etro 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. package/.env +2 -0
  2. package/.github/workflows/nodejs.yml +27 -0
  3. package/CHANGELOG.md +109 -0
  4. package/CODE_OF_CONDUCT.md +77 -0
  5. package/CONTRIBUTING.md +155 -0
  6. package/LICENSE +674 -0
  7. package/README.md +57 -0
  8. package/dist/etro.js +3397 -0
  9. package/docs/effect.js.html +1215 -0
  10. package/docs/event.js.html +145 -0
  11. package/docs/index.html +81 -0
  12. package/docs/index.js.html +92 -0
  13. package/docs/layer.js.html +888 -0
  14. package/docs/module-effect-GaussianBlurComponent.html +345 -0
  15. package/docs/module-effect.Brightness.html +339 -0
  16. package/docs/module-effect.Channels.html +319 -0
  17. package/docs/module-effect.ChromaKey.html +611 -0
  18. package/docs/module-effect.Contrast.html +339 -0
  19. package/docs/module-effect.EllipticalMask.html +200 -0
  20. package/docs/module-effect.GaussianBlur.html +202 -0
  21. package/docs/module-effect.GaussianBlurHorizontal.html +242 -0
  22. package/docs/module-effect.GaussianBlurVertical.html +242 -0
  23. package/docs/module-effect.Pixelate.html +330 -0
  24. package/docs/module-effect.Shader.html +1227 -0
  25. package/docs/module-effect.Stack.html +406 -0
  26. package/docs/module-effect.Transform.Matrix.html +193 -0
  27. package/docs/module-effect.Transform.html +1174 -0
  28. package/docs/module-effect.html +148 -0
  29. package/docs/module-event.html +473 -0
  30. package/docs/module-index.html +186 -0
  31. package/docs/module-layer-Media.html +1116 -0
  32. package/docs/module-layer-MediaMixin.html +164 -0
  33. package/docs/module-layer.Audio.html +1188 -0
  34. package/docs/module-layer.Base.html +629 -0
  35. package/docs/module-layer.Image.html +1421 -0
  36. package/docs/module-layer.Text.html +1731 -0
  37. package/docs/module-layer.Video.html +1938 -0
  38. package/docs/module-layer.Visual.html +1698 -0
  39. package/docs/module-layer.html +137 -0
  40. package/docs/module-movie.html +3118 -0
  41. package/docs/module-util.Color.html +702 -0
  42. package/docs/module-util.Font.html +395 -0
  43. package/docs/module-util.html +845 -0
  44. package/docs/movie.js.html +689 -0
  45. package/docs/scripts/collapse.js +20 -0
  46. package/docs/scripts/linenumber.js +25 -0
  47. package/docs/scripts/nav.js +12 -0
  48. package/docs/scripts/polyfill.js +4 -0
  49. package/docs/scripts/prettify/Apache-License-2.0.txt +202 -0
  50. package/docs/scripts/prettify/lang-css.js +2 -0
  51. package/docs/scripts/prettify/prettify.js +28 -0
  52. package/docs/scripts/search.js +83 -0
  53. package/docs/styles/jsdoc.css +671 -0
  54. package/docs/styles/prettify.css +79 -0
  55. package/docs/util.js.html +503 -0
  56. package/eslint.conf.js +28 -0
  57. package/eslint.test-conf.js +4 -0
  58. package/examples/application/readme-screenshot.html +86 -0
  59. package/examples/application/video-player.html +131 -0
  60. package/examples/application/webcam.html +28 -0
  61. package/examples/introduction/audio.html +52 -0
  62. package/examples/introduction/effects.html +56 -0
  63. package/examples/introduction/export.html +70 -0
  64. package/examples/introduction/functions.html +35 -0
  65. package/examples/introduction/hello-world1.html +33 -0
  66. package/examples/introduction/hello-world2.html +32 -0
  67. package/examples/introduction/keyframes.html +67 -0
  68. package/examples/introduction/media.html +55 -0
  69. package/examples/introduction/text.html +27 -0
  70. package/jsdoc.conf.json +3 -0
  71. package/karma.conf.js +60 -0
  72. package/package.json +63 -0
  73. package/private-todo.txt +70 -0
  74. package/rename-file.sh +18 -0
  75. package/rename-versions.sh +14 -0
  76. package/rename.sh +22 -0
  77. package/rollup.config.js +31 -0
  78. package/screenshots/2019-08-17_0.png +0 -0
  79. package/scripts/gen-effect-samples.html +99 -0
  80. package/scripts/save-effect-samples.js +43 -0
  81. package/spec/assets/effect/gaussian-blur-horizontal.png +0 -0
  82. package/spec/assets/effect/gaussian-blur-vertical.png +0 -0
  83. package/spec/assets/effect/original.png +0 -0
  84. package/spec/assets/effect/pixelate.png +0 -0
  85. package/spec/assets/effect/transform/multiply.png +0 -0
  86. package/spec/assets/effect/transform/rotate.png +0 -0
  87. package/spec/assets/effect/transform/scale-fraction.png +0 -0
  88. package/spec/assets/effect/transform/scale.png +0 -0
  89. package/spec/assets/effect/transform/translate-fraction.png +0 -0
  90. package/spec/assets/effect/transform/translate.png +0 -0
  91. package/spec/assets/layer/audio.wav +0 -0
  92. package/spec/assets/layer/image.jpg +0 -0
  93. package/spec/effect.spec.js +352 -0
  94. package/spec/event.spec.js +25 -0
  95. package/spec/layer.spec.js +128 -0
  96. package/spec/movie.spec.js +154 -0
  97. package/spec/util.spec.js +285 -0
  98. package/src/effect.js +1265 -0
  99. package/src/event.js +78 -0
  100. package/src/index.js +23 -0
  101. package/src/layer.js +875 -0
  102. package/src/movie.js +636 -0
  103. package/src/util.js +487 -0
package/dist/etro.js ADDED
@@ -0,0 +1,3397 @@
1
+ var etro = (function () {
2
+ 'use strict';
3
+
4
+ /**
5
+ * @module event
6
+ */
7
+
8
+ const listeners = new WeakMap();
9
+
10
+ class TypeId {
11
+ constructor (id) {
12
+ this.parts = id.split('.');
13
+ }
14
+
15
+ contains (other) {
16
+ if (other.length > this.length) {
17
+ return false
18
+ }
19
+
20
+ for (let i = 0; i < other.parts.length; i++) {
21
+ if (other.parts[i] !== this.parts[i]) {
22
+ return false
23
+ }
24
+ }
25
+ return true
26
+ }
27
+
28
+ toString () {
29
+ return this.parts.join('.')
30
+ }
31
+ }
32
+
33
+ /**
34
+ * Emits an event to all listeners
35
+ *
36
+ * @param {object} target - a Etro object
37
+ * @param {string} type - the id of the type (can contain subtypes, such as "type.subtype")
38
+ * @param {function} listener
39
+ */
40
+ function subscribe (target, type, listener) {
41
+ if (!listeners.has(target)) {
42
+ listeners.set(target, []);
43
+ }
44
+
45
+ listeners.get(target).push(
46
+ { type: new TypeId(type), listener }
47
+ );
48
+ }
49
+
50
+ /**
51
+ * Emits an event to all listeners
52
+ *
53
+ * @param {object} target - a Etro object
54
+ * @param {string} type - the id of the type (can contain subtypes, such as "type.subtype")
55
+ * @param {object} event - any additional event data
56
+ */
57
+ function publish (target, type, event) {
58
+ event.target = target; // could be a proxy
59
+ event.type = type;
60
+
61
+ const t = new TypeId(type);
62
+
63
+ if (!listeners.has(target)) {
64
+ return null // no event fired
65
+ }
66
+
67
+ const listenersForType = [];
68
+ for (let i = 0; i < listeners.get(target).length; i++) {
69
+ const item = listeners.get(target)[i];
70
+ if (t.contains(item.type)) {
71
+ listenersForType.push(item.listener);
72
+ }
73
+ }
74
+
75
+ for (let i = 0; i < listenersForType.length; i++) {
76
+ const listener = listenersForType[i];
77
+ listener(event);
78
+ }
79
+
80
+ return event
81
+ }
82
+
83
+ var event = /*#__PURE__*/Object.freeze({
84
+ subscribe: subscribe,
85
+ publish: publish
86
+ });
87
+
88
+ /**
89
+ * @module util
90
+ */
91
+
92
+ /**
93
+ * Merges `options` with `defaultOptions`, and then copies the properties with the keys in `defaultOptions`
94
+ * from the merged object to `destObj`.
95
+ *
96
+ * @return {undefined}
97
+ * @todo Make methods like getDefaultOptions private
98
+ */
99
+ function applyOptions (options, destObj) {
100
+ const defaultOptions = destObj.getDefaultOptions();
101
+
102
+ // validate; make sure `keys` doesn't have any extraneous items
103
+ for (const option in options) {
104
+ // eslint-disable-next-line no-prototype-builtins
105
+ if (!defaultOptions.hasOwnProperty(option)) {
106
+ throw new Error("Invalid option: '" + option + "'")
107
+ }
108
+ }
109
+
110
+ // merge options and defaultOptions
111
+ options = { ...defaultOptions, ...options };
112
+
113
+ // copy options
114
+ for (const option in options) {
115
+ if (!(option in destObj)) {
116
+ destObj[option] = options[option];
117
+ }
118
+ }
119
+ }
120
+
121
+ // https://stackoverflow.com/a/8024294/3783155
122
+ /**
123
+ * Get all inherited keys
124
+ * @param {object} obj
125
+ * @param {boolean} excludeObjectClass - don't add properties of the <code>Object</code> prototype
126
+ * @private
127
+ */
128
+ function getAllPropertyNames (obj, excludeObjectClass) {
129
+ let props = [];
130
+ do {
131
+ props = props.concat(Object.getOwnPropertyNames(obj));
132
+ } while ((obj = Object.getPrototypeOf(obj)) && (excludeObjectClass ? obj.constructor.name !== 'Object' : true))
133
+ return props
134
+ }
135
+
136
+ /**
137
+ * @return {boolean} <code>true</code> if <code>property</code> is a non-array object and all of its own
138
+ * property keys are numbers or <code>"interpolate"</code> or <code>"interpolationKeys"</code>, and
139
+ * <code>false</code> otherwise.
140
+ */
141
+ function isKeyFrames (property) {
142
+ if ((typeof property !== 'object' || property === null) || Array.isArray(property)) {
143
+ return false
144
+ }
145
+ // is reduce slow? I think it is
146
+ // let keys = Object.keys(property); // own propeties
147
+ const keys = getAllPropertyNames(property, true); // includes non-enumerable properties (except that of `Object`)
148
+ for (let i = 0; i < keys.length; i++) {
149
+ const key = keys[i];
150
+ // convert key to number, because object keys are always converted to strings
151
+ if (isNaN(key) && !(key === 'interpolate' || key === 'interpolationKeys')) {
152
+ return false
153
+ }
154
+ }
155
+ // If it's an empty object, don't treat is as keyframe set.
156
+ // https://stackoverflow.com/a/32108184/3783155
157
+ const isEmpty = property.constructor === Object && Object.entries(property).length === 0;
158
+ return !isEmpty
159
+ }
160
+
161
+ // must be cleared at the start of each frame
162
+ const valCache = new WeakMap();
163
+ function cacheValue (element, path, value) {
164
+ if (!valCache.has(element.movie)) {
165
+ valCache.set(element.movie, new WeakMap());
166
+ }
167
+ const movieCache = valCache.get(element.movie);
168
+
169
+ if (!movieCache.has(element)) {
170
+ movieCache.set(element, {});
171
+ }
172
+ const elementCache = movieCache.get(element);
173
+
174
+ elementCache[path] = value;
175
+ return value
176
+ }
177
+ function hasCachedValue (element, path) {
178
+ return valCache.has(element.movie) &&
179
+ valCache.get(element.movie).has(element) &&
180
+ path in valCache.get(element.movie).get(element)
181
+ }
182
+ function getCachedValue (element, path) {
183
+ return valCache.get(element.movie).get(element)[path]
184
+ }
185
+ function clearCachedValues (movie) {
186
+ valCache.delete(movie);
187
+ }
188
+
189
+ /**
190
+ * Calculates the value of keyframe set <code>property</code> at <code>time</code> if
191
+ * <code>property</code> is an array, or returns <code>property</code>, assuming that it's a number.
192
+ *
193
+ * @param {(*|module:util.KeyFrames)} property - value or map of time-to-value pairs for keyframes
194
+ * @param {object} element - the object to which the property belongs
195
+ * @param {number} time - time to calculate keyframes for, if necessary
196
+ *
197
+ * Note that only values used in keyframes that numbers or objects (including arrays) are interpolated.
198
+ * All other values are taken sequentially with no interpolation. JavaScript will convert parsed colors,
199
+ * if created correctly, to their string representations when assigned to a CanvasRenderingContext2D property
200
+ * (I'm pretty sure).
201
+ *
202
+ * @todo Is this function efficient?
203
+ * @todo Update doc @params to allow for keyframes
204
+ *
205
+ * @typedef {Object} module:util.KeyFrames
206
+ * @property {function} interpolate - the function to interpolate between keyframes, defaults to
207
+ * {@link module:util.linearInterp}
208
+ * @property {string[]} interpolationKeys - keys to interpolate for objects, defaults to all
209
+ * own enumerable properties
210
+ */
211
+ function val (element, path, time) {
212
+ if (hasCachedValue(element, path)) {
213
+ return getCachedValue(element, path)
214
+ }
215
+
216
+ // get property of element at path
217
+ const pathParts = path.split('.');
218
+ let property = element;
219
+ while (pathParts.length > 0) {
220
+ property = property[pathParts.shift()];
221
+ }
222
+ const process = element.propertyFilters[path];
223
+
224
+ let value;
225
+ if (isKeyFrames(property)) {
226
+ value = valKeyFrame(property, time);
227
+ } else if (typeof property === 'function') {
228
+ value = property(element, time); // TODO? add more args
229
+ } else {
230
+ value = property; // simple value
231
+ }
232
+ return cacheValue(element, path, process ? process(value) : value)
233
+ }
234
+
235
+ function valKeyFrame (property, time) {
236
+ // if (Object.keys(property).length === 0) throw "Empty key frame set"; // this will never be executed
237
+ if (time === undefined) {
238
+ throw new Error('|time| is undefined or null')
239
+ }
240
+ // I think .reduce and such are slow to do per-frame (or more)?
241
+ // lower is the max beneath time, upper is the min above time
242
+ let lowerTime = 0; let upperTime = Infinity;
243
+ let lowerValue = null; let upperValue = null; // default values for the inequalities
244
+ for (let keyTime in property) {
245
+ const keyValue = property[keyTime];
246
+ keyTime = +keyTime; // valueOf to convert to number
247
+
248
+ if (lowerTime <= keyTime && keyTime <= time) {
249
+ lowerValue = keyValue;
250
+ lowerTime = keyTime;
251
+ }
252
+ if (time <= keyTime && keyTime <= upperTime) {
253
+ upperValue = keyValue;
254
+ upperTime = keyTime;
255
+ }
256
+ }
257
+ // TODO: support custom interpolation for 'other' types
258
+ if (lowerValue === null) {
259
+ throw new Error(`No keyframes located before or at time ${time}.`)
260
+ }
261
+ // no need for upperValue if it is flat interpolation
262
+ if (!(typeof lowerValue === 'number' || typeof lowerValue === 'object')) {
263
+ return lowerValue
264
+ }
265
+ if (upperValue === null) {
266
+ throw new Error(`No keyframes located after or at time ${time}.`)
267
+ }
268
+ if (typeof lowerValue !== typeof upperValue) {
269
+ throw new Error('Type mismatch in keyframe values')
270
+ }
271
+ // interpolate
272
+ // the following should mean that there is a key frame *at* |time|; prevents division by zero below
273
+ if (upperTime === lowerTime) {
274
+ return upperValue
275
+ }
276
+ const progress = time - lowerTime; const percentProgress = progress / (upperTime - lowerTime);
277
+ const interpolate = property.interpolate || linearInterp;
278
+ return interpolate(lowerValue, upperValue, percentProgress, property.interpolationKeys)
279
+ }
280
+
281
+ /* export function floorInterp(x1, x2, t, objectKeys) {
282
+ // https://stackoverflow.com/a/25835337/3783155 (TODO: preserve getters/setters, etc?)
283
+ return !objectKeys ? x1 : objectKeys.reduce((a, x) => {
284
+ if (x1.hasOwnProperty(x)) a[x] = o[x]; // ignore x2
285
+ return a;
286
+ }, Object.create(Object.getPrototypeOf(x1)));
287
+ } */
288
+
289
+ function linearInterp (x1, x2, t, objectKeys) {
290
+ if (typeof x1 !== typeof x2) {
291
+ throw new Error('Type mismatch')
292
+ }
293
+ if (typeof x1 !== 'number' && typeof x1 !== 'object') {
294
+ return x1
295
+ } // flat interpolation (floor)
296
+ if (typeof x1 === 'object') { // to work with objects (including arrays)
297
+ // TODO: make this code DRY
298
+ if (Object.getPrototypeOf(x1) !== Object.getPrototypeOf(x2)) {
299
+ throw new Error('Prototype mismatch')
300
+ }
301
+ const int = Object.create(Object.getPrototypeOf(x1)); // preserve prototype of objects
302
+ // only take the union of properties
303
+ const keys = Object.keys(x1) || objectKeys;
304
+ for (let i = 0; i < keys.length; i++) {
305
+ const key = keys[i];
306
+ // (only take the union of properties)
307
+ // eslint-disable-next-line no-prototype-builtins
308
+ if (!x1.hasOwnProperty(key) || !x2.hasOwnProperty(key)) {
309
+ continue
310
+ }
311
+ int[key] = linearInterp(x1[key], x2[key], t);
312
+ }
313
+ return int
314
+ }
315
+ return (1 - t) * x1 + t * x2
316
+ }
317
+
318
+ function cosineInterp (x1, x2, t, objectKeys) {
319
+ if (typeof x1 !== typeof x2) {
320
+ throw new Error('Type mismatch')
321
+ }
322
+ if (typeof x1 !== 'number' && typeof x1 !== 'object') {
323
+ return x1
324
+ } // flat interpolation (floor)
325
+ if (typeof x1 === 'object' && typeof x2 === 'object') { // to work with objects (including arrays)
326
+ if (Object.getPrototypeOf(x1) !== Object.getPrototypeOf(x2)) {
327
+ throw new Error('Prototype mismatch')
328
+ }
329
+ const int = Object.create(Object.getPrototypeOf(x1)); // preserve prototype of objects
330
+ // only take the union of properties
331
+ const keys = Object.keys(x1) || objectKeys;
332
+ for (let i = 0; i < keys.length; i++) {
333
+ const key = keys[i];
334
+ // (only take the union of properties)
335
+ // eslint-disable-next-line no-prototype-builtins
336
+ if (!x1.hasOwnProperty(key) || !x2.hasOwnProperty(key)) {
337
+ continue
338
+ }
339
+ int[key] = cosineInterp(x1[key], x2[key], t);
340
+ }
341
+ return int
342
+ }
343
+ const cos = Math.cos(Math.PI / 2 * t);
344
+ return cos * x1 + (1 - cos) * x2
345
+ }
346
+
347
+ /**
348
+ * An rgba color, for proper interpolation and shader effects
349
+ */
350
+ class Color {
351
+ /**
352
+ * @param {number} r
353
+ * @param {number} g
354
+ * @param {number} b
355
+ * @param {number} a
356
+ */
357
+ constructor (r, g, b, a = 1.0) {
358
+ /** @type number */
359
+ this.r = r;
360
+ /** @type number */
361
+ this.g = g;
362
+ /** @type number */
363
+ this.b = b;
364
+ /** @type number */
365
+ this.a = a;
366
+ }
367
+
368
+ /**
369
+ * Converts to css color
370
+ */
371
+ toString () {
372
+ return `rgba(${this.r}, ${this.g}, ${this.b}, ${this.a})`
373
+ }
374
+ }
375
+
376
+ const parseColorCanvas = document.createElement('canvas');
377
+ parseColorCanvas.width = parseColorCanvas.height = 1;
378
+ const parseColorCtx = parseColorCanvas.getContext('2d');
379
+ /**
380
+ * Converts a css color string to a {@link module:util.Color} object representation.
381
+ * @param {string} str
382
+ * @return {module:util.Color} the parsed color
383
+ */
384
+ function parseColor (str) {
385
+ // TODO - find a better way to cope with the fact that invalid
386
+ // values of "col" are ignored
387
+ parseColorCtx.clearRect(0, 0, 1, 1);
388
+ parseColorCtx.fillStyle = str;
389
+ parseColorCtx.fillRect(0, 0, 1, 1);
390
+ const data = parseColorCtx.getImageData(0, 0, 1, 1).data;
391
+ return new Color(data[0], data[1], data[2], data[3] / 255)
392
+ }
393
+
394
+ /**
395
+ * A font, for proper interpolation
396
+ */
397
+ class Font {
398
+ /**
399
+ * @param {number} size
400
+ * @param {string} family
401
+ * @param {string} sizeUnit
402
+ */
403
+ constructor (size, sizeUnit, family, style = 'normal', variant = 'normal',
404
+ weight = 'normal', stretch = 'normal', lineHeight = 'normal') {
405
+ this.size = size;
406
+ this.sizeUnit = sizeUnit;
407
+ this.family = family;
408
+ this.style = style;
409
+ this.variant = variant;
410
+ this.weight = weight;
411
+ this.stretch = stretch;
412
+ this.lineHeight = lineHeight;
413
+ }
414
+
415
+ /**
416
+ * Converts to css font syntax
417
+ * @see https://developer.mozilla.org/en-US/docs/Web/CSS/font
418
+ */
419
+ toString () {
420
+ let s = '';
421
+ if (this.style !== 'normal') s += this.style + ' ';
422
+ if (this.variant !== 'normal') s += this.variant + ' ';
423
+ if (this.weight !== 'normal') s += this.weight + ' ';
424
+ if (this.stretch !== 'normal') s += this.stretch + ' ';
425
+ s += `${this.size}${this.sizeUnit} `;
426
+ if (this.lineHeight !== 'normal') s += this.lineHeight + ' ';
427
+ s += this.family;
428
+
429
+ return s
430
+ }
431
+ }
432
+
433
+ const parseFontEl = document.createElement('div');
434
+ /**
435
+ * Converts a css font string to a {@link module:util.Font} object representation.
436
+ * @param {string} str
437
+ * @return {module:util.Font} the parsed font
438
+ */
439
+ function parseFont (str) {
440
+ parseFontEl.setAttribute('style', `font: ${str}`); // assign css string to html element
441
+ const {
442
+ fontSize, fontFamily, fontStyle, fontVariant, fontWeight, lineHeight
443
+ } = parseFontEl.style;
444
+ parseFontEl.removeAttribute('style');
445
+
446
+ const size = parseFloat(fontSize);
447
+ const sizeUnit = fontSize.substring(size.toString().length);
448
+ return new Font(size, sizeUnit, fontFamily, fontStyle, fontVariant, fontWeight, lineHeight)
449
+ }
450
+
451
+ /*
452
+ * Attempts to solve the diamond inheritance problem using mixins
453
+ * See {@link http://javascriptweblog.wordpress.com/2011/05/31/a-fresh-look-at-javascript-mixins/}<br>
454
+ *
455
+ * <strong>Note that the caller has to explicitly update the class value and as well as the class's property
456
+ * <code>constructor</code> to its prototype's constructor.</strong><br>
457
+ *
458
+ * This throws an error when composing functions with return values; unless if the composed function is a
459
+ * constructor, which is handled specially.
460
+ *
461
+ * Note that all properties must be functions for this to work as expected.
462
+ *
463
+ * If the destination and source have the methods with the same name (key), assign a new function
464
+ * that calls both with the given arguments. The arguments list passed to each subfunction will be the
465
+ * argument list that was called to the composite function.
466
+ *
467
+ * This function only works with functions, getters and setters.
468
+ *
469
+ * TODO: make a lot more robust
470
+ * TODO: rethink my ways... this is evil
471
+ */
472
+ /* export function extendProto(destination, source) {
473
+ for (let name in source) {
474
+ const extendMethod = (sourceDescriptor, which) => {
475
+ let sourceFn = sourceDescriptor[which],
476
+ origDestDescriptor = Object.getOwnPropertyDescriptor(destination, name),
477
+ origDestFn = origDestDescriptor ? origDestDescriptor[which] : undefined;
478
+ let destFn = !origDestFn ? sourceFn : function compositeMethod() { // `function` or `()` ?
479
+ try {
480
+ // |.apply()| because we're seperating the method from the object, so return the value
481
+ // of |this| back to the function
482
+ let r1 = origDestFn.apply(this, arguments),
483
+ r2 = sourceFn.apply(this, arguments);
484
+ if (r1 || r2) throw "Return value in composite method"; // null will slip by ig
485
+ } catch (e) {
486
+ if (e.toString() === "TypeError: class constructors must be invoked with |new|") {
487
+ let inst = new origDestFn(...arguments);
488
+ sourceFn.apply(inst, arguments);
489
+ return inst;
490
+ } else throw e;
491
+ }
492
+ };
493
+
494
+ let destDescriptor = {...sourceDescriptor}; // shallow clone
495
+ destDescriptor[which] = destFn;
496
+ Object.defineProperty(destination, name, destDescriptor);
497
+ };
498
+
499
+ let descriptor = Object.getOwnPropertyDescriptor(source, name);
500
+ if (descriptor) { // if hasOwnProperty
501
+ if (descriptor.get) extendMethod(descriptor, 'get');
502
+ if (descriptor.set) extendMethod(descriptor, 'set');
503
+ if (descriptor.value) extendMethod(descriptor, 'value');
504
+ }
505
+ }
506
+ } */
507
+
508
+ // TODO: remove this function
509
+ function mapPixels (mapper, canvas, ctx, x, y, width, height, flush = true) {
510
+ x = x || 0;
511
+ y = y || 0;
512
+ width = width || canvas.width;
513
+ height = height || canvas.height;
514
+ const frame = ctx.getImageData(x, y, width, height);
515
+ for (let i = 0, l = frame.data.length; i < l; i += 4) {
516
+ mapper(frame.data, i);
517
+ }
518
+ if (flush) {
519
+ ctx.putImageData(frame, x, y);
520
+ }
521
+ }
522
+
523
+ /**
524
+ * <p>Emits "change" event when public properties updated, recursively
525
+ * <p>Must be called before any watchable properties are set, and only once in the prototype chain
526
+ *
527
+ * @param {object} target - object to watch
528
+ */
529
+ function watchPublic (target) {
530
+ const getPath = (receiver, prop) =>
531
+ (receiver === proxy ? '' : (paths.get(receiver) + '.')) + prop;
532
+ const callback = function (prop, val, receiver) {
533
+ // Public API property updated, emit 'modify' event.
534
+ publish(proxy, `${target.type}.change.modify`, { property: getPath(receiver, prop), newValue: val });
535
+ };
536
+ const check = prop => !(prop.startsWith('_') || target.publicExcludes.includes(prop));
537
+
538
+ const paths = new WeakMap(); // the path to each child property (each is a unique proxy)
539
+
540
+ const handler = {
541
+ set (obj, prop, val, receiver) {
542
+ // Recurse
543
+ if (typeof val === 'object' && val !== null && !paths.has(val) && check(prop)) {
544
+ val = new Proxy(val, handler);
545
+ paths.set(val, getPath(receiver, prop));
546
+ }
547
+
548
+ const was = prop in obj;
549
+ // set property or attribute
550
+ // Search prototype chain for the closest setter
551
+ let objProto = obj;
552
+ while ((objProto = Object.getPrototypeOf(objProto))) {
553
+ const propDesc = Object.getOwnPropertyDescriptor(objProto, prop);
554
+ if (propDesc && propDesc.set) {
555
+ propDesc.set.call(receiver, val); // call setter, supplying proxy as this (fixes event bugs)
556
+ break
557
+ }
558
+ }
559
+ if (!objProto) { // couldn't find setter; set value on instance
560
+ obj[prop] = val;
561
+ }
562
+ // Check if it already existed and if it's a valid property to watch, if on root object
563
+ if (obj !== target || (was && check(prop))) {
564
+ callback(prop, val, receiver);
565
+ }
566
+ return true
567
+ }
568
+ };
569
+
570
+ const proxy = new Proxy(target, handler);
571
+ return proxy
572
+ }
573
+
574
+ var util = /*#__PURE__*/Object.freeze({
575
+ applyOptions: applyOptions,
576
+ clearCachedValues: clearCachedValues,
577
+ val: val,
578
+ linearInterp: linearInterp,
579
+ cosineInterp: cosineInterp,
580
+ Color: Color,
581
+ parseColor: parseColor,
582
+ Font: Font,
583
+ parseFont: parseFont,
584
+ mapPixels: mapPixels,
585
+ watchPublic: watchPublic
586
+ });
587
+
588
+ /**
589
+ * @module layer
590
+ * @todo Add aligning options, like horizontal and vertical align modes
591
+ */
592
+
593
+ /**
594
+ * A layer is a piece of content for the movie
595
+ */
596
+ class Base {
597
+ /**
598
+ * Creates a new empty layer
599
+ *
600
+ * @param {number} startTime - when to start the layer on the movie's timeline
601
+ * @param {number} duration - how long the layer should last on the movie's timeline
602
+ * @param {object} [options] - no options, here for consistency
603
+ */
604
+ constructor (startTime, duration, options = {}) { // rn, options isn't used but I'm keeping it here
605
+ const newThis = watchPublic(this); // proxy that will be returned by constructor
606
+ // Don't send updates when initializing, so use this instead of newThis:
607
+ applyOptions(options, this); // no options rn, but just to stick to protocol
608
+
609
+ this._startTime = startTime;
610
+ this._duration = duration;
611
+
612
+ this._active = false; // whether newThis layer is currently being rendered
613
+ this.enabled = true;
614
+
615
+ this._movie = null;
616
+
617
+ // Propogate up to target
618
+ subscribe(newThis, 'layer.change', event => {
619
+ const typeOfChange = event.type.substring(event.type.lastIndexOf('.') + 1);
620
+ const type = `movie.change.layer.${typeOfChange}`;
621
+ publish(newThis._movie, type, { ...event, target: newThis._movie, type });
622
+ });
623
+
624
+ return newThis
625
+ }
626
+
627
+ attach (movie) {
628
+ this._movie = movie;
629
+ }
630
+
631
+ detach () {
632
+ this._movie = null;
633
+ }
634
+
635
+ /**
636
+ * Called when the layer is activated
637
+ */
638
+ start () {}
639
+
640
+ /**
641
+ * Called when the movie renders and the layer is active
642
+ */
643
+ render () {}
644
+
645
+ /**
646
+ * Called when the layer is deactivated
647
+ */
648
+ stop () {}
649
+
650
+ get parent () {
651
+ return this._movie
652
+ }
653
+
654
+ /**
655
+ * If the attached movie's playback position is in this layer
656
+ * @type boolean
657
+ */
658
+ get active () {
659
+ return this._active
660
+ }
661
+
662
+ /**
663
+ * @type number
664
+ */
665
+ get startTime () {
666
+ return this._startTime
667
+ }
668
+
669
+ set startTime (val) {
670
+ this._startTime = val;
671
+ }
672
+
673
+ /**
674
+ * The current time of the movie relative to this layer
675
+ * @type number
676
+ */
677
+ get currentTime () {
678
+ return this._movie ? this._movie.currentTime - this.startTime
679
+ : undefined
680
+ }
681
+
682
+ /**
683
+ * @type number
684
+ */
685
+ get duration () {
686
+ return this._duration
687
+ }
688
+
689
+ set duration (val) {
690
+ this._duration = val;
691
+ }
692
+
693
+ get movie () {
694
+ return this._movie
695
+ }
696
+
697
+ getDefaultOptions () {
698
+ return {}
699
+ }
700
+ }
701
+ // id for events (independent of instance, but easy to access when on prototype chain)
702
+ Base.prototype.type = 'layer';
703
+ Base.prototype.publicExcludes = [];
704
+ Base.prototype.propertyFilters = {};
705
+
706
+ /** Any layer that renders to a canvas */
707
+ class Visual extends Base {
708
+ /**
709
+ * Creates a visual layer
710
+ *
711
+ * @param {number} startTime - when to start the layer on the movie's timeline
712
+ * @param {number} duration - how long the layer should last on the movie's timeline
713
+ * @param {object} [options] - various optional arguments
714
+ * @param {number} [options.width=null] - the width of the entire layer
715
+ * @param {number} [options.height=null] - the height of the entire layer
716
+ * @param {number} [options.x=0] - the offset of the layer relative to the movie
717
+ * @param {number} [options.y=0] - the offset of the layer relative to the movie
718
+ * @param {string} [options.background=null] - the background color of the layer, or <code>null</code>
719
+ * for a transparent background
720
+ * @param {object} [options.border=null] - the layer's outline, or <code>null</code> for no outline
721
+ * @param {string} [options.border.color] - the outline's color; required for a border
722
+ * @param {string} [options.border.thickness=1] - the outline's weight
723
+ * @param {number} [options.opacity=1] - the layer's opacity; <code>1</cod> for full opacity
724
+ * and <code>0</code> for full transparency
725
+ */
726
+ constructor (startTime, duration, options = {}) {
727
+ super(startTime, duration, options);
728
+ // only validate extra if not subclassed, because if subclcass, there will be extraneous options
729
+ applyOptions(options, this);
730
+
731
+ this._canvas = document.createElement('canvas');
732
+ this._cctx = this.canvas.getContext('2d');
733
+
734
+ this._effectsBack = [];
735
+ const that = this;
736
+ this._effects = new Proxy(this._effectsBack, {
737
+ apply: function (target, thisArg, argumentsList) {
738
+ return thisArg[target].apply(this, argumentsList)
739
+ },
740
+ deleteProperty: function (target, property) {
741
+ const value = target[property];
742
+ value.detach();
743
+ delete target[property];
744
+ return true
745
+ },
746
+ set: function (target, property, value, receiver) {
747
+ target[property] = value;
748
+ if (!isNaN(property)) { // if property is an number (index)
749
+ value.attach(that);
750
+ }
751
+ return true
752
+ }
753
+ });
754
+ }
755
+
756
+ /**
757
+ * Render visual output
758
+ */
759
+ render (reltime) {
760
+ this.beginRender(reltime);
761
+ this.doRender(reltime);
762
+ this.endRender(reltime);
763
+ }
764
+
765
+ beginRender (reltime) {
766
+ // if this.width or this.height is null, that means "take all available screen space", so set it to
767
+ // this._move.width or this._movie.height, respectively
768
+ const w = val(this, 'width', reltime) || val(this._movie, 'width', this.startTime + reltime);
769
+ const h = val(this, 'height', reltime) || val(this._movie, 'height', this.startTime + reltime);
770
+ this.canvas.width = w;
771
+ this.canvas.height = h;
772
+ this.cctx.globalAlpha = val(this, 'opacity', reltime);
773
+ }
774
+
775
+ doRender (reltime) {
776
+ // if this.width or this.height is null, that means "take all available screen space", so set it to
777
+ // this._move.width or this._movie.height, respectively
778
+ // canvas.width & canvas.height are already interpolated
779
+ if (this.background) {
780
+ this.cctx.fillStyle = val(this, 'background', reltime);
781
+ this.cctx.fillRect(0, 0, this.canvas.width, this.canvas.height); // (0, 0) relative to layer
782
+ }
783
+ if (this.border && this.border.color) {
784
+ this.cctx.strokeStyle = val(this, 'border.color', reltime);
785
+ this.cctx.lineWidth = val(this, 'border.thickness', reltime) || 1; // this is optional.. TODO: integrate this with defaultOptions
786
+ }
787
+ }
788
+
789
+ endRender (reltime) {
790
+ const w = val(this, 'width', reltime) || val(this._movie, 'width', this.startTime + reltime);
791
+ const h = val(this, 'height', reltime) || val(this._movie, 'height', this.startTime + reltime);
792
+ if (w * h > 0) {
793
+ this._applyEffects();
794
+ }
795
+ // else InvalidStateError for drawing zero-area image in some effects, right?
796
+ }
797
+
798
+ _applyEffects () {
799
+ for (let i = 0; i < this.effects.length; i++) {
800
+ const effect = this.effects[i];
801
+ if (effect.enabled) {
802
+ effect.apply(this, this._movie.currentTime - this.startTime); // pass relative time
803
+ }
804
+ }
805
+ }
806
+
807
+ /**
808
+ * Convienence method for <code>effects.push()</code>
809
+ * @param {BaseEffect} effect
810
+ * @return {module:layer.Visual} the layer (for chaining)
811
+ */
812
+ addEffect (effect) {
813
+ this.effects.push(effect); return this
814
+ }
815
+
816
+ /**
817
+ * The intermediate rendering canvas
818
+ * @type HTMLCanvasElement
819
+ */
820
+ get canvas () {
821
+ return this._canvas
822
+ }
823
+
824
+ /**
825
+ * The context of {@link module:layer.Visual#canvas}
826
+ * @type CanvasRenderingContext2D
827
+ */
828
+ get cctx () {
829
+ return this._cctx
830
+ }
831
+
832
+ /**
833
+ * @type effect.Base[]
834
+ */
835
+ get effects () {
836
+ return this._effects // priavte (because it's a proxy)
837
+ }
838
+
839
+ getDefaultOptions () {
840
+ return {
841
+ ...Base.prototype.getDefaultOptions(),
842
+ /**
843
+ * @name module:layer.Visual#x
844
+ * @type number
845
+ * @desc The offset of the layer relative to the movie
846
+ */
847
+ x: 0,
848
+ /**
849
+ * @name module:layer.Visual#y
850
+ * @type number
851
+ * @desc The offset of the layer relative to the movie
852
+ */
853
+ y: 0,
854
+ /**
855
+ * @name module:layer.Visual#width
856
+ * @type number
857
+ */
858
+ width: null,
859
+ /**
860
+ * @name module:layer.Visual#height
861
+ * @type number
862
+ */
863
+ height: null,
864
+ /**
865
+ * @name module:layer.Visual#background
866
+ * @type string
867
+ * @desc The css color code for the background, or <code>null</code> for transparency
868
+ */
869
+ background: null,
870
+ /**
871
+ * @name module:layer.Visual#border
872
+ * @type string
873
+ * @desc The css border style, or <code>null</code> for no border
874
+ */
875
+ border: null,
876
+ /**
877
+ * @name module:layer.Visual#opacity
878
+ * @type number
879
+ */
880
+ opacity: 1
881
+ }
882
+ }
883
+ }
884
+ Visual.prototype.publicExcludes = Base.prototype.publicExcludes.concat(['canvas', 'cctx', 'effects']);
885
+ Visual.prototype.propertyFilters = {
886
+ ...Base.propertyFilters,
887
+ width: function (width) {
888
+ return width !== undefined ? width : this._movie.width
889
+ },
890
+ height: function (height) {
891
+ return height !== undefined ? height : this._movie.height
892
+ }
893
+ };
894
+
895
+ class Text extends Visual {
896
+ // TODO: is textX necessary? it seems inconsistent, because you can't define width/height directly for a text layer
897
+ /**
898
+ * Creates a new text layer
899
+ *
900
+ * @param {number} startTime
901
+ * @param {number} duration
902
+ * @param {string} text - the text to display
903
+ * @param {number} width - the width of the entire layer
904
+ * @param {number} height - the height of the entire layer
905
+ * @param {object} [options] - various optional arguments
906
+ * @param {number} [options.x=0] - the horizontal position of the layer (relative to the movie)
907
+ * @param {number} [options.y=0] - the vertical position of the layer (relative to the movie)
908
+ * @param {string} [options.background=null] - the background color of the layer, or <code>null</code>
909
+ * for a transparent background
910
+ * @param {object} [options.border=null] - the layer's outline, or <code>null</code> for no outline
911
+ * @param {string} [options.border.color] - the outline"s color; required for a border
912
+ * @param {string} [options.border.thickness=1] - the outline"s weight
913
+ * @param {number} [options.opacity=1] - the layer"s opacity; <code>1</cod> for full opacity
914
+ * and <code>0</code> for full transparency
915
+ * @param {string} [options.font="10px sans-serif"]
916
+ * @param {string} [options.color="#fff"]
917
+ * @param {number} [options.textX=0] - the text's horizontal offset relative to the layer
918
+ * @param {number} [options.textY=0] - the text's vertical offset relative to the layer
919
+ * @param {number} [options.maxWidth=null] - the maximum width of a line of text
920
+ * @param {string} [options.textAlign="start"] - horizontal align
921
+ * @param {string} [options.textBaseline="top"] - vertical align
922
+ * @param {string} [options.textDirection="ltr"] - the text direction
923
+ *
924
+ * @todo add padding options
925
+ */
926
+ constructor (startTime, duration, text, options = {}) {
927
+ // default to no (transparent) background
928
+ super(startTime, duration, { background: null, ...options }); // fill in zeros in |doRender|
929
+ applyOptions(options, this);
930
+
931
+ /**
932
+ * @type string
933
+ */
934
+ this.text = text;
935
+
936
+ // this._prevText = undefined;
937
+ // // because the canvas context rounds font size, but we need to be more accurate
938
+ // // rn, this doesn't make a difference, because we can only measure metrics by integer font sizes
939
+ // this._lastFont = undefined;
940
+ // this._prevMaxWidth = undefined;
941
+ }
942
+
943
+ doRender (reltime) {
944
+ super.doRender(reltime);
945
+ const text = val(this, 'text', reltime); const font = val(this, 'font', reltime);
946
+ const maxWidth = this.maxWidth ? val(this, 'maxWidth', reltime) : undefined;
947
+ // // properties that affect metrics
948
+ // if (this._prevText !== text || this._prevFont !== font || this._prevMaxWidth !== maxWidth)
949
+ // this._updateMetrics(text, font, maxWidth);
950
+
951
+ this.cctx.font = font;
952
+ this.cctx.fillStyle = val(this, 'color', reltime);
953
+ this.cctx.textAlign = val(this, 'textAlign', reltime);
954
+ this.cctx.textBaseline = val(this, 'textBaseline', reltime);
955
+ this.cctx.textDirection = val(this, 'textDirection', reltime);
956
+ this.cctx.fillText(
957
+ text, val(this, 'textX', reltime), val(this, 'textY', reltime),
958
+ maxWidth
959
+ );
960
+
961
+ this._prevText = text;
962
+ this._prevFont = font;
963
+ this._prevMaxWidth = maxWidth;
964
+ }
965
+
966
+ // _updateMetrics(text, font, maxWidth) {
967
+ // // TODO calculate / measure for non-integer font.size values
968
+ // let metrics = Text._measureText(text, font, maxWidth);
969
+ // // TODO: allow user-specified/overwritten width/height
970
+ // this.width = /*this.width || */metrics.width;
971
+ // this.height = /*this.height || */metrics.height;
972
+ // }
973
+
974
+ // TODO: implement setters and getters that update dimensions!
975
+
976
+ /* static _measureText(text, font, maxWidth) {
977
+ // TODO: fix too much bottom padding
978
+ const s = document.createElement("span");
979
+ s.textContent = text;
980
+ s.style.font = font;
981
+ s.style.padding = "0";
982
+ if (maxWidth) s.style.maxWidth = maxWidth;
983
+ document.body.appendChild(s);
984
+ const metrics = {width: s.offsetWidth, height: s.offsetHeight};
985
+ document.body.removeChild(s);
986
+ return metrics;
987
+ } */
988
+
989
+ getDefaultOptions () {
990
+ return {
991
+ ...Visual.prototype.getDefaultOptions(),
992
+ background: null,
993
+ /**
994
+ * @name module:layer.Text#font
995
+ * @type string
996
+ * @desc The css font to render with
997
+ */
998
+ font: '10px sans-serif',
999
+ /**
1000
+ * @name module:layer.Text#font
1001
+ * @type string
1002
+ * @desc The css color to render with
1003
+ */
1004
+ color: '#fff',
1005
+ /**
1006
+ * @name module:layer.Text#textX
1007
+ * @type number
1008
+ * @desc Offset of the text relative to the layer
1009
+ */
1010
+ textX: 0,
1011
+ /**
1012
+ * @name module:layer.Text#textY
1013
+ * @type number
1014
+ * @desc Offset of the text relative to the layer
1015
+ */
1016
+ textY: 0,
1017
+ /**
1018
+ * @name module:layer.Text#maxWidth
1019
+ * @type number
1020
+ */
1021
+ maxWidth: null,
1022
+ /**
1023
+ * @name module:layer.Text#textAlign
1024
+ * @type string
1025
+ * @desc The horizontal alignment
1026
+ * @see [<code>CanvasRenderingContext2D#textAlign</code>]{@link https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/textAlign}
1027
+ */
1028
+ textAlign: 'start',
1029
+ /**
1030
+ * @name module:layer.Text#textAlign
1031
+ * @type string
1032
+ * @desc the vertical alignment
1033
+ * @see [<code>CanvasRenderingContext2D#textBaseline</code>]{@link https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/textBaseline}
1034
+ */
1035
+ textBaseline: 'top',
1036
+ /**
1037
+ * @name module:layer.Text#textDirection
1038
+ * @type string
1039
+ * @see [<code>CanvasRenderingContext2D#direction</code>]{@link https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/textBaseline}
1040
+ */
1041
+ textDirection: 'ltr'
1042
+ }
1043
+ }
1044
+ }
1045
+
1046
+ class Image extends Visual {
1047
+ /**
1048
+ * Creates a new image layer
1049
+ *
1050
+ * @param {number} startTime
1051
+ * @param {number} duration
1052
+ * @param {HTMLImageElement} image
1053
+ * @param {object} [options]
1054
+ * @param {number} [options.x=0] - the offset of the layer relative to the movie
1055
+ * @param {number} [options.y=0] - the offset of the layer relative to the movie
1056
+ * @param {string} [options.background=null] - the background color of the layer, or <code>null</code>
1057
+ * for transparency
1058
+ * @param {object} [options.border=null] - the layer"s outline, or <code>null</code> for no outline
1059
+ * @param {string} [options.border.color] - the outline"s color; required for a border
1060
+ * @param {string} [options.border.thickness=1] - the outline"s weight
1061
+ * @param {number} [options.opacity=1] - the layer"s opacity; <code>1</cod> for full opacity
1062
+ * and <code>0</code> for full transparency
1063
+ * @param {number} [options.clipX=0] - image source x
1064
+ * @param {number} [options.clipY=0] - image source y
1065
+ * @param {number} [options.clipWidth=undefined] - image source width, or <code>undefined</code> to fill the entire layer
1066
+ * @param {number} [options.clipHeight=undefined] - image source height, or <code>undefined</code> to fill the entire layer
1067
+ * @param {number} [options.imageX=0] - offset of the image relative to the layer
1068
+ * @param {number} [options.imageY=0] - offset of the image relative to the layer
1069
+ */
1070
+ constructor (startTime, duration, image, options = {}) {
1071
+ super(startTime, duration, options); // wait to set width & height
1072
+ applyOptions(options, this);
1073
+ // clipX... => how much to show of this.image
1074
+ // imageX... => how to project this.image onto the canvas
1075
+ this._image = image;
1076
+
1077
+ const load = () => {
1078
+ this.width = this.imageWidth = this.width || this.image.width;
1079
+ this.height = this.imageHeight = this.height || this.image.height;
1080
+ this.clipWidth = this.clipWidth || image.width;
1081
+ this.clipHeight = this.clipHeight || image.height;
1082
+ };
1083
+ if (image.complete) {
1084
+ load();
1085
+ } else {
1086
+ image.addEventListener('load', load);
1087
+ }
1088
+ }
1089
+
1090
+ doRender (reltime) {
1091
+ super.doRender(reltime); // clear/fill background
1092
+ this.cctx.drawImage(
1093
+ this.image,
1094
+ val(this, 'clipX', reltime), val(this, 'clipY', reltime),
1095
+ val(this, 'clipWidth', reltime), val(this, 'clipHeight', reltime),
1096
+ // this.imageX and this.imageY are relative to layer
1097
+ val(this, 'imageX', reltime), val(this, 'imageY', reltime),
1098
+ val(this, 'imageWidth', reltime), val(this, 'imageHeight', reltime)
1099
+ );
1100
+ }
1101
+
1102
+ /**
1103
+ * @type HTMLImageElement
1104
+ */
1105
+ get image () {
1106
+ return this._image
1107
+ }
1108
+
1109
+ getDefaultOptions () {
1110
+ return {
1111
+ ...Visual.prototype.getDefaultOptions(),
1112
+ /**
1113
+ * @name module:layer.Image#clipX
1114
+ * @type number
1115
+ * @desc Image source x
1116
+ */
1117
+ clipX: 0,
1118
+ /**
1119
+ * @name module:layer.Image#clipY
1120
+ * @type number
1121
+ * @desc Image source y
1122
+ */
1123
+ clipY: 0,
1124
+ /**
1125
+ * @name module:layer.Image#clipWidth
1126
+ * @type number
1127
+ * @desc Image source width, or <code>undefined</code> to fill the entire layer
1128
+ */
1129
+ clipWidth: undefined,
1130
+ /**
1131
+ * @name module:layer.Image#clipHeight
1132
+ * @type number
1133
+ * @desc Image source height, or <code>undefined</code> to fill the entire layer
1134
+ */
1135
+ clipHeight: undefined,
1136
+ /**
1137
+ * @name module:layer.Image#imageX
1138
+ * @type number
1139
+ * @desc Offset of the image relative to the layer
1140
+ */
1141
+ imageX: 0,
1142
+ /**
1143
+ * @name module:layer.Image#imageX
1144
+ * @type number
1145
+ * @desc Offset of the image relative to the layer
1146
+ */
1147
+ imageY: 0
1148
+ }
1149
+ }
1150
+ }
1151
+
1152
+ // https://web.archive.org/web/20190111044453/http://justinfagnani.com/2015/12/21/real-mixins-with-javascript-classes/
1153
+ /**
1154
+ * Video or audio
1155
+ * @mixin MediaMixin
1156
+ * @todo implement playback rate
1157
+ */
1158
+ const MediaMixin = superclass => {
1159
+ if (superclass !== Base && superclass !== Visual) {
1160
+ throw new Error('Media can only extend Base and Visual')
1161
+ }
1162
+
1163
+ class Media extends superclass {
1164
+ /**
1165
+ * @param {number} startTime
1166
+ * @param {HTMLVideoElement} media
1167
+ * @param {object} [options]
1168
+ * @param {number} [options.mediaStartTime=0] - at what time in the audio the layer starts
1169
+ * @param {numer} [options.duration=media.duration-options.mediaStartTime]
1170
+ * @param {boolean} [options.muted=false]
1171
+ * @param {number} [options.volume=1]
1172
+ * @param {number} [options.playbackRate=1]
1173
+ */
1174
+ constructor (startTime, media, onload, options = {}) {
1175
+ super(startTime, 0, options); // works with both Base and Visual
1176
+ this._initialized = false;
1177
+ this._media = media;
1178
+ this._mediaStartTime = options.mediaStartTime || 0;
1179
+ applyOptions(options, this);
1180
+
1181
+ const load = () => {
1182
+ // TODO: && ?
1183
+ if ((options.duration || (media.duration - this.mediaStartTime)) < 0) {
1184
+ throw new Error('Invalid options.duration or options.mediaStartTime')
1185
+ }
1186
+ this.duration = options.duration || (media.duration - this.mediaStartTime);
1187
+ // onload will use `this`, and can't bind itself because it's before super()
1188
+ onload && onload.bind(this)(media, options);
1189
+ };
1190
+ if (media.readyState >= 2) {
1191
+ // this frame's data is available now
1192
+ load();
1193
+ } else {
1194
+ // when this frame's data is available
1195
+ media.addEventListener('canplay', load);
1196
+ }
1197
+ media.addEventListener('durationchange', () => {
1198
+ this.duration = options.duration || (media.duration - this.mediaStartTime);
1199
+ });
1200
+
1201
+ // TODO: on unattach?
1202
+ subscribe(this, 'movie.audiodestinationupdate', event => {
1203
+ // reset destination
1204
+ this.source.disconnect();
1205
+ this.source.connect(event.destination);
1206
+ });
1207
+ }
1208
+
1209
+ attach (movie) {
1210
+ super.attach(movie);
1211
+
1212
+ subscribe(movie, 'movie.seek', e => {
1213
+ const time = e.movie.currentTime;
1214
+ if (time < this.startTime || time >= this.startTime + this.duration) {
1215
+ return
1216
+ }
1217
+ this.media.currentTime = time - this.startTime;
1218
+ });
1219
+ // connect to audiocontext
1220
+ this._source = movie.actx.createMediaElementSource(this.media);
1221
+ this.source.connect(movie.actx.destination);
1222
+ }
1223
+
1224
+ start (reltime) {
1225
+ this.media.currentTime = reltime + this.mediaStartTime;
1226
+ this.media.play();
1227
+ }
1228
+
1229
+ render (reltime) {
1230
+ super.render(reltime);
1231
+ // even interpolate here
1232
+ // TODO: implement Issue: Create built-in audio node to support built-in audio nodes, as this does nothing rn
1233
+ this.media.muted = val(this, 'muted', reltime);
1234
+ this.media.volume = val(this, 'volume', reltime);
1235
+ this.media.playbackRate = val(this, 'playbackRate', reltime);
1236
+ }
1237
+
1238
+ stop () {
1239
+ this.media.pause();
1240
+ }
1241
+
1242
+ /**
1243
+ * The raw html media element
1244
+ * @type HTMLMediaElement
1245
+ */
1246
+ get media () {
1247
+ return this._media
1248
+ }
1249
+
1250
+ /**
1251
+ * The audio source node for the media
1252
+ * @type MediaStreamAudioSourceNode
1253
+ */
1254
+ get source () {
1255
+ return this._source
1256
+ }
1257
+
1258
+ get startTime () {
1259
+ return this._startTime
1260
+ }
1261
+
1262
+ set startTime (val) {
1263
+ this._startTime = val;
1264
+ if (this._initialized) {
1265
+ const mediaProgress = this._movie.currentTime - this.startTime;
1266
+ this.media.currentTime = this.mediaStartTime + mediaProgress;
1267
+ }
1268
+ }
1269
+
1270
+ set mediaStartTime (val) {
1271
+ this._mediaStartTime = val;
1272
+ if (this._initialized) {
1273
+ const mediaProgress = this._movie.currentTime - this.startTime;
1274
+ this.media.currentTime = mediaProgress + this.mediaStartTime;
1275
+ }
1276
+ }
1277
+
1278
+ /**
1279
+ * Where in the media the layer starts at
1280
+ * @type number
1281
+ */
1282
+ get mediaStartTime () {
1283
+ return this._mediaStartTime
1284
+ }
1285
+
1286
+ getDefaultOptions () {
1287
+ return {
1288
+ ...superclass.prototype.getDefaultOptions(),
1289
+ /**
1290
+ * @name module:layer~Media#mediaStartTime
1291
+ * @type number
1292
+ * @desc Where in the media the layer starts at
1293
+ */
1294
+ mediaStartTime: 0,
1295
+ /**
1296
+ * @name module:layer~Media#duration
1297
+ * @type number
1298
+ */
1299
+ duration: undefined, // important to include undefined keys, for applyOptions
1300
+ /**
1301
+ * @name module:layer~Media#muted
1302
+ * @type boolean
1303
+ */
1304
+ muted: false,
1305
+ /**
1306
+ * @name module:layer~Media#volume
1307
+ * @type number
1308
+ */
1309
+ volume: 1,
1310
+ /**
1311
+ * @name module:layer~Media#playbackRate
1312
+ * @type number
1313
+ * @todo <strong>Implement</strong>
1314
+ */
1315
+ playbackRate: 1
1316
+ }
1317
+ }
1318
+ }
1319
+ return Media // custom mixin class
1320
+ };
1321
+
1322
+ // use mixins instead of `extend`ing two classes (which doens't work); see below class def
1323
+ /**
1324
+ * @extends module:layer~Media
1325
+ */
1326
+ class Video extends MediaMixin(Visual) {
1327
+ /**
1328
+ * Creates a new video layer
1329
+ *
1330
+ * @param {number} startTime
1331
+ * @param {HTMLVideoElement} media
1332
+ * @param {object} [options]
1333
+ * @param {number} startTime
1334
+ * @param {HTMLVideoElement} media
1335
+ * @param {object} [options]
1336
+ * @param {number} [options.mediaStartTime=0] - at what time in the audio the layer starts
1337
+ * @param {numer} [options.duration=media.duration-options.mediaStartTime]
1338
+ * @param {boolean} [options.muted=false]
1339
+ * @param {number} [options.volume=1]
1340
+ * @param {number} [options.speed=1] - the audio's playerback rate
1341
+ * @param {number} [options.mediaStartTime=0] - at what time in the video the layer starts
1342
+ * @param {numer} [options.duration=media.duration-options.mediaStartTime]
1343
+ * @param {number} [options.clipX=0] - video source x
1344
+ * @param {number} [options.clipY=0] - video source y
1345
+ * @param {number} [options.clipWidth=0] - video destination width
1346
+ * @param {number} [options.clipHeight=0] - video destination height
1347
+ * @param {number} [options.mediaX=0] - video offset relative to the layer
1348
+ * @param {number} [options.mediaY=0] - video offset relative to the layer
1349
+ */
1350
+ constructor (startTime, media, options = {}) {
1351
+ // fill in the zeros once loaded
1352
+ super(startTime, media, function () {
1353
+ this.width = this.mediaWidth = options.width || media.videoWidth;
1354
+ this.height = this.mediaHeight = options.height || media.videoHeight;
1355
+ this.clipWidth = options.clipWidth || media.videoWidth;
1356
+ this.clipHeight = options.clipHeight || media.videoHeight;
1357
+ }, options);
1358
+ // clipX... => how much to show of this.media
1359
+ // mediaX... => how to project this.media onto the canvas
1360
+ applyOptions(options, this);
1361
+ if (this.duration === undefined) {
1362
+ this.duration = media.duration - this.mediaStartTime;
1363
+ }
1364
+ }
1365
+
1366
+ doRender (reltime) {
1367
+ super.doRender();
1368
+ this.cctx.drawImage(this.media,
1369
+ val(this, 'clipX', reltime), val(this, 'clipY', reltime),
1370
+ val(this, 'clipWidth', reltime), val(this, 'clipHeight', reltime),
1371
+ val(this, 'mediaX', reltime), val(this, 'mediaY', reltime), // relative to layer
1372
+ val(this, 'mediaWidth', reltime), val(this, 'mediaHeight', reltime));
1373
+ }
1374
+
1375
+ getDefaultOptions () {
1376
+ return {
1377
+ ...Object.getPrototypeOf(this).getDefaultOptions(), // let's not call MediaMixin again
1378
+ /**
1379
+ * @name module:layer.Video#clipX
1380
+ * @type number
1381
+ * @desc Video source x
1382
+ */
1383
+ clipX: 0,
1384
+ /**
1385
+ * @name module:layer.Video#clipY
1386
+ * @type number
1387
+ * @desc Video source y
1388
+ */
1389
+ clipY: 0,
1390
+ /**
1391
+ * @name module:layer.Video#mediaX
1392
+ * @type number
1393
+ * @desc Video offset relative to layer
1394
+ */
1395
+ mediaX: 0,
1396
+ /**
1397
+ * @name module:layer.Video#mediaY
1398
+ * @type number
1399
+ * @desc Video offset relative to layer
1400
+ */
1401
+ mediaY: 0,
1402
+ /**
1403
+ * @name module:layer.Video#mediaWidth
1404
+ * @type number
1405
+ * @desc Video destination width
1406
+ */
1407
+ mediaWidth: undefined,
1408
+ /**
1409
+ * @name module:layer.Video#mediaHeight
1410
+ * @type number
1411
+ * @desc Video destination height
1412
+ */
1413
+ mediaHeight: undefined
1414
+ }
1415
+ }
1416
+ }
1417
+
1418
+ /**
1419
+ * @extends module:layer~Media
1420
+ */
1421
+ class Audio extends MediaMixin(Base) {
1422
+ /**
1423
+ * Creates an audio layer
1424
+ *
1425
+ * @param {number} startTime
1426
+ * @param {HTMLAudioElement} media
1427
+ * @param {object} [options]
1428
+ * @param {number} startTime
1429
+ * @param {HTMLVideoElement} media
1430
+ * @param {object} [options]
1431
+ * @param {number} [options.mediaStartTime=0] - at what time in the audio the layer starts
1432
+ * @param {numer} [options.duration=media.duration-options.mediaStartTime]
1433
+ * @param {boolean} [options.muted=false]
1434
+ * @param {number} [options.volume=1]
1435
+ * @param {number} [options.speed=1] - the audio's playerback rate
1436
+ */
1437
+ constructor (startTime, media, options = {}) {
1438
+ // fill in the zero once loaded, no width or height (will raise error)
1439
+ super(startTime, media, null, options);
1440
+ applyOptions(options, this);
1441
+ if (this.duration === undefined) {
1442
+ this.duration = media.duration - this.mediaStartTime;
1443
+ }
1444
+ }
1445
+
1446
+ getDefaultOptions () {
1447
+ return {
1448
+ ...Object.getPrototypeOf(this).getDefaultOptions(), // let's not call MediaMixin again
1449
+ /**
1450
+ * @name module:layer.Audio#mediaStartTime
1451
+ * @type number
1452
+ * @desc Where in the media to start playing when the layer starts
1453
+ */
1454
+ mediaStartTime: 0,
1455
+ duration: undefined
1456
+ }
1457
+ }
1458
+ }
1459
+
1460
+ var layers = /*#__PURE__*/Object.freeze({
1461
+ Base: Base,
1462
+ Visual: Visual,
1463
+ Text: Text,
1464
+ Image: Image,
1465
+ MediaMixin: MediaMixin,
1466
+ Video: Video,
1467
+ Audio: Audio
1468
+ });
1469
+
1470
+ /**
1471
+ * @module movie
1472
+ */
1473
+
1474
+ /**
1475
+ * Contains all layers and movie information<br>
1476
+ * Implements a sub/pub system (adapted from https://gist.github.com/lizzie/4993046)
1477
+ *
1478
+ * @todo Implement event "durationchange", and more
1479
+ * @todo Add width and height options
1480
+ * @todo Make record option to make recording video output to the user while it's recording
1481
+ * @todo rename renderingFrame -> refreshing
1482
+ */
1483
+ class Movie {
1484
+ /**
1485
+ * Creates a new <code>Movie</code> instance (project)
1486
+ *
1487
+ * @param {HTMLCanvasElement} canvas - the canvas to display image data on
1488
+ * @param {object} [options] - various optional arguments
1489
+ * @param {BaseAudioContext} [options.audioContext=new AudioContext()]
1490
+ * @param {string} [options.background="#000"] - the background color of the movijse,
1491
+ * or <code>null</code> for a transparent background
1492
+ * @param {boolean} [options.repeat=false] - whether to loop playbackjs
1493
+ * @param {boolean} [options.autoRefresh=true] - whether to call `.refresh()` on init and when relevant layers
1494
+ * are added/removed
1495
+ */
1496
+ constructor (canvas, options = {}) {
1497
+ // TODO: move into multiple methods!
1498
+ // Rename audioContext -> _actx
1499
+ if ('audioContext' in options) {
1500
+ options._actx = options.audioContext;
1501
+ }
1502
+ delete options.audioContext; // TODO: move up a line :P
1503
+
1504
+ const newThis = watchPublic(this); // proxy that will be returned by constructor
1505
+ // Don't send updates when initializing, so use this instead of newThis:
1506
+ // output canvas
1507
+ this._canvas = canvas;
1508
+ // output canvas context
1509
+ this._cctx = canvas.getContext('2d'); // TODO: make private?
1510
+ applyOptions(options, this);
1511
+
1512
+ // proxy arrays
1513
+ const that = newThis;
1514
+
1515
+ this._effectsBack = [];
1516
+ this._effects = new Proxy(newThis._effectsBack, {
1517
+ apply: function (target, thisArg, argumentsList) {
1518
+ return thisArg[target].apply(newThis, argumentsList)
1519
+ },
1520
+ deleteProperty: function (target, property) {
1521
+ // Refresh screen when effect is removed, if the movie isn't playing already.
1522
+ const value = target[property];
1523
+ publish(that, 'movie.change.effect.remove', { effect: value });
1524
+ value.detach();
1525
+ delete target[property];
1526
+ return true
1527
+ },
1528
+ set: function (target, property, value) {
1529
+ if (!isNaN(property)) { // if property is an number (index)
1530
+ if (target[property]) {
1531
+ delete target[property]; // call deleteProperty
1532
+ }
1533
+ value.attach(that); // Attach effect to movie (first)
1534
+ // Refresh screen when effect is set, if the movie isn't playing already.
1535
+ publish(that, 'movie.change.effect.add', { effect: value });
1536
+ }
1537
+ target[property] = value;
1538
+ return true
1539
+ }
1540
+ });
1541
+
1542
+ this._layersBack = [];
1543
+ this._layers = new Proxy(newThis._layersBack, {
1544
+ apply: function (target, thisArg, argumentsList) {
1545
+ return thisArg[target].apply(newThis, argumentsList)
1546
+ },
1547
+ deleteProperty: function (target, property) {
1548
+ const oldDuration = this.duration;
1549
+ const value = target[property];
1550
+ value.detach(that);
1551
+ const current = that.currentTime >= value.startTime && that.currentTime < value.startTime + value.duration;
1552
+ if (current) {
1553
+ publish(that, 'movie.change.layer.remove', { layer: value });
1554
+ }
1555
+ publish(that, 'movie.change.duration', { oldDuration });
1556
+ delete target[property];
1557
+ return true
1558
+ },
1559
+ set: function (target, property, value) {
1560
+ const oldDuration = this.duration;
1561
+ target[property] = value;
1562
+ if (!isNaN(property)) { // if property is an number (index)
1563
+ value.attach(that); // Attach layer to movie (first)
1564
+ // Refresh screen when a relevant layer is added or removed
1565
+ const current = that.currentTime >= value.startTime && that.currentTime < value.startTime + value.duration;
1566
+ if (current) {
1567
+ publish(that, 'movie.change.layer.add', { layer: value });
1568
+ }
1569
+ publish(that, 'movie.change.duration', { oldDuration });
1570
+ }
1571
+ return true
1572
+ }
1573
+ });
1574
+ this._paused = true;
1575
+ this._ended = false;
1576
+ // to prevent multiple frame-rendering loops at the same time (see `render`)
1577
+ this._renderingFrame = false; // only applicable when rendering
1578
+ this._currentTime = 0;
1579
+
1580
+ this._mediaRecorder = null; // for recording
1581
+
1582
+ // NOTE: -1 works well in inequalities
1583
+ this._lastPlayed = -1; // the last time `play` was called
1584
+ this._lastPlayedOffset = -1; // what was `currentTime` when `play` was called
1585
+ // newThis._updateInterval = 0.1; // time in seconds between each "timeupdate" event
1586
+ // newThis._lastUpdate = -1;
1587
+
1588
+ if (newThis.autoRefresh) {
1589
+ newThis.refresh(); // render single frame on init
1590
+ }
1591
+
1592
+ // Subscribe to own event "change" (child events propogate up)
1593
+ subscribe(newThis, 'movie.change', () => {
1594
+ if (newThis.autoRefresh && !newThis.rendering) {
1595
+ newThis.refresh();
1596
+ }
1597
+ });
1598
+
1599
+ // Subscribe to own event "ended"
1600
+ subscribe(newThis, 'movie.ended', () => {
1601
+ if (newThis.recording) {
1602
+ newThis._mediaRecorder.requestData(); // I shouldn't have to call newThis right? err
1603
+ newThis._mediaRecorder.stop();
1604
+ }
1605
+ });
1606
+
1607
+ return newThis
1608
+ }
1609
+
1610
+ /**
1611
+ * Plays the movie
1612
+ * @return {Promise} fulfilled when done playing, never fails
1613
+ */
1614
+ play () {
1615
+ return new Promise((resolve, reject) => {
1616
+ if (!this.paused) {
1617
+ throw new Error('Already playing')
1618
+ }
1619
+
1620
+ this._paused = this._ended = false;
1621
+ this._lastPlayed = performance.now();
1622
+ this._lastPlayedOffset = this.currentTime;
1623
+
1624
+ if (!this._renderingFrame) {
1625
+ // Not rendering (and not playing), so play
1626
+ this._render(undefined, resolve);
1627
+ }
1628
+ // Stop rendering frame if currently doing so, because playing has higher priority.
1629
+ this._renderingFrame = false; // this will effect the next _render call
1630
+
1631
+ publish(this, 'movie.play', {});
1632
+ })
1633
+ }
1634
+
1635
+ // TEST: *support recording that plays back with audio!*
1636
+ // TODO: figure out a way to record faster than playing (i.e. not in real time)
1637
+ // TODO: improve recording performance to increase frame rate?
1638
+ /**
1639
+ * Plays the movie in the background and records it
1640
+ *
1641
+ * @param {number} framerate
1642
+ * @param {object} [options]
1643
+ * @param {boolean} [options.video=true] - whether to include video in recording
1644
+ * @param {boolean} [options.audio=true] - whether to include audio in recording
1645
+ * @param {object} [options.mediaRecorderOptions=undefined] - options to pass to the <code>MediaRecorder</code>
1646
+ * constructor
1647
+ * @return {Promise} resolves when done recording, rejects when internal media recorder errors
1648
+ */
1649
+ record (framerate, options = {}) {
1650
+ if (options.video === options.audio === false) {
1651
+ throw new Error('Both video and audio cannot be disabled')
1652
+ }
1653
+
1654
+ if (!this.paused) {
1655
+ throw new Error('Cannot record movie while already playing or recording')
1656
+ }
1657
+ return new Promise((resolve, reject) => {
1658
+ // https://developers.google.com/web/updates/2016/01/mediarecorder
1659
+ const canvasCache = this.canvas;
1660
+ // record on a temporary canvas context
1661
+ this._canvas = document.createElement('canvas');
1662
+ this.canvas.width = canvasCache.width;
1663
+ this.canvas.height = canvasCache.height;
1664
+ this._cctx = this.canvas.getContext('2d');
1665
+
1666
+ const recordedChunks = []; // frame blobs
1667
+ // combine image + audio, or just pick one
1668
+ let tracks = [];
1669
+ if (options.video !== false) {
1670
+ const visualStream = this.canvas.captureStream(framerate);
1671
+ tracks = tracks.concat(visualStream.getTracks());
1672
+ }
1673
+ // Check if there's a layer that's an instance of a Media mixin (Audio or Video)
1674
+ const hasMediaTracks = this.layers.some(layer => layer instanceof Audio || layer instanceof Video);
1675
+ // If no media tracks present, don't include an audio stream, because Chrome doesn't record silence
1676
+ // when an audio stream is present.
1677
+ if (hasMediaTracks && options.audio !== false) {
1678
+ const audioDestination = this.actx.createMediaStreamDestination();
1679
+ const audioStream = audioDestination.stream;
1680
+ tracks = tracks.concat(audioStream.getTracks());
1681
+ this.publishToLayers('movie.audiodestinationupdate', { movie: this, destination: audioDestination });
1682
+ }
1683
+ const stream = new MediaStream(tracks);
1684
+ const mediaRecorder = new MediaRecorder(stream, options.mediaRecorderOptions);
1685
+ // TODO: publish to movie, not layers
1686
+ mediaRecorder.ondataavailable = event => {
1687
+ // if (this._paused) reject(new Error("Recording was interrupted"));
1688
+ if (event.data.size > 0) {
1689
+ recordedChunks.push(event.data);
1690
+ }
1691
+ };
1692
+ mediaRecorder.onstop = () => {
1693
+ this._ended = true;
1694
+ this._canvas = canvasCache;
1695
+ this._cctx = this.canvas.getContext('2d');
1696
+ this.publishToLayers(
1697
+ 'movie.audiodestinationupdate',
1698
+ { movie: this, destination: this.actx.destination }
1699
+ );
1700
+ this._mediaRecorder = null;
1701
+ // construct super-blob
1702
+ // this is the exported video as a blob!
1703
+ resolve(new Blob(recordedChunks, { type: 'video/webm' }/*, {"type" : "audio/ogg; codecs=opus"} */));
1704
+ };
1705
+ mediaRecorder.onerror = reject;
1706
+
1707
+ mediaRecorder.start();
1708
+ this._mediaRecorder = mediaRecorder;
1709
+ this.play();
1710
+ publish(this, 'movie.record', { options });
1711
+ })
1712
+ }
1713
+
1714
+ /**
1715
+ * Stops the movie, without reseting the playback position
1716
+ * @return {Movie} the movie (for chaining)
1717
+ */
1718
+ pause () {
1719
+ this._paused = true;
1720
+ // disable all layers
1721
+ for (let i = 0; i < this.layers.length; i++) {
1722
+ const layer = this.layers[i];
1723
+ layer.stop(this.currentTime - layer.startTime);
1724
+ layer._active = false;
1725
+ }
1726
+ publish(this, 'movie.pause', {});
1727
+ return this
1728
+ }
1729
+
1730
+ /**
1731
+ * Stops playback and resets the playback position
1732
+ * @return {Movie} the movie (for chaining)
1733
+ */
1734
+ stop () {
1735
+ this.pause();
1736
+ this.currentTime = 0; // use setter?
1737
+ return this
1738
+ }
1739
+
1740
+ /**
1741
+ * @param {number} [timestamp=performance.now()]
1742
+ * @param {function} [done=undefined] - called when done playing or when the current frame is loaded
1743
+ * @private
1744
+ */
1745
+ _render (timestamp = performance.now(), done = undefined) {
1746
+ clearCachedValues(this);
1747
+
1748
+ if (!this.rendering) {
1749
+ // (!this.paused || this._renderingFrame) is true (it's playing or it's rendering a single frame)
1750
+ done && done();
1751
+ return
1752
+ }
1753
+
1754
+ this._updateCurrentTime(timestamp);
1755
+ // bad for performance? (remember, it's calling Array.reduce)
1756
+ const end = this.duration;
1757
+ const ended = this.currentTime >= end;
1758
+ if (ended) {
1759
+ publish(this, 'movie.ended', { movie: this, repeat: this.repeat });
1760
+ this._currentTime = 0; // don't use setter
1761
+ publish(this, 'movie.timeupdate', { movie: this });
1762
+ this._lastPlayed = performance.now();
1763
+ this._lastPlayedOffset = 0; // this.currentTime
1764
+ this._renderingFrame = false;
1765
+ if (!this.repeat || this.recording) {
1766
+ this._ended = true;
1767
+ // disable all layers
1768
+ for (let i = 0; i < this.layers.length; i++) {
1769
+ const layer = this.layers[i];
1770
+ layer.stop(this.currentTime - layer.startTime);
1771
+ layer._active = false;
1772
+ }
1773
+ }
1774
+ done && done();
1775
+ return
1776
+ }
1777
+
1778
+ // do render
1779
+ this._renderBackground(timestamp);
1780
+ const frameFullyLoaded = this._renderLayers(timestamp);
1781
+ this._applyEffects();
1782
+
1783
+ if (frameFullyLoaded) {
1784
+ publish(this, 'movie.loadeddata', { movie: this });
1785
+ }
1786
+
1787
+ // if instant didn't load, repeatedly frame-render until frame is loaded
1788
+ // if the expression below is false, don't publish an event, just silently stop render loop
1789
+ if (this._renderingFrame && frameFullyLoaded) {
1790
+ this._renderingFrame = false;
1791
+ done && done();
1792
+ return
1793
+ }
1794
+
1795
+ window.requestAnimationFrame(timestamp => {
1796
+ this._render(timestamp);
1797
+ }); // TODO: research performance cost
1798
+ }
1799
+
1800
+ _updateCurrentTime (timestamp) {
1801
+ // if we're only instant-rendering (current frame only), it doens't matter if it's paused or not
1802
+ if (!this._renderingFrame) {
1803
+ // if ((timestamp - this._lastUpdate) >= this._updateInterval) {
1804
+ const sinceLastPlayed = (timestamp - this._lastPlayed) / 1000;
1805
+ this._currentTime = this._lastPlayedOffset + sinceLastPlayed; // don't use setter
1806
+ publish(this, 'movie.timeupdate', { movie: this });
1807
+ // this._lastUpdate = timestamp;
1808
+ // }
1809
+ }
1810
+ }
1811
+
1812
+ _renderBackground (timestamp) {
1813
+ this.cctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
1814
+ if (this.background) { // TODO: check valued result
1815
+ this.cctx.fillStyle = val(this, 'background', timestamp);
1816
+ this.cctx.fillRect(0, 0, this.canvas.width, this.canvas.height);
1817
+ }
1818
+ }
1819
+
1820
+ /**
1821
+ * @return {boolean} whether or not video frames are loaded
1822
+ * @param {number} [timestamp=performance.now()]
1823
+ * @private
1824
+ */
1825
+ _renderLayers (timestamp) {
1826
+ let frameFullyLoaded = true;
1827
+ for (let i = 0; i < this.layers.length; i++) {
1828
+ const layer = this.layers[i];
1829
+ const reltime = this.currentTime - layer.startTime;
1830
+ // Cancel operation if layer disabled or outside layer time interval
1831
+ if (!layer.enabled ||
1832
+ // > or >= ?
1833
+ this.currentTime < layer.startTime || this.currentTime > layer.startTime + layer.duration) {
1834
+ // outside time interval
1835
+ // if only rendering this frame (instant==true), we are not "starting" the layer
1836
+ if (layer.active && !this._renderingFrame) {
1837
+ // TODO: make a `deactivate()` method?
1838
+ // console.log("stop");
1839
+ layer.stop(reltime);
1840
+ layer._active = false;
1841
+ }
1842
+ continue
1843
+ }
1844
+ // if only rendering this frame, we are not "starting" the layer
1845
+ if (!layer.active && layer.enabled && !this._renderingFrame) {
1846
+ // TODO: make an `activate()` method?
1847
+ // console.log("start");
1848
+ layer.start(reltime);
1849
+ layer._active = true;
1850
+ }
1851
+
1852
+ if (layer.media) {
1853
+ frameFullyLoaded = frameFullyLoaded && layer.media.readyState >= 2;
1854
+ } // frame loaded
1855
+ layer.render(reltime); // pass relative time for convenience
1856
+
1857
+ // if the layer has visual component
1858
+ if (layer.canvas) {
1859
+ // layer.canvas.width and layer.canvas.height should already be interpolated
1860
+ // if the layer has an area (else InvalidStateError from canvas)
1861
+ if (layer.canvas.width * layer.canvas.height > 0) {
1862
+ this.cctx.drawImage(layer.canvas,
1863
+ val(layer, 'x', reltime), val(layer, 'y', reltime), layer.canvas.width, layer.canvas.height
1864
+ );
1865
+ }
1866
+ }
1867
+ }
1868
+
1869
+ return frameFullyLoaded
1870
+ }
1871
+
1872
+ _applyEffects () {
1873
+ for (let i = 0; i < this.effects.length; i++) {
1874
+ const effect = this.effects[i];
1875
+ effect.apply(this, this.currentTime);
1876
+ }
1877
+ }
1878
+
1879
+ /**
1880
+ * Refreshes the screen (only use this if auto-refresh is disabled)
1881
+ * @return {Promise} - resolves when the frame is loaded
1882
+ */
1883
+ refresh () {
1884
+ if (this.rendering) {
1885
+ throw new Error('Cannot refresh frame while already rendering')
1886
+ }
1887
+
1888
+ return new Promise((resolve, reject) => {
1889
+ this._renderingFrame = true;
1890
+ this._render(undefined, resolve);
1891
+ })
1892
+ }
1893
+
1894
+ /**
1895
+ * Convienence method
1896
+ * @todo Make private
1897
+ */
1898
+ publishToLayers (type, event) {
1899
+ for (let i = 0; i < this.layers.length; i++) {
1900
+ publish(this.layers[i], type, event);
1901
+ }
1902
+ }
1903
+
1904
+ /**
1905
+ * If the movie is playing, recording or refreshing
1906
+ * @type boolean
1907
+ */
1908
+ get rendering () {
1909
+ return !this.paused || this._renderingFrame
1910
+ }
1911
+
1912
+ /**
1913
+ * If the movie is refreshing current frame
1914
+ * @type boolean
1915
+ */
1916
+ get renderingFrame () {
1917
+ return this._renderingFrame
1918
+ }
1919
+
1920
+ /**
1921
+ * If the movie is recording
1922
+ * @type boolean
1923
+ */
1924
+ get recording () {
1925
+ return !!this._mediaRecorder
1926
+ }
1927
+
1928
+ /**
1929
+ * The combined duration of all layers
1930
+ * @type number
1931
+ */
1932
+ get duration () { // TODO: dirty flag?
1933
+ return this.layers.reduce((end, layer) => Math.max(layer.startTime + layer.duration, end), 0)
1934
+ }
1935
+
1936
+ /**
1937
+ * @type layer.Base[]
1938
+ */
1939
+ get layers () {
1940
+ return this._layers
1941
+ }
1942
+
1943
+ // (proxy)
1944
+ /**
1945
+ * Convienence method for <code>layers.push()</code>
1946
+ * @param {BaseLayer} layer
1947
+ * @return {Movie} the movie (for chaining)
1948
+ */
1949
+ addLayer (layer) {
1950
+ this.layers.push(layer); return this
1951
+ }
1952
+
1953
+ /**
1954
+ * @type effect.Base[]
1955
+ */
1956
+ get effects () {
1957
+ return this._effects // private (because it's a proxy)
1958
+ }
1959
+
1960
+ /**
1961
+ * Convienence method for <code>effects.push()</code>
1962
+ * @param {BaseEffect} effect
1963
+ * @return {Movie} the movie (for chaining)
1964
+ */
1965
+ addEffect (effect) {
1966
+ this.effects.push(effect); return this
1967
+ }
1968
+
1969
+ /**
1970
+ * @type boolean
1971
+ */
1972
+ get paused () {
1973
+ return this._paused
1974
+ }
1975
+
1976
+ /**
1977
+ * If the playback position is at the end of the movie
1978
+ * @type boolean
1979
+ */
1980
+ get ended () {
1981
+ return this._ended
1982
+ }
1983
+
1984
+ /**
1985
+ * The current playback position
1986
+ * @type number
1987
+ */
1988
+ get currentTime () {
1989
+ return this._currentTime
1990
+ }
1991
+
1992
+ /**
1993
+ * Sets the current playback position. This is a more powerful version of `set currentTime`.
1994
+ *
1995
+ * @param {number} time - the new cursor's time value in seconds
1996
+ * @param {boolean} [refresh=true] - whether to render a single frame to match new time or not
1997
+ * @return {Promise} resolves when the current frame is rendered if <code>refresh</code> is true,
1998
+ * otherwise resolves immediately
1999
+ *
2000
+ * @todo Refresh ionly f auto-refreshing is enabled
2001
+ */
2002
+ setCurrentTime (time, refresh = true) {
2003
+ return new Promise((resolve, reject) => {
2004
+ this._currentTime = time;
2005
+ publish(this, 'movie.seek', {});
2006
+ if (refresh) {
2007
+ // pass promise callbacks to `refresh`
2008
+ this.refresh().then(resolve).catch(reject);
2009
+ } else {
2010
+ resolve();
2011
+ }
2012
+ })
2013
+ }
2014
+
2015
+ set currentTime (time) {
2016
+ this._currentTime = time;
2017
+ publish(this, 'movie.seek', {});
2018
+ this.refresh(); // render single frame to match new time
2019
+ }
2020
+
2021
+ /**
2022
+ * The rendering canvas
2023
+ * @type HTMLCanvasElement
2024
+ */
2025
+ get canvas () {
2026
+ return this._canvas
2027
+ }
2028
+
2029
+ /**
2030
+ * The rendering canvas's context
2031
+ * @type CanvasRenderingContext2D
2032
+ */
2033
+ get cctx () {
2034
+ return this._cctx
2035
+ }
2036
+
2037
+ /**
2038
+ * The audio context to which audio is played
2039
+ * @type BaseAudioContext
2040
+ */
2041
+ get actx () {
2042
+ return this._actx
2043
+ }
2044
+
2045
+ /**
2046
+ * The width of the rendering canvas
2047
+ * @type number
2048
+ */
2049
+ get width () {
2050
+ return this.canvas.width
2051
+ }
2052
+
2053
+ /**
2054
+ * The height of the rendering canvas
2055
+ * @type number
2056
+ */
2057
+ get height () {
2058
+ return this.canvas.height
2059
+ }
2060
+
2061
+ set width (width) {
2062
+ this.canvas.width = width;
2063
+ }
2064
+
2065
+ set height (height) {
2066
+ this.canvas.height = height;
2067
+ }
2068
+
2069
+ get movie () {
2070
+ return this
2071
+ }
2072
+
2073
+ getDefaultOptions () {
2074
+ return {
2075
+ _actx: new AudioContext(),
2076
+ /**
2077
+ * @name module:movie#background
2078
+ * @type string
2079
+ * @desc The css color for the background, or <code>null</code> for transparency
2080
+ */
2081
+ background: '#000',
2082
+ /**
2083
+ * @name module:movie#repeat
2084
+ * @type boolean
2085
+ */
2086
+ repeat: false,
2087
+ /**
2088
+ * @name module:movie#autoRefresh
2089
+ * @type boolean
2090
+ * @desc Whether to refresh when changes are made that would effect the current frame
2091
+ */
2092
+ autoRefresh: true
2093
+ }
2094
+ }
2095
+ }
2096
+
2097
+ // id for events (independent of instance, but easy to access when on prototype chain)
2098
+ Movie.prototype.type = 'movie';
2099
+ // TODO: refactor so we don't need to explicitly exclude some of these
2100
+ Movie.prototype.publicExcludes = ['canvas', 'cctx', 'actx', 'layers', 'effects'];
2101
+ Movie.prototype.propertyFilters = {};
2102
+
2103
+ /**
2104
+ * @module effect
2105
+ *
2106
+ * @todo Investigate why an effect might run once in the beginning even if its layer isn't at the beginning
2107
+ * @todo Add audio effect support
2108
+ * @todo Move shader source to external files
2109
+ */
2110
+
2111
+ /**
2112
+ * Any effect that modifies the visual contents of a layer.
2113
+ *
2114
+ * <em>Note: At this time, simply use the <code>actx</code> property of the movie to add audio nodes to a
2115
+ * layer's media. TODO: add more audio support, including more types of audio nodes, probably in a
2116
+ * different module.</em>
2117
+ */
2118
+ class Base$1 {
2119
+ constructor () {
2120
+ const newThis = watchPublic(this); // proxy that will be returned by constructor
2121
+
2122
+ newThis.enabled = true;
2123
+ newThis._target = null;
2124
+
2125
+ // Propogate up to target
2126
+ subscribe(newThis, 'effect.change.modify', event => {
2127
+ if (!newThis._target) {
2128
+ return
2129
+ }
2130
+ const type = `${newThis._target.type}.change.effect.modify`;
2131
+ publish(newThis._target, type, { ...event, target: newThis._target, source: newThis, type });
2132
+ });
2133
+
2134
+ return newThis
2135
+ }
2136
+
2137
+ attach (target) {
2138
+ this._target = target;
2139
+ }
2140
+
2141
+ detach () {
2142
+ this._target = null;
2143
+ }
2144
+
2145
+ // subclasses must implement apply
2146
+ /**
2147
+ * Apply this effect to a target at the given time
2148
+ *
2149
+ * @param {module:movie|module:layer.Base} target
2150
+ * @param {number} reltime - the movie's current time relative to the layer (will soon be replaced with an instance getter)
2151
+ * @abstract
2152
+ */
2153
+ apply (target, reltime) {
2154
+ throw new Error('No overriding method found or super.apply was called')
2155
+ }
2156
+
2157
+ /**
2158
+ * The current time of the target
2159
+ * @type number
2160
+ */
2161
+ get currentTime () {
2162
+ return this._target ? this._target.currentTime : undefined
2163
+ }
2164
+
2165
+ get parent () {
2166
+ return this._target
2167
+ }
2168
+
2169
+ get movie () {
2170
+ return this._target ? this._target.movie : undefined
2171
+ }
2172
+ }
2173
+ // id for events (independent of instance, but easy to access when on prototype chain)
2174
+ Base$1.prototype.type = 'effect';
2175
+ Base$1.prototype.publicExcludes = [];
2176
+ Base$1.prototype.propertyFilters = {};
2177
+
2178
+ /**
2179
+ * A sequence of effects to apply, treated as one effect. This can be useful for defining reused effect sequences as one effect.
2180
+ */
2181
+ class Stack extends Base$1 {
2182
+ constructor (effects) {
2183
+ super();
2184
+
2185
+ this._effectsBack = [];
2186
+ this._effects = new Proxy(this._effectsBack, {
2187
+ apply: function (target, thisArg, argumentsList) {
2188
+ return thisArg[target].apply(this, argumentsList)
2189
+ },
2190
+ deleteProperty: function (target, property) {
2191
+ const value = target[property];
2192
+ publish(value, 'effect.detach', { effectTarget: this._target });
2193
+ delete target[property];
2194
+ return true
2195
+ },
2196
+ set: function (target, property, value) {
2197
+ if (!isNaN(property)) { // if property is an number (index)
2198
+ if (target[property]) {
2199
+ delete target[property]; // call deleteProperty
2200
+ }
2201
+ publish(value, 'effect.attach', { effectTarget: this._target }); // Attach effect to movie (first)
2202
+ }
2203
+ target[property] = value;
2204
+ return true
2205
+ }
2206
+ });
2207
+ effects.forEach(effect => this.effects.push(effect));
2208
+ }
2209
+
2210
+ attach (movie) {
2211
+ super.attach(movie);
2212
+ this.effects.forEach(effect => {
2213
+ effect.detach();
2214
+ effect.attach(movie);
2215
+ });
2216
+ }
2217
+
2218
+ detach () {
2219
+ super.detach();
2220
+ this.effects.forEach(effect => {
2221
+ effect.detach();
2222
+ });
2223
+ }
2224
+
2225
+ apply (target, reltime) {
2226
+ for (let i = 0; i < this.effects.length; i++) {
2227
+ const effect = this.effects[i];
2228
+ effect.apply(target, reltime);
2229
+ }
2230
+ }
2231
+
2232
+ /**
2233
+ * @type module:effect.Base[]
2234
+ */
2235
+ get effects () {
2236
+ return this._effects
2237
+ }
2238
+
2239
+ /**
2240
+ * Convenience method for chaining
2241
+ * @param {module:effect.Base} effect - the effect to append
2242
+ */
2243
+ addEffect (effect) {
2244
+ this.effects.push(effect);
2245
+ return this
2246
+ }
2247
+ }
2248
+
2249
+ /**
2250
+ * A hardware-accelerated pixel mapping
2251
+ * @todo can `v_TextureCoord` be replaced by `gl_FragUV`
2252
+ */
2253
+ class Shader extends Base$1 {
2254
+ /**
2255
+ * @param {string} fragmentSrc
2256
+ * @param {object} [userUniforms={}]
2257
+ * @param {object[]} [userTextures=[]]
2258
+ * @param {object} [sourceTextureOptions={}]
2259
+ */
2260
+ constructor (fragmentSrc = Shader._IDENTITY_FRAGMENT_SOURCE, userUniforms = {}, userTextures = [], sourceTextureOptions = {}) {
2261
+ super();
2262
+ // TODO: split up into multiple methods
2263
+
2264
+ const gl = this._initGl();
2265
+ this._program = Shader._initShaderProgram(gl, Shader._VERTEX_SOURCE, fragmentSrc);
2266
+ this._buffers = Shader._initRectBuffers(gl);
2267
+
2268
+ this._initTextures(userUniforms, userTextures, sourceTextureOptions);
2269
+ this._initAttribs();
2270
+ this._initUniforms(userUniforms);
2271
+ }
2272
+
2273
+ _initGl () {
2274
+ this._canvas = document.createElement('canvas');
2275
+ const gl = this._canvas.getContext('webgl');
2276
+ if (gl === null) {
2277
+ throw new Error('Unable to initialize WebGL. Your browser or machine may not support it.')
2278
+ }
2279
+ this._gl = gl;
2280
+ return gl
2281
+ }
2282
+
2283
+ _initTextures (userUniforms, userTextures, sourceTextureOptions) {
2284
+ const gl = this._gl;
2285
+ const maxTextures = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);
2286
+ if (userTextures.length > maxTextures) {
2287
+ console.warn('Too many textures!');
2288
+ }
2289
+ this._userTextures = {};
2290
+ for (const name in userTextures) {
2291
+ const userOptions = userTextures[name];
2292
+ // Apply default options.
2293
+ const options = { ...Shader._DEFAULT_TEXTURE_OPTIONS, ...userOptions };
2294
+
2295
+ if (options.createUniform) {
2296
+ // Automatically, create a uniform with the same name as this texture, that points to it.
2297
+ // This is an easy way for the user to use custom textures, without having to define multiple properties in the effect object.
2298
+ if (userUniforms[name]) {
2299
+ throw new Error(`Texture - uniform naming conflict: ${name}!`)
2300
+ }
2301
+ // Add this as a "user uniform".
2302
+ userUniforms[name] = '1i'; // texture pointer
2303
+ }
2304
+ this._userTextures[name] = options;
2305
+ }
2306
+ this._sourceTextureOptions = { ...Shader._DEFAULT_TEXTURE_OPTIONS, ...sourceTextureOptions };
2307
+ }
2308
+
2309
+ _initAttribs () {
2310
+ const gl = this._gl;
2311
+ this._attribLocations = {
2312
+ textureCoord: gl.getAttribLocation(this._program, 'a_TextureCoord')
2313
+ // a_VertexPosition ?? somehow it works without it though...
2314
+ };
2315
+ }
2316
+
2317
+ _initUniforms (userUniforms) {
2318
+ const gl = this._gl;
2319
+ this._uniformLocations = {
2320
+ // modelViewMatrix: gl.getUniformLocation(this._program, "u_ModelViewMatrix"),
2321
+ source: gl.getUniformLocation(this._program, 'u_Source'),
2322
+ size: gl.getUniformLocation(this._program, 'u_Size')
2323
+ };
2324
+ // The options value can just be a string equal to the type of the variable, for syntactic sugar.
2325
+ // If this is the case, convert it to a real options object.
2326
+ this._userUniforms = {};
2327
+ for (const name in userUniforms) {
2328
+ const val = userUniforms[name];
2329
+ this._userUniforms[name] = typeof val === 'string' ? { type: val } : val;
2330
+ }
2331
+ for (const unprefixed in userUniforms) {
2332
+ // property => u_Property
2333
+ const prefixed = 'u_' + unprefixed.charAt(0).toUpperCase() + (unprefixed.length > 1 ? unprefixed.slice(1) : '');
2334
+ this._uniformLocations[unprefixed] = gl.getUniformLocation(this._program, prefixed);
2335
+ }
2336
+ }
2337
+
2338
+ // Not needed, right?
2339
+ /* watchWebGLOptions() {
2340
+ const pubChange = () => {
2341
+ this.publish("change", {});
2342
+ };
2343
+ for (let name in this._userTextures) {
2344
+ watch(this, name, pubChange);
2345
+ }
2346
+ for (let name in this._userUniforms) {
2347
+ watch(this, name, pubChange);
2348
+ }
2349
+ } */
2350
+
2351
+ apply (target, reltime) {
2352
+ const gl = this._gl;
2353
+ this._checkDimensions(target);
2354
+ this._refreshGl();
2355
+
2356
+ this._enablePositionAttrib();
2357
+ this._enableTexCoordAttrib();
2358
+ this._prepareTextures(target, reltime);
2359
+
2360
+ gl.useProgram(this._program);
2361
+
2362
+ this._prepareUniforms(target, reltime);
2363
+
2364
+ this._draw(target);
2365
+ }
2366
+
2367
+ _checkDimensions (target) {
2368
+ const gl = this._gl;
2369
+ // TODO: Change target.canvas.width => target.width and see if it breaks anything.
2370
+ if (this._canvas.width !== target.canvas.width || this._canvas.height !== target.canvas.height) { // (optimization)
2371
+ this._canvas.width = target.canvas.width;
2372
+ this._canvas.height = target.canvas.height;
2373
+
2374
+ gl.viewport(0, 0, target.canvas.width, target.canvas.height);
2375
+ }
2376
+ }
2377
+
2378
+ _refreshGl () {
2379
+ const gl = this._gl;
2380
+ gl.clearColor(0, 0, 0, 1); // clear to black; fragments can be made transparent with the blendfunc below
2381
+ // gl.clearDepth(1.0); // clear everything
2382
+ gl.blendFuncSeparate(gl.SRC_ALPHA, gl.SRC_ALPHA, gl.ONE, gl.ZERO); // idk why I can't multiply rgb by zero
2383
+ gl.enable(gl.BLEND);
2384
+ gl.disable(gl.DEPTH_TEST); // gl.depthFunc(gl.LEQUAL);
2385
+
2386
+ gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
2387
+ }
2388
+
2389
+ _enablePositionAttrib () {
2390
+ const gl = this._gl;
2391
+ // Tell WebGL how to pull out the positions from buffer
2392
+ const numComponents = 2;
2393
+ const type = gl.FLOAT; // the data in the buffer is 32bit floats
2394
+ const normalize = false; // don't normalize
2395
+ const stride = 0; // how many bytes to get from one set of values to the next
2396
+ // 0 = use type and numComponents above
2397
+ const offset = 0; // how many bytes inside the buffer to start from
2398
+ gl.bindBuffer(gl.ARRAY_BUFFER, this._buffers.position);
2399
+ gl.vertexAttribPointer(
2400
+ this._attribLocations.vertexPosition,
2401
+ numComponents,
2402
+ type,
2403
+ normalize,
2404
+ stride,
2405
+ offset);
2406
+ gl.enableVertexAttribArray(
2407
+ this._attribLocations.vertexPosition);
2408
+ }
2409
+
2410
+ _enableTexCoordAttrib () {
2411
+ const gl = this._gl;
2412
+ // tell webgl how to pull out the texture coordinates from buffer
2413
+ const numComponents = 2; // every coordinate composed of 2 values (uv)
2414
+ const type = gl.FLOAT; // the data in the buffer is 32 bit float
2415
+ const normalize = false; // don't normalize
2416
+ const stride = 0; // how many bytes to get from one set to the next
2417
+ const offset = 0; // how many bytes inside the buffer to start from
2418
+ gl.bindBuffer(gl.ARRAY_BUFFER, this._buffers.textureCoord);
2419
+ gl.vertexAttribPointer(this._attribLocations.textureCoord, numComponents, type, normalize, stride, offset);
2420
+ gl.enableVertexAttribArray(this._attribLocations.textureCoord);
2421
+ }
2422
+
2423
+ _prepareTextures (target, reltime) {
2424
+ const gl = this._gl;
2425
+ // TODO: figure out which properties should be private / public
2426
+
2427
+ // Tell WebGL we want to affect texture unit 0
2428
+ // Call `activeTexture` before `_loadTexture` so it won't be bound to the last active texture.
2429
+ gl.activeTexture(gl.TEXTURE0);
2430
+ this._inputTexture = Shader._loadTexture(gl, target.canvas, this._sourceTextureOptions);
2431
+ // Bind the texture to texture unit 0
2432
+ gl.bindTexture(gl.TEXTURE_2D, this._inputTexture);
2433
+
2434
+ let i = 0;
2435
+ for (const name in this._userTextures) {
2436
+ const options = this._userTextures[name];
2437
+ // Call `activeTexture` before `_loadTexture` so it won't be bound to the last active texture.
2438
+ // TODO: investigate better implementation of `_loadTexture`
2439
+ gl.activeTexture(gl.TEXTURE0 + (Shader.INTERNAL_TEXTURE_UNITS + i)); // use the fact that TEXTURE0, TEXTURE1, ... are continuous
2440
+ const preparedTex = Shader._loadTexture(gl, val(this, name, reltime), options); // do it every frame to keep updated (I think you need to)
2441
+ gl.bindTexture(gl[options.target], preparedTex);
2442
+ i++;
2443
+ }
2444
+ }
2445
+
2446
+ _prepareUniforms (target, reltime) {
2447
+ const gl = this._gl;
2448
+ // Set the shader uniforms
2449
+
2450
+ // Tell the shader we bound the texture to texture unit 0
2451
+ // All base (Shader class) uniforms are optional
2452
+ if (this._uniformLocations.source) {
2453
+ gl.uniform1i(this._uniformLocations.source, 0);
2454
+ }
2455
+
2456
+ // All base (Shader class) uniforms are optional
2457
+ if (this._uniformLocations.size) {
2458
+ gl.uniform2iv(this._uniformLocations.size, [target.canvas.width, target.canvas.height]);
2459
+ }
2460
+
2461
+ for (const unprefixed in this._userUniforms) {
2462
+ const options = this._userUniforms[unprefixed];
2463
+ const value = val(this, unprefixed, reltime);
2464
+ const preparedValue = this._prepareValue(value, options.type, reltime, options);
2465
+ const location = this._uniformLocations[unprefixed];
2466
+ gl['uniform' + options.type](location, preparedValue); // haHA JavaScript (`options.type` is "1f", for instance)
2467
+ }
2468
+ gl.uniform1i(this._uniformLocations.test, 0);
2469
+ }
2470
+
2471
+ _draw (target) {
2472
+ const gl = this._gl;
2473
+
2474
+ const offset = 0;
2475
+ const vertexCount = 4;
2476
+ gl.drawArrays(gl.TRIANGLE_STRIP, offset, vertexCount);
2477
+
2478
+ // clear the target, in case the effect outputs transparent pixels
2479
+ target.cctx.clearRect(0, 0, target.canvas.width, target.canvas.height);
2480
+ // copy internal image state onto target
2481
+ target.cctx.drawImage(this._canvas, 0, 0);
2482
+ }
2483
+
2484
+ /**
2485
+ * Converts a value of a standard type for javascript to a standard type for GLSL
2486
+ * @param value - the raw value to prepare
2487
+ * @param {string} outputType - the WebGL type of |value|; example: <code>1f</code> for a float
2488
+ * @param {number} reltime - current time, relative to the target
2489
+ * @param {object} [options] - Optional config
2490
+ */
2491
+ _prepareValue (value, outputType, reltime, options = {}) {
2492
+ const def = options.defaultFloatComponent || 0;
2493
+ if (outputType === '1i') {
2494
+ /*
2495
+ * Textures are passed to the shader by both providing the texture (with texImage2D)
2496
+ * and setting the |sampler| uniform equal to the index of the texture.
2497
+ * In etro shader effects, the subclass passes the names of all the textures ot this base class,
2498
+ * along with all the names of uniforms. By default, corresponding uniforms (with the same name) are
2499
+ * created for each texture for ease of use. You can also define different texture properties in the
2500
+ * javascript effect by setting it identical to the property with the passed texture name.
2501
+ * In WebGL, it will be set to the same integer texture unit.
2502
+ *
2503
+ * To do this, test if |value| is identical to a texture.
2504
+ * If so, set it to the texture's index, so the shader can use it.
2505
+ */
2506
+ let i = 0;
2507
+ for (const name in this._userTextures) {
2508
+ const testValue = val(this, name, reltime);
2509
+ if (value === testValue) {
2510
+ value = Shader.INTERNAL_TEXTURE_UNITS + i; // after the internal texture units
2511
+ }
2512
+ i++;
2513
+ }
2514
+ }
2515
+
2516
+ if (outputType === '3fv') {
2517
+ // allow 4-component vectors; TODO: why?
2518
+ if (Array.isArray(value) && (value.length === 3 || value.length === 4)) {
2519
+ return value
2520
+ }
2521
+ // kind of loose so this can be changed if needed
2522
+ if (typeof value === 'object') {
2523
+ return [
2524
+ value.r !== undefined ? value.r : def,
2525
+ value.g !== undefined ? value.g : def,
2526
+ value.b !== undefined ? value.b : def
2527
+ ]
2528
+ }
2529
+
2530
+ throw new Error(`Invalid type: ${outputType} or value: ${value}`)
2531
+ }
2532
+
2533
+ if (outputType === '4fv') {
2534
+ if (Array.isArray(value) && value.length === 4) {
2535
+ return value
2536
+ }
2537
+ // kind of loose so this can be changed if needed
2538
+ if (typeof value === 'object') {
2539
+ return [
2540
+ value.r !== undefined ? value.r : def,
2541
+ value.g !== undefined ? value.g : def,
2542
+ value.b !== undefined ? value.b : def,
2543
+ value.a !== undefined ? value.a : def
2544
+ ]
2545
+ }
2546
+
2547
+ throw new Error(`Invalid type: ${outputType} or value: ${value}`)
2548
+ }
2549
+
2550
+ return value
2551
+ }
2552
+ }
2553
+ // Shader.prototype.getpublicExcludes = () =>
2554
+ Shader._initRectBuffers = gl => {
2555
+ const position = [
2556
+ // the screen/canvas (output)
2557
+ -1.0, 1.0,
2558
+ 1.0, 1.0,
2559
+ -1.0, -1.0,
2560
+ 1.0, -1.0
2561
+ ];
2562
+ const textureCoord = [
2563
+ // the texture/canvas (input)
2564
+ 0.0, 0.0,
2565
+ 1.0, 0.0,
2566
+ 0.0, 1.0,
2567
+ 1.0, 1.0
2568
+ ];
2569
+
2570
+ return {
2571
+ position: Shader._initBuffer(gl, position),
2572
+ textureCoord: Shader._initBuffer(gl, textureCoord)
2573
+ }
2574
+ };
2575
+ /**
2576
+ * Creates the quad covering the screen
2577
+ */
2578
+ Shader._initBuffer = (gl, data) => {
2579
+ const buffer = gl.createBuffer();
2580
+
2581
+ // Select the buffer as the one to apply buffer operations to from here out.
2582
+ gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
2583
+
2584
+ gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(data), gl.STATIC_DRAW);
2585
+
2586
+ return buffer
2587
+ };
2588
+ /**
2589
+ * Creates a webgl texture from the source.
2590
+ * @param {object} [options] - optional WebGL config for texture
2591
+ * @param {number} [options.target=gl.TEXTURE_2D]
2592
+ * @param {number} [options.level=0]
2593
+ * @param {number} [options.internalFormat=gl.RGBA]
2594
+ * @param {number} [options.srcFormat=gl.RGBA]
2595
+ * @param {number} [options.srcType=gl.UNSIGNED_BYTE]
2596
+ * @param {number} [options.wrapS=gl.CLAMP_TO_EDGE]
2597
+ * @param {number} [options.wrapT=gl.CLAMP_TO_EDGE]
2598
+ * @param {number} [options.minFilter=gl.LINEAR]
2599
+ * @param {number} [options.magFilter=gl.LINEAR]
2600
+ */
2601
+ Shader._loadTexture = (gl, source, options = {}) => {
2602
+ options = { ...Shader._DEFAULT_TEXTURE_OPTIONS, ...options }; // Apply default options, just in case.
2603
+ const target = gl[options.target]; // When creating the option, the user can't access `gl` so access it here.
2604
+ const level = options.level;
2605
+ const internalFormat = gl[options.internalFormat];
2606
+ const srcFormat = gl[options.srcFormat];
2607
+ const srcType = gl[options.srcType];
2608
+ const wrapS = gl[options.wrapS];
2609
+ const wrapT = gl[options.wrapT];
2610
+ const minFilter = gl[options.minFilter];
2611
+ const magFilter = gl[options.magFilter];
2612
+ // TODO: figure out how wrap-s and wrap-t interact with mipmaps
2613
+ // (for legacy support)
2614
+ // let wrapS = options.wrapS ? options.wrapS : gl.CLAMP_TO_EDGE,
2615
+ // wrapT = options.wrapT ? options.wrapT : gl.CLAMP_TO_EDGE;
2616
+
2617
+ const tex = gl.createTexture();
2618
+ gl.bindTexture(target, tex);
2619
+
2620
+ // gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true) // premultiply alpha
2621
+
2622
+ // TODO: figure out how this works with layer width/height
2623
+
2624
+ // TODO: support 3d textures (change texImage2D)
2625
+ // set to `source`
2626
+ gl.texImage2D(target, level, internalFormat, srcFormat, srcType, source);
2627
+
2628
+ // WebGL1 has different requirements for power of 2 images
2629
+ // vs non power of 2 images so check if the image is a
2630
+ // power of 2 in both dimensions.
2631
+ // Get dimensions by using the fact that all valid inputs for
2632
+ // texImage2D must have `width` and `height` properties except
2633
+ // videos, which have `videoWidth` and `videoHeight` instead
2634
+ // and `ArrayBufferView`, which is one dimensional (so don't
2635
+ // worry about mipmaps)
2636
+ const w = target instanceof HTMLVideoElement ? target.videoWidth : target.width;
2637
+ const h = target instanceof HTMLVideoElement ? target.videoHeight : target.height;
2638
+ if ((w && isPowerOf2(w)) && (h && isPowerOf2(h))) {
2639
+ // Yes, it's a power of 2. All wrap modes are valid. Generate mips.
2640
+ gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS);
2641
+ gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT);
2642
+ gl.generateMipmap(target);
2643
+ } else {
2644
+ // No, it's not a power of 2. Turn off mips and set
2645
+ // wrapping to clamp to edge
2646
+ if (wrapS !== gl.CLAMP_TO_EDGE || wrapT !== gl.CLAMP_TO_EDGE) {
2647
+ console.warn('Wrap mode is not CLAMP_TO_EDGE for a non-power-of-two texture. Defaulting to CLAMP_TO_EDGE');
2648
+ }
2649
+ gl.texParameteri(target, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
2650
+ gl.texParameteri(target, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
2651
+ gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter);
2652
+ gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter);
2653
+ }
2654
+
2655
+ return tex
2656
+ };
2657
+ const isPowerOf2 = value => (value && (value - 1)) === 0;
2658
+ // https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Adding_2D_content_to_a_WebGL_context
2659
+ Shader._initShaderProgram = (gl, vertexSrc, fragmentSrc) => {
2660
+ const vertexShader = Shader._loadShader(gl, gl.VERTEX_SHADER, vertexSrc);
2661
+ const fragmentShader = Shader._loadShader(gl, gl.FRAGMENT_SHADER, fragmentSrc);
2662
+
2663
+ const shaderProgram = gl.createProgram();
2664
+ gl.attachShader(shaderProgram, vertexShader);
2665
+ gl.attachShader(shaderProgram, fragmentShader);
2666
+ gl.linkProgram(shaderProgram);
2667
+
2668
+ // check program creation status
2669
+ if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
2670
+ console.warn('Unable to link shader program: ' + gl.getProgramInfoLog(shaderProgram));
2671
+ return null
2672
+ }
2673
+
2674
+ return shaderProgram
2675
+ };
2676
+ Shader._loadShader = (gl, type, source) => {
2677
+ const shader = gl.createShader(type);
2678
+ gl.shaderSource(shader, source);
2679
+ gl.compileShader(shader);
2680
+
2681
+ // check compile status
2682
+ if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
2683
+ console.warn('An error occured compiling shader: ' + gl.getShaderInfoLog(shader));
2684
+ gl.deleteShader(shader);
2685
+ return null
2686
+ }
2687
+
2688
+ return shader
2689
+ };
2690
+ /**
2691
+ * WebGL texture units consumed by <code>Shader</code>
2692
+ */
2693
+ Shader.INTERNAL_TEXTURE_UNITS = 1;
2694
+ Shader._DEFAULT_TEXTURE_OPTIONS = {
2695
+ createUniform: true,
2696
+ target: 'TEXTURE_2D',
2697
+ level: 0,
2698
+ internalFormat: 'RGBA',
2699
+ srcFormat: 'RGBA',
2700
+ srcType: 'UNSIGNED_BYTE',
2701
+ minFilter: 'LINEAR',
2702
+ magFilter: 'LINEAR',
2703
+ wrapS: 'CLAMP_TO_EDGE',
2704
+ wrapT: 'CLAMP_TO_EDGE'
2705
+ };
2706
+ Shader._VERTEX_SOURCE = `
2707
+ attribute vec4 a_VertexPosition;
2708
+ attribute vec2 a_TextureCoord;
2709
+
2710
+ varying highp vec2 v_TextureCoord;
2711
+
2712
+ void main() {
2713
+ // no need for projection or model-view matrices, since we're just rendering a rectangle
2714
+ // that fills the screen (see position values)
2715
+ gl_Position = a_VertexPosition;
2716
+ v_TextureCoord = a_TextureCoord;
2717
+ }
2718
+ `;
2719
+ Shader._IDENTITY_FRAGMENT_SOURCE = `
2720
+ precision mediump float;
2721
+
2722
+ uniform sampler2D u_Source;
2723
+
2724
+ varying highp vec2 v_TextureCoord;
2725
+
2726
+ void main() {
2727
+ gl_FragColor = texture2D(u_Source, v_TextureCoord);
2728
+ }
2729
+ `;
2730
+
2731
+ /* COLOR & TRANSPARENCY */
2732
+ // TODO: move shader source code to external .js files (with exports)
2733
+
2734
+ /**
2735
+ * Changes the brightness
2736
+ */
2737
+ class Brightness extends Shader {
2738
+ /**
2739
+ * @param {number} [brightness=0] - the value to add to each pixel's channels [-255, 255]
2740
+ */
2741
+ constructor (brightness = 0.0) {
2742
+ super(`
2743
+ precision mediump float;
2744
+
2745
+ uniform sampler2D u_Source;
2746
+ uniform float u_Brightness;
2747
+
2748
+ varying highp vec2 v_TextureCoord;
2749
+
2750
+ void main() {
2751
+ vec4 color = texture2D(u_Source, v_TextureCoord);
2752
+ vec3 rgb = clamp(color.rgb + u_Brightness / 255.0, 0.0, 1.0);
2753
+ gl_FragColor = vec4(rgb, color.a);
2754
+ }
2755
+ `, {
2756
+ brightness: '1f'
2757
+ });
2758
+ /**
2759
+ * The value to add to each pixel's channels [-255, 255]
2760
+ * @type number
2761
+ */
2762
+ this.brightness = brightness;
2763
+ }
2764
+ }
2765
+
2766
+ /**
2767
+ * Changes the contrast
2768
+ */
2769
+ class Contrast extends Shader {
2770
+ /**
2771
+ * @param {number} [contrast=1] - the contrast multiplier
2772
+ */
2773
+ constructor (contrast = 1.0) {
2774
+ super(`
2775
+ precision mediump float;
2776
+
2777
+ uniform sampler2D u_Source;
2778
+ uniform float u_Contrast;
2779
+
2780
+ varying highp vec2 v_TextureCoord;
2781
+
2782
+ void main() {
2783
+ vec4 color = texture2D(u_Source, v_TextureCoord);
2784
+ vec3 rgb = clamp(u_Contrast * (color.rgb - 0.5) + 0.5, 0.0, 1.0);
2785
+ gl_FragColor = vec4(rgb, color.a);
2786
+ }
2787
+ `, {
2788
+ contrast: '1f'
2789
+ });
2790
+ /**
2791
+ * The contrast multiplier
2792
+ * @type number
2793
+ */
2794
+ this.contrast = contrast;
2795
+ }
2796
+ }
2797
+
2798
+ /**
2799
+ * Multiplies each channel by a different number
2800
+ */
2801
+ class Channels extends Shader {
2802
+ /**
2803
+ * @param {module:util.Color} factors - channel factors, each defaulting to 1
2804
+ */
2805
+ constructor (factors = {}) {
2806
+ super(`
2807
+ precision mediump float;
2808
+
2809
+ uniform sampler2D u_Source;
2810
+ uniform vec4 u_Factors;
2811
+
2812
+ varying highp vec2 v_TextureCoord;
2813
+
2814
+ void main() {
2815
+ vec4 color = texture2D(u_Source, v_TextureCoord);
2816
+ gl_FragColor = clamp(u_Factors * color, 0.0, 1.0);
2817
+ }
2818
+ `, {
2819
+ factors: { type: '4fv', defaultFloatComponent: 1 }
2820
+ });
2821
+
2822
+ /**
2823
+ * Channel factors, each defaulting to 1
2824
+ * @type module:util.Color
2825
+ */
2826
+ this.factors = factors;
2827
+ }
2828
+ }
2829
+
2830
+ /**
2831
+ * Reduces alpha for pixels which are close to a specified target color
2832
+ */
2833
+ class ChromaKey extends Shader {
2834
+ /**
2835
+ * @param {module:util.Color} [target={r: 0, g: 0, b: 0}] - the color to remove
2836
+ * @param {number} [threshold=0] - how much error is allowed
2837
+ * @param {boolean} [interpolate=false] - true value to interpolate the alpha channel,
2838
+ * or false value for no smoothing (i.e. 255 or 0 alpha)
2839
+ * @param {number} [smoothingSharpness=0] - a modifier to lessen the smoothing range, if applicable
2840
+ * @todo Use <code>smoothingSharpness</code>
2841
+ */
2842
+ constructor (target = { r: 0, g: 0, b: 0 }, threshold = 0, interpolate = false/*, smoothingSharpness=0 */) {
2843
+ super(`
2844
+ precision mediump float;
2845
+
2846
+ uniform sampler2D u_Source;
2847
+ uniform vec3 u_Target;
2848
+ uniform float u_Threshold;
2849
+ uniform bool u_Interpolate;
2850
+
2851
+ varying highp vec2 v_TextureCoord;
2852
+
2853
+ void main() {
2854
+ vec4 color = texture2D(u_Source, v_TextureCoord);
2855
+ float alpha = color.a;
2856
+ vec3 dist = abs(color.rgb - u_Target / 255.0);
2857
+ if (!u_Interpolate) {
2858
+ // Standard way that most video editors probably use (all-or-nothing method)
2859
+ float thresh = u_Threshold / 255.0;
2860
+ bool transparent = dist.r <= thresh && dist.g <= thresh && dist.b <= thresh;
2861
+ if (transparent)
2862
+ alpha = 0.0;
2863
+ } else {
2864
+ /*
2865
+ better way IMHO:
2866
+ Take the average of the absolute differences between the pixel and the target for each channel
2867
+ */
2868
+ float transparency = (dist.r + dist.g + dist.b) / 3.0;
2869
+ // TODO: custom or variety of interpolation methods
2870
+ alpha = transparency;
2871
+ }
2872
+ gl_FragColor = vec4(color.rgb, alpha);
2873
+ }
2874
+ `, {
2875
+ target: '3fv',
2876
+ threshold: '1f',
2877
+ interpolate: '1i'
2878
+ });
2879
+ /**
2880
+ * The color to remove
2881
+ * @type module:util.Color
2882
+ */
2883
+ this.target = target;
2884
+ /**
2885
+ * How much error is alloed
2886
+ * @type number
2887
+ */
2888
+ this.threshold = threshold;
2889
+ /**
2890
+ * True value to interpolate the alpha channel,
2891
+ * or false value for no smoothing (i.e. 255 or 0 alpha)
2892
+ * @type boolean
2893
+ */
2894
+ this.interpolate = interpolate;
2895
+ // this.smoothingSharpness = smoothingSharpness;
2896
+ }
2897
+ }
2898
+
2899
+ /* BLUR */
2900
+
2901
+ /**
2902
+ * Applies a Gaussian blur
2903
+ *
2904
+ * @todo Improve performance
2905
+ * @todo Make sure this is truly gaussian even though it doens't require a standard deviation
2906
+ */
2907
+ class GaussianBlur extends Stack {
2908
+ constructor (radius) {
2909
+ // Divide into two shader effects (use the fact that gaussian blurring can be split into components for performance benefits)
2910
+ super([
2911
+ new GaussianBlurHorizontal(radius),
2912
+ new GaussianBlurVertical(radius)
2913
+ ]);
2914
+ }
2915
+ }
2916
+
2917
+ /**
2918
+ * Shared class for both horizontal and vertical gaussian blur classes.
2919
+ * @todo If radius == 0, don't affect the image (right now, the image goes black).
2920
+ */
2921
+ class GaussianBlurComponent extends Shader {
2922
+ /**
2923
+ * @param {string} src - fragment src code specific to which component (horizontal or vertical)
2924
+ * @param {number} radius
2925
+ */
2926
+ constructor (src, radius) {
2927
+ super(src, {
2928
+ radius: '1i'
2929
+ }, {
2930
+ shape: { minFilter: 'NEAREST', magFilter: 'NEAREST' }
2931
+ });
2932
+ /**
2933
+ * @type number
2934
+ */
2935
+ this.radius = radius;
2936
+ this._radiusCache = undefined;
2937
+ }
2938
+
2939
+ apply (target, reltime) {
2940
+ const radiusVal = val(this, 'radius', reltime);
2941
+ if (radiusVal !== this._radiusCache) {
2942
+ // Regenerate gaussian distribution.
2943
+ this.shape = GaussianBlurComponent.render1DKernel(
2944
+ GaussianBlurComponent.gen1DKernel(radiusVal)
2945
+ ); // distribution canvas
2946
+ }
2947
+ this._radiusCache = radiusVal;
2948
+
2949
+ super.apply(target, reltime);
2950
+ }
2951
+ }
2952
+ GaussianBlurComponent.prototype.publicExcludes = Shader.prototype.publicExcludes.concat(['shape']);
2953
+ /**
2954
+ * Render Gaussian kernel to a canvas for use in shader.
2955
+ * @param {number[]} kernel
2956
+ * @private
2957
+ *
2958
+ * @return {HTMLCanvasElement}
2959
+ */
2960
+ GaussianBlurComponent.render1DKernel = kernel => {
2961
+ // TODO: Use Float32Array instead of canvas.
2962
+ // init canvas
2963
+ const canvas = document.createElement('canvas');
2964
+ canvas.width = kernel.length;
2965
+ canvas.height = 1; // 1-dimensional
2966
+ const ctx = canvas.getContext('2d');
2967
+
2968
+ // draw to canvas
2969
+ const imageData = ctx.createImageData(canvas.width, canvas.height);
2970
+ for (let i = 0; i < kernel.length; i++) {
2971
+ imageData.data[4 * i + 0] = 255 * kernel[i]; // Use red channel to store distribution weights.
2972
+ imageData.data[4 * i + 1] = 0; // Clear all other channels.
2973
+ imageData.data[4 * i + 2] = 0;
2974
+ imageData.data[4 * i + 3] = 255;
2975
+ }
2976
+ ctx.putImageData(imageData, 0, 0);
2977
+
2978
+ return canvas
2979
+ };
2980
+ GaussianBlurComponent.gen1DKernel = radius => {
2981
+ const pascal = GaussianBlurComponent.genPascalRow(2 * radius + 1);
2982
+ // don't use `reduce` and `map` (overhead?)
2983
+ let sum = 0;
2984
+ for (let i = 0; i < pascal.length; i++) {
2985
+ sum += pascal[i];
2986
+ }
2987
+ for (let i = 0; i < pascal.length; i++) {
2988
+ pascal[i] /= sum;
2989
+ }
2990
+ return pascal
2991
+ };
2992
+ GaussianBlurComponent.genPascalRow = index => {
2993
+ if (index < 0) {
2994
+ throw new Error(`Invalid index ${index}`)
2995
+ }
2996
+ let currRow = [1];
2997
+ for (let i = 1; i < index; i++) {
2998
+ const nextRow = [];
2999
+ nextRow.length = currRow.length + 1;
3000
+ // edges are always 1's
3001
+ nextRow[0] = nextRow[nextRow.length - 1] = 1;
3002
+ for (let j = 1; j < nextRow.length - 1; j++) {
3003
+ nextRow[j] = currRow[j - 1] + currRow[j];
3004
+ }
3005
+ currRow = nextRow;
3006
+ }
3007
+ return currRow
3008
+ };
3009
+
3010
+ /**
3011
+ * Horizontal component of gaussian blur
3012
+ */
3013
+ class GaussianBlurHorizontal extends GaussianBlurComponent {
3014
+ /**
3015
+ * @param {number} radius
3016
+ */
3017
+ constructor (radius) {
3018
+ super(`
3019
+ #define MAX_RADIUS 250
3020
+
3021
+ precision mediump float;
3022
+
3023
+ uniform sampler2D u_Source;
3024
+ uniform ivec2 u_Size; // pixel dimensions of input and output
3025
+ uniform sampler2D u_Shape; // pseudo one-dimension of blur distribution (would be 1D but webgl doesn't support it)
3026
+ uniform int u_Radius; // TODO: support floating-point radii
3027
+
3028
+ varying highp vec2 v_TextureCoord;
3029
+
3030
+ void main() {
3031
+ /*
3032
+ * Ideally, totalWeight should end up being 1, but due to rounding errors, it sometimes ends up less than 1
3033
+ * (I believe JS canvas stores values as integers, which rounds down for the majority of the Gaussian curve)
3034
+ * So, normalize by accumulating all the weights and dividing by that.
3035
+ */
3036
+ float totalWeight = 0.0;
3037
+ vec4 avg = vec4(0.0);
3038
+ // GLSL can only use constants in for-loop declaration, so start at zero, and stop before 2 * u_Radius + 1,
3039
+ // opposed to starting at -u_Radius and stopping _at_ +u_Radius.
3040
+ for (int i = 0; i < 2 * MAX_RADIUS + 1; i++) {
3041
+ if (i >= 2 * u_Radius + 1)
3042
+ break; // GLSL can only use constants in for-loop declaration, so we break here.
3043
+ // (2 * u_Radius + 1) is the width of u_Shape, by definition
3044
+ float weight = texture2D(u_Shape, vec2(float(i) / float(2 * u_Radius + 1), 0.5)).r; // TODO: use single-channel format
3045
+ totalWeight += weight;
3046
+ vec4 sample = texture2D(u_Source, v_TextureCoord + vec2(i - u_Radius, 0.0) / vec2(u_Size));
3047
+ avg += weight * sample;
3048
+ }
3049
+ gl_FragColor = avg / totalWeight;
3050
+ }
3051
+ `, radius);
3052
+ }
3053
+ }
3054
+
3055
+ /**
3056
+ * Vertical component of gaussian blur
3057
+ */
3058
+ class GaussianBlurVertical extends GaussianBlurComponent {
3059
+ /**
3060
+ * @param {number} radius
3061
+ */
3062
+ constructor (radius) {
3063
+ super(`
3064
+ #define MAX_RADIUS 250
3065
+
3066
+ precision mediump float;
3067
+
3068
+ uniform sampler2D u_Source;
3069
+ uniform ivec2 u_Size; // pixel dimensions of input and output
3070
+ uniform sampler2D u_Shape; // pseudo one-dimension of blur distribution (would be 1D but webgl doesn't support it)
3071
+ uniform int u_Radius; // TODO: support floating-point radii
3072
+
3073
+ varying highp vec2 v_TextureCoord;
3074
+
3075
+ void main() {
3076
+ /*
3077
+ * Ideally, totalWeight should end up being 1, but due to rounding errors, it sometimes ends up less than 1
3078
+ * (I believe JS canvas stores values as integers, which rounds down for the majority of the Gaussian curve)
3079
+ * So, normalize by accumulating all the weights and dividing by that.
3080
+ */
3081
+ float totalWeight = 0.0;
3082
+ vec4 avg = vec4(0.0);
3083
+ // GLSL can only use constants in for-loop declaration, so start at zero, and stop before 2 * u_Radius + 1,
3084
+ // opposed to starting at -u_Radius and stopping _at_ +u_Radius.
3085
+ for (int i = 0; i < 2 * MAX_RADIUS + 1; i++) {
3086
+ if (i >= 2 * u_Radius + 1)
3087
+ break; // GLSL can only use constants in for-loop declaration, so we break here.
3088
+ // (2 * u_Radius + 1) is the width of u_Shape, by definition
3089
+ float weight = texture2D(u_Shape, vec2(float(i) / float(2 * u_Radius + 1), 0.5)).r; // TODO: use single-channel format
3090
+ totalWeight += weight;
3091
+ vec4 sample = texture2D(u_Source, v_TextureCoord + vec2(0.0, i - u_Radius) / vec2(u_Size));
3092
+ avg += weight * sample;
3093
+ }
3094
+ gl_FragColor = avg / totalWeight;
3095
+ }
3096
+ `, radius);
3097
+ }
3098
+ }
3099
+
3100
+ /**
3101
+ * Makes the target look pixelated
3102
+ * @todo just resample with NEAREST interpolation? but how?
3103
+ */
3104
+ class Pixelate extends Shader {
3105
+ /**
3106
+ * @param {number} pixelSize
3107
+ */
3108
+ constructor (pixelSize = 1) {
3109
+ super(`
3110
+ precision mediump float;
3111
+
3112
+ uniform sampler2D u_Source;
3113
+ uniform ivec2 u_Size;
3114
+ uniform int u_PixelSize;
3115
+
3116
+ varying highp vec2 v_TextureCoord;
3117
+
3118
+ void main() {
3119
+ int ps = u_PixelSize;
3120
+
3121
+ // Snap to nearest block's center
3122
+ vec2 loc = vec2(u_Size) * v_TextureCoord; // pixel-space
3123
+ vec2 snappedLoc = float(ps) * floor(loc / float(ps));
3124
+ vec2 centeredLoc = snappedLoc + vec2(float(u_PixelSize) / 2.0 + 0.5);
3125
+ vec2 clampedLoc = clamp(centeredLoc, vec2(0.0), vec2(u_Size));
3126
+ gl_FragColor = texture2D(u_Source, clampedLoc / vec2(u_Size));
3127
+ }
3128
+ `, {
3129
+ pixelSize: '1i'
3130
+ });
3131
+ /**
3132
+ * @type number
3133
+ */
3134
+ this.pixelSize = pixelSize;
3135
+ }
3136
+
3137
+ apply (target, reltime) {
3138
+ const ps = val(this, 'pixelSize', reltime);
3139
+ if (ps % 1 !== 0 || ps < 0) {
3140
+ throw new Error('Pixel size must be a nonnegative integer')
3141
+ }
3142
+
3143
+ super.apply(target, reltime);
3144
+ }
3145
+ }
3146
+
3147
+ // TODO: implement directional blur
3148
+ // TODO: implement radial blur
3149
+ // TODO: implement zoom blur
3150
+
3151
+ /* DISTORTION */
3152
+ /**
3153
+ * Transforms a layer or movie using a transformation matrix. Use {@link Transform.Matrix}
3154
+ * to either A) calculate those values based on a series of translations, scalings and rotations)
3155
+ * or B) input the matrix values directly, using the optional argument in the constructor.
3156
+ */
3157
+ class Transform extends Base$1 {
3158
+ /**
3159
+ * @param {module:effect.Transform.Matrix} matrix - how to transform the target
3160
+ */
3161
+ constructor (matrix) {
3162
+ super();
3163
+ /**
3164
+ * How to transform the target
3165
+ * @type module:effect.Transform.Matrix
3166
+ */
3167
+ this.matrix = matrix;
3168
+ this._tmpMatrix = new Transform.Matrix();
3169
+ this._tmpCanvas = document.createElement('canvas');
3170
+ this._tmpCtx = this._tmpCanvas.getContext('2d');
3171
+ }
3172
+
3173
+ apply (target, reltime) {
3174
+ if (target.canvas.width !== this._tmpCanvas.width) {
3175
+ this._tmpCanvas.width = target.canvas.width;
3176
+ }
3177
+ if (target.canvas.height !== this._tmpCanvas.height) {
3178
+ this._tmpCanvas.height = target.canvas.height;
3179
+ }
3180
+ this._tmpMatrix.data = val(this, 'matrix.data', reltime); // use data, since that's the underlying storage
3181
+
3182
+ this._tmpCtx.setTransform(
3183
+ this._tmpMatrix.a, this._tmpMatrix.b, this._tmpMatrix.c,
3184
+ this._tmpMatrix.d, this._tmpMatrix.e, this._tmpMatrix.f
3185
+ );
3186
+ this._tmpCtx.drawImage(target.canvas, 0, 0);
3187
+ // Assume it was identity for now
3188
+ this._tmpCtx.setTransform(1, 0, 0, 0, 1, 0, 0, 0, 1);
3189
+ target.cctx.clearRect(0, 0, target.canvas.width, target.canvas.height);
3190
+ target.cctx.drawImage(this._tmpCanvas, 0, 0);
3191
+ }
3192
+ }
3193
+ /**
3194
+ * @class
3195
+ * A 3x3 matrix for storing 2d transformations
3196
+ */
3197
+ Transform.Matrix = class Matrix {
3198
+ constructor (data) {
3199
+ this.data = data || [
3200
+ 1, 0, 0,
3201
+ 0, 1, 0,
3202
+ 0, 0, 1
3203
+ ];
3204
+ }
3205
+
3206
+ identity () {
3207
+ for (let i = 0; i < this.data.length; i++) {
3208
+ this.data[i] = Transform.Matrix.IDENTITY.data[i];
3209
+ }
3210
+
3211
+ return this
3212
+ }
3213
+
3214
+ /**
3215
+ * @param {number} x
3216
+ * @param {number} y
3217
+ * @param {number} [val]
3218
+ */
3219
+ cell (x, y, val) {
3220
+ if (val !== undefined) {
3221
+ this.data[3 * y + x] = val;
3222
+ }
3223
+ return this.data[3 * y + x]
3224
+ }
3225
+
3226
+ /* For canvas context setTransform */
3227
+ get a () {
3228
+ return this.data[0]
3229
+ }
3230
+
3231
+ get b () {
3232
+ return this.data[3]
3233
+ }
3234
+
3235
+ get c () {
3236
+ return this.data[1]
3237
+ }
3238
+
3239
+ get d () {
3240
+ return this.data[4]
3241
+ }
3242
+
3243
+ get e () {
3244
+ return this.data[2]
3245
+ }
3246
+
3247
+ get f () {
3248
+ return this.data[5]
3249
+ }
3250
+
3251
+ /**
3252
+ * Combines <code>this</code> with another matrix <code>other</code>
3253
+ * @param other
3254
+ */
3255
+ multiply (other) {
3256
+ // copy to temporary matrix to avoid modifying `this` while reading from it
3257
+ // http://www.informit.com/articles/article.aspx?p=98117&seqNum=4
3258
+ for (let x = 0; x < 3; x++) {
3259
+ for (let y = 0; y < 3; y++) {
3260
+ let sum = 0;
3261
+ for (let i = 0; i < 3; i++) {
3262
+ sum += this.cell(x, i) * other.cell(i, y);
3263
+ }
3264
+ TMP_MATRIX.cell(x, y, sum);
3265
+ }
3266
+ }
3267
+ // copy data from TMP_MATRIX to this
3268
+ for (let i = 0; i < TMP_MATRIX.data.length; i++) {
3269
+ this.data[i] = TMP_MATRIX.data[i];
3270
+ }
3271
+ return this
3272
+ }
3273
+
3274
+ /**
3275
+ * @param {number} x
3276
+ * @param {number} y
3277
+ */
3278
+ translate (x, y) {
3279
+ this.multiply(new Transform.Matrix([
3280
+ 1, 0, x,
3281
+ 0, 1, y,
3282
+ 0, 0, 1
3283
+ ]));
3284
+
3285
+ return this
3286
+ }
3287
+
3288
+ /**
3289
+ * @param {number} x
3290
+ * @param {number} y
3291
+ */
3292
+ scale (x, y) {
3293
+ this.multiply(new Transform.Matrix([
3294
+ x, 0, 0,
3295
+ 0, y, 0,
3296
+ 0, 0, 1
3297
+ ]));
3298
+
3299
+ return this
3300
+ }
3301
+
3302
+ /**
3303
+ * @param {number} a - the angle or rotation in radians
3304
+ */
3305
+ rotate (a) {
3306
+ const c = Math.cos(a); const s = Math.sin(a);
3307
+ this.multiply(new Transform.Matrix([
3308
+ c, s, 0,
3309
+ -s, c, 0,
3310
+ 0, 0, 1
3311
+ ]));
3312
+
3313
+ return this
3314
+ }
3315
+ };
3316
+ /**
3317
+ * The identity matrix
3318
+ */
3319
+ Transform.Matrix.IDENTITY = new Transform.Matrix();
3320
+ const TMP_MATRIX = new Transform.Matrix();
3321
+
3322
+ /**
3323
+ * Preserves an ellipse of the layer and clears the rest
3324
+ * @todo Parent layer mask effects will make more complex masks easier
3325
+ */
3326
+ class EllipticalMask extends Base$1 {
3327
+ constructor (x, y, radiusX, radiusY, rotation = 0, startAngle = 0, endAngle = 2 * Math.PI, anticlockwise = false) {
3328
+ super();
3329
+ this.x = x;
3330
+ this.y = y;
3331
+ this.radiusX = radiusX;
3332
+ this.radiusY = radiusY;
3333
+ this.rotation = rotation;
3334
+ this.startAngle = startAngle;
3335
+ this.endAngle = endAngle;
3336
+ this.anticlockwise = anticlockwise;
3337
+ // for saving image data before clearing
3338
+ this._tmpCanvas = document.createElement('canvas');
3339
+ this._tmpCtx = this._tmpCanvas.getContext('2d');
3340
+ }
3341
+
3342
+ apply (target, reltime) {
3343
+ const ctx = target.cctx; const canvas = target.canvas;
3344
+ const x = val(this, 'x', reltime); const y = val(this.y, target, reltime);
3345
+ const radiusX = val(this, 'radiusX', reltime); const radiusY = val(this.radiusY, target, reltime);
3346
+ const rotation = val(this, 'rotation', reltime);
3347
+ const startAngle = val(this, 'startAngle', reltime); const endAngle = val(this.endAngle, target, reltime);
3348
+ const anticlockwise = val(this, 'anticlockwise', reltime);
3349
+ this._tmpCanvas.width = target.canvas.width;
3350
+ this._tmpCanvas.height = target.canvas.height;
3351
+ this._tmpCtx.drawImage(canvas, 0, 0);
3352
+
3353
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
3354
+ ctx.save(); // idk how to preserve clipping state without save/restore
3355
+ // create elliptical path and clip
3356
+ ctx.beginPath();
3357
+ ctx.ellipse(x, y, radiusX, radiusY, rotation, startAngle, endAngle, anticlockwise);
3358
+ ctx.closePath();
3359
+ ctx.clip();
3360
+ // render image with clipping state
3361
+ ctx.drawImage(this._tmpCanvas, 0, 0);
3362
+ ctx.restore();
3363
+ }
3364
+ }
3365
+
3366
+ var effects = /*#__PURE__*/Object.freeze({
3367
+ Base: Base$1,
3368
+ Stack: Stack,
3369
+ Shader: Shader,
3370
+ Brightness: Brightness,
3371
+ Contrast: Contrast,
3372
+ Channels: Channels,
3373
+ ChromaKey: ChromaKey,
3374
+ GaussianBlur: GaussianBlur,
3375
+ GaussianBlurHorizontal: GaussianBlurHorizontal,
3376
+ GaussianBlurVertical: GaussianBlurVertical,
3377
+ Pixelate: Pixelate,
3378
+ Transform: Transform,
3379
+ EllipticalMask: EllipticalMask
3380
+ });
3381
+
3382
+ /**
3383
+ * The entry point
3384
+ * @module index
3385
+ */
3386
+
3387
+ var index = {
3388
+ Movie: Movie,
3389
+ layer: layers,
3390
+ effect: effects,
3391
+ event,
3392
+ ...util
3393
+ };
3394
+
3395
+ return index;
3396
+
3397
+ }());