@loaders.gl/video 4.0.0-alpha.5 → 4.0.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/bundle.js +2 -2
  2. package/dist/dist.min.js +23 -21
  3. package/dist/es5/bundle.js +6 -0
  4. package/dist/es5/bundle.js.map +1 -0
  5. package/dist/es5/gif-builder.js +200 -0
  6. package/dist/es5/gif-builder.js.map +1 -0
  7. package/dist/es5/index.js +21 -0
  8. package/dist/es5/index.js.map +1 -0
  9. package/dist/es5/lib/gifshot/gifshot-loader.js +67 -0
  10. package/dist/es5/lib/gifshot/gifshot-loader.js.map +1 -0
  11. package/dist/es5/lib/gifshot/gifshot.js +1924 -0
  12. package/dist/es5/lib/gifshot/gifshot.js.map +1 -0
  13. package/dist/es5/lib/parsers/parse-video.js +31 -0
  14. package/dist/es5/lib/parsers/parse-video.js.map +1 -0
  15. package/dist/es5/lib/utils/assert.js +12 -0
  16. package/dist/es5/lib/utils/assert.js.map +1 -0
  17. package/dist/es5/video-loader.js +28 -0
  18. package/dist/es5/video-loader.js.map +1 -0
  19. package/dist/esm/bundle.js +4 -0
  20. package/dist/esm/bundle.js.map +1 -0
  21. package/dist/esm/gif-builder.js +106 -0
  22. package/dist/esm/gif-builder.js.map +1 -0
  23. package/dist/esm/index.js +3 -0
  24. package/dist/esm/index.js.map +1 -0
  25. package/dist/esm/lib/gifshot/gifshot-loader.js +17 -0
  26. package/dist/esm/lib/gifshot/gifshot-loader.js.map +1 -0
  27. package/dist/esm/lib/gifshot/gifshot.js +1915 -0
  28. package/dist/esm/lib/gifshot/gifshot.js.map +1 -0
  29. package/dist/esm/lib/parsers/parse-video.js +7 -0
  30. package/dist/esm/lib/parsers/parse-video.js.map +1 -0
  31. package/dist/esm/lib/utils/assert.js +6 -0
  32. package/dist/esm/lib/utils/assert.js.map +1 -0
  33. package/dist/esm/video-loader.js +19 -0
  34. package/dist/esm/video-loader.js.map +1 -0
  35. package/dist/gif-builder.js +136 -114
  36. package/dist/index.js +10 -3
  37. package/dist/lib/gifshot/gifshot-loader.js +18 -15
  38. package/dist/lib/gifshot/gifshot.d.ts.map +1 -1
  39. package/dist/lib/gifshot/gifshot.js +2344 -2089
  40. package/dist/lib/parsers/parse-video.js +12 -6
  41. package/dist/lib/utils/assert.js +8 -5
  42. package/dist/video-loader.d.ts +1 -1
  43. package/dist/video-loader.d.ts.map +1 -1
  44. package/dist/video-loader.js +22 -14
  45. package/package.json +6 -6
  46. package/src/lib/gifshot/gifshot.ts +33 -32
  47. package/dist/bundle.js.map +0 -1
  48. package/dist/gif-builder.js.map +0 -1
  49. package/dist/index.js.map +0 -1
  50. package/dist/lib/gifshot/gifshot-loader.js.map +0 -1
  51. package/dist/lib/gifshot/gifshot.js.map +0 -1
  52. package/dist/lib/parsers/parse-video.js.map +0 -1
  53. package/dist/lib/utils/assert.js.map +0 -1
  54. package/dist/lib/utils/globals.d.ts +0 -10
  55. package/dist/lib/utils/globals.d.ts.map +0 -1
  56. package/dist/lib/utils/globals.js +0 -16
  57. package/dist/lib/utils/globals.js.map +0 -1
  58. package/dist/video-loader.js.map +0 -1
  59. package/src/lib/utils/globals.ts +0 -25
@@ -1,2184 +1,2439 @@
1
+ "use strict";
2
+ // @ts-nocheck
3
+ /* eslint-disable */
4
+ Object.defineProperty(exports, "__esModule", { value: true });
5
+ /* Copyrights for code authored by Yahoo Inc. is licensed under the following terms:
6
+ MIT License
7
+ Copyright 2017 Yahoo Inc.
8
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
9
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
10
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
11
+ */
12
+ /*
13
+ utils.js
14
+ ========
15
+ */
16
+ /* Copyright 2017 Yahoo Inc.
17
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
18
+ */
1
19
  var utils = {
2
- URL: window.URL || window.webkitURL || window.mozURL || window.msURL,
3
- getUserMedia: function () {
4
- const getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
5
- return getUserMedia ? getUserMedia.bind(navigator) : getUserMedia;
6
- }(),
7
- requestAnimFrame: window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.oRequestAnimationFrame || window.msRequestAnimationFrame,
8
- requestTimeout: function requestTimeout(callback, delay) {
9
- callback = callback || utils.noop;
10
- delay = delay || 0;
11
-
12
- if (!utils.requestAnimFrame) {
13
- return setTimeout(callback, delay);
14
- }
15
-
16
- const start = new Date().getTime();
17
- const handle = new Object();
18
- const requestAnimFrame = utils.requestAnimFrame;
19
-
20
- const loop = function loop() {
21
- const current = new Date().getTime();
22
- const delta = current - start;
23
- delta >= delay ? callback.call() : handle.value = requestAnimFrame(loop);
24
- };
25
-
26
- handle.value = requestAnimFrame(loop);
27
- return handle;
28
- },
29
- Blob: window.Blob || window.BlobBuilder || window.WebKitBlobBuilder || window.MozBlobBuilder || window.MSBlobBuilder,
30
- btoa: function () {
31
- const btoa = window.btoa || function (input) {
32
- let output = '';
33
- let i = 0;
34
- const l = input.length;
35
- const key = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
36
- let chr1 = void 0;
37
- let chr2 = void 0;
38
- let chr3 = void 0;
39
- let enc1 = void 0;
40
- let enc2 = void 0;
41
- let enc3 = void 0;
42
- let enc4 = void 0;
43
-
44
- while (i < l) {
45
- chr1 = input.charCodeAt(i++);
46
- chr2 = input.charCodeAt(i++);
47
- chr3 = input.charCodeAt(i++);
48
- enc1 = chr1 >> 2;
49
- enc2 = (chr1 & 3) << 4 | chr2 >> 4;
50
- enc3 = (chr2 & 15) << 2 | chr3 >> 6;
51
- enc4 = chr3 & 63;
52
-
53
- if (isNaN(chr2)) {
54
- enc3 = enc4 = 64;
55
- } else if (isNaN(chr3)) {
56
- enc4 = 64;
57
- }
58
-
59
- output = output + key.charAt(enc1) + key.charAt(enc2) + key.charAt(enc3) + key.charAt(enc4);
60
- }
61
-
62
- return output;
63
- };
64
-
65
- return btoa ? btoa.bind(window) : utils.noop;
66
- }(),
67
- isObject: function isObject(obj) {
68
- return obj && Object.prototype.toString.call(obj) === '[object Object]';
69
- },
70
- isEmptyObject: function isEmptyObject(obj) {
71
- return utils.isObject(obj) && !Object.keys(obj).length;
72
- },
73
- isArray: function isArray(arr) {
74
- return arr && Array.isArray(arr);
75
- },
76
- isFunction: function isFunction(func) {
77
- return func && typeof func === 'function';
78
- },
79
- isElement: function isElement(elem) {
80
- return elem && elem.nodeType === 1;
81
- },
82
- isString: function isString(value) {
83
- return typeof value === 'string' || Object.prototype.toString.call(value) === '[object String]';
84
- },
85
- isSupported: {
86
- canvas: function canvas() {
87
- const el = document.createElement('canvas');
88
- return el && el.getContext && el.getContext('2d');
20
+ URL: globalThis.URL || globalThis.webkitURL || globalThis.mozURL || globalThis.msURL,
21
+ getUserMedia: (function () {
22
+ if (!globalThis.navigator)
23
+ return globalThis.navigator;
24
+ const getUserMedia = globalThis.navigator.getUserMedia ||
25
+ globalThis.navigator.webkitGetUserMedia ||
26
+ globalThis.navigator.mozGetUserMedia ||
27
+ globalThis.navigator.msGetUserMedia;
28
+ return getUserMedia ? getUserMedia.bind(globalThis.navigator) : getUserMedia;
29
+ })(),
30
+ requestAnimFrame: globalThis.requestAnimationFrame ||
31
+ globalThis.webkitRequestAnimationFrame ||
32
+ globalThis.mozRequestAnimationFrame ||
33
+ globalThis.oRequestAnimationFrame ||
34
+ globalThis.msRequestAnimationFrame,
35
+ requestTimeout: function requestTimeout(callback, delay) {
36
+ callback = callback || utils.noop;
37
+ delay = delay || 0;
38
+ if (!utils.requestAnimFrame) {
39
+ return setTimeout(callback, delay);
40
+ }
41
+ const start = new Date().getTime();
42
+ const handle = new Object();
43
+ const requestAnimFrame = utils.requestAnimFrame;
44
+ const loop = function loop() {
45
+ const current = new Date().getTime();
46
+ const delta = current - start;
47
+ delta >= delay ? callback.call() : (handle.value = requestAnimFrame(loop));
48
+ };
49
+ handle.value = requestAnimFrame(loop);
50
+ return handle;
89
51
  },
90
- webworkers: function webworkers() {
91
- return window.Worker;
52
+ Blob: globalThis.Blob ||
53
+ globalThis.BlobBuilder ||
54
+ globalThis.WebKitBlobBuilder ||
55
+ globalThis.MozBlobBuilder ||
56
+ globalThis.MSBlobBuilder,
57
+ btoa: (function () {
58
+ const btoa = globalThis.btoa ||
59
+ function (input) {
60
+ let output = '';
61
+ let i = 0;
62
+ const l = input.length;
63
+ const key = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
64
+ let chr1 = void 0;
65
+ let chr2 = void 0;
66
+ let chr3 = void 0;
67
+ let enc1 = void 0;
68
+ let enc2 = void 0;
69
+ let enc3 = void 0;
70
+ let enc4 = void 0;
71
+ while (i < l) {
72
+ chr1 = input.charCodeAt(i++);
73
+ chr2 = input.charCodeAt(i++);
74
+ chr3 = input.charCodeAt(i++);
75
+ enc1 = chr1 >> 2;
76
+ enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
77
+ enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
78
+ enc4 = chr3 & 63;
79
+ if (isNaN(chr2)) {
80
+ enc3 = enc4 = 64;
81
+ }
82
+ else if (isNaN(chr3)) {
83
+ enc4 = 64;
84
+ }
85
+ output =
86
+ output + key.charAt(enc1) + key.charAt(enc2) + key.charAt(enc3) + key.charAt(enc4);
87
+ }
88
+ return output;
89
+ };
90
+ return btoa ? btoa.bind(globalThis) : utils.noop;
91
+ })(),
92
+ isObject: function isObject(obj) {
93
+ return obj && Object.prototype.toString.call(obj) === '[object Object]';
92
94
  },
93
- blob: function blob() {
94
- return utils.Blob;
95
+ isEmptyObject: function isEmptyObject(obj) {
96
+ return utils.isObject(obj) && !Object.keys(obj).length;
95
97
  },
96
- Uint8Array: function Uint8Array() {
97
- return window.Uint8Array;
98
+ isArray: function isArray(arr) {
99
+ return arr && Array.isArray(arr);
98
100
  },
99
- Uint32Array: function Uint32Array() {
100
- return window.Uint32Array;
101
+ isFunction: function isFunction(func) {
102
+ return func && typeof func === 'function';
101
103
  },
102
- videoCodecs: function () {
103
- const testEl = document.createElement('video');
104
- const supportObj = {
105
- mp4: false,
106
- h264: false,
107
- ogv: false,
108
- ogg: false,
109
- webm: false
110
- };
111
-
112
- try {
113
- if (testEl && testEl.canPlayType) {
114
- supportObj.mp4 = testEl.canPlayType('video/mp4; codecs="mp4v.20.8"') !== '';
115
- supportObj.h264 = (testEl.canPlayType('video/mp4; codecs="avc1.42E01E"') || testEl.canPlayType('video/mp4; codecs="avc1.42E01E, mp4a.40.2"')) !== '';
116
- supportObj.ogv = testEl.canPlayType('video/ogg; codecs="theora"') !== '';
117
- supportObj.ogg = testEl.canPlayType('video/ogg; codecs="theora"') !== '';
118
- supportObj.webm = testEl.canPlayType('video/webm; codecs="vp8, vorbis"') !== -1;
119
- }
120
- } catch (e) {}
121
-
122
- return supportObj;
123
- }()
124
- },
125
- noop: function noop() {},
126
- each: function each(collection, callback) {
127
- let x = void 0;
128
- let len = void 0;
129
-
130
- if (utils.isArray(collection)) {
131
- x = -1;
132
- len = collection.length;
133
-
134
- while (++x < len) {
135
- if (callback(x, collection[x]) === false) {
136
- break;
137
- }
138
- }
139
- } else if (utils.isObject(collection)) {
140
- for (x in collection) {
141
- if (collection.hasOwnProperty(x)) {
142
- if (callback(x, collection[x]) === false) {
143
- break;
144
- }
145
- }
146
- }
147
- }
148
- },
149
- normalizeOptions: function normalizeOptions(defaultOptions, userOptions) {
150
- if (!utils.isObject(defaultOptions) || !utils.isObject(userOptions) || !Object.keys) {
151
- return;
152
- }
153
-
154
- const newObj = {};
155
- utils.each(defaultOptions, function (key, val) {
156
- newObj[key] = defaultOptions[key];
157
- });
158
- utils.each(userOptions, function (key, val) {
159
- const currentUserOption = userOptions[key];
160
-
161
- if (!utils.isObject(currentUserOption)) {
162
- newObj[key] = currentUserOption;
163
- } else if (!defaultOptions[key]) {
164
- newObj[key] = currentUserOption;
165
- } else {
166
- newObj[key] = utils.normalizeOptions(defaultOptions[key], currentUserOption);
167
- }
168
- });
169
- return newObj;
170
- },
171
- setCSSAttr: function setCSSAttr(elem, attr, val) {
172
- if (!utils.isElement(elem)) {
173
- return;
174
- }
175
-
176
- if (utils.isString(attr) && utils.isString(val)) {
177
- elem.style[attr] = val;
178
- } else if (utils.isObject(attr)) {
179
- utils.each(attr, function (key, val) {
180
- elem.style[key] = val;
181
- });
182
- }
183
- },
184
- removeElement: function removeElement(node) {
185
- if (!utils.isElement(node)) {
186
- return;
187
- }
188
-
189
- if (node.parentNode) {
190
- node.parentNode.removeChild(node);
191
- }
192
- },
193
- createWebWorker: function createWebWorker(content) {
194
- if (!utils.isString(content)) {
195
- return {};
196
- }
197
-
198
- try {
199
- const blob = new utils.Blob([content], {
200
- type: 'text/javascript'
201
- });
202
- const objectUrl = utils.URL.createObjectURL(blob);
203
- const worker = new Worker(objectUrl);
204
- return {
205
- objectUrl,
206
- worker
207
- };
208
- } catch (e) {
209
- return "".concat(e);
210
- }
211
- },
212
- getExtension: function getExtension(src) {
213
- return src.substr(src.lastIndexOf('.') + 1, src.length);
214
- },
215
- getFontSize: function getFontSize() {
216
- const options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
217
-
218
- if (!document.body || options.resizeFont === false) {
219
- return options.fontSize;
220
- }
221
-
222
- const text = options.text;
223
- const containerWidth = options.gifWidth;
224
- let fontSize = parseInt(options.fontSize, 10);
225
- const minFontSize = parseInt(options.minFontSize, 10);
226
- const div = document.createElement('div');
227
- const span = document.createElement('span');
228
- div.setAttribute('width', containerWidth);
229
- div.appendChild(span);
230
- span.innerHTML = text;
231
- span.style.fontSize = "".concat(fontSize, "px");
232
- span.style.textIndent = '-9999px';
233
- span.style.visibility = 'hidden';
234
- document.body.appendChild(span);
235
-
236
- while (span.offsetWidth > containerWidth && fontSize >= minFontSize) {
237
- span.style.fontSize = "".concat(--fontSize, "px");
238
- }
239
-
240
- document.body.removeChild(span);
241
- return "".concat(fontSize, "px");
242
- },
243
- webWorkerError: false
104
+ isElement: function isElement(elem) {
105
+ return elem && elem.nodeType === 1;
106
+ },
107
+ isString: function isString(value) {
108
+ return typeof value === 'string' || Object.prototype.toString.call(value) === '[object String]';
109
+ },
110
+ isSupported: {
111
+ canvas: function canvas() {
112
+ const el = document.createElement('canvas');
113
+ return el && el.getContext && el.getContext('2d');
114
+ },
115
+ webworkers: function webworkers() {
116
+ return globalThis.Worker;
117
+ },
118
+ blob: function blob() {
119
+ return utils.Blob;
120
+ },
121
+ Uint8Array: function Uint8Array() {
122
+ return globalThis.Uint8Array;
123
+ },
124
+ Uint32Array: function Uint32Array() {
125
+ return globalThis.Uint32Array;
126
+ },
127
+ videoCodecs: (function () {
128
+ const testEl = document.createElement('video');
129
+ const supportObj = {
130
+ mp4: false,
131
+ h264: false,
132
+ ogv: false,
133
+ ogg: false,
134
+ webm: false
135
+ };
136
+ try {
137
+ if (testEl && testEl.canPlayType) {
138
+ // Check for MPEG-4 support
139
+ supportObj.mp4 = testEl.canPlayType('video/mp4; codecs="mp4v.20.8"') !== '';
140
+ // Check for h264 support
141
+ supportObj.h264 =
142
+ (testEl.canPlayType('video/mp4; codecs="avc1.42E01E"') ||
143
+ testEl.canPlayType('video/mp4; codecs="avc1.42E01E, mp4a.40.2"')) !== '';
144
+ // Check for Ogv support
145
+ supportObj.ogv = testEl.canPlayType('video/ogg; codecs="theora"') !== '';
146
+ // Check for Ogg support
147
+ supportObj.ogg = testEl.canPlayType('video/ogg; codecs="theora"') !== '';
148
+ // Check for Webm support
149
+ supportObj.webm = testEl.canPlayType('video/webm; codecs="vp8, vorbis"') !== -1;
150
+ }
151
+ }
152
+ catch (e) { }
153
+ return supportObj;
154
+ })()
155
+ },
156
+ noop: function noop() { },
157
+ each: function each(collection, callback) {
158
+ let x = void 0;
159
+ let len = void 0;
160
+ if (utils.isArray(collection)) {
161
+ x = -1;
162
+ len = collection.length;
163
+ while (++x < len) {
164
+ if (callback(x, collection[x]) === false) {
165
+ break;
166
+ }
167
+ }
168
+ }
169
+ else if (utils.isObject(collection)) {
170
+ for (x in collection) {
171
+ if (collection.hasOwnProperty(x)) {
172
+ if (callback(x, collection[x]) === false) {
173
+ break;
174
+ }
175
+ }
176
+ }
177
+ }
178
+ },
179
+ normalizeOptions: function normalizeOptions(defaultOptions, userOptions) {
180
+ if (!utils.isObject(defaultOptions) || !utils.isObject(userOptions) || !Object.keys) {
181
+ return;
182
+ }
183
+ const newObj = {};
184
+ utils.each(defaultOptions, function (key, val) {
185
+ newObj[key] = defaultOptions[key];
186
+ });
187
+ utils.each(userOptions, function (key, val) {
188
+ const currentUserOption = userOptions[key];
189
+ if (!utils.isObject(currentUserOption)) {
190
+ newObj[key] = currentUserOption;
191
+ }
192
+ else if (!defaultOptions[key]) {
193
+ newObj[key] = currentUserOption;
194
+ }
195
+ else {
196
+ newObj[key] = utils.normalizeOptions(defaultOptions[key], currentUserOption);
197
+ }
198
+ });
199
+ return newObj;
200
+ },
201
+ setCSSAttr: function setCSSAttr(elem, attr, val) {
202
+ if (!utils.isElement(elem)) {
203
+ return;
204
+ }
205
+ if (utils.isString(attr) && utils.isString(val)) {
206
+ elem.style[attr] = val;
207
+ }
208
+ else if (utils.isObject(attr)) {
209
+ utils.each(attr, function (key, val) {
210
+ elem.style[key] = val;
211
+ });
212
+ }
213
+ },
214
+ removeElement: function removeElement(node) {
215
+ if (!utils.isElement(node)) {
216
+ return;
217
+ }
218
+ if (node.parentNode) {
219
+ node.parentNode.removeChild(node);
220
+ }
221
+ },
222
+ createWebWorker: function createWebWorker(content) {
223
+ if (!utils.isString(content)) {
224
+ return {};
225
+ }
226
+ try {
227
+ const blob = new utils.Blob([content], {
228
+ type: 'text/javascript'
229
+ });
230
+ const objectUrl = utils.URL.createObjectURL(blob);
231
+ const worker = new Worker(objectUrl);
232
+ return {
233
+ objectUrl,
234
+ worker
235
+ };
236
+ }
237
+ catch (e) {
238
+ return `${e}`;
239
+ }
240
+ },
241
+ getExtension: function getExtension(src) {
242
+ return src.substr(src.lastIndexOf('.') + 1, src.length);
243
+ },
244
+ getFontSize: function getFontSize() {
245
+ const options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
246
+ if (!document.body || options.resizeFont === false) {
247
+ return options.fontSize;
248
+ }
249
+ const text = options.text;
250
+ const containerWidth = options.gifWidth;
251
+ let fontSize = parseInt(options.fontSize, 10);
252
+ const minFontSize = parseInt(options.minFontSize, 10);
253
+ const div = document.createElement('div');
254
+ const span = document.createElement('span');
255
+ div.setAttribute('width', containerWidth);
256
+ div.appendChild(span);
257
+ span.innerHTML = text;
258
+ span.style.fontSize = `${fontSize}px`;
259
+ span.style.textIndent = '-9999px';
260
+ span.style.visibility = 'hidden';
261
+ document.body.appendChild(span);
262
+ while (span.offsetWidth > containerWidth && fontSize >= minFontSize) {
263
+ span.style.fontSize = `${--fontSize}px`;
264
+ }
265
+ document.body.removeChild(span);
266
+ return `${fontSize}px`;
267
+ },
268
+ webWorkerError: false
244
269
  };
245
270
  const utils$2 = Object.freeze({
246
- default: utils
271
+ default: utils
247
272
  });
273
+ /*
274
+ error.js
275
+ ========
276
+ */
277
+ /* Copyright 2017 Yahoo Inc.
278
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
279
+ */
280
+ // Dependencies
248
281
  var error = {
249
- validate: function validate(skipObj) {
250
- skipObj = utils.isObject(skipObj) ? skipObj : {};
251
- let errorObj = {};
252
- utils.each(error.validators, function (indece, currentValidator) {
253
- const errorCode = currentValidator.errorCode;
254
-
255
- if (!skipObj[errorCode] && !currentValidator.condition) {
256
- errorObj = currentValidator;
257
- errorObj.error = true;
258
- return false;
259
- }
260
- });
261
- delete errorObj.condition;
262
- return errorObj;
263
- },
264
- isValid: function isValid(skipObj) {
265
- const errorObj = error.validate(skipObj);
266
- const isValid = errorObj.error !== true;
267
- return isValid;
268
- },
269
- validators: [{
270
- condition: utils.isFunction(utils.getUserMedia),
271
- errorCode: 'getUserMedia',
272
- errorMsg: 'The getUserMedia API is not supported in your browser'
273
- }, {
274
- condition: utils.isSupported.canvas(),
275
- errorCode: 'canvas',
276
- errorMsg: 'Canvas elements are not supported in your browser'
277
- }, {
278
- condition: utils.isSupported.webworkers(),
279
- errorCode: 'webworkers',
280
- errorMsg: 'The Web Workers API is not supported in your browser'
281
- }, {
282
- condition: utils.isFunction(utils.URL),
283
- errorCode: 'window.URL',
284
- errorMsg: 'The window.URL API is not supported in your browser'
285
- }, {
286
- condition: utils.isSupported.blob(),
287
- errorCode: 'window.Blob',
288
- errorMsg: 'The window.Blob File API is not supported in your browser'
289
- }, {
290
- condition: utils.isSupported.Uint8Array(),
291
- errorCode: 'window.Uint8Array',
292
- errorMsg: 'The window.Uint8Array function constructor is not supported in your browser'
293
- }, {
294
- condition: utils.isSupported.Uint32Array(),
295
- errorCode: 'window.Uint32Array',
296
- errorMsg: 'The window.Uint32Array function constructor is not supported in your browser'
297
- }],
298
- messages: {
299
- videoCodecs: {
300
- errorCode: 'videocodec',
301
- errorMsg: 'The video codec you are trying to use is not supported in your browser'
282
+ validate: function validate(skipObj) {
283
+ skipObj = utils.isObject(skipObj) ? skipObj : {};
284
+ let errorObj = {};
285
+ utils.each(error.validators, function (indece, currentValidator) {
286
+ const errorCode = currentValidator.errorCode;
287
+ if (!skipObj[errorCode] && !currentValidator.condition) {
288
+ errorObj = currentValidator;
289
+ errorObj.error = true;
290
+ return false;
291
+ }
292
+ });
293
+ delete errorObj.condition;
294
+ return errorObj;
295
+ },
296
+ isValid: function isValid(skipObj) {
297
+ const errorObj = error.validate(skipObj);
298
+ const isValid = errorObj.error !== true;
299
+ return isValid;
300
+ },
301
+ validators: [
302
+ {
303
+ condition: utils.isFunction(utils.getUserMedia),
304
+ errorCode: 'getUserMedia',
305
+ errorMsg: 'The getUserMedia API is not supported in your browser'
306
+ },
307
+ {
308
+ condition: utils.isSupported.canvas(),
309
+ errorCode: 'canvas',
310
+ errorMsg: 'Canvas elements are not supported in your browser'
311
+ },
312
+ {
313
+ condition: utils.isSupported.webworkers(),
314
+ errorCode: 'webworkers',
315
+ errorMsg: 'The Web Workers API is not supported in your browser'
316
+ },
317
+ {
318
+ condition: utils.isFunction(utils.URL),
319
+ errorCode: 'globalThis.URL',
320
+ errorMsg: 'The globalThis.URL API is not supported in your browser'
321
+ },
322
+ {
323
+ condition: utils.isSupported.blob(),
324
+ errorCode: 'globalThis.Blob',
325
+ errorMsg: 'The globalThis.Blob File API is not supported in your browser'
326
+ },
327
+ {
328
+ condition: utils.isSupported.Uint8Array(),
329
+ errorCode: 'globalThis.Uint8Array',
330
+ errorMsg: 'The globalThis.Uint8Array function constructor is not supported in your browser'
331
+ },
332
+ {
333
+ condition: utils.isSupported.Uint32Array(),
334
+ errorCode: 'globalThis.Uint32Array',
335
+ errorMsg: 'The globalThis.Uint32Array function constructor is not supported in your browser'
336
+ }
337
+ ],
338
+ messages: {
339
+ videoCodecs: {
340
+ errorCode: 'videocodec',
341
+ errorMsg: 'The video codec you are trying to use is not supported in your browser'
342
+ }
302
343
  }
303
- }
304
344
  };
305
345
  const error$2 = Object.freeze({
306
- default: error
346
+ default: error
307
347
  });
308
-
309
- const noop = function noop() {};
310
-
348
+ /*
349
+ defaultOptions.js
350
+ =================
351
+ */
352
+ /* Copyright 2017 Yahoo Inc.
353
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
354
+ */
355
+ // Helpers
356
+ const noop = function noop() { };
311
357
  const defaultOptions = {
312
- sampleInterval: 10,
313
- numWorkers: 2,
314
- filter: '',
315
- gifWidth: 200,
316
- gifHeight: 200,
317
- interval: 0.1,
318
- numFrames: 10,
319
- frameDuration: 1,
320
- keepCameraOn: false,
321
- images: [],
322
- video: null,
323
- webcamVideoElement: null,
324
- cameraStream: null,
325
- text: '',
326
- fontWeight: 'normal',
327
- fontSize: '16px',
328
- minFontSize: '10px',
329
- resizeFont: false,
330
- fontFamily: 'sans-serif',
331
- fontColor: '#ffffff',
332
- textAlign: 'center',
333
- textBaseline: 'bottom',
334
- textXCoordinate: null,
335
- textYCoordinate: null,
336
- progressCallback: noop,
337
- completeCallback: noop,
338
- saveRenderingContexts: false,
339
- savedRenderingContexts: [],
340
- crossOrigin: 'Anonymous'
358
+ sampleInterval: 10,
359
+ numWorkers: 2,
360
+ filter: '',
361
+ gifWidth: 200,
362
+ gifHeight: 200,
363
+ interval: 0.1,
364
+ numFrames: 10,
365
+ frameDuration: 1,
366
+ keepCameraOn: false,
367
+ images: [],
368
+ video: null,
369
+ webcamVideoElement: null,
370
+ cameraStream: null,
371
+ text: '',
372
+ fontWeight: 'normal',
373
+ fontSize: '16px',
374
+ minFontSize: '10px',
375
+ resizeFont: false,
376
+ fontFamily: 'sans-serif',
377
+ fontColor: '#ffffff',
378
+ textAlign: 'center',
379
+ textBaseline: 'bottom',
380
+ textXCoordinate: null,
381
+ textYCoordinate: null,
382
+ progressCallback: noop,
383
+ completeCallback: noop,
384
+ saveRenderingContexts: false,
385
+ savedRenderingContexts: [],
386
+ crossOrigin: 'Anonymous'
341
387
  };
342
388
  const defaultOptions$2 = Object.freeze({
343
- default: defaultOptions
389
+ default: defaultOptions
344
390
  });
345
-
391
+ /*
392
+ isSupported.js
393
+ ==============
394
+ */
395
+ /* Copyright 2017 Yahoo Inc.
396
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
397
+ */
398
+ // Dependencies
346
399
  function isSupported() {
347
- return error.isValid();
400
+ return error.isValid();
348
401
  }
349
-
402
+ /*
403
+ isWebCamGIFSupported.js
404
+ =======================
405
+ */
406
+ /* Copyright 2017 Yahoo Inc.
407
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
408
+ */
350
409
  function isWebCamGIFSupported() {
351
- return error.isValid();
410
+ return error.isValid();
352
411
  }
353
-
412
+ /*
413
+ isSupported.js
414
+ ==============
415
+ */
416
+ /* Copyright 2017 Yahoo Inc.
417
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
418
+ */
419
+ // Dependencies
354
420
  function isSupported$1() {
355
- const options = {
356
- getUserMedia: true
357
- };
358
- return error.isValid(options);
421
+ const options = {
422
+ getUserMedia: true
423
+ };
424
+ return error.isValid(options);
359
425
  }
360
-
426
+ /*
427
+ isExistingVideoGIFSupported.js
428
+ ==============================
429
+ */
430
+ /* Copyright 2017 Yahoo Inc.
431
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
432
+ */
433
+ // Dependencies
361
434
  function isExistingVideoGIFSupported(codecs) {
362
- let hasValidCodec = false;
363
-
364
- if (utils.isArray(codecs) && codecs.length) {
365
- utils.each(codecs, function (indece, currentCodec) {
366
- if (utils.isSupported.videoCodecs[currentCodec]) {
367
- hasValidCodec = true;
368
- }
369
- });
370
-
371
- if (!hasValidCodec) {
372
- return false;
435
+ let hasValidCodec = false;
436
+ if (utils.isArray(codecs) && codecs.length) {
437
+ utils.each(codecs, function (indece, currentCodec) {
438
+ if (utils.isSupported.videoCodecs[currentCodec]) {
439
+ hasValidCodec = true;
440
+ }
441
+ });
442
+ if (!hasValidCodec) {
443
+ return false;
444
+ }
373
445
  }
374
- } else if (utils.isString(codecs) && codecs.length) {
375
- if (!utils.isSupported.videoCodecs[codecs]) {
376
- return false;
446
+ else if (utils.isString(codecs) && codecs.length) {
447
+ if (!utils.isSupported.videoCodecs[codecs]) {
448
+ return false;
449
+ }
377
450
  }
378
- }
379
-
380
- return error.isValid({
381
- getUserMedia: true
382
- });
451
+ return error.isValid({
452
+ getUserMedia: true
453
+ });
383
454
  }
384
-
455
+ /*
456
+ NeuQuant.js
457
+ ===========
458
+ */
459
+ /*
460
+ * NeuQuant Neural-Net Quantization Algorithm
461
+ * ------------------------------------------
462
+ *
463
+ * Copyright (c) 1994 Anthony Dekker
464
+ *
465
+ * NEUQUANT Neural-Net quantization algorithm by Anthony Dekker, 1994. See
466
+ * "Kohonen neural networks for optimal colour quantization" in "Network:
467
+ * Computation in Neural Systems" Vol. 5 (1994) pp 351-367. for a discussion of
468
+ * the algorithm.
469
+ *
470
+ * Any party obtaining a copy of these files from the author, directly or
471
+ * indirectly, is granted, free of charge, a full and unrestricted irrevocable,
472
+ * world-wide, paid up, royalty-free, nonexclusive right and license to deal in
473
+ * this software and documentation files (the "Software"), including without
474
+ * limitation the rights to use, copy, modify, merge, publish, distribute,
475
+ * sublicense, and/or sell copies of the Software, and to permit persons who
476
+ * receive copies from any such party to do so, with the only requirement being
477
+ * that this copyright notice remain intact.
478
+ */
479
+ /*
480
+ * This class handles Neural-Net quantization algorithm
481
+ * @author Kevin Weiner (original Java version - kweiner@fmsware.com)
482
+ * @author Thibault Imbert (AS3 version - bytearray.org)
483
+ * @version 0.1 AS3 implementation
484
+ * @version 0.2 JS->AS3 "translation" by antimatter15
485
+ * @version 0.3 JS clean up + using modern JS idioms by sole - http://soledadpenades.com
486
+ * Also implement fix in color conversion described at http://stackoverflow.com/questions/16371712/neuquant-js-javascript-color-quantization-hidden-bug-in-js-conversion
487
+ */
385
488
  function NeuQuant() {
386
- const netsize = 256;
387
- const prime1 = 499;
388
- const prime2 = 491;
389
- const prime3 = 487;
390
- const prime4 = 503;
391
- const minpicturebytes = 3 * prime4;
392
- const maxnetpos = netsize - 1;
393
- const netbiasshift = 4;
394
- const ncycles = 100;
395
- const intbiasshift = 16;
396
- const intbias = 1 << intbiasshift;
397
- const gammashift = 10;
398
- const gamma = 1 << gammashift;
399
- const betashift = 10;
400
- const beta = intbias >> betashift;
401
- const betagamma = intbias << gammashift - betashift;
402
- const initrad = netsize >> 3;
403
- const radiusbiasshift = 6;
404
- const radiusbias = 1 << radiusbiasshift;
405
- const initradius = initrad * radiusbias;
406
- const radiusdec = 30;
407
- const alphabiasshift = 10;
408
- const initalpha = 1 << alphabiasshift;
409
- let alphadec;
410
- const radbiasshift = 8;
411
- const radbias = 1 << radbiasshift;
412
- const alpharadbshift = alphabiasshift + radbiasshift;
413
- const alpharadbias = 1 << alpharadbshift;
414
- let thepicture;
415
- let lengthcount;
416
- let samplefac;
417
- let network;
418
- const netindex = [];
419
- const bias = [];
420
- const freq = [];
421
- const radpower = [];
422
-
423
- function NeuQuantConstructor(thepic, len, sample) {
424
- let i;
425
- let p;
426
- thepicture = thepic;
427
- lengthcount = len;
428
- samplefac = sample;
429
- network = new Array(netsize);
430
-
431
- for (i = 0; i < netsize; i++) {
432
- network[i] = new Array(4);
433
- p = network[i];
434
- p[0] = p[1] = p[2] = (i << netbiasshift + 8) / netsize | 0;
435
- freq[i] = intbias / netsize | 0;
436
- bias[i] = 0;
437
- }
438
- }
439
-
440
- function colorMap() {
441
- const map = [];
442
- const index = new Array(netsize);
443
-
444
- for (let i = 0; i < netsize; i++) {
445
- index[network[i][3]] = i;
446
- }
447
-
448
- let k = 0;
449
-
450
- for (let l = 0; l < netsize; l++) {
451
- const j = index[l];
452
- map[k++] = network[j][0];
453
- map[k++] = network[j][1];
454
- map[k++] = network[j][2];
455
- }
456
-
457
- return map;
458
- }
459
-
460
- function inxbuild() {
461
- let i;
462
- let j;
463
- let smallpos;
464
- let smallval;
465
- let p;
466
- let q;
467
- let previouscol;
468
- let startpos;
469
- previouscol = 0;
470
- startpos = 0;
471
-
472
- for (i = 0; i < netsize; i++) {
473
- p = network[i];
474
- smallpos = i;
475
- smallval = p[1];
476
-
477
- for (j = i + 1; j < netsize; j++) {
478
- q = network[j];
479
-
480
- if (q[1] < smallval) {
481
- smallpos = j;
482
- smallval = q[1];
483
- }
484
- }
485
-
486
- q = network[smallpos];
487
-
488
- if (i != smallpos) {
489
- j = q[0];
490
- q[0] = p[0];
491
- p[0] = j;
492
- j = q[1];
493
- q[1] = p[1];
494
- p[1] = j;
495
- j = q[2];
496
- q[2] = p[2];
497
- p[2] = j;
498
- j = q[3];
499
- q[3] = p[3];
500
- p[3] = j;
501
- }
502
-
503
- if (smallval != previouscol) {
504
- netindex[previouscol] = startpos + i >> 1;
505
-
506
- for (j = previouscol + 1; j < smallval; j++) {
507
- netindex[j] = i;
508
- }
509
-
510
- previouscol = smallval;
511
- startpos = i;
512
- }
513
- }
514
-
515
- netindex[previouscol] = startpos + maxnetpos >> 1;
516
-
517
- for (j = previouscol + 1; j < 256; j++) {
518
- netindex[j] = maxnetpos;
519
- }
520
- }
521
-
522
- function learn() {
523
- let i;
524
- let j;
525
- let b;
526
- let g;
527
- let r;
528
- let radius;
529
- let rad;
530
- let alpha;
531
- let step;
532
- let delta;
533
- let samplepixels;
534
- let p;
535
- let pix;
536
- let lim;
537
-
538
- if (lengthcount < minpicturebytes) {
539
- samplefac = 1;
540
- }
541
-
542
- alphadec = 30 + (samplefac - 1) / 3;
543
- p = thepicture;
544
- pix = 0;
545
- lim = lengthcount;
546
- samplepixels = lengthcount / (3 * samplefac);
547
- delta = samplepixels / ncycles | 0;
548
- alpha = initalpha;
549
- radius = initradius;
550
- rad = radius >> radiusbiasshift;
551
-
552
- if (rad <= 1) {
553
- rad = 0;
554
- }
555
-
556
- for (i = 0; i < rad; i++) {
557
- radpower[i] = alpha * ((rad * rad - i * i) * radbias / (rad * rad));
489
+ const netsize = 256; // number of colours used
490
+ // four primes near 500 - assume no image has a length so large
491
+ // that it is divisible by all four primes
492
+ const prime1 = 499;
493
+ const prime2 = 491;
494
+ const prime3 = 487;
495
+ const prime4 = 503;
496
+ // minimum size for input image
497
+ const minpicturebytes = 3 * prime4;
498
+ // Network Definitions
499
+ const maxnetpos = netsize - 1;
500
+ const netbiasshift = 4; // bias for colour values
501
+ const ncycles = 100; // no. of learning cycles
502
+ // defs for freq and bias
503
+ const intbiasshift = 16; // bias for fractions
504
+ const intbias = 1 << intbiasshift;
505
+ const gammashift = 10; // gamma = 1024
506
+ const gamma = 1 << gammashift;
507
+ const betashift = 10;
508
+ const beta = intbias >> betashift; // beta = 1/1024
509
+ const betagamma = intbias << (gammashift - betashift);
510
+ // defs for decreasing radius factor
511
+ // For 256 colors, radius starts at 32.0 biased by 6 bits
512
+ // and decreases by a factor of 1/30 each cycle
513
+ const initrad = netsize >> 3;
514
+ const radiusbiasshift = 6;
515
+ const radiusbias = 1 << radiusbiasshift;
516
+ const initradius = initrad * radiusbias;
517
+ const radiusdec = 30;
518
+ // defs for decreasing alpha factor
519
+ // Alpha starts at 1.0 biased by 10 bits
520
+ const alphabiasshift = 10;
521
+ const initalpha = 1 << alphabiasshift;
522
+ let alphadec;
523
+ // radbias and alpharadbias used for radpower calculation
524
+ const radbiasshift = 8;
525
+ const radbias = 1 << radbiasshift;
526
+ const alpharadbshift = alphabiasshift + radbiasshift;
527
+ const alpharadbias = 1 << alpharadbshift;
528
+ // Input image
529
+ let thepicture;
530
+ // Height * Width * 3
531
+ let lengthcount;
532
+ // Sampling factor 1..30
533
+ let samplefac;
534
+ // The network itself
535
+ let network;
536
+ const netindex = [];
537
+ // for network lookup - really 256
538
+ const bias = [];
539
+ // bias and freq arrays for learning
540
+ const freq = [];
541
+ const radpower = [];
542
+ function NeuQuantConstructor(thepic, len, sample) {
543
+ let i;
544
+ let p;
545
+ thepicture = thepic;
546
+ lengthcount = len;
547
+ samplefac = sample;
548
+ network = new Array(netsize);
549
+ for (i = 0; i < netsize; i++) {
550
+ network[i] = new Array(4);
551
+ p = network[i];
552
+ p[0] = p[1] = p[2] = ((i << (netbiasshift + 8)) / netsize) | 0;
553
+ freq[i] = (intbias / netsize) | 0; // 1 / netsize
554
+ bias[i] = 0;
555
+ }
558
556
  }
559
-
560
- if (lengthcount < minpicturebytes) {
561
- step = 3;
562
- } else if (lengthcount % prime1 !== 0) {
563
- step = 3 * prime1;
564
- } else if (lengthcount % prime2 !== 0) {
565
- step = 3 * prime2;
566
- } else if (lengthcount % prime3 !== 0) {
567
- step = 3 * prime3;
568
- } else {
569
- step = 3 * prime4;
557
+ function colorMap() {
558
+ const map = [];
559
+ const index = new Array(netsize);
560
+ for (let i = 0; i < netsize; i++) {
561
+ index[network[i][3]] = i;
562
+ }
563
+ let k = 0;
564
+ for (let l = 0; l < netsize; l++) {
565
+ const j = index[l];
566
+ map[k++] = network[j][0];
567
+ map[k++] = network[j][1];
568
+ map[k++] = network[j][2];
569
+ }
570
+ return map;
571
+ }
572
+ // Insertion sort of network and building of netindex[0..255]
573
+ // (to do after unbias)
574
+ function inxbuild() {
575
+ let i;
576
+ let j;
577
+ let smallpos;
578
+ let smallval;
579
+ let p;
580
+ let q;
581
+ let previouscol;
582
+ let startpos;
583
+ previouscol = 0;
584
+ startpos = 0;
585
+ for (i = 0; i < netsize; i++) {
586
+ p = network[i];
587
+ smallpos = i;
588
+ smallval = p[1]; // index on g
589
+ // find smallest in i..netsize-1
590
+ for (j = i + 1; j < netsize; j++) {
591
+ q = network[j];
592
+ if (q[1] < smallval) {
593
+ // index on g
594
+ smallpos = j;
595
+ smallval = q[1]; // index on g
596
+ }
597
+ }
598
+ q = network[smallpos];
599
+ // swap p (i) and q (smallpos) entries
600
+ if (i != smallpos) {
601
+ j = q[0];
602
+ q[0] = p[0];
603
+ p[0] = j;
604
+ j = q[1];
605
+ q[1] = p[1];
606
+ p[1] = j;
607
+ j = q[2];
608
+ q[2] = p[2];
609
+ p[2] = j;
610
+ j = q[3];
611
+ q[3] = p[3];
612
+ p[3] = j;
613
+ }
614
+ // smallval entry is now in position i
615
+ if (smallval != previouscol) {
616
+ netindex[previouscol] = (startpos + i) >> 1;
617
+ for (j = previouscol + 1; j < smallval; j++) {
618
+ netindex[j] = i;
619
+ }
620
+ previouscol = smallval;
621
+ startpos = i;
622
+ }
623
+ }
624
+ netindex[previouscol] = (startpos + maxnetpos) >> 1;
625
+ for (j = previouscol + 1; j < 256; j++) {
626
+ netindex[j] = maxnetpos; // really 256
627
+ }
570
628
  }
571
-
572
- i = 0;
573
-
574
- while (i < samplepixels) {
575
- b = (p[pix + 0] & 0xff) << netbiasshift;
576
- g = (p[pix + 1] & 0xff) << netbiasshift;
577
- r = (p[pix + 2] & 0xff) << netbiasshift;
578
- j = contest(b, g, r);
579
- altersingle(alpha, j, b, g, r);
580
-
581
- if (rad !== 0) {
582
- alterneigh(rad, j, b, g, r);
583
- }
584
-
585
- pix += step;
586
-
587
- if (pix >= lim) {
588
- pix -= lengthcount;
589
- }
590
-
591
- i++;
592
-
593
- if (delta === 0) {
594
- delta = 1;
595
- }
596
-
597
- if (i % delta === 0) {
598
- alpha -= alpha / alphadec;
599
- radius -= radius / radiusdec;
629
+ // Main Learning Loop
630
+ function learn() {
631
+ let i;
632
+ let j;
633
+ let b;
634
+ let g;
635
+ let r;
636
+ let radius;
637
+ let rad;
638
+ let alpha;
639
+ let step;
640
+ let delta;
641
+ let samplepixels;
642
+ let p;
643
+ let pix;
644
+ let lim;
645
+ if (lengthcount < minpicturebytes) {
646
+ samplefac = 1;
647
+ }
648
+ alphadec = 30 + (samplefac - 1) / 3;
649
+ p = thepicture;
650
+ pix = 0;
651
+ lim = lengthcount;
652
+ samplepixels = lengthcount / (3 * samplefac);
653
+ delta = (samplepixels / ncycles) | 0;
654
+ alpha = initalpha;
655
+ radius = initradius;
600
656
  rad = radius >> radiusbiasshift;
601
-
602
657
  if (rad <= 1) {
603
- rad = 0;
658
+ rad = 0;
604
659
  }
605
-
606
- for (j = 0; j < rad; j++) {
607
- radpower[j] = alpha * ((rad * rad - j * j) * radbias / (rad * rad));
660
+ for (i = 0; i < rad; i++) {
661
+ radpower[i] = alpha * (((rad * rad - i * i) * radbias) / (rad * rad));
662
+ }
663
+ if (lengthcount < minpicturebytes) {
664
+ step = 3;
665
+ }
666
+ else if (lengthcount % prime1 !== 0) {
667
+ step = 3 * prime1;
668
+ }
669
+ else if (lengthcount % prime2 !== 0) {
670
+ step = 3 * prime2;
671
+ }
672
+ else if (lengthcount % prime3 !== 0) {
673
+ step = 3 * prime3;
674
+ }
675
+ else {
676
+ step = 3 * prime4;
677
+ }
678
+ i = 0;
679
+ while (i < samplepixels) {
680
+ b = (p[pix + 0] & 0xff) << netbiasshift;
681
+ g = (p[pix + 1] & 0xff) << netbiasshift;
682
+ r = (p[pix + 2] & 0xff) << netbiasshift;
683
+ j = contest(b, g, r);
684
+ altersingle(alpha, j, b, g, r);
685
+ if (rad !== 0) {
686
+ // Alter neighbours
687
+ alterneigh(rad, j, b, g, r);
688
+ }
689
+ pix += step;
690
+ if (pix >= lim) {
691
+ pix -= lengthcount;
692
+ }
693
+ i++;
694
+ if (delta === 0) {
695
+ delta = 1;
696
+ }
697
+ if (i % delta === 0) {
698
+ alpha -= alpha / alphadec;
699
+ radius -= radius / radiusdec;
700
+ rad = radius >> radiusbiasshift;
701
+ if (rad <= 1) {
702
+ rad = 0;
703
+ }
704
+ for (j = 0; j < rad; j++) {
705
+ radpower[j] = alpha * (((rad * rad - j * j) * radbias) / (rad * rad));
706
+ }
707
+ }
608
708
  }
609
- }
610
709
  }
611
- }
612
-
613
- function map(b, g, r) {
614
- let i;
615
- let j;
616
- let dist;
617
- let a;
618
- let bestd;
619
- let p;
620
- let best;
621
- bestd = 1000;
622
- best = -1;
623
- i = netindex[g];
624
- j = i - 1;
625
-
626
- while (i < netsize || j >= 0) {
627
- if (i < netsize) {
628
- p = network[i];
629
- dist = p[1] - g;
630
-
631
- if (dist >= bestd) {
632
- i = netsize;
633
- } else {
634
- i++;
635
-
636
- if (dist < 0) {
637
- dist = -dist;
638
- }
639
-
640
- a = p[0] - b;
641
-
642
- if (a < 0) {
643
- a = -a;
644
- }
645
-
646
- dist += a;
647
-
648
- if (dist < bestd) {
649
- a = p[2] - r;
650
-
710
+ // Search for BGR values 0..255 (after net is unbiased) and return colour index
711
+ function map(b, g, r) {
712
+ let i;
713
+ let j;
714
+ let dist;
715
+ let a;
716
+ let bestd;
717
+ let p;
718
+ let best;
719
+ // Biggest possible distance is 256 * 3
720
+ bestd = 1000;
721
+ best = -1;
722
+ i = netindex[g]; // index on g
723
+ j = i - 1; // start at netindex[g] and work outwards
724
+ while (i < netsize || j >= 0) {
725
+ if (i < netsize) {
726
+ p = network[i];
727
+ dist = p[1] - g; // inx key
728
+ if (dist >= bestd) {
729
+ i = netsize; // stop iter
730
+ }
731
+ else {
732
+ i++;
733
+ if (dist < 0) {
734
+ dist = -dist;
735
+ }
736
+ a = p[0] - b;
737
+ if (a < 0) {
738
+ a = -a;
739
+ }
740
+ dist += a;
741
+ if (dist < bestd) {
742
+ a = p[2] - r;
743
+ if (a < 0) {
744
+ a = -a;
745
+ }
746
+ dist += a;
747
+ if (dist < bestd) {
748
+ bestd = dist;
749
+ best = p[3];
750
+ }
751
+ }
752
+ }
753
+ }
754
+ if (j >= 0) {
755
+ p = network[j];
756
+ dist = g - p[1]; // inx key - reverse dif
757
+ if (dist >= bestd) {
758
+ j = -1; // stop iter
759
+ }
760
+ else {
761
+ j--;
762
+ if (dist < 0) {
763
+ dist = -dist;
764
+ }
765
+ a = p[0] - b;
766
+ if (a < 0) {
767
+ a = -a;
768
+ }
769
+ dist += a;
770
+ if (dist < bestd) {
771
+ a = p[2] - r;
772
+ if (a < 0) {
773
+ a = -a;
774
+ }
775
+ dist += a;
776
+ if (dist < bestd) {
777
+ bestd = dist;
778
+ best = p[3];
779
+ }
780
+ }
781
+ }
782
+ }
783
+ }
784
+ return best;
785
+ }
786
+ function process() {
787
+ learn();
788
+ unbiasnet();
789
+ inxbuild();
790
+ return colorMap();
791
+ }
792
+ // Unbias network to give byte values 0..255 and record position i
793
+ // to prepare for sort
794
+ function unbiasnet() {
795
+ let i;
796
+ let j;
797
+ for (i = 0; i < netsize; i++) {
798
+ network[i][0] >>= netbiasshift;
799
+ network[i][1] >>= netbiasshift;
800
+ network[i][2] >>= netbiasshift;
801
+ network[i][3] = i; // record colour no
802
+ }
803
+ }
804
+ // Move adjacent neurons by precomputed alpha*(1-((i-j)^2/[r]^2))
805
+ // in radpower[|i-j|]
806
+ function alterneigh(rad, i, b, g, r) {
807
+ let j;
808
+ let k;
809
+ let lo;
810
+ let hi;
811
+ let a;
812
+ let m;
813
+ let p;
814
+ lo = i - rad;
815
+ if (lo < -1) {
816
+ lo = -1;
817
+ }
818
+ hi = i + rad;
819
+ if (hi > netsize) {
820
+ hi = netsize;
821
+ }
822
+ j = i + 1;
823
+ k = i - 1;
824
+ m = 1;
825
+ while (j < hi || k > lo) {
826
+ a = radpower[m++];
827
+ if (j < hi) {
828
+ p = network[j++];
829
+ try {
830
+ p[0] -= ((a * (p[0] - b)) / alpharadbias) | 0;
831
+ p[1] -= ((a * (p[1] - g)) / alpharadbias) | 0;
832
+ p[2] -= ((a * (p[2] - r)) / alpharadbias) | 0;
833
+ }
834
+ catch (e) { }
835
+ }
836
+ if (k > lo) {
837
+ p = network[k--];
838
+ try {
839
+ p[0] -= ((a * (p[0] - b)) / alpharadbias) | 0;
840
+ p[1] -= ((a * (p[1] - g)) / alpharadbias) | 0;
841
+ p[2] -= ((a * (p[2] - r)) / alpharadbias) | 0;
842
+ }
843
+ catch (e) { }
844
+ }
845
+ }
846
+ }
847
+ // Move neuron i towards biased (b,g,r) by factor alpha
848
+ function altersingle(alpha, i, b, g, r) {
849
+ // alter hit neuron
850
+ const n = network[i];
851
+ const alphaMult = alpha / initalpha;
852
+ n[0] -= (alphaMult * (n[0] - b)) | 0;
853
+ n[1] -= (alphaMult * (n[1] - g)) | 0;
854
+ n[2] -= (alphaMult * (n[2] - r)) | 0;
855
+ }
856
+ // Search for biased BGR values
857
+ function contest(b, g, r) {
858
+ // finds closest neuron (min dist) and updates freq
859
+ // finds best neuron (min dist-bias) and returns position
860
+ // for frequently chosen neurons, freq[i] is high and bias[i] is negative
861
+ // bias[i] = gamma*((1/netsize)-freq[i])
862
+ let i;
863
+ let dist;
864
+ let a;
865
+ let biasdist;
866
+ let betafreq;
867
+ let bestpos;
868
+ let bestbiaspos;
869
+ let bestd;
870
+ let bestbiasd;
871
+ let n;
872
+ bestd = ~(1 << 31);
873
+ bestbiasd = bestd;
874
+ bestpos = -1;
875
+ bestbiaspos = bestpos;
876
+ for (i = 0; i < netsize; i++) {
877
+ n = network[i];
878
+ dist = n[0] - b;
879
+ if (dist < 0) {
880
+ dist = -dist;
881
+ }
882
+ a = n[1] - g;
651
883
  if (a < 0) {
652
- a = -a;
884
+ a = -a;
653
885
  }
654
-
655
886
  dist += a;
656
-
657
- if (dist < bestd) {
658
- bestd = dist;
659
- best = p[3];
660
- }
661
- }
662
- }
663
- }
664
-
665
- if (j >= 0) {
666
- p = network[j];
667
- dist = g - p[1];
668
-
669
- if (dist >= bestd) {
670
- j = -1;
671
- } else {
672
- j--;
673
-
674
- if (dist < 0) {
675
- dist = -dist;
676
- }
677
-
678
- a = p[0] - b;
679
-
680
- if (a < 0) {
681
- a = -a;
682
- }
683
-
684
- dist += a;
685
-
686
- if (dist < bestd) {
687
- a = p[2] - r;
688
-
887
+ a = n[2] - r;
689
888
  if (a < 0) {
690
- a = -a;
889
+ a = -a;
691
890
  }
692
-
693
891
  dist += a;
694
-
695
892
  if (dist < bestd) {
696
- bestd = dist;
697
- best = p[3];
893
+ bestd = dist;
894
+ bestpos = i;
895
+ }
896
+ biasdist = dist - (bias[i] >> (intbiasshift - netbiasshift));
897
+ if (biasdist < bestbiasd) {
898
+ bestbiasd = biasdist;
899
+ bestbiaspos = i;
698
900
  }
699
- }
901
+ betafreq = freq[i] >> betashift;
902
+ freq[i] -= betafreq;
903
+ bias[i] += betafreq << gammashift;
700
904
  }
701
- }
702
- }
703
-
704
- return best;
705
- }
706
-
707
- function process() {
708
- learn();
709
- unbiasnet();
710
- inxbuild();
711
- return colorMap();
712
- }
713
-
714
- function unbiasnet() {
715
- let i;
716
- let j;
717
-
718
- for (i = 0; i < netsize; i++) {
719
- network[i][0] >>= netbiasshift;
720
- network[i][1] >>= netbiasshift;
721
- network[i][2] >>= netbiasshift;
722
- network[i][3] = i;
723
- }
724
- }
725
-
726
- function alterneigh(rad, i, b, g, r) {
727
- let j;
728
- let k;
729
- let lo;
730
- let hi;
731
- let a;
732
- let m;
733
- let p;
734
- lo = i - rad;
735
-
736
- if (lo < -1) {
737
- lo = -1;
738
- }
739
-
740
- hi = i + rad;
741
-
742
- if (hi > netsize) {
743
- hi = netsize;
744
- }
745
-
746
- j = i + 1;
747
- k = i - 1;
748
- m = 1;
749
-
750
- while (j < hi || k > lo) {
751
- a = radpower[m++];
752
-
753
- if (j < hi) {
754
- p = network[j++];
755
-
756
- try {
757
- p[0] -= a * (p[0] - b) / alpharadbias | 0;
758
- p[1] -= a * (p[1] - g) / alpharadbias | 0;
759
- p[2] -= a * (p[2] - r) / alpharadbias | 0;
760
- } catch (e) {}
761
- }
762
-
763
- if (k > lo) {
764
- p = network[k--];
765
-
766
- try {
767
- p[0] -= a * (p[0] - b) / alpharadbias | 0;
768
- p[1] -= a * (p[1] - g) / alpharadbias | 0;
769
- p[2] -= a * (p[2] - r) / alpharadbias | 0;
770
- } catch (e) {}
771
- }
772
- }
773
- }
774
-
775
- function altersingle(alpha, i, b, g, r) {
776
- const n = network[i];
777
- const alphaMult = alpha / initalpha;
778
- n[0] -= alphaMult * (n[0] - b) | 0;
779
- n[1] -= alphaMult * (n[1] - g) | 0;
780
- n[2] -= alphaMult * (n[2] - r) | 0;
781
- }
782
-
783
- function contest(b, g, r) {
784
- let i;
785
- let dist;
786
- let a;
787
- let biasdist;
788
- let betafreq;
789
- let bestpos;
790
- let bestbiaspos;
791
- let bestd;
792
- let bestbiasd;
793
- let n;
794
- bestd = ~(1 << 31);
795
- bestbiasd = bestd;
796
- bestpos = -1;
797
- bestbiaspos = bestpos;
798
-
799
- for (i = 0; i < netsize; i++) {
800
- n = network[i];
801
- dist = n[0] - b;
802
-
803
- if (dist < 0) {
804
- dist = -dist;
805
- }
806
-
807
- a = n[1] - g;
808
-
809
- if (a < 0) {
810
- a = -a;
811
- }
812
-
813
- dist += a;
814
- a = n[2] - r;
815
-
816
- if (a < 0) {
817
- a = -a;
818
- }
819
-
820
- dist += a;
821
-
822
- if (dist < bestd) {
823
- bestd = dist;
824
- bestpos = i;
825
- }
826
-
827
- biasdist = dist - (bias[i] >> intbiasshift - netbiasshift);
828
-
829
- if (biasdist < bestbiasd) {
830
- bestbiasd = biasdist;
831
- bestbiaspos = i;
832
- }
833
-
834
- betafreq = freq[i] >> betashift;
835
- freq[i] -= betafreq;
836
- bias[i] += betafreq << gammashift;
837
- }
838
-
839
- freq[bestpos] += beta;
840
- bias[bestpos] -= betagamma;
841
- return bestbiaspos;
842
- }
843
-
844
- NeuQuantConstructor.apply(this, arguments);
845
- const exports = {};
846
- exports.map = map;
847
- exports.process = process;
848
- return exports;
905
+ freq[bestpos] += beta;
906
+ bias[bestpos] -= betagamma;
907
+ return bestbiaspos;
908
+ }
909
+ NeuQuantConstructor.apply(this, arguments);
910
+ const exports = {};
911
+ exports.map = map;
912
+ exports.process = process;
913
+ return exports;
849
914
  }
850
-
915
+ /*
916
+ processFrameWorker.js
917
+ =====================
918
+ */
919
+ /* Copyright 2017 Yahoo Inc.
920
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
921
+ */
851
922
  function workerCode() {
852
- const self = this;
853
-
854
- try {
855
- self.onmessage = function (ev) {
856
- const data = ev.data || {};
857
- let response;
858
-
859
- if (data.gifshot) {
860
- response = workerMethods.run(data);
861
- postMessage(response);
862
- }
863
- };
864
- } catch (e) {}
865
-
866
- var workerMethods = {
867
- dataToRGB: function dataToRGB(data, width, height) {
868
- const length = width * height * 4;
869
- let i = 0;
870
- const rgb = [];
871
-
872
- while (i < length) {
873
- rgb.push(data[i++]);
874
- rgb.push(data[i++]);
875
- rgb.push(data[i++]);
876
- i++;
877
- }
878
-
879
- return rgb;
880
- },
881
- componentizedPaletteToArray: function componentizedPaletteToArray(paletteRGB) {
882
- paletteRGB = paletteRGB || [];
883
- const paletteArray = [];
884
-
885
- for (let i = 0; i < paletteRGB.length; i += 3) {
886
- const r = paletteRGB[i];
887
- const g = paletteRGB[i + 1];
888
- const b = paletteRGB[i + 2];
889
- paletteArray.push(r << 16 | g << 8 | b);
890
- }
891
-
892
- return paletteArray;
893
- },
894
- processFrameWithQuantizer: function processFrameWithQuantizer(imageData, width, height, sampleInterval) {
895
- const rgbComponents = this.dataToRGB(imageData, width, height);
896
- const nq = new NeuQuant(rgbComponents, rgbComponents.length, sampleInterval);
897
- const paletteRGB = nq.process();
898
- const paletteArray = new Uint32Array(this.componentizedPaletteToArray(paletteRGB));
899
- const numberPixels = width * height;
900
- const indexedPixels = new Uint8Array(numberPixels);
901
- let k = 0;
902
-
903
- for (let i = 0; i < numberPixels; i++) {
904
- const r = rgbComponents[k++];
905
- const g = rgbComponents[k++];
906
- const b = rgbComponents[k++];
907
- indexedPixels[i] = nq.map(r, g, b);
908
- }
909
-
910
- return {
911
- pixels: indexedPixels,
912
- palette: paletteArray
913
- };
914
- },
915
- run: function run(frame) {
916
- frame = frame || {};
917
- const _frame = frame;
918
- const height = _frame.height;
919
- const palette = _frame.palette;
920
- const sampleInterval = _frame.sampleInterval;
921
- const width = _frame.width;
922
- const imageData = frame.data;
923
- return this.processFrameWithQuantizer(imageData, width, height, sampleInterval);
923
+ const self = this;
924
+ try {
925
+ globalThis.onmessage = function (ev) {
926
+ const data = ev.data || {};
927
+ let response;
928
+ if (data.gifshot) {
929
+ response = workerMethods.run(data);
930
+ postMessage(response);
931
+ }
932
+ };
924
933
  }
925
- };
926
- return workerMethods;
934
+ catch (e) { }
935
+ var workerMethods = {
936
+ dataToRGB: function dataToRGB(data, width, height) {
937
+ const length = width * height * 4;
938
+ let i = 0;
939
+ const rgb = [];
940
+ while (i < length) {
941
+ rgb.push(data[i++]);
942
+ rgb.push(data[i++]);
943
+ rgb.push(data[i++]);
944
+ i++; // for the alpha channel which we don't care about
945
+ }
946
+ return rgb;
947
+ },
948
+ componentizedPaletteToArray: function componentizedPaletteToArray(paletteRGB) {
949
+ paletteRGB = paletteRGB || [];
950
+ const paletteArray = [];
951
+ for (let i = 0; i < paletteRGB.length; i += 3) {
952
+ const r = paletteRGB[i];
953
+ const g = paletteRGB[i + 1];
954
+ const b = paletteRGB[i + 2];
955
+ paletteArray.push((r << 16) | (g << 8) | b);
956
+ }
957
+ return paletteArray;
958
+ },
959
+ // This is the "traditional" Animated_GIF style of going from RGBA to indexed color frames
960
+ processFrameWithQuantizer: function processFrameWithQuantizer(imageData, width, height, sampleInterval) {
961
+ const rgbComponents = this.dataToRGB(imageData, width, height);
962
+ const nq = new NeuQuant(rgbComponents, rgbComponents.length, sampleInterval);
963
+ const paletteRGB = nq.process();
964
+ const paletteArray = new Uint32Array(this.componentizedPaletteToArray(paletteRGB));
965
+ const numberPixels = width * height;
966
+ const indexedPixels = new Uint8Array(numberPixels);
967
+ let k = 0;
968
+ for (let i = 0; i < numberPixels; i++) {
969
+ const r = rgbComponents[k++];
970
+ const g = rgbComponents[k++];
971
+ const b = rgbComponents[k++];
972
+ indexedPixels[i] = nq.map(r, g, b);
973
+ }
974
+ return {
975
+ pixels: indexedPixels,
976
+ palette: paletteArray
977
+ };
978
+ },
979
+ run: function run(frame) {
980
+ frame = frame || {};
981
+ const _frame = frame;
982
+ const height = _frame.height;
983
+ const palette = _frame.palette;
984
+ const sampleInterval = _frame.sampleInterval;
985
+ const width = _frame.width;
986
+ const imageData = frame.data;
987
+ return this.processFrameWithQuantizer(imageData, width, height, sampleInterval);
988
+ }
989
+ };
990
+ return workerMethods;
927
991
  }
928
-
992
+ /*
993
+ gifWriter.js
994
+ ============
995
+ */
996
+ // (c) Dean McNamee <dean@gmail.com>, 2013.
997
+ //
998
+ // https://github.com/deanm/omggif
999
+ //
1000
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
1001
+ // of this software and associated documentation files (the "Software"), to
1002
+ // deal in the Software without restriction, including without limitation the
1003
+ // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
1004
+ // sell copies of the Software, and to permit persons to whom the Software is
1005
+ // furnished to do so, subject to the following conditions:
1006
+ //
1007
+ // The above copyright notice and this permission notice shall be included in
1008
+ // all copies or substantial portions of the Software.
1009
+ //
1010
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
1011
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1012
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
1013
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
1014
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
1015
+ // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
1016
+ // IN THE SOFTWARE.
1017
+ //
1018
+ // omggif is a JavaScript implementation of a GIF 89a encoder and decoder,
1019
+ // including animation and compression. It does not rely on any specific
1020
+ // underlying system, so should run in the browser, Node, or Plask.
929
1021
  function gifWriter(buf, width, height, gopts) {
930
- let p = 0;
931
- gopts = gopts === undefined ? {} : gopts;
932
- const loop_count = gopts.loop === undefined ? null : gopts.loop;
933
- const global_palette = gopts.palette === undefined ? null : gopts.palette;
934
- if (width <= 0 || height <= 0 || width > 65535 || height > 65535) throw 'Width/Height invalid.';
935
-
936
- function check_palette_and_num_colors(palette) {
937
- const num_colors = palette.length;
938
- if (num_colors < 2 || num_colors > 256 || num_colors & num_colors - 1) throw 'Invalid code/color length, must be power of 2 and 2 .. 256.';
939
- return num_colors;
940
- }
941
-
942
- buf[p++] = 0x47;
943
- buf[p++] = 0x49;
944
- buf[p++] = 0x46;
945
- buf[p++] = 0x38;
946
- buf[p++] = 0x39;
947
- buf[p++] = 0x61;
948
- const gp_num_colors_pow2 = 0;
949
- const background = 0;
950
- buf[p++] = width & 0xff;
951
- buf[p++] = width >> 8 & 0xff;
952
- buf[p++] = height & 0xff;
953
- buf[p++] = height >> 8 & 0xff;
954
- buf[p++] = (global_palette !== null ? 0x80 : 0) | gp_num_colors_pow2;
955
- buf[p++] = background;
956
- buf[p++] = 0;
957
-
958
- if (loop_count !== null) {
959
- if (loop_count < 0 || loop_count > 65535) throw 'Loop count invalid.';
960
- buf[p++] = 0x21;
961
- buf[p++] = 0xff;
962
- buf[p++] = 0x0b;
963
- buf[p++] = 0x4e;
964
- buf[p++] = 0x45;
965
- buf[p++] = 0x54;
966
- buf[p++] = 0x53;
967
- buf[p++] = 0x43;
968
- buf[p++] = 0x41;
969
- buf[p++] = 0x50;
970
- buf[p++] = 0x45;
971
- buf[p++] = 0x32;
972
- buf[p++] = 0x2e;
973
- buf[p++] = 0x30;
974
- buf[p++] = 0x03;
975
- buf[p++] = 0x01;
976
- buf[p++] = loop_count & 0xff;
977
- buf[p++] = loop_count >> 8 & 0xff;
978
- buf[p++] = 0x00;
979
- }
980
-
981
- let ended = false;
982
-
983
- this.addFrame = function (x, y, w, h, indexed_pixels, opts) {
984
- if (ended === true) {
985
- --p;
986
- ended = false;
987
- }
988
-
989
- opts = opts === undefined ? {} : opts;
990
- if (x < 0 || y < 0 || x > 65535 || y > 65535) throw 'x/y invalid.';
991
- if (w <= 0 || h <= 0 || w > 65535 || h > 65535) throw 'Width/Height invalid.';
992
- if (indexed_pixels.length < w * h) throw 'Not enough pixels for the frame size.';
993
- let using_local_palette = true;
994
- let palette = opts.palette;
995
-
996
- if (palette === undefined || palette === null) {
997
- using_local_palette = false;
998
- palette = global_palette;
999
- }
1000
-
1001
- if (palette === undefined || palette === null) throw 'Must supply either a local or global palette.';
1002
- let num_colors = check_palette_and_num_colors(palette);
1003
- let min_code_size = 0;
1004
-
1005
- while (num_colors >>= 1) {
1006
- ++min_code_size;
1007
- }
1008
-
1009
- num_colors = 1 << min_code_size;
1010
- const delay = opts.delay === undefined ? 0 : opts.delay;
1011
- const disposal = opts.disposal === undefined ? 0 : opts.disposal;
1012
- if (disposal < 0 || disposal > 3) throw 'Disposal out of range.';
1013
- let use_transparency = false;
1014
- let transparent_index = 0;
1015
-
1016
- if (opts.transparent !== undefined && opts.transparent !== null) {
1017
- use_transparency = true;
1018
- transparent_index = opts.transparent;
1019
- if (transparent_index < 0 || transparent_index >= num_colors) throw 'Transparent color index.';
1020
- }
1021
-
1022
- if (disposal !== 0 || use_transparency || delay !== 0) {
1023
- buf[p++] = 0x21;
1024
- buf[p++] = 0xf9;
1025
- buf[p++] = 4;
1026
- buf[p++] = disposal << 2 | (use_transparency === true ? 1 : 0);
1027
- buf[p++] = delay & 0xff;
1028
- buf[p++] = delay >> 8 & 0xff;
1029
- buf[p++] = transparent_index;
1030
- buf[p++] = 0;
1031
- }
1032
-
1033
- buf[p++] = 0x2c;
1034
- buf[p++] = x & 0xff;
1035
- buf[p++] = x >> 8 & 0xff;
1036
- buf[p++] = y & 0xff;
1037
- buf[p++] = y >> 8 & 0xff;
1038
- buf[p++] = w & 0xff;
1039
- buf[p++] = w >> 8 & 0xff;
1040
- buf[p++] = h & 0xff;
1041
- buf[p++] = h >> 8 & 0xff;
1042
- buf[p++] = using_local_palette === true ? 0x80 | min_code_size - 1 : 0;
1043
-
1044
- if (using_local_palette === true) {
1045
- for (let i = 0, il = palette.length; i < il; ++i) {
1046
- const rgb = palette[i];
1047
- buf[p++] = rgb >> 16 & 0xff;
1048
- buf[p++] = rgb >> 8 & 0xff;
1049
- buf[p++] = rgb & 0xff;
1050
- }
1051
- }
1052
-
1053
- p = GifWriterOutputLZWCodeStream(buf, p, min_code_size < 2 ? 2 : min_code_size, indexed_pixels);
1054
- };
1055
-
1056
- this.end = function () {
1057
- if (ended === false) {
1058
- buf[p++] = 0x3b;
1059
- ended = true;
1060
- }
1061
-
1062
- return p;
1063
- };
1064
-
1065
- function GifWriterOutputLZWCodeStream(buf, p, min_code_size, index_stream) {
1066
- buf[p++] = min_code_size;
1067
- let cur_subblock = p++;
1068
- const clear_code = 1 << min_code_size;
1069
- const code_mask = clear_code - 1;
1070
- const eoi_code = clear_code + 1;
1071
- let next_code = eoi_code + 1;
1072
- let cur_code_size = min_code_size + 1;
1073
- let cur_shift = 0;
1074
- let cur = 0;
1075
-
1076
- function emit_bytes_to_buffer(bit_block_size) {
1077
- while (cur_shift >= bit_block_size) {
1078
- buf[p++] = cur & 0xff;
1079
- cur >>= 8;
1080
- cur_shift -= 8;
1081
-
1082
- if (p === cur_subblock + 256) {
1083
- buf[cur_subblock] = 255;
1084
- cur_subblock = p++;
1085
- }
1086
- }
1087
- }
1088
-
1089
- function emit_code(c) {
1090
- cur |= c << cur_shift;
1091
- cur_shift += cur_code_size;
1092
- emit_bytes_to_buffer(8);
1093
- }
1094
-
1095
- let ib_code = index_stream[0] & code_mask;
1096
- let code_table = {};
1097
- emit_code(clear_code);
1098
-
1099
- for (let i = 1, il = index_stream.length; i < il; ++i) {
1100
- const k = index_stream[i] & code_mask;
1101
- const cur_key = ib_code << 8 | k;
1102
- const cur_code = code_table[cur_key];
1103
-
1104
- if (cur_code === undefined) {
1105
- cur |= ib_code << cur_shift;
1106
- cur_shift += cur_code_size;
1107
-
1108
- while (cur_shift >= 8) {
1109
- buf[p++] = cur & 0xff;
1110
- cur >>= 8;
1111
- cur_shift -= 8;
1112
-
1113
- if (p === cur_subblock + 256) {
1114
- buf[cur_subblock] = 255;
1115
- cur_subblock = p++;
1116
- }
1117
- }
1118
-
1119
- if (next_code === 4096) {
1120
- emit_code(clear_code);
1121
- next_code = eoi_code + 1;
1122
- cur_code_size = min_code_size + 1;
1123
- code_table = {};
1124
- } else {
1125
- if (next_code >= 1 << cur_code_size) ++cur_code_size;
1126
- code_table[cur_key] = next_code++;
1127
- }
1128
-
1129
- ib_code = k;
1130
- } else {
1131
- ib_code = cur_code;
1132
- }
1133
- }
1134
-
1135
- emit_code(ib_code);
1136
- emit_code(eoi_code);
1137
- emit_bytes_to_buffer(1);
1138
-
1139
- if (cur_subblock + 1 === p) {
1140
- buf[cur_subblock] = 0;
1141
- } else {
1142
- buf[cur_subblock] = p - cur_subblock - 1;
1143
- buf[p++] = 0;
1022
+ let p = 0;
1023
+ gopts = gopts === undefined ? {} : gopts;
1024
+ const loop_count = gopts.loop === undefined ? null : gopts.loop;
1025
+ const global_palette = gopts.palette === undefined ? null : gopts.palette;
1026
+ if (width <= 0 || height <= 0 || width > 65535 || height > 65535)
1027
+ throw 'Width/Height invalid.';
1028
+ function check_palette_and_num_colors(palette) {
1029
+ const num_colors = palette.length;
1030
+ if (num_colors < 2 || num_colors > 256 || num_colors & (num_colors - 1))
1031
+ throw 'Invalid code/color length, must be power of 2 and 2 .. 256.';
1032
+ return num_colors;
1033
+ }
1034
+ // - Header.
1035
+ buf[p++] = 0x47;
1036
+ buf[p++] = 0x49;
1037
+ buf[p++] = 0x46; // GIF
1038
+ buf[p++] = 0x38;
1039
+ buf[p++] = 0x39;
1040
+ buf[p++] = 0x61; // 89a
1041
+ // Handling of Global Color Table (palette) and background index.
1042
+ const gp_num_colors_pow2 = 0;
1043
+ const background = 0;
1044
+ // - Logical Screen Descriptor.
1045
+ // NOTE(deanm): w/h apparently ignored by implementations, but set anyway.
1046
+ buf[p++] = width & 0xff;
1047
+ buf[p++] = (width >> 8) & 0xff;
1048
+ buf[p++] = height & 0xff;
1049
+ buf[p++] = (height >> 8) & 0xff;
1050
+ // NOTE: Indicates 0-bpp original color resolution (unused?).
1051
+ buf[p++] =
1052
+ (global_palette !== null ? 0x80 : 0) | // Global Color Table Flag.
1053
+ gp_num_colors_pow2; // NOTE: No sort flag (unused?).
1054
+ buf[p++] = background; // Background Color Index.
1055
+ buf[p++] = 0; // Pixel aspect ratio (unused?).
1056
+ if (loop_count !== null) {
1057
+ // Netscape block for looping.
1058
+ if (loop_count < 0 || loop_count > 65535)
1059
+ throw 'Loop count invalid.';
1060
+ // Extension code, label, and length.
1061
+ buf[p++] = 0x21;
1062
+ buf[p++] = 0xff;
1063
+ buf[p++] = 0x0b;
1064
+ // NETSCAPE2.0
1065
+ buf[p++] = 0x4e;
1066
+ buf[p++] = 0x45;
1067
+ buf[p++] = 0x54;
1068
+ buf[p++] = 0x53;
1069
+ buf[p++] = 0x43;
1070
+ buf[p++] = 0x41;
1071
+ buf[p++] = 0x50;
1072
+ buf[p++] = 0x45;
1073
+ buf[p++] = 0x32;
1074
+ buf[p++] = 0x2e;
1075
+ buf[p++] = 0x30;
1076
+ // Sub-block
1077
+ buf[p++] = 0x03;
1078
+ buf[p++] = 0x01;
1079
+ buf[p++] = loop_count & 0xff;
1080
+ buf[p++] = (loop_count >> 8) & 0xff;
1081
+ buf[p++] = 0x00; // Terminator.
1082
+ }
1083
+ let ended = false;
1084
+ this.addFrame = function (x, y, w, h, indexed_pixels, opts) {
1085
+ if (ended === true) {
1086
+ --p;
1087
+ ended = false;
1088
+ } // Un-end.
1089
+ opts = opts === undefined ? {} : opts;
1090
+ // TODO(deanm): Bounds check x, y. Do they need to be within the virtual
1091
+ // canvas width/height, I imagine?
1092
+ if (x < 0 || y < 0 || x > 65535 || y > 65535)
1093
+ throw 'x/y invalid.';
1094
+ if (w <= 0 || h <= 0 || w > 65535 || h > 65535)
1095
+ throw 'Width/Height invalid.';
1096
+ if (indexed_pixels.length < w * h)
1097
+ throw 'Not enough pixels for the frame size.';
1098
+ let using_local_palette = true;
1099
+ let palette = opts.palette;
1100
+ if (palette === undefined || palette === null) {
1101
+ using_local_palette = false;
1102
+ palette = global_palette;
1103
+ }
1104
+ if (palette === undefined || palette === null)
1105
+ throw 'Must supply either a local or global palette.';
1106
+ let num_colors = check_palette_and_num_colors(palette);
1107
+ // Compute the min_code_size (power of 2), destroying num_colors.
1108
+ let min_code_size = 0;
1109
+ while ((num_colors >>= 1)) {
1110
+ ++min_code_size;
1111
+ }
1112
+ num_colors = 1 << min_code_size; // Now we can easily get it back.
1113
+ const delay = opts.delay === undefined ? 0 : opts.delay;
1114
+ // From the spec:
1115
+ // 0 - No disposal specified. The decoder is
1116
+ // not required to take any action.
1117
+ // 1 - Do not dispose. The graphic is to be left
1118
+ // in place.
1119
+ // 2 - Restore to background color. The area used by the
1120
+ // graphic must be restored to the background color.
1121
+ // 3 - Restore to previous. The decoder is required to
1122
+ // restore the area overwritten by the graphic with
1123
+ // what was there prior to rendering the graphic.
1124
+ // 4-7 - To be defined.
1125
+ // NOTE(deanm): Dispose background doesn't really work, apparently most
1126
+ // browsers ignore the background palette index and clear to transparency.
1127
+ const disposal = opts.disposal === undefined ? 0 : opts.disposal;
1128
+ if (disposal < 0 || disposal > 3)
1129
+ // 4-7 is reserved.
1130
+ throw 'Disposal out of range.';
1131
+ let use_transparency = false;
1132
+ let transparent_index = 0;
1133
+ if (opts.transparent !== undefined && opts.transparent !== null) {
1134
+ use_transparency = true;
1135
+ transparent_index = opts.transparent;
1136
+ if (transparent_index < 0 || transparent_index >= num_colors)
1137
+ throw 'Transparent color index.';
1138
+ }
1139
+ if (disposal !== 0 || use_transparency || delay !== 0) {
1140
+ // - Graphics Control Extension
1141
+ buf[p++] = 0x21;
1142
+ buf[p++] = 0xf9; // Extension / Label.
1143
+ buf[p++] = 4; // Byte size.
1144
+ buf[p++] = (disposal << 2) | (use_transparency === true ? 1 : 0);
1145
+ buf[p++] = delay & 0xff;
1146
+ buf[p++] = (delay >> 8) & 0xff;
1147
+ buf[p++] = transparent_index; // Transparent color index.
1148
+ buf[p++] = 0; // Block Terminator.
1149
+ }
1150
+ // - Image Descriptor
1151
+ buf[p++] = 0x2c; // Image Seperator.
1152
+ buf[p++] = x & 0xff;
1153
+ buf[p++] = (x >> 8) & 0xff; // Left.
1154
+ buf[p++] = y & 0xff;
1155
+ buf[p++] = (y >> 8) & 0xff; // Top.
1156
+ buf[p++] = w & 0xff;
1157
+ buf[p++] = (w >> 8) & 0xff;
1158
+ buf[p++] = h & 0xff;
1159
+ buf[p++] = (h >> 8) & 0xff;
1160
+ // NOTE: No sort flag (unused?).
1161
+ // TODO(deanm): Support interlace.
1162
+ buf[p++] = using_local_palette === true ? 0x80 | (min_code_size - 1) : 0;
1163
+ // - Local Color Table
1164
+ if (using_local_palette === true) {
1165
+ for (let i = 0, il = palette.length; i < il; ++i) {
1166
+ const rgb = palette[i];
1167
+ buf[p++] = (rgb >> 16) & 0xff;
1168
+ buf[p++] = (rgb >> 8) & 0xff;
1169
+ buf[p++] = rgb & 0xff;
1170
+ }
1171
+ }
1172
+ p = GifWriterOutputLZWCodeStream(buf, p, min_code_size < 2 ? 2 : min_code_size, indexed_pixels);
1173
+ };
1174
+ this.end = function () {
1175
+ if (ended === false) {
1176
+ buf[p++] = 0x3b; // Trailer.
1177
+ ended = true;
1178
+ }
1179
+ return p;
1180
+ };
1181
+ // Main compression routine, palette indexes -> LZW code stream.
1182
+ // |index_stream| must have at least one entry.
1183
+ function GifWriterOutputLZWCodeStream(buf, p, min_code_size, index_stream) {
1184
+ buf[p++] = min_code_size;
1185
+ let cur_subblock = p++; // Pointing at the length field.
1186
+ const clear_code = 1 << min_code_size;
1187
+ const code_mask = clear_code - 1;
1188
+ const eoi_code = clear_code + 1;
1189
+ let next_code = eoi_code + 1;
1190
+ let cur_code_size = min_code_size + 1; // Number of bits per code.
1191
+ let cur_shift = 0;
1192
+ // We have at most 12-bit codes, so we should have to hold a max of 19
1193
+ // bits here (and then we would write out).
1194
+ let cur = 0;
1195
+ function emit_bytes_to_buffer(bit_block_size) {
1196
+ while (cur_shift >= bit_block_size) {
1197
+ buf[p++] = cur & 0xff;
1198
+ cur >>= 8;
1199
+ cur_shift -= 8;
1200
+ if (p === cur_subblock + 256) {
1201
+ // Finished a subblock.
1202
+ buf[cur_subblock] = 255;
1203
+ cur_subblock = p++;
1204
+ }
1205
+ }
1206
+ }
1207
+ function emit_code(c) {
1208
+ cur |= c << cur_shift;
1209
+ cur_shift += cur_code_size;
1210
+ emit_bytes_to_buffer(8);
1211
+ }
1212
+ // I am not an expert on the topic, and I don't want to write a thesis.
1213
+ // However, it is good to outline here the basic algorithm and the few data
1214
+ // structures and optimizations here that make this implementation fast.
1215
+ // The basic idea behind LZW is to build a table of previously seen runs
1216
+ // addressed by a short id (herein called output code). All data is
1217
+ // referenced by a code, which represents one or more values from the
1218
+ // original input stream. All input bytes can be referenced as the same
1219
+ // value as an output code. So if you didn't want any compression, you
1220
+ // could more or less just output the original bytes as codes (there are
1221
+ // some details to this, but it is the idea). In order to achieve
1222
+ // compression, values greater then the input range (codes can be up to
1223
+ // 12-bit while input only 8-bit) represent a sequence of previously seen
1224
+ // inputs. The decompressor is able to build the same mapping while
1225
+ // decoding, so there is always a shared common knowledge between the
1226
+ // encoding and decoder, which is also important for "timing" aspects like
1227
+ // how to handle variable bit width code encoding.
1228
+ //
1229
+ // One obvious but very important consequence of the table system is there
1230
+ // is always a unique id (at most 12-bits) to map the runs. 'A' might be
1231
+ // 4, then 'AA' might be 10, 'AAA' 11, 'AAAA' 12, etc. This relationship
1232
+ // can be used for an effecient lookup strategy for the code mapping. We
1233
+ // need to know if a run has been seen before, and be able to map that run
1234
+ // to the output code. Since we start with known unique ids (input bytes),
1235
+ // and then from those build more unique ids (table entries), we can
1236
+ // continue this chain (almost like a linked list) to always have small
1237
+ // integer values that represent the current byte chains in the encoder.
1238
+ // This means instead of tracking the input bytes (AAAABCD) to know our
1239
+ // current state, we can track the table entry for AAAABC (it is guaranteed
1240
+ // to exist by the nature of the algorithm) and the next character D.
1241
+ // Therefor the tuple of (table_entry, byte) is guaranteed to also be
1242
+ // unique. This allows us to create a simple lookup key for mapping input
1243
+ // sequences to codes (table indices) without having to store or search
1244
+ // any of the code sequences. So if 'AAAA' has a table entry of 12, the
1245
+ // tuple of ('AAAA', K) for any input byte K will be unique, and can be our
1246
+ // key. This leads to a integer value at most 20-bits, which can always
1247
+ // fit in an SMI value and be used as a fast sparse array / object key.
1248
+ // Output code for the current contents of the index buffer.
1249
+ let ib_code = index_stream[0] & code_mask; // Load first input index.
1250
+ let code_table = {}; // Key'd on our 20-bit "tuple".
1251
+ emit_code(clear_code); // Spec says first code should be a clear code.
1252
+ // First index already loaded, process the rest of the stream.
1253
+ for (let i = 1, il = index_stream.length; i < il; ++i) {
1254
+ const k = index_stream[i] & code_mask;
1255
+ const cur_key = (ib_code << 8) | k; // (prev, k) unique tuple.
1256
+ const cur_code = code_table[cur_key]; // buffer + k.
1257
+ // Check if we have to create a new code table entry.
1258
+ if (cur_code === undefined) {
1259
+ // We don't have buffer + k.
1260
+ // Emit index buffer (without k).
1261
+ // This is an inline version of emit_code, because this is the core
1262
+ // writing routine of the compressor (and V8 cannot inline emit_code
1263
+ // because it is a closure here in a different context). Additionally
1264
+ // we can call emit_byte_to_buffer less often, because we can have
1265
+ // 30-bits (from our 31-bit signed SMI), and we know our codes will only
1266
+ // be 12-bits, so can safely have 18-bits there without overflow.
1267
+ // emit_code(ib_code);
1268
+ cur |= ib_code << cur_shift;
1269
+ cur_shift += cur_code_size;
1270
+ while (cur_shift >= 8) {
1271
+ buf[p++] = cur & 0xff;
1272
+ cur >>= 8;
1273
+ cur_shift -= 8;
1274
+ if (p === cur_subblock + 256) {
1275
+ // Finished a subblock.
1276
+ buf[cur_subblock] = 255;
1277
+ cur_subblock = p++;
1278
+ }
1279
+ }
1280
+ if (next_code === 4096) {
1281
+ // Table full, need a clear.
1282
+ emit_code(clear_code);
1283
+ next_code = eoi_code + 1;
1284
+ cur_code_size = min_code_size + 1;
1285
+ code_table = {};
1286
+ }
1287
+ else {
1288
+ // Table not full, insert a new entry.
1289
+ // Increase our variable bit code sizes if necessary. This is a bit
1290
+ // tricky as it is based on "timing" between the encoding and
1291
+ // decoder. From the encoders perspective this should happen after
1292
+ // we've already emitted the index buffer and are about to create the
1293
+ // first table entry that would overflow our current code bit size.
1294
+ if (next_code >= 1 << cur_code_size)
1295
+ ++cur_code_size;
1296
+ code_table[cur_key] = next_code++; // Insert into code table.
1297
+ }
1298
+ ib_code = k; // Index buffer to single input k.
1299
+ }
1300
+ else {
1301
+ ib_code = cur_code; // Index buffer to sequence in code table.
1302
+ }
1303
+ }
1304
+ emit_code(ib_code); // There will still be something in the index buffer.
1305
+ emit_code(eoi_code); // End Of Information.
1306
+ // Flush / finalize the sub-blocks stream to the buffer.
1307
+ emit_bytes_to_buffer(1);
1308
+ // Finish the sub-blocks, writing out any unfinished lengths and
1309
+ // terminating with a sub-block of length 0. If we have already started
1310
+ // but not yet used a sub-block it can just become the terminator.
1311
+ if (cur_subblock + 1 === p) {
1312
+ // Started but unused.
1313
+ buf[cur_subblock] = 0;
1314
+ }
1315
+ else {
1316
+ // Started and used, write length and additional terminator block.
1317
+ buf[cur_subblock] = p - cur_subblock - 1;
1318
+ buf[p++] = 0;
1319
+ }
1320
+ return p;
1144
1321
  }
1145
-
1146
- return p;
1147
- }
1148
1322
  }
1149
-
1150
- const noop$2 = function noop() {};
1151
-
1323
+ /*
1324
+ animatedGIF.js
1325
+ ==============
1326
+ */
1327
+ /* Copyright 2017 Yahoo Inc.
1328
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
1329
+ */
1330
+ // Dependencies
1331
+ // Helpers
1332
+ const noop$2 = function noop() { };
1152
1333
  const AnimatedGIF = function AnimatedGIF(options) {
1153
- this.canvas = null;
1154
- this.ctx = null;
1155
- this.repeat = 0;
1156
- this.frames = [];
1157
- this.numRenderedFrames = 0;
1158
- this.onRenderCompleteCallback = noop$2;
1159
- this.onRenderProgressCallback = noop$2;
1160
- this.workers = [];
1161
- this.availableWorkers = [];
1162
- this.generatingGIF = false;
1163
- this.options = options;
1164
- this.initializeWebWorkers(options);
1334
+ this.canvas = null;
1335
+ this.ctx = null;
1336
+ this.repeat = 0;
1337
+ this.frames = [];
1338
+ this.numRenderedFrames = 0;
1339
+ this.onRenderCompleteCallback = noop$2;
1340
+ this.onRenderProgressCallback = noop$2;
1341
+ this.workers = [];
1342
+ this.availableWorkers = [];
1343
+ this.generatingGIF = false;
1344
+ this.options = options;
1345
+ // Constructs and initializes the the web workers appropriately
1346
+ this.initializeWebWorkers(options);
1165
1347
  };
1166
-
1167
1348
  AnimatedGIF.prototype = {
1168
- workerMethods: workerCode(),
1169
- initializeWebWorkers: function initializeWebWorkers(options) {
1170
- const self = this;
1171
- const processFrameWorkerCode = "".concat(NeuQuant.toString(), "(").concat(workerCode.toString(), "());");
1172
- let webWorkerObj = void 0;
1173
- let objectUrl = void 0;
1174
- let webWorker = void 0;
1175
- let numWorkers = void 0;
1176
- let x = -1;
1177
- let workerError = '';
1178
- numWorkers = options.numWorkers;
1179
-
1180
- while (++x < numWorkers) {
1181
- webWorkerObj = utils.createWebWorker(processFrameWorkerCode);
1182
-
1183
- if (utils.isObject(webWorkerObj)) {
1184
- objectUrl = webWorkerObj.objectUrl;
1185
- webWorker = webWorkerObj.worker;
1186
- self.workers.push({
1187
- worker: webWorker,
1188
- objectUrl
1349
+ workerMethods: workerCode(),
1350
+ initializeWebWorkers: function initializeWebWorkers(options) {
1351
+ const self = this;
1352
+ const processFrameWorkerCode = `${NeuQuant.toString()}(${workerCode.toString()}());`;
1353
+ let webWorkerObj = void 0;
1354
+ let objectUrl = void 0;
1355
+ let webWorker = void 0;
1356
+ let numWorkers = void 0;
1357
+ let x = -1;
1358
+ let workerError = '';
1359
+ numWorkers = options.numWorkers;
1360
+ while (++x < numWorkers) {
1361
+ webWorkerObj = utils.createWebWorker(processFrameWorkerCode);
1362
+ if (utils.isObject(webWorkerObj)) {
1363
+ objectUrl = webWorkerObj.objectUrl;
1364
+ webWorker = webWorkerObj.worker;
1365
+ self.workers.push({
1366
+ worker: webWorker,
1367
+ objectUrl
1368
+ });
1369
+ self.availableWorkers.push(webWorker);
1370
+ }
1371
+ else {
1372
+ workerError = webWorkerObj;
1373
+ utils.webWorkerError = Boolean(webWorkerObj);
1374
+ }
1375
+ }
1376
+ this.workerError = workerError;
1377
+ this.canvas = document.createElement('canvas');
1378
+ this.canvas.width = options.gifWidth;
1379
+ this.canvas.height = options.gifHeight;
1380
+ this.ctx = this.canvas.getContext('2d');
1381
+ this.frames = [];
1382
+ },
1383
+ // Return a worker for processing a frame
1384
+ getWorker: function getWorker() {
1385
+ return this.availableWorkers.pop();
1386
+ },
1387
+ // Restores a worker to the pool
1388
+ freeWorker: function freeWorker(worker) {
1389
+ this.availableWorkers.push(worker);
1390
+ },
1391
+ byteMap: (function () {
1392
+ const byteMap = [];
1393
+ for (let i = 0; i < 256; i++) {
1394
+ byteMap[i] = String.fromCharCode(i);
1395
+ }
1396
+ return byteMap;
1397
+ })(),
1398
+ bufferToString: function bufferToString(buffer) {
1399
+ const numberValues = buffer.length;
1400
+ let str = '';
1401
+ let x = -1;
1402
+ while (++x < numberValues) {
1403
+ str += this.byteMap[buffer[x]];
1404
+ }
1405
+ return str;
1406
+ },
1407
+ onFrameFinished: function onFrameFinished(progressCallback) {
1408
+ // The GIF is not written until we're done with all the frames
1409
+ // because they might not be processed in the same order
1410
+ const self = this;
1411
+ const frames = self.frames;
1412
+ const options = self.options;
1413
+ const hasExistingImages = Boolean((options.images || []).length);
1414
+ const allDone = frames.every(function (frame) {
1415
+ return !frame.beingProcessed && frame.done;
1189
1416
  });
1190
- self.availableWorkers.push(webWorker);
1191
- } else {
1192
- workerError = webWorkerObj;
1193
- utils.webWorkerError = Boolean(webWorkerObj);
1194
- }
1195
- }
1196
-
1197
- this.workerError = workerError;
1198
- this.canvas = document.createElement('canvas');
1199
- this.canvas.width = options.gifWidth;
1200
- this.canvas.height = options.gifHeight;
1201
- this.ctx = this.canvas.getContext('2d');
1202
- this.frames = [];
1203
- },
1204
- getWorker: function getWorker() {
1205
- return this.availableWorkers.pop();
1206
- },
1207
- freeWorker: function freeWorker(worker) {
1208
- this.availableWorkers.push(worker);
1209
- },
1210
- byteMap: function () {
1211
- const byteMap = [];
1212
-
1213
- for (let i = 0; i < 256; i++) {
1214
- byteMap[i] = String.fromCharCode(i);
1215
- }
1216
-
1217
- return byteMap;
1218
- }(),
1219
- bufferToString: function bufferToString(buffer) {
1220
- const numberValues = buffer.length;
1221
- let str = '';
1222
- let x = -1;
1223
-
1224
- while (++x < numberValues) {
1225
- str += this.byteMap[buffer[x]];
1226
- }
1227
-
1228
- return str;
1229
- },
1230
- onFrameFinished: function onFrameFinished(progressCallback) {
1231
- const self = this;
1232
- const frames = self.frames;
1233
- const options = self.options;
1234
- const hasExistingImages = Boolean((options.images || []).length);
1235
- const allDone = frames.every(function (frame) {
1236
- return !frame.beingProcessed && frame.done;
1237
- });
1238
- self.numRenderedFrames++;
1239
-
1240
- if (hasExistingImages) {
1241
- progressCallback(self.numRenderedFrames / frames.length);
1242
- }
1243
-
1244
- self.onRenderProgressCallback(self.numRenderedFrames * 0.75 / frames.length);
1245
-
1246
- if (allDone) {
1247
- if (!self.generatingGIF) {
1248
- self.generateGIF(frames, self.onRenderCompleteCallback);
1249
- }
1250
- } else {
1251
- utils.requestTimeout(function () {
1252
- self.processNextFrame();
1253
- }, 1);
1254
- }
1255
- },
1256
- processFrame: function processFrame(position) {
1257
- const AnimatedGifContext = this;
1258
- const options = this.options;
1259
- const _options = this.options;
1260
- const progressCallback = _options.progressCallback;
1261
- const sampleInterval = _options.sampleInterval;
1262
- const frames = this.frames;
1263
- let frame = void 0;
1264
- let worker = void 0;
1265
-
1266
- const done = function done() {
1267
- const ev = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1268
- const data = ev.data;
1269
- delete frame.data;
1270
- frame.pixels = Array.prototype.slice.call(data.pixels);
1271
- frame.palette = Array.prototype.slice.call(data.palette);
1272
- frame.done = true;
1273
- frame.beingProcessed = false;
1274
- AnimatedGifContext.freeWorker(worker);
1275
- AnimatedGifContext.onFrameFinished(progressCallback);
1276
- };
1277
-
1278
- frame = frames[position];
1279
-
1280
- if (frame.beingProcessed || frame.done) {
1281
- this.onFrameFinished();
1282
- return;
1283
- }
1284
-
1285
- frame.sampleInterval = sampleInterval;
1286
- frame.beingProcessed = true;
1287
- frame.gifshot = true;
1288
- worker = this.getWorker();
1289
-
1290
- if (worker) {
1291
- worker.onmessage = done;
1292
- worker.postMessage(frame);
1293
- } else {
1294
- done({
1295
- data: AnimatedGifContext.workerMethods.run(frame)
1296
- });
1297
- }
1298
- },
1299
- startRendering: function startRendering(completeCallback) {
1300
- this.onRenderCompleteCallback = completeCallback;
1301
-
1302
- for (let i = 0; i < this.options.numWorkers && i < this.frames.length; i++) {
1303
- this.processFrame(i);
1304
- }
1305
- },
1306
- processNextFrame: function processNextFrame() {
1307
- let position = -1;
1308
-
1309
- for (let i = 0; i < this.frames.length; i++) {
1310
- const frame = this.frames[i];
1311
-
1312
- if (!frame.done && !frame.beingProcessed) {
1313
- position = i;
1314
- break;
1315
- }
1316
- }
1317
-
1318
- if (position >= 0) {
1319
- this.processFrame(position);
1320
- }
1321
- },
1322
- generateGIF: function generateGIF(frames, callback) {
1323
- const buffer = [];
1324
- const gifOptions = {
1325
- loop: this.repeat
1326
- };
1327
- const options = this.options;
1328
- const interval = options.interval;
1329
- const frameDuration = options.frameDuration;
1330
- const existingImages = options.images;
1331
- const hasExistingImages = Boolean(existingImages.length);
1332
- const height = options.gifHeight;
1333
- const width = options.gifWidth;
1334
- const gifWriter$$1 = new gifWriter(buffer, width, height, gifOptions);
1335
- const onRenderProgressCallback = this.onRenderProgressCallback;
1336
- const delay = hasExistingImages ? interval * 100 : 0;
1337
- let bufferToString = void 0;
1338
- let gif = void 0;
1339
- this.generatingGIF = true;
1340
- utils.each(frames, function (iterator, frame) {
1341
- const framePalette = frame.palette;
1342
- onRenderProgressCallback(0.75 + 0.25 * frame.position * 1.0 / frames.length);
1343
-
1344
- for (let i = 0; i < frameDuration; i++) {
1345
- gifWriter$$1.addFrame(0, 0, width, height, frame.pixels, {
1346
- palette: framePalette,
1347
- delay
1417
+ self.numRenderedFrames++;
1418
+ if (hasExistingImages) {
1419
+ progressCallback(self.numRenderedFrames / frames.length);
1420
+ }
1421
+ self.onRenderProgressCallback((self.numRenderedFrames * 0.75) / frames.length);
1422
+ if (allDone) {
1423
+ if (!self.generatingGIF) {
1424
+ self.generateGIF(frames, self.onRenderCompleteCallback);
1425
+ }
1426
+ }
1427
+ else {
1428
+ utils.requestTimeout(function () {
1429
+ self.processNextFrame();
1430
+ }, 1);
1431
+ }
1432
+ },
1433
+ processFrame: function processFrame(position) {
1434
+ const AnimatedGifContext = this;
1435
+ const options = this.options;
1436
+ const _options = this.options;
1437
+ const progressCallback = _options.progressCallback;
1438
+ const sampleInterval = _options.sampleInterval;
1439
+ const frames = this.frames;
1440
+ let frame = void 0;
1441
+ let worker = void 0;
1442
+ const done = function done() {
1443
+ const ev = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1444
+ const data = ev.data;
1445
+ // Delete original data, and free memory
1446
+ delete frame.data;
1447
+ frame.pixels = Array.prototype.slice.call(data.pixels);
1448
+ frame.palette = Array.prototype.slice.call(data.palette);
1449
+ frame.done = true;
1450
+ frame.beingProcessed = false;
1451
+ AnimatedGifContext.freeWorker(worker);
1452
+ AnimatedGifContext.onFrameFinished(progressCallback);
1453
+ };
1454
+ frame = frames[position];
1455
+ if (frame.beingProcessed || frame.done) {
1456
+ this.onFrameFinished();
1457
+ return;
1458
+ }
1459
+ frame.sampleInterval = sampleInterval;
1460
+ frame.beingProcessed = true;
1461
+ frame.gifshot = true;
1462
+ worker = this.getWorker();
1463
+ if (worker) {
1464
+ // Process the frame in a web worker
1465
+ worker.onmessage = done;
1466
+ worker.postMessage(frame);
1467
+ }
1468
+ else {
1469
+ // Process the frame in the current thread
1470
+ done({
1471
+ data: AnimatedGifContext.workerMethods.run(frame)
1472
+ });
1473
+ }
1474
+ },
1475
+ startRendering: function startRendering(completeCallback) {
1476
+ this.onRenderCompleteCallback = completeCallback;
1477
+ for (let i = 0; i < this.options.numWorkers && i < this.frames.length; i++) {
1478
+ this.processFrame(i);
1479
+ }
1480
+ },
1481
+ processNextFrame: function processNextFrame() {
1482
+ let position = -1;
1483
+ for (let i = 0; i < this.frames.length; i++) {
1484
+ const frame = this.frames[i];
1485
+ if (!frame.done && !frame.beingProcessed) {
1486
+ position = i;
1487
+ break;
1488
+ }
1489
+ }
1490
+ if (position >= 0) {
1491
+ this.processFrame(position);
1492
+ }
1493
+ },
1494
+ // Takes the already processed data in frames and feeds it to a new
1495
+ // GifWriter instance in order to get the binary GIF file
1496
+ generateGIF: function generateGIF(frames, callback) {
1497
+ // TODO: Weird: using a simple JS array instead of a typed array,
1498
+ // the files are WAY smaller o_o. Patches/explanations welcome!
1499
+ const buffer = []; // new Uint8Array(width * height * frames.length * 5);
1500
+ const gifOptions = {
1501
+ loop: this.repeat
1502
+ };
1503
+ const options = this.options;
1504
+ const interval = options.interval;
1505
+ const frameDuration = options.frameDuration;
1506
+ const existingImages = options.images;
1507
+ const hasExistingImages = Boolean(existingImages.length);
1508
+ const height = options.gifHeight;
1509
+ const width = options.gifWidth;
1510
+ const gifWriter$$1 = new gifWriter(buffer, width, height, gifOptions);
1511
+ const onRenderProgressCallback = this.onRenderProgressCallback;
1512
+ const delay = hasExistingImages ? interval * 100 : 0;
1513
+ let bufferToString = void 0;
1514
+ let gif = void 0;
1515
+ this.generatingGIF = true;
1516
+ utils.each(frames, function (iterator, frame) {
1517
+ const framePalette = frame.palette;
1518
+ onRenderProgressCallback(0.75 + (0.25 * frame.position * 1.0) / frames.length);
1519
+ for (let i = 0; i < frameDuration; i++) {
1520
+ gifWriter$$1.addFrame(0, 0, width, height, frame.pixels, {
1521
+ palette: framePalette,
1522
+ delay
1523
+ });
1524
+ }
1525
+ });
1526
+ gifWriter$$1.end();
1527
+ onRenderProgressCallback(1.0);
1528
+ this.frames = [];
1529
+ this.generatingGIF = false;
1530
+ if (utils.isFunction(callback)) {
1531
+ bufferToString = this.bufferToString(buffer);
1532
+ gif = `data:image/gif;base64,${utils.btoa(bufferToString)}`;
1533
+ callback(gif);
1534
+ }
1535
+ },
1536
+ // From GIF: 0 = loop forever, null = not looping, n > 0 = loop n times and stop
1537
+ setRepeat: function setRepeat(r) {
1538
+ this.repeat = r;
1539
+ },
1540
+ addFrame: function addFrame(element, gifshotOptions) {
1541
+ gifshotOptions = utils.isObject(gifshotOptions) ? gifshotOptions : {};
1542
+ const self = this;
1543
+ const ctx = self.ctx;
1544
+ const options = self.options;
1545
+ const width = options.gifWidth;
1546
+ const height = options.gifHeight;
1547
+ const fontSize = utils.getFontSize(gifshotOptions);
1548
+ const _gifshotOptions = gifshotOptions;
1549
+ const filter = _gifshotOptions.filter;
1550
+ const fontColor = _gifshotOptions.fontColor;
1551
+ const fontFamily = _gifshotOptions.fontFamily;
1552
+ const fontWeight = _gifshotOptions.fontWeight;
1553
+ const gifHeight = _gifshotOptions.gifHeight;
1554
+ const gifWidth = _gifshotOptions.gifWidth;
1555
+ const text = _gifshotOptions.text;
1556
+ const textAlign = _gifshotOptions.textAlign;
1557
+ const textBaseline = _gifshotOptions.textBaseline;
1558
+ const textXCoordinate = gifshotOptions.textXCoordinate
1559
+ ? gifshotOptions.textXCoordinate
1560
+ : textAlign === 'left'
1561
+ ? 1
1562
+ : textAlign === 'right'
1563
+ ? width
1564
+ : width / 2;
1565
+ const textYCoordinate = gifshotOptions.textYCoordinate
1566
+ ? gifshotOptions.textYCoordinate
1567
+ : textBaseline === 'top'
1568
+ ? 1
1569
+ : textBaseline === 'center'
1570
+ ? height / 2
1571
+ : height;
1572
+ const font = `${fontWeight} ${fontSize} ${fontFamily}`;
1573
+ let imageData = void 0;
1574
+ try {
1575
+ ctx.filter = filter;
1576
+ ctx.drawImage(element, 0, 0, width, height);
1577
+ if (text) {
1578
+ ctx.font = font;
1579
+ ctx.fillStyle = fontColor;
1580
+ ctx.textAlign = textAlign;
1581
+ ctx.textBaseline = textBaseline;
1582
+ ctx.fillText(text, textXCoordinate, textYCoordinate);
1583
+ }
1584
+ imageData = ctx.getImageData(0, 0, width, height);
1585
+ self.addFrameImageData(imageData);
1586
+ }
1587
+ catch (e) {
1588
+ return `${e}`;
1589
+ }
1590
+ },
1591
+ addFrameImageData: function addFrameImageData() {
1592
+ const imageData = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1593
+ const frames = this.frames;
1594
+ const imageDataArray = imageData.data;
1595
+ this.frames.push({
1596
+ data: imageDataArray,
1597
+ width: imageData.width,
1598
+ height: imageData.height,
1599
+ palette: null,
1600
+ dithering: null,
1601
+ done: false,
1602
+ beingProcessed: false,
1603
+ position: frames.length
1604
+ });
1605
+ },
1606
+ onRenderProgress: function onRenderProgress(callback) {
1607
+ this.onRenderProgressCallback = callback;
1608
+ },
1609
+ isRendering: function isRendering() {
1610
+ return this.generatingGIF;
1611
+ },
1612
+ getBase64GIF: function getBase64GIF(completeCallback) {
1613
+ const self = this;
1614
+ const onRenderComplete = function onRenderComplete(gif) {
1615
+ self.destroyWorkers();
1616
+ utils.requestTimeout(function () {
1617
+ completeCallback(gif);
1618
+ }, 0);
1619
+ };
1620
+ self.startRendering(onRenderComplete);
1621
+ },
1622
+ destroyWorkers: function destroyWorkers() {
1623
+ if (this.workerError) {
1624
+ return;
1625
+ }
1626
+ const workers = this.workers;
1627
+ // Explicitly ask web workers to die so they are explicitly GC'ed
1628
+ utils.each(workers, function (iterator, workerObj) {
1629
+ const worker = workerObj.worker;
1630
+ const objectUrl = workerObj.objectUrl;
1631
+ worker.terminate();
1632
+ utils.URL.revokeObjectURL(objectUrl);
1348
1633
  });
1349
- }
1350
- });
1351
- gifWriter$$1.end();
1352
- onRenderProgressCallback(1.0);
1353
- this.frames = [];
1354
- this.generatingGIF = false;
1355
-
1356
- if (utils.isFunction(callback)) {
1357
- bufferToString = this.bufferToString(buffer);
1358
- gif = "data:image/gif;base64,".concat(utils.btoa(bufferToString));
1359
- callback(gif);
1360
- }
1361
- },
1362
- setRepeat: function setRepeat(r) {
1363
- this.repeat = r;
1364
- },
1365
- addFrame: function addFrame(element, gifshotOptions) {
1366
- gifshotOptions = utils.isObject(gifshotOptions) ? gifshotOptions : {};
1367
- const self = this;
1368
- const ctx = self.ctx;
1369
- const options = self.options;
1370
- const width = options.gifWidth;
1371
- const height = options.gifHeight;
1372
- const fontSize = utils.getFontSize(gifshotOptions);
1373
- const _gifshotOptions = gifshotOptions;
1374
- const filter = _gifshotOptions.filter;
1375
- const fontColor = _gifshotOptions.fontColor;
1376
- const fontFamily = _gifshotOptions.fontFamily;
1377
- const fontWeight = _gifshotOptions.fontWeight;
1378
- const gifHeight = _gifshotOptions.gifHeight;
1379
- const gifWidth = _gifshotOptions.gifWidth;
1380
- const text = _gifshotOptions.text;
1381
- const textAlign = _gifshotOptions.textAlign;
1382
- const textBaseline = _gifshotOptions.textBaseline;
1383
- const textXCoordinate = gifshotOptions.textXCoordinate ? gifshotOptions.textXCoordinate : textAlign === 'left' ? 1 : textAlign === 'right' ? width : width / 2;
1384
- const textYCoordinate = gifshotOptions.textYCoordinate ? gifshotOptions.textYCoordinate : textBaseline === 'top' ? 1 : textBaseline === 'center' ? height / 2 : height;
1385
- const font = "".concat(fontWeight, " ").concat(fontSize, " ").concat(fontFamily);
1386
- let imageData = void 0;
1387
-
1388
- try {
1389
- ctx.filter = filter;
1390
- ctx.drawImage(element, 0, 0, width, height);
1391
-
1392
- if (text) {
1393
- ctx.font = font;
1394
- ctx.fillStyle = fontColor;
1395
- ctx.textAlign = textAlign;
1396
- ctx.textBaseline = textBaseline;
1397
- ctx.fillText(text, textXCoordinate, textYCoordinate);
1398
- }
1399
-
1400
- imageData = ctx.getImageData(0, 0, width, height);
1401
- self.addFrameImageData(imageData);
1402
- } catch (e) {
1403
- return "".concat(e);
1404
- }
1405
- },
1406
- addFrameImageData: function addFrameImageData() {
1407
- const imageData = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1408
- const frames = this.frames;
1409
- const imageDataArray = imageData.data;
1410
- this.frames.push({
1411
- data: imageDataArray,
1412
- width: imageData.width,
1413
- height: imageData.height,
1414
- palette: null,
1415
- dithering: null,
1416
- done: false,
1417
- beingProcessed: false,
1418
- position: frames.length
1419
- });
1420
- },
1421
- onRenderProgress: function onRenderProgress(callback) {
1422
- this.onRenderProgressCallback = callback;
1423
- },
1424
- isRendering: function isRendering() {
1425
- return this.generatingGIF;
1426
- },
1427
- getBase64GIF: function getBase64GIF(completeCallback) {
1428
- const self = this;
1429
-
1430
- const onRenderComplete = function onRenderComplete(gif) {
1431
- self.destroyWorkers();
1432
- utils.requestTimeout(function () {
1433
- completeCallback(gif);
1434
- }, 0);
1435
- };
1436
-
1437
- self.startRendering(onRenderComplete);
1438
- },
1439
- destroyWorkers: function destroyWorkers() {
1440
- if (this.workerError) {
1441
- return;
1442
1634
  }
1443
-
1444
- const workers = this.workers;
1445
- utils.each(workers, function (iterator, workerObj) {
1446
- const worker = workerObj.worker;
1447
- const objectUrl = workerObj.objectUrl;
1448
- worker.terminate();
1449
- utils.URL.revokeObjectURL(objectUrl);
1450
- });
1451
- }
1452
1635
  };
1453
-
1636
+ /*
1637
+ getBase64GIF.js
1638
+ ===============
1639
+ */
1640
+ /* Copyright 2017 Yahoo Inc.
1641
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
1642
+ */
1454
1643
  function getBase64GIF(animatedGifInstance, callback) {
1455
- animatedGifInstance.getBase64GIF(function (image) {
1456
- callback({
1457
- error: false,
1458
- errorCode: '',
1459
- errorMsg: '',
1460
- image
1644
+ // This is asynchronous, rendered with WebWorkers
1645
+ animatedGifInstance.getBase64GIF(function (image) {
1646
+ callback({
1647
+ error: false,
1648
+ errorCode: '',
1649
+ errorMsg: '',
1650
+ image
1651
+ });
1461
1652
  });
1462
- });
1463
1653
  }
1464
-
1654
+ /*
1655
+ existingImages.js
1656
+ =================
1657
+ */
1658
+ /* Copyright 2017 Yahoo Inc.
1659
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
1660
+ */
1465
1661
  function existingImages() {
1466
- const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1467
- const self = this;
1468
- const callback = obj.callback;
1469
- const images = obj.images;
1470
- const options = obj.options;
1471
- let imagesLength = obj.imagesLength;
1472
- const skipObj = {
1473
- getUserMedia: true,
1474
- 'window.URL': true
1475
- };
1476
- const errorObj = error.validate(skipObj);
1477
- const loadedImages = [];
1478
- let loadedImagesLength = 0;
1479
- let tempImage = void 0;
1480
- let ag = void 0;
1481
-
1482
- if (errorObj.error) {
1483
- return callback(errorObj);
1484
- }
1485
-
1486
- ag = new AnimatedGIF(options);
1487
- utils.each(images, function (index, image) {
1488
- const currentImage = image;
1489
-
1490
- if (utils.isElement(currentImage)) {
1491
- if (options.crossOrigin) {
1492
- currentImage.crossOrigin = options.crossOrigin;
1493
- }
1494
-
1495
- loadedImages[index] = currentImage;
1496
- loadedImagesLength += 1;
1497
-
1498
- if (loadedImagesLength === imagesLength) {
1499
- addLoadedImagesToGif();
1500
- }
1501
- } else if (utils.isString(currentImage)) {
1502
- tempImage = new Image();
1503
-
1504
- if (options.crossOrigin) {
1505
- tempImage.crossOrigin = options.crossOrigin;
1506
- }
1507
-
1508
- (function (tempImage) {
1509
- if (image.text) {
1510
- tempImage.text = image.text;
1511
- }
1512
-
1513
- tempImage.onerror = function (e) {
1514
- let obj = void 0;
1515
- --imagesLength;
1516
-
1517
- if (imagesLength === 0) {
1518
- obj = {};
1519
- obj.error = 'None of the requested images was capable of being retrieved';
1520
- return callback(obj);
1521
- }
1522
- };
1523
-
1524
- tempImage.onload = function (e) {
1525
- if (image.text) {
1526
- loadedImages[index] = {
1527
- img: tempImage,
1528
- text: tempImage.text
1529
- };
1530
- } else {
1531
- loadedImages[index] = tempImage;
1532
- }
1533
-
1534
- loadedImagesLength += 1;
1535
-
1536
- if (loadedImagesLength === imagesLength) {
1537
- addLoadedImagesToGif();
1538
- }
1539
-
1540
- utils.removeElement(tempImage);
1541
- };
1542
-
1543
- tempImage.src = currentImage;
1544
- })(tempImage);
1545
-
1546
- utils.setCSSAttr(tempImage, {
1547
- position: 'fixed',
1548
- opacity: '0'
1549
- });
1550
- document.body.appendChild(tempImage);
1551
- }
1552
- });
1553
-
1554
- function addLoadedImagesToGif() {
1555
- utils.each(loadedImages, function (index, loadedImage) {
1556
- if (loadedImage) {
1557
- if (loadedImage.text) {
1558
- ag.addFrame(loadedImage.img, options, loadedImage.text);
1559
- } else {
1560
- ag.addFrame(loadedImage, options);
1561
- }
1562
- }
1662
+ const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1663
+ const self = this;
1664
+ const callback = obj.callback;
1665
+ const images = obj.images;
1666
+ const options = obj.options;
1667
+ let imagesLength = obj.imagesLength;
1668
+ const skipObj = {
1669
+ getUserMedia: true,
1670
+ 'globalThis.URL': true
1671
+ };
1672
+ const errorObj = error.validate(skipObj);
1673
+ const loadedImages = [];
1674
+ let loadedImagesLength = 0;
1675
+ let tempImage = void 0;
1676
+ let ag = void 0;
1677
+ if (errorObj.error) {
1678
+ return callback(errorObj);
1679
+ }
1680
+ // change workerPath to point to where Animated_GIF.worker.js is
1681
+ ag = new AnimatedGIF(options);
1682
+ utils.each(images, function (index, image) {
1683
+ const currentImage = image;
1684
+ // if (image.src) {
1685
+ // currentImage = currentImage.src;
1686
+ // }
1687
+ if (utils.isElement(currentImage)) {
1688
+ if (options.crossOrigin) {
1689
+ currentImage.crossOrigin = options.crossOrigin;
1690
+ }
1691
+ loadedImages[index] = currentImage;
1692
+ loadedImagesLength += 1;
1693
+ if (loadedImagesLength === imagesLength) {
1694
+ addLoadedImagesToGif();
1695
+ }
1696
+ }
1697
+ else if (utils.isString(currentImage)) {
1698
+ tempImage = new Image();
1699
+ if (options.crossOrigin) {
1700
+ tempImage.crossOrigin = options.crossOrigin;
1701
+ }
1702
+ (function (tempImage) {
1703
+ if (image.text) {
1704
+ tempImage.text = image.text;
1705
+ }
1706
+ tempImage.onerror = function (e) {
1707
+ let obj = void 0;
1708
+ --imagesLength; // skips over images that error out
1709
+ if (imagesLength === 0) {
1710
+ obj = {};
1711
+ obj.error = 'None of the requested images was capable of being retrieved';
1712
+ return callback(obj);
1713
+ }
1714
+ };
1715
+ tempImage.onload = function (e) {
1716
+ if (image.text) {
1717
+ loadedImages[index] = {
1718
+ img: tempImage,
1719
+ text: tempImage.text
1720
+ };
1721
+ }
1722
+ else {
1723
+ loadedImages[index] = tempImage;
1724
+ }
1725
+ loadedImagesLength += 1;
1726
+ if (loadedImagesLength === imagesLength) {
1727
+ addLoadedImagesToGif();
1728
+ }
1729
+ utils.removeElement(tempImage);
1730
+ };
1731
+ tempImage.src = currentImage;
1732
+ })(tempImage);
1733
+ utils.setCSSAttr(tempImage, {
1734
+ position: 'fixed',
1735
+ opacity: '0'
1736
+ });
1737
+ document.body.appendChild(tempImage);
1738
+ }
1563
1739
  });
1564
- getBase64GIF(ag, callback);
1565
- }
1740
+ function addLoadedImagesToGif() {
1741
+ utils.each(loadedImages, function (index, loadedImage) {
1742
+ if (loadedImage) {
1743
+ if (loadedImage.text) {
1744
+ ag.addFrame(loadedImage.img, options, loadedImage.text);
1745
+ }
1746
+ else {
1747
+ ag.addFrame(loadedImage, options);
1748
+ }
1749
+ }
1750
+ });
1751
+ getBase64GIF(ag, callback);
1752
+ }
1566
1753
  }
1567
-
1568
- const noop$3 = function noop() {};
1569
-
1754
+ /*
1755
+ screenShot.js
1756
+ =============
1757
+ */
1758
+ /* Copyright 2017 Yahoo Inc.
1759
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
1760
+ */
1761
+ // Dependencies
1762
+ // Helpers
1763
+ const noop$3 = function noop() { };
1570
1764
  const screenShot = {
1571
- getGIF: function getGIF() {
1572
- const options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1573
- let callback = arguments[1];
1574
- callback = utils.isFunction(callback) ? callback : noop$3;
1575
- const canvas = document.createElement('canvas');
1576
- let context = void 0;
1577
- const existingImages = options.images;
1578
- const hasExistingImages = Boolean(existingImages.length);
1579
- const cameraStream = options.cameraStream;
1580
- const crop = options.crop;
1581
- const filter = options.filter;
1582
- const fontColor = options.fontColor;
1583
- const fontFamily = options.fontFamily;
1584
- const fontWeight = options.fontWeight;
1585
- const keepCameraOn = options.keepCameraOn;
1586
- const numWorkers = options.numWorkers;
1587
- const progressCallback = options.progressCallback;
1588
- const saveRenderingContexts = options.saveRenderingContexts;
1589
- const savedRenderingContexts = options.savedRenderingContexts;
1590
- const text = options.text;
1591
- const textAlign = options.textAlign;
1592
- const textBaseline = options.textBaseline;
1593
- const videoElement = options.videoElement;
1594
- const videoHeight = options.videoHeight;
1595
- const videoWidth = options.videoWidth;
1596
- const webcamVideoElement = options.webcamVideoElement;
1597
- const gifWidth = Number(options.gifWidth);
1598
- const gifHeight = Number(options.gifHeight);
1599
- let interval = Number(options.interval);
1600
- const sampleInterval = Number(options.sampleInterval);
1601
- const waitBetweenFrames = hasExistingImages ? 0 : interval * 1000;
1602
- const renderingContextsToSave = [];
1603
- let numFrames = savedRenderingContexts.length ? savedRenderingContexts.length : options.numFrames;
1604
- let pendingFrames = numFrames;
1605
- const ag = new AnimatedGIF(options);
1606
- const fontSize = utils.getFontSize(options);
1607
- const textXCoordinate = options.textXCoordinate ? options.textXCoordinate : textAlign === 'left' ? 1 : textAlign === 'right' ? gifWidth : gifWidth / 2;
1608
- const textYCoordinate = options.textYCoordinate ? options.textYCoordinate : textBaseline === 'top' ? 1 : textBaseline === 'center' ? gifHeight / 2 : gifHeight;
1609
- const font = "".concat(fontWeight, " ").concat(fontSize, " ").concat(fontFamily);
1610
- let sourceX = crop ? Math.floor(crop.scaledWidth / 2) : 0;
1611
- let sourceWidth = crop ? videoWidth - crop.scaledWidth : 0;
1612
- let sourceY = crop ? Math.floor(crop.scaledHeight / 2) : 0;
1613
- let sourceHeight = crop ? videoHeight - crop.scaledHeight : 0;
1614
-
1615
- const captureFrames = function captureSingleFrame() {
1616
- const framesLeft = pendingFrames - 1;
1617
-
1618
- if (savedRenderingContexts.length) {
1619
- context.putImageData(savedRenderingContexts[numFrames - pendingFrames], 0, 0);
1620
- finishCapture();
1621
- } else {
1622
- drawVideo();
1623
- }
1624
-
1625
- function drawVideo() {
1626
- try {
1627
- if (sourceWidth > videoWidth) {
1628
- sourceWidth = videoWidth;
1629
- }
1630
-
1631
- if (sourceHeight > videoHeight) {
1632
- sourceHeight = videoHeight;
1633
- }
1634
-
1635
- if (sourceX < 0) {
1636
- sourceX = 0;
1637
- }
1638
-
1639
- if (sourceY < 0) {
1640
- sourceY = 0;
1641
- }
1642
-
1643
- context.filter = filter;
1644
- context.drawImage(videoElement, sourceX, sourceY, sourceWidth, sourceHeight, 0, 0, gifWidth, gifHeight);
1645
- finishCapture();
1646
- } catch (e) {
1647
- if (e.name === 'NS_ERROR_NOT_AVAILABLE') {
1648
- utils.requestTimeout(drawVideo, 100);
1649
- } else {
1650
- throw e;
1651
- }
1652
- }
1653
- }
1654
-
1655
- function finishCapture() {
1656
- let imageData = void 0;
1657
-
1658
- if (saveRenderingContexts) {
1659
- renderingContextsToSave.push(context.getImageData(0, 0, gifWidth, gifHeight));
1660
- }
1661
-
1662
- if (text) {
1663
- context.font = font;
1664
- context.fillStyle = fontColor;
1665
- context.textAlign = textAlign;
1666
- context.textBaseline = textBaseline;
1667
- context.fillText(text, textXCoordinate, textYCoordinate);
1668
- }
1669
-
1670
- imageData = context.getImageData(0, 0, gifWidth, gifHeight);
1671
- ag.addFrameImageData(imageData);
1672
- pendingFrames = framesLeft;
1673
- progressCallback((numFrames - pendingFrames) / numFrames);
1674
-
1675
- if (framesLeft > 0) {
1676
- utils.requestTimeout(captureSingleFrame, waitBetweenFrames);
1677
- }
1678
-
1679
- if (!pendingFrames) {
1680
- ag.getBase64GIF(function (image) {
1681
- callback({
1682
- error: false,
1683
- errorCode: '',
1684
- errorMsg: '',
1685
- image,
1686
- cameraStream,
1687
- videoElement,
1688
- webcamVideoElement,
1689
- savedRenderingContexts: renderingContextsToSave,
1690
- keepCameraOn
1691
- });
1692
- });
1765
+ getGIF: function getGIF() {
1766
+ const options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1767
+ let callback = arguments[1];
1768
+ callback = utils.isFunction(callback) ? callback : noop$3;
1769
+ const canvas = document.createElement('canvas');
1770
+ let context = void 0;
1771
+ const existingImages = options.images;
1772
+ const hasExistingImages = Boolean(existingImages.length);
1773
+ const cameraStream = options.cameraStream;
1774
+ const crop = options.crop;
1775
+ const filter = options.filter;
1776
+ const fontColor = options.fontColor;
1777
+ const fontFamily = options.fontFamily;
1778
+ const fontWeight = options.fontWeight;
1779
+ const keepCameraOn = options.keepCameraOn;
1780
+ const numWorkers = options.numWorkers;
1781
+ const progressCallback = options.progressCallback;
1782
+ const saveRenderingContexts = options.saveRenderingContexts;
1783
+ const savedRenderingContexts = options.savedRenderingContexts;
1784
+ const text = options.text;
1785
+ const textAlign = options.textAlign;
1786
+ const textBaseline = options.textBaseline;
1787
+ const videoElement = options.videoElement;
1788
+ const videoHeight = options.videoHeight;
1789
+ const videoWidth = options.videoWidth;
1790
+ const webcamVideoElement = options.webcamVideoElement;
1791
+ const gifWidth = Number(options.gifWidth);
1792
+ const gifHeight = Number(options.gifHeight);
1793
+ let interval = Number(options.interval);
1794
+ const sampleInterval = Number(options.sampleInterval);
1795
+ const waitBetweenFrames = hasExistingImages ? 0 : interval * 1000;
1796
+ const renderingContextsToSave = [];
1797
+ let numFrames = savedRenderingContexts.length
1798
+ ? savedRenderingContexts.length
1799
+ : options.numFrames;
1800
+ let pendingFrames = numFrames;
1801
+ const ag = new AnimatedGIF(options);
1802
+ const fontSize = utils.getFontSize(options);
1803
+ const textXCoordinate = options.textXCoordinate
1804
+ ? options.textXCoordinate
1805
+ : textAlign === 'left'
1806
+ ? 1
1807
+ : textAlign === 'right'
1808
+ ? gifWidth
1809
+ : gifWidth / 2;
1810
+ const textYCoordinate = options.textYCoordinate
1811
+ ? options.textYCoordinate
1812
+ : textBaseline === 'top'
1813
+ ? 1
1814
+ : textBaseline === 'center'
1815
+ ? gifHeight / 2
1816
+ : gifHeight;
1817
+ const font = `${fontWeight} ${fontSize} ${fontFamily}`;
1818
+ let sourceX = crop ? Math.floor(crop.scaledWidth / 2) : 0;
1819
+ let sourceWidth = crop ? videoWidth - crop.scaledWidth : 0;
1820
+ let sourceY = crop ? Math.floor(crop.scaledHeight / 2) : 0;
1821
+ let sourceHeight = crop ? videoHeight - crop.scaledHeight : 0;
1822
+ const captureFrames = function captureSingleFrame() {
1823
+ const framesLeft = pendingFrames - 1;
1824
+ if (savedRenderingContexts.length) {
1825
+ context.putImageData(savedRenderingContexts[numFrames - pendingFrames], 0, 0);
1826
+ finishCapture();
1827
+ }
1828
+ else {
1829
+ drawVideo();
1830
+ }
1831
+ function drawVideo() {
1832
+ try {
1833
+ // Makes sure the canvas video heights/widths are in bounds
1834
+ if (sourceWidth > videoWidth) {
1835
+ sourceWidth = videoWidth;
1836
+ }
1837
+ if (sourceHeight > videoHeight) {
1838
+ sourceHeight = videoHeight;
1839
+ }
1840
+ if (sourceX < 0) {
1841
+ sourceX = 0;
1842
+ }
1843
+ if (sourceY < 0) {
1844
+ sourceY = 0;
1845
+ }
1846
+ context.filter = filter;
1847
+ context.drawImage(videoElement, sourceX, sourceY, sourceWidth, sourceHeight, 0, 0, gifWidth, gifHeight);
1848
+ finishCapture();
1849
+ }
1850
+ catch (e) {
1851
+ // There is a Firefox bug that sometimes throws NS_ERROR_NOT_AVAILABLE and
1852
+ // and IndexSizeError errors when drawing a video element to the canvas
1853
+ if (e.name === 'NS_ERROR_NOT_AVAILABLE') {
1854
+ // Wait 100ms before trying again
1855
+ utils.requestTimeout(drawVideo, 100);
1856
+ }
1857
+ else {
1858
+ throw e;
1859
+ }
1860
+ }
1861
+ }
1862
+ function finishCapture() {
1863
+ let imageData = void 0;
1864
+ if (saveRenderingContexts) {
1865
+ renderingContextsToSave.push(context.getImageData(0, 0, gifWidth, gifHeight));
1866
+ }
1867
+ // If there is text to display, make sure to display it on the canvas after the image is drawn
1868
+ if (text) {
1869
+ context.font = font;
1870
+ context.fillStyle = fontColor;
1871
+ context.textAlign = textAlign;
1872
+ context.textBaseline = textBaseline;
1873
+ context.fillText(text, textXCoordinate, textYCoordinate);
1874
+ }
1875
+ imageData = context.getImageData(0, 0, gifWidth, gifHeight);
1876
+ ag.addFrameImageData(imageData);
1877
+ pendingFrames = framesLeft;
1878
+ // Call back with an r value indicating how far along we are in capture
1879
+ progressCallback((numFrames - pendingFrames) / numFrames);
1880
+ if (framesLeft > 0) {
1881
+ // test
1882
+ utils.requestTimeout(captureSingleFrame, waitBetweenFrames);
1883
+ }
1884
+ if (!pendingFrames) {
1885
+ ag.getBase64GIF(function (image) {
1886
+ callback({
1887
+ error: false,
1888
+ errorCode: '',
1889
+ errorMsg: '',
1890
+ image,
1891
+ cameraStream,
1892
+ videoElement,
1893
+ webcamVideoElement,
1894
+ savedRenderingContexts: renderingContextsToSave,
1895
+ keepCameraOn
1896
+ });
1897
+ });
1898
+ }
1899
+ }
1900
+ };
1901
+ numFrames = numFrames !== undefined ? numFrames : 10;
1902
+ interval = interval !== undefined ? interval : 0.1; // In seconds
1903
+ canvas.width = gifWidth;
1904
+ canvas.height = gifHeight;
1905
+ context = canvas.getContext('2d');
1906
+ (function capture() {
1907
+ if (!savedRenderingContexts.length && videoElement.currentTime === 0) {
1908
+ utils.requestTimeout(capture, 100);
1909
+ return;
1910
+ }
1911
+ captureFrames();
1912
+ })();
1913
+ },
1914
+ getCropDimensions: function getCropDimensions() {
1915
+ const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1916
+ const width = obj.videoWidth;
1917
+ const height = obj.videoHeight;
1918
+ const gifWidth = obj.gifWidth;
1919
+ const gifHeight = obj.gifHeight;
1920
+ const result = {
1921
+ width: 0,
1922
+ height: 0,
1923
+ scaledWidth: 0,
1924
+ scaledHeight: 0
1925
+ };
1926
+ if (width > height) {
1927
+ result.width = Math.round(width * (gifHeight / height)) - gifWidth;
1928
+ result.scaledWidth = Math.round(result.width * (height / gifHeight));
1693
1929
  }
1694
- }
1695
- };
1696
-
1697
- numFrames = numFrames !== undefined ? numFrames : 10;
1698
- interval = interval !== undefined ? interval : 0.1;
1699
- canvas.width = gifWidth;
1700
- canvas.height = gifHeight;
1701
- context = canvas.getContext('2d');
1702
-
1703
- (function capture() {
1704
- if (!savedRenderingContexts.length && videoElement.currentTime === 0) {
1705
- utils.requestTimeout(capture, 100);
1706
- return;
1707
- }
1708
-
1709
- captureFrames();
1710
- })();
1711
- },
1712
- getCropDimensions: function getCropDimensions() {
1713
- const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1714
- const width = obj.videoWidth;
1715
- const height = obj.videoHeight;
1716
- const gifWidth = obj.gifWidth;
1717
- const gifHeight = obj.gifHeight;
1718
- const result = {
1719
- width: 0,
1720
- height: 0,
1721
- scaledWidth: 0,
1722
- scaledHeight: 0
1723
- };
1724
-
1725
- if (width > height) {
1726
- result.width = Math.round(width * (gifHeight / height)) - gifWidth;
1727
- result.scaledWidth = Math.round(result.width * (height / gifHeight));
1728
- } else {
1729
- result.height = Math.round(height * (gifWidth / width)) - gifHeight;
1730
- result.scaledHeight = Math.round(result.height * (width / gifWidth));
1930
+ else {
1931
+ result.height = Math.round(height * (gifWidth / width)) - gifHeight;
1932
+ result.scaledHeight = Math.round(result.height * (width / gifWidth));
1933
+ }
1934
+ return result;
1731
1935
  }
1732
-
1733
- return result;
1734
- }
1735
1936
  };
1937
+ /*
1938
+ videoStream.js
1939
+ ==============
1940
+ */
1941
+ /* Copyright 2017 Yahoo Inc.
1942
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
1943
+ */
1944
+ // Dependencies
1736
1945
  var videoStream = {
1737
- loadedData: false,
1738
- defaultVideoDimensions: {
1739
- width: 640,
1740
- height: 480
1741
- },
1742
- findVideoSize: function findVideoSizeMethod(obj) {
1743
- findVideoSizeMethod.attempts = findVideoSizeMethod.attempts || 0;
1744
- const cameraStream = obj.cameraStream;
1745
- const completedCallback = obj.completedCallback;
1746
- const videoElement = obj.videoElement;
1747
-
1748
- if (!videoElement) {
1749
- return;
1750
- }
1751
-
1752
- if (videoElement.videoWidth > 0 && videoElement.videoHeight > 0) {
1753
- videoElement.removeEventListener('loadeddata', videoStream.findVideoSize);
1754
- completedCallback({
1755
- videoElement,
1756
- cameraStream,
1757
- videoWidth: videoElement.videoWidth,
1758
- videoHeight: videoElement.videoHeight
1759
- });
1760
- } else if (findVideoSizeMethod.attempts < 10) {
1761
- findVideoSizeMethod.attempts += 1;
1762
- utils.requestTimeout(function () {
1763
- videoStream.findVideoSize(obj);
1764
- }, 400);
1765
- } else {
1766
- completedCallback({
1767
- videoElement,
1768
- cameraStream,
1769
- videoWidth: videoStream.defaultVideoDimensions.width,
1770
- videoHeight: videoStream.defaultVideoDimensions.height
1771
- });
1772
- }
1773
- },
1774
- onStreamingTimeout: function onStreamingTimeout(callback) {
1775
- if (utils.isFunction(callback)) {
1776
- callback({
1777
- error: true,
1778
- errorCode: 'getUserMedia',
1779
- errorMsg: 'There was an issue with the getUserMedia API - Timed out while trying to start streaming',
1780
- image: null,
1781
- cameraStream: {}
1782
- });
1783
- }
1784
- },
1785
- stream: function stream(obj) {
1786
- const existingVideo = utils.isArray(obj.existingVideo) ? obj.existingVideo[0] : obj.existingVideo;
1787
- const cameraStream = obj.cameraStream;
1788
- const completedCallback = obj.completedCallback;
1789
- const streamedCallback = obj.streamedCallback;
1790
- const videoElement = obj.videoElement;
1791
-
1792
- if (utils.isFunction(streamedCallback)) {
1793
- streamedCallback();
1794
- }
1795
-
1796
- if (existingVideo) {
1797
- if (utils.isString(existingVideo)) {
1798
- videoElement.src = existingVideo;
1799
- videoElement.innerHTML = "<source src=\"".concat(existingVideo, "\" type=\"video/").concat(utils.getExtension(existingVideo), "\" />");
1800
- } else if (existingVideo instanceof Blob) {
1801
- try {
1802
- videoElement.src = utils.URL.createObjectURL(existingVideo);
1803
- } catch (e) {}
1804
-
1805
- videoElement.innerHTML = "<source src=\"".concat(existingVideo, "\" type=\"").concat(existingVideo.type, "\" />");
1806
- }
1807
- } else if (videoElement.mozSrcObject) {
1808
- videoElement.mozSrcObject = cameraStream;
1809
- } else if (utils.URL) {
1810
- try {
1811
- videoElement.srcObject = cameraStream;
1812
- videoElement.src = utils.URL.createObjectURL(cameraStream);
1813
- } catch (e) {
1814
- videoElement.srcObject = cameraStream;
1815
- }
1816
- }
1817
-
1818
- videoElement.play();
1819
- utils.requestTimeout(function checkLoadedData() {
1820
- checkLoadedData.count = checkLoadedData.count || 0;
1821
-
1822
- if (videoStream.loadedData === true) {
1823
- videoStream.findVideoSize({
1824
- videoElement,
1825
- cameraStream,
1826
- completedCallback
1827
- });
1828
- videoStream.loadedData = false;
1829
- } else {
1830
- checkLoadedData.count += 1;
1831
-
1832
- if (checkLoadedData.count > 10) {
1833
- videoStream.findVideoSize({
1834
- videoElement,
1835
- cameraStream,
1836
- completedCallback
1837
- });
1838
- } else {
1839
- checkLoadedData();
1840
- }
1841
- }
1842
- }, 0);
1843
- },
1844
- startStreaming: function startStreaming(obj) {
1845
- const errorCallback = utils.isFunction(obj.error) ? obj.error : utils.noop;
1846
- const streamedCallback = utils.isFunction(obj.streamed) ? obj.streamed : utils.noop;
1847
- const completedCallback = utils.isFunction(obj.completed) ? obj.completed : utils.noop;
1848
- const crossOrigin = obj.crossOrigin;
1849
- const existingVideo = obj.existingVideo;
1850
- const lastCameraStream = obj.lastCameraStream;
1851
- const options = obj.options;
1852
- const webcamVideoElement = obj.webcamVideoElement;
1853
- const videoElement = utils.isElement(existingVideo) ? existingVideo : webcamVideoElement ? webcamVideoElement : document.createElement('video');
1854
- const cameraStream = void 0;
1855
-
1856
- if (crossOrigin) {
1857
- videoElement.crossOrigin = options.crossOrigin;
1858
- }
1859
-
1860
- videoElement.autoplay = true;
1861
- videoElement.loop = true;
1862
- videoElement.muted = true;
1863
- videoElement.addEventListener('loadeddata', function (event) {
1864
- videoStream.loadedData = true;
1865
-
1866
- if (options.offset) {
1867
- videoElement.currentTime = options.offset;
1868
- }
1869
- });
1870
-
1871
- if (existingVideo) {
1872
- videoStream.stream({
1873
- videoElement,
1874
- existingVideo,
1875
- completedCallback
1876
- });
1877
- } else if (lastCameraStream) {
1878
- videoStream.stream({
1879
- videoElement,
1880
- cameraStream: lastCameraStream,
1881
- streamedCallback,
1882
- completedCallback
1883
- });
1884
- } else {
1885
- utils.getUserMedia({
1886
- video: true
1887
- }, function (stream) {
1888
- videoStream.stream({
1889
- videoElement,
1890
- cameraStream: stream,
1891
- streamedCallback,
1892
- completedCallback
1893
- });
1894
- }, errorCallback);
1895
- }
1896
- },
1897
- startVideoStreaming: function startVideoStreaming(callback) {
1898
- const options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
1899
- const timeoutLength = options.timeout !== undefined ? options.timeout : 0;
1900
- const originalCallback = options.callback;
1901
- const webcamVideoElement = options.webcamVideoElement;
1902
- let noGetUserMediaSupportTimeout = void 0;
1903
-
1904
- if (timeoutLength > 0) {
1905
- noGetUserMediaSupportTimeout = utils.requestTimeout(function () {
1906
- videoStream.onStreamingTimeout(originalCallback);
1907
- }, 10000);
1908
- }
1909
-
1910
- videoStream.startStreaming({
1911
- error: function error() {
1912
- originalCallback({
1913
- error: true,
1914
- errorCode: 'getUserMedia',
1915
- errorMsg: 'There was an issue with the getUserMedia API - the user probably denied permission',
1916
- image: null,
1917
- cameraStream: {}
1918
- });
1919
- },
1920
- streamed: function streamed() {
1921
- clearTimeout(noGetUserMediaSupportTimeout);
1922
- },
1923
- completed: function completed() {
1924
- const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
1946
+ loadedData: false,
1947
+ defaultVideoDimensions: {
1948
+ width: 640,
1949
+ height: 480
1950
+ },
1951
+ findVideoSize: function findVideoSizeMethod(obj) {
1952
+ findVideoSizeMethod.attempts = findVideoSizeMethod.attempts || 0;
1925
1953
  const cameraStream = obj.cameraStream;
1954
+ const completedCallback = obj.completedCallback;
1926
1955
  const videoElement = obj.videoElement;
1927
- const videoHeight = obj.videoHeight;
1928
- const videoWidth = obj.videoWidth;
1929
- callback({
1930
- cameraStream,
1931
- videoElement,
1932
- videoHeight,
1933
- videoWidth
1956
+ if (!videoElement) {
1957
+ return;
1958
+ }
1959
+ if (videoElement.videoWidth > 0 && videoElement.videoHeight > 0) {
1960
+ videoElement.removeEventListener('loadeddata', videoStream.findVideoSize);
1961
+ completedCallback({
1962
+ videoElement,
1963
+ cameraStream,
1964
+ videoWidth: videoElement.videoWidth,
1965
+ videoHeight: videoElement.videoHeight
1966
+ });
1967
+ }
1968
+ else if (findVideoSizeMethod.attempts < 10) {
1969
+ findVideoSizeMethod.attempts += 1;
1970
+ utils.requestTimeout(function () {
1971
+ videoStream.findVideoSize(obj);
1972
+ }, 400);
1973
+ }
1974
+ else {
1975
+ completedCallback({
1976
+ videoElement,
1977
+ cameraStream,
1978
+ videoWidth: videoStream.defaultVideoDimensions.width,
1979
+ videoHeight: videoStream.defaultVideoDimensions.height
1980
+ });
1981
+ }
1982
+ },
1983
+ onStreamingTimeout: function onStreamingTimeout(callback) {
1984
+ if (utils.isFunction(callback)) {
1985
+ callback({
1986
+ error: true,
1987
+ errorCode: 'getUserMedia',
1988
+ errorMsg: 'There was an issue with the getUserMedia API - Timed out while trying to start streaming',
1989
+ image: null,
1990
+ cameraStream: {}
1991
+ });
1992
+ }
1993
+ },
1994
+ stream: function stream(obj) {
1995
+ const existingVideo = utils.isArray(obj.existingVideo)
1996
+ ? obj.existingVideo[0]
1997
+ : obj.existingVideo;
1998
+ const cameraStream = obj.cameraStream;
1999
+ const completedCallback = obj.completedCallback;
2000
+ const streamedCallback = obj.streamedCallback;
2001
+ const videoElement = obj.videoElement;
2002
+ if (utils.isFunction(streamedCallback)) {
2003
+ streamedCallback();
2004
+ }
2005
+ if (existingVideo) {
2006
+ if (utils.isString(existingVideo)) {
2007
+ videoElement.src = existingVideo;
2008
+ videoElement.innerHTML = `<source src="${existingVideo}" type="video/${utils.getExtension(existingVideo)}" />`;
2009
+ }
2010
+ else if (existingVideo instanceof Blob) {
2011
+ try {
2012
+ videoElement.src = utils.URL.createObjectURL(existingVideo);
2013
+ }
2014
+ catch (e) { }
2015
+ videoElement.innerHTML = `<source src="${existingVideo}" type="${existingVideo.type}" />`;
2016
+ }
2017
+ }
2018
+ else if (videoElement.mozSrcObject) {
2019
+ videoElement.mozSrcObject = cameraStream;
2020
+ }
2021
+ else if (utils.URL) {
2022
+ try {
2023
+ videoElement.srcObject = cameraStream;
2024
+ videoElement.src = utils.URL.createObjectURL(cameraStream);
2025
+ }
2026
+ catch (e) {
2027
+ videoElement.srcObject = cameraStream;
2028
+ }
2029
+ }
2030
+ videoElement.play();
2031
+ utils.requestTimeout(function checkLoadedData() {
2032
+ checkLoadedData.count = checkLoadedData.count || 0;
2033
+ if (videoStream.loadedData === true) {
2034
+ videoStream.findVideoSize({
2035
+ videoElement,
2036
+ cameraStream,
2037
+ completedCallback
2038
+ });
2039
+ videoStream.loadedData = false;
2040
+ }
2041
+ else {
2042
+ checkLoadedData.count += 1;
2043
+ if (checkLoadedData.count > 10) {
2044
+ videoStream.findVideoSize({
2045
+ videoElement,
2046
+ cameraStream,
2047
+ completedCallback
2048
+ });
2049
+ }
2050
+ else {
2051
+ checkLoadedData();
2052
+ }
2053
+ }
2054
+ }, 0);
2055
+ },
2056
+ startStreaming: function startStreaming(obj) {
2057
+ const errorCallback = utils.isFunction(obj.error) ? obj.error : utils.noop;
2058
+ const streamedCallback = utils.isFunction(obj.streamed) ? obj.streamed : utils.noop;
2059
+ const completedCallback = utils.isFunction(obj.completed) ? obj.completed : utils.noop;
2060
+ const crossOrigin = obj.crossOrigin;
2061
+ const existingVideo = obj.existingVideo;
2062
+ const lastCameraStream = obj.lastCameraStream;
2063
+ const options = obj.options;
2064
+ const webcamVideoElement = obj.webcamVideoElement;
2065
+ const videoElement = utils.isElement(existingVideo)
2066
+ ? existingVideo
2067
+ : webcamVideoElement
2068
+ ? webcamVideoElement
2069
+ : document.createElement('video');
2070
+ const cameraStream = void 0;
2071
+ if (crossOrigin) {
2072
+ videoElement.crossOrigin = options.crossOrigin;
2073
+ }
2074
+ videoElement.autoplay = true;
2075
+ videoElement.loop = true;
2076
+ videoElement.muted = true;
2077
+ videoElement.addEventListener('loadeddata', function (event) {
2078
+ videoStream.loadedData = true;
2079
+ if (options.offset) {
2080
+ videoElement.currentTime = options.offset;
2081
+ }
1934
2082
  });
1935
- },
1936
- lastCameraStream: options.lastCameraStream,
1937
- webcamVideoElement,
1938
- crossOrigin: options.crossOrigin,
1939
- options
1940
- });
1941
- },
1942
- stopVideoStreaming: function stopVideoStreaming(obj) {
1943
- obj = utils.isObject(obj) ? obj : {};
1944
- const _obj = obj;
1945
- const keepCameraOn = _obj.keepCameraOn;
1946
- const videoElement = _obj.videoElement;
1947
- const webcamVideoElement = _obj.webcamVideoElement;
1948
- const cameraStream = obj.cameraStream || {};
1949
- const cameraStreamTracks = cameraStream.getTracks ? cameraStream.getTracks() || [] : [];
1950
- const hasCameraStreamTracks = Boolean(cameraStreamTracks.length);
1951
- const firstCameraStreamTrack = cameraStreamTracks[0];
1952
-
1953
- if (!keepCameraOn && hasCameraStreamTracks) {
1954
- if (utils.isFunction(firstCameraStreamTrack.stop)) {
1955
- firstCameraStreamTrack.stop();
1956
- }
1957
- }
1958
-
1959
- if (utils.isElement(videoElement) && !webcamVideoElement) {
1960
- videoElement.pause();
1961
-
1962
- if (utils.isFunction(utils.URL.revokeObjectURL) && !utils.webWorkerError) {
1963
- if (videoElement.src) {
1964
- utils.URL.revokeObjectURL(videoElement.src);
1965
- }
1966
- }
1967
-
1968
- utils.removeElement(videoElement);
2083
+ if (existingVideo) {
2084
+ videoStream.stream({
2085
+ videoElement,
2086
+ existingVideo,
2087
+ completedCallback
2088
+ });
2089
+ }
2090
+ else if (lastCameraStream) {
2091
+ videoStream.stream({
2092
+ videoElement,
2093
+ cameraStream: lastCameraStream,
2094
+ streamedCallback,
2095
+ completedCallback
2096
+ });
2097
+ }
2098
+ else {
2099
+ utils.getUserMedia({
2100
+ video: true
2101
+ }, function (stream) {
2102
+ videoStream.stream({
2103
+ videoElement,
2104
+ cameraStream: stream,
2105
+ streamedCallback,
2106
+ completedCallback
2107
+ });
2108
+ }, errorCallback);
2109
+ }
2110
+ },
2111
+ startVideoStreaming: function startVideoStreaming(callback) {
2112
+ const options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
2113
+ const timeoutLength = options.timeout !== undefined ? options.timeout : 0;
2114
+ const originalCallback = options.callback;
2115
+ const webcamVideoElement = options.webcamVideoElement;
2116
+ let noGetUserMediaSupportTimeout = void 0;
2117
+ // Some browsers apparently have support for video streaming because of the
2118
+ // presence of the getUserMedia function, but then do not answer our
2119
+ // calls for streaming.
2120
+ // So we'll set up this timeout and if nothing happens after a while, we'll
2121
+ // conclude that there's no actual getUserMedia support.
2122
+ if (timeoutLength > 0) {
2123
+ noGetUserMediaSupportTimeout = utils.requestTimeout(function () {
2124
+ videoStream.onStreamingTimeout(originalCallback);
2125
+ }, 10000);
2126
+ }
2127
+ videoStream.startStreaming({
2128
+ error: function error() {
2129
+ originalCallback({
2130
+ error: true,
2131
+ errorCode: 'getUserMedia',
2132
+ errorMsg: 'There was an issue with the getUserMedia API - the user probably denied permission',
2133
+ image: null,
2134
+ cameraStream: {}
2135
+ });
2136
+ },
2137
+ streamed: function streamed() {
2138
+ // The streaming started somehow, so we can assume there is getUserMedia support
2139
+ clearTimeout(noGetUserMediaSupportTimeout);
2140
+ },
2141
+ completed: function completed() {
2142
+ const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
2143
+ const cameraStream = obj.cameraStream;
2144
+ const videoElement = obj.videoElement;
2145
+ const videoHeight = obj.videoHeight;
2146
+ const videoWidth = obj.videoWidth;
2147
+ callback({
2148
+ cameraStream,
2149
+ videoElement,
2150
+ videoHeight,
2151
+ videoWidth
2152
+ });
2153
+ },
2154
+ lastCameraStream: options.lastCameraStream,
2155
+ webcamVideoElement,
2156
+ crossOrigin: options.crossOrigin,
2157
+ options
2158
+ });
2159
+ },
2160
+ stopVideoStreaming: function stopVideoStreaming(obj) {
2161
+ obj = utils.isObject(obj) ? obj : {};
2162
+ const _obj = obj;
2163
+ const keepCameraOn = _obj.keepCameraOn;
2164
+ const videoElement = _obj.videoElement;
2165
+ const webcamVideoElement = _obj.webcamVideoElement;
2166
+ const cameraStream = obj.cameraStream || {};
2167
+ const cameraStreamTracks = cameraStream.getTracks ? cameraStream.getTracks() || [] : [];
2168
+ const hasCameraStreamTracks = Boolean(cameraStreamTracks.length);
2169
+ const firstCameraStreamTrack = cameraStreamTracks[0];
2170
+ if (!keepCameraOn && hasCameraStreamTracks) {
2171
+ if (utils.isFunction(firstCameraStreamTrack.stop)) {
2172
+ // Stops the camera stream
2173
+ firstCameraStreamTrack.stop();
2174
+ }
2175
+ }
2176
+ if (utils.isElement(videoElement) && !webcamVideoElement) {
2177
+ // Pauses the video, revokes the object URL (freeing up memory), and remove the video element
2178
+ videoElement.pause();
2179
+ // Destroys the object url
2180
+ if (utils.isFunction(utils.URL.revokeObjectURL) && !utils.webWorkerError) {
2181
+ if (videoElement.src) {
2182
+ utils.URL.revokeObjectURL(videoElement.src);
2183
+ }
2184
+ }
2185
+ // Removes the video element from the DOM
2186
+ utils.removeElement(videoElement);
2187
+ }
1969
2188
  }
1970
- }
1971
2189
  };
1972
-
2190
+ /*
2191
+ stopVideoStreaming.js
2192
+ =====================
2193
+ */
2194
+ /* Copyright 2017 Yahoo Inc.
2195
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
2196
+ */
1973
2197
  function stopVideoStreaming(options) {
1974
- options = utils.isObject(options) ? options : {};
1975
- videoStream.stopVideoStreaming(options);
2198
+ options = utils.isObject(options) ? options : {};
2199
+ videoStream.stopVideoStreaming(options);
1976
2200
  }
1977
-
2201
+ /*
2202
+ createAndGetGIF.js
2203
+ ==================
2204
+ */
2205
+ /* Copyright 2017 Yahoo Inc.
2206
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
2207
+ */
2208
+ // Dependencies
1978
2209
  function createAndGetGIF(obj, callback) {
1979
- const options = obj.options || {};
1980
- const images = options.images;
1981
- const video = options.video;
1982
- const gifWidth = Number(options.gifWidth);
1983
- const gifHeight = Number(options.gifHeight);
1984
- const numFrames = Number(options.numFrames);
1985
- const cameraStream = obj.cameraStream;
1986
- const videoElement = obj.videoElement;
1987
- const videoWidth = obj.videoWidth;
1988
- const videoHeight = obj.videoHeight;
1989
- const cropDimensions = screenShot.getCropDimensions({
1990
- videoWidth,
1991
- videoHeight,
1992
- gifHeight,
1993
- gifWidth
1994
- });
1995
- const completeCallback = callback;
1996
- options.crop = cropDimensions;
1997
- options.videoElement = videoElement;
1998
- options.videoWidth = videoWidth;
1999
- options.videoHeight = videoHeight;
2000
- options.cameraStream = cameraStream;
2001
-
2002
- if (!utils.isElement(videoElement)) {
2003
- return;
2004
- }
2005
-
2006
- videoElement.width = gifWidth + cropDimensions.width;
2007
- videoElement.height = gifHeight + cropDimensions.height;
2008
-
2009
- if (!options.webcamVideoElement) {
2010
- utils.setCSSAttr(videoElement, {
2011
- position: 'fixed',
2012
- opacity: '0'
2210
+ const options = obj.options || {};
2211
+ const images = options.images;
2212
+ const video = options.video;
2213
+ const gifWidth = Number(options.gifWidth);
2214
+ const gifHeight = Number(options.gifHeight);
2215
+ const numFrames = Number(options.numFrames);
2216
+ const cameraStream = obj.cameraStream;
2217
+ const videoElement = obj.videoElement;
2218
+ const videoWidth = obj.videoWidth;
2219
+ const videoHeight = obj.videoHeight;
2220
+ const cropDimensions = screenShot.getCropDimensions({
2221
+ videoWidth,
2222
+ videoHeight,
2223
+ gifHeight,
2224
+ gifWidth
2013
2225
  });
2014
- document.body.appendChild(videoElement);
2015
- }
2016
-
2017
- videoElement.play();
2018
- screenShot.getGIF(options, function (obj) {
2019
- if ((!images || !images.length) && (!video || !video.length)) {
2020
- stopVideoStreaming(obj);
2226
+ const completeCallback = callback;
2227
+ options.crop = cropDimensions;
2228
+ options.videoElement = videoElement;
2229
+ options.videoWidth = videoWidth;
2230
+ options.videoHeight = videoHeight;
2231
+ options.cameraStream = cameraStream;
2232
+ if (!utils.isElement(videoElement)) {
2233
+ return;
2021
2234
  }
2022
-
2023
- completeCallback(obj);
2024
- });
2235
+ videoElement.width = gifWidth + cropDimensions.width;
2236
+ videoElement.height = gifHeight + cropDimensions.height;
2237
+ if (!options.webcamVideoElement) {
2238
+ utils.setCSSAttr(videoElement, {
2239
+ position: 'fixed',
2240
+ opacity: '0'
2241
+ });
2242
+ document.body.appendChild(videoElement);
2243
+ }
2244
+ // Firefox doesn't seem to obey autoplay if the element is not in the DOM when the content
2245
+ // is loaded, so we must manually trigger play after adding it, or the video will be frozen
2246
+ videoElement.play();
2247
+ screenShot.getGIF(options, function (obj) {
2248
+ if ((!images || !images.length) && (!video || !video.length)) {
2249
+ stopVideoStreaming(obj);
2250
+ }
2251
+ completeCallback(obj);
2252
+ });
2025
2253
  }
2026
-
2254
+ /*
2255
+ existingVideo.js
2256
+ ================
2257
+ */
2258
+ /* Copyright 2017 Yahoo Inc.
2259
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
2260
+ */
2261
+ // Dependencies
2027
2262
  function existingVideo() {
2028
- const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
2029
- const callback = obj.callback;
2030
- let existingVideo = obj.existingVideo;
2031
- const options = obj.options;
2032
- const skipObj = {
2033
- getUserMedia: true,
2034
- 'window.URL': true
2035
- };
2036
- const errorObj = error.validate(skipObj);
2037
- const loadedImages = 0;
2038
- let videoType = void 0;
2039
- let videoSrc = void 0;
2040
- const tempImage = void 0;
2041
- const ag = void 0;
2042
-
2043
- if (errorObj.error) {
2044
- return callback(errorObj);
2045
- }
2046
-
2047
- if (utils.isElement(existingVideo) && existingVideo.src) {
2048
- videoSrc = existingVideo.src;
2049
- videoType = utils.getExtension(videoSrc);
2050
-
2051
- if (!utils.isSupported.videoCodecs[videoType]) {
2052
- return callback(error.messages.videoCodecs);
2263
+ const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
2264
+ const callback = obj.callback;
2265
+ let existingVideo = obj.existingVideo;
2266
+ const options = obj.options;
2267
+ const skipObj = {
2268
+ getUserMedia: true,
2269
+ 'globalThis.URL': true
2270
+ };
2271
+ const errorObj = error.validate(skipObj);
2272
+ const loadedImages = 0;
2273
+ let videoType = void 0;
2274
+ let videoSrc = void 0;
2275
+ const tempImage = void 0;
2276
+ const ag = void 0;
2277
+ if (errorObj.error) {
2278
+ return callback(errorObj);
2279
+ }
2280
+ if (utils.isElement(existingVideo) && existingVideo.src) {
2281
+ videoSrc = existingVideo.src;
2282
+ videoType = utils.getExtension(videoSrc);
2283
+ if (!utils.isSupported.videoCodecs[videoType]) {
2284
+ return callback(error.messages.videoCodecs);
2285
+ }
2286
+ }
2287
+ else if (utils.isArray(existingVideo)) {
2288
+ utils.each(existingVideo, function (iterator, videoSrc) {
2289
+ if (videoSrc instanceof Blob) {
2290
+ videoType = videoSrc.type.substr(videoSrc.type.lastIndexOf('/') + 1, videoSrc.length);
2291
+ }
2292
+ else {
2293
+ videoType = videoSrc.substr(videoSrc.lastIndexOf('.') + 1, videoSrc.length);
2294
+ }
2295
+ if (utils.isSupported.videoCodecs[videoType]) {
2296
+ existingVideo = videoSrc;
2297
+ return false;
2298
+ }
2299
+ });
2053
2300
  }
2054
- } else if (utils.isArray(existingVideo)) {
2055
- utils.each(existingVideo, function (iterator, videoSrc) {
2056
- if (videoSrc instanceof Blob) {
2057
- videoType = videoSrc.type.substr(videoSrc.type.lastIndexOf('/') + 1, videoSrc.length);
2058
- } else {
2059
- videoType = videoSrc.substr(videoSrc.lastIndexOf('.') + 1, videoSrc.length);
2060
- }
2061
-
2062
- if (utils.isSupported.videoCodecs[videoType]) {
2063
- existingVideo = videoSrc;
2064
- return false;
2065
- }
2301
+ videoStream.startStreaming({
2302
+ completed: function completed(obj) {
2303
+ obj.options = options || {};
2304
+ createAndGetGIF(obj, callback);
2305
+ },
2306
+ existingVideo,
2307
+ crossOrigin: options.crossOrigin,
2308
+ options
2066
2309
  });
2067
- }
2068
-
2069
- videoStream.startStreaming({
2070
- completed: function completed(obj) {
2071
- obj.options = options || {};
2072
- createAndGetGIF(obj, callback);
2073
- },
2074
- existingVideo,
2075
- crossOrigin: options.crossOrigin,
2076
- options
2077
- });
2078
2310
  }
2079
-
2311
+ /*
2312
+ existingWebcam.js
2313
+ =================
2314
+ */
2315
+ /* Copyright 2017 Yahoo Inc.
2316
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
2317
+ */
2318
+ // Dependencies
2080
2319
  function existingWebcam() {
2081
- const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
2082
- const callback = obj.callback;
2083
- const lastCameraStream = obj.lastCameraStream;
2084
- const options = obj.options;
2085
- const webcamVideoElement = obj.webcamVideoElement;
2086
-
2087
- if (!isWebCamGIFSupported()) {
2088
- return callback(error.validate());
2089
- }
2090
-
2091
- if (options.savedRenderingContexts.length) {
2092
- screenShot.getGIF(options, function (obj) {
2093
- callback(obj);
2094
- });
2095
- return;
2096
- }
2097
-
2098
- videoStream.startVideoStreaming(function () {
2099
2320
  const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
2100
- obj.options = options || {};
2101
- createAndGetGIF(obj, callback);
2102
- }, {
2103
- lastCameraStream,
2104
- callback,
2105
- webcamVideoElement,
2106
- crossOrigin: options.crossOrigin
2107
- });
2321
+ const callback = obj.callback;
2322
+ const lastCameraStream = obj.lastCameraStream;
2323
+ const options = obj.options;
2324
+ const webcamVideoElement = obj.webcamVideoElement;
2325
+ if (!isWebCamGIFSupported()) {
2326
+ return callback(error.validate());
2327
+ }
2328
+ if (options.savedRenderingContexts.length) {
2329
+ screenShot.getGIF(options, function (obj) {
2330
+ callback(obj);
2331
+ });
2332
+ return;
2333
+ }
2334
+ videoStream.startVideoStreaming(function () {
2335
+ const obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
2336
+ obj.options = options || {};
2337
+ createAndGetGIF(obj, callback);
2338
+ }, {
2339
+ lastCameraStream,
2340
+ callback,
2341
+ webcamVideoElement,
2342
+ crossOrigin: options.crossOrigin
2343
+ });
2108
2344
  }
2109
-
2345
+ /*
2346
+ createGIF.js
2347
+ ============
2348
+ */
2349
+ /* Copyright 2017 Yahoo Inc.
2350
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
2351
+ */
2352
+ // Dependencies
2110
2353
  function createGIF(userOptions, callback) {
2111
- callback = utils.isFunction(userOptions) ? userOptions : callback;
2112
- userOptions = utils.isObject(userOptions) ? userOptions : {};
2113
-
2114
- if (!utils.isFunction(callback)) {
2115
- return;
2116
- }
2117
-
2118
- let options = utils.normalizeOptions(defaultOptions, userOptions) || {};
2119
- const lastCameraStream = userOptions.cameraStream;
2120
- const images = options.images;
2121
- const imagesLength = images ? images.length : 0;
2122
- const video = options.video;
2123
- const webcamVideoElement = options.webcamVideoElement;
2124
- options = utils.normalizeOptions(options, {
2125
- gifWidth: Math.floor(options.gifWidth),
2126
- gifHeight: Math.floor(options.gifHeight)
2127
- });
2128
-
2129
- if (imagesLength) {
2130
- existingImages({
2131
- images,
2132
- imagesLength,
2133
- callback,
2134
- options
2135
- });
2136
- } else if (video) {
2137
- existingVideo({
2138
- existingVideo: video,
2139
- callback,
2140
- options
2141
- });
2142
- } else {
2143
- existingWebcam({
2144
- lastCameraStream,
2145
- callback,
2146
- webcamVideoElement,
2147
- options
2354
+ callback = utils.isFunction(userOptions) ? userOptions : callback;
2355
+ userOptions = utils.isObject(userOptions) ? userOptions : {};
2356
+ if (!utils.isFunction(callback)) {
2357
+ return;
2358
+ }
2359
+ let options = utils.normalizeOptions(defaultOptions, userOptions) || {};
2360
+ const lastCameraStream = userOptions.cameraStream;
2361
+ const images = options.images;
2362
+ const imagesLength = images ? images.length : 0;
2363
+ const video = options.video;
2364
+ const webcamVideoElement = options.webcamVideoElement;
2365
+ options = utils.normalizeOptions(options, {
2366
+ gifWidth: Math.floor(options.gifWidth),
2367
+ gifHeight: Math.floor(options.gifHeight)
2148
2368
  });
2149
- }
2369
+ // If the user would like to create a GIF from an existing image(s)
2370
+ if (imagesLength) {
2371
+ existingImages({
2372
+ images,
2373
+ imagesLength,
2374
+ callback,
2375
+ options
2376
+ });
2377
+ }
2378
+ else if (video) {
2379
+ // If the user would like to create a GIF from an existing HTML5 video
2380
+ existingVideo({
2381
+ existingVideo: video,
2382
+ callback,
2383
+ options
2384
+ });
2385
+ }
2386
+ else {
2387
+ // If the user would like to create a GIF from a webcam stream
2388
+ existingWebcam({
2389
+ lastCameraStream,
2390
+ callback,
2391
+ webcamVideoElement,
2392
+ options
2393
+ });
2394
+ }
2150
2395
  }
2151
-
2396
+ /*
2397
+ takeSnapShot.js
2398
+ ===============
2399
+ */
2400
+ /* Copyright 2017 Yahoo Inc.
2401
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
2402
+ */
2152
2403
  function takeSnapShot(userOptions, callback) {
2153
- callback = utils.isFunction(userOptions) ? userOptions : callback;
2154
- userOptions = utils.isObject(userOptions) ? userOptions : {};
2155
-
2156
- if (!utils.isFunction(callback)) {
2157
- return;
2158
- }
2159
-
2160
- const mergedOptions = utils.normalizeOptions(defaultOptions, userOptions);
2161
- const options = utils.normalizeOptions(mergedOptions, {
2162
- interval: 0.1,
2163
- numFrames: 1,
2164
- gifWidth: Math.floor(mergedOptions.gifWidth),
2165
- gifHeight: Math.floor(mergedOptions.gifHeight)
2166
- });
2167
- createGIF(options, callback);
2404
+ callback = utils.isFunction(userOptions) ? userOptions : callback;
2405
+ userOptions = utils.isObject(userOptions) ? userOptions : {};
2406
+ if (!utils.isFunction(callback)) {
2407
+ return;
2408
+ }
2409
+ const mergedOptions = utils.normalizeOptions(defaultOptions, userOptions);
2410
+ const options = utils.normalizeOptions(mergedOptions, {
2411
+ interval: 0.1,
2412
+ numFrames: 1,
2413
+ gifWidth: Math.floor(mergedOptions.gifWidth),
2414
+ gifHeight: Math.floor(mergedOptions.gifHeight)
2415
+ });
2416
+ createGIF(options, callback);
2168
2417
  }
2169
-
2418
+ /*
2419
+ API.js
2420
+ ======
2421
+ */
2422
+ /* Copyright 2017 Yahoo Inc.
2423
+ * Copyrights licensed under the MIT License. See the accompanying LICENSE file for terms.
2424
+ */
2425
+ // Dependencies
2170
2426
  const API = {
2171
- utils: utils$2,
2172
- error: error$2,
2173
- defaultOptions: defaultOptions$2,
2174
- createGIF,
2175
- takeSnapShot,
2176
- stopVideoStreaming,
2177
- isSupported,
2178
- isWebCamGIFSupported,
2179
- isExistingVideoGIFSupported,
2180
- isExistingImagesGIFSupported: isSupported$1,
2181
- VERSION: '0.4.5'
2427
+ utils: utils$2,
2428
+ error: error$2,
2429
+ defaultOptions: defaultOptions$2,
2430
+ createGIF,
2431
+ takeSnapShot,
2432
+ stopVideoStreaming,
2433
+ isSupported,
2434
+ isWebCamGIFSupported,
2435
+ isExistingVideoGIFSupported,
2436
+ isExistingImagesGIFSupported: isSupported$1,
2437
+ VERSION: '0.4.5'
2182
2438
  };
2183
- export default API;
2184
- //# sourceMappingURL=gifshot.js.map
2439
+ exports.default = API;