@micromag/element-audio 0.2.371 → 0.2.372

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/es/index.js +60 -25
  2. package/lib/index.js +62 -27
  3. package/package.json +2 -2
package/es/index.js CHANGED
@@ -2,16 +2,16 @@ import _defineProperty from '@babel/runtime/helpers/defineProperty';
2
2
  import _toConsumableArray from '@babel/runtime/helpers/toConsumableArray';
3
3
  import _slicedToArray from '@babel/runtime/helpers/slicedToArray';
4
4
  import _objectWithoutProperties from '@babel/runtime/helpers/objectWithoutProperties';
5
- import React, { useRef, useEffect, useCallback, useState } from 'react';
6
- import PropTypes from 'prop-types';
7
- import classNames from 'classnames';
8
5
  import { PropTypes as PropTypes$1 } from '@micromag/core';
9
6
  import { useUserInteracted } from '@micromag/core/contexts';
10
7
  import { useResizeObserver, useMediaApi } from '@micromag/core/hooks';
11
- import 'whatwg-fetch';
12
- import { useGesture } from 'react-use-gesture';
8
+ import classNames from 'classnames';
9
+ import PropTypes from 'prop-types';
10
+ import React, { useRef, useEffect, useCallback, useState } from 'react';
13
11
  import { useSpring } from '@react-spring/core';
14
12
  import { animated } from '@react-spring/web';
13
+ import { useGesture } from 'react-use-gesture';
14
+ import 'whatwg-fetch';
15
15
 
16
16
  var styles$1 = {"container":"micromag-element-audio-audio-wave-container","button":"micromag-element-audio-audio-wave-button","canvasBackground":"micromag-element-audio-audio-wave-canvasBackground","canvasProgress":"micromag-element-audio-audio-wave-canvasProgress"};
17
17
 
@@ -112,24 +112,48 @@ var AudioWave = function AudioWave(_ref) {
112
112
  }
113
113
 
114
114
  var sampleOuterWidth = sampleWidth + sampleMargin * 2;
115
- var samplesCount = Math.floor(elWidth / sampleOuterWidth);
116
- var amplitudes = []; // get samples
115
+ var samplesCount = Math.floor(elWidth / sampleOuterWidth); // const amplitudes = [];
116
+ // get samples
117
117
 
118
- var sampleSize = Math.floor(audioLevels.length / samplesCount);
118
+ var levelsBySamples = audioLevels.length / samplesCount;
119
119
 
120
- for (var sampleI = 0; sampleI < samplesCount; sampleI += 1) {
121
- var sampleStart = sampleSize * sampleI;
122
- var sum = 0;
120
+ var amplitudes = _toConsumableArray(Array(samplesCount).keys()).reduce(function (newAmplitudes, index) {
121
+ var levelStartIndex = index * levelsBySamples;
122
+ var levelEndIndex = levelStartIndex + levelsBySamples;
123
+ var newValues = [];
123
124
 
124
- for (var sampleSizeI = 0; sampleSizeI < sampleSize; sampleSizeI += 1) {
125
- sum += Math.abs(audioLevels[sampleStart + sampleSizeI]);
125
+ for (var i = Math.floor(levelStartIndex); i < Math.round(levelEndIndex); i += 1) {
126
+ newValues.push(audioLevels[i]);
126
127
  }
127
128
 
128
- amplitudes.push(sum / sampleSize);
129
- }
130
-
129
+ return levelsBySamples >= 1 ? [].concat(_toConsumableArray(newAmplitudes), [newValues.reduce(function (total, value) {
130
+ return total + value;
131
+ }, 0) / newValues.length]) : [].concat(_toConsumableArray(newAmplitudes), newValues);
132
+ }, []); // for (let sampleI = 0; sampleI < samplesCount; sampleI += levelsBySamples) {
133
+ // // if (levelsBySamples >= 1) {
134
+ // // const sampleSize = Math.floor(levelsBySamples);
135
+ // // const sampleStart = sampleSize * sampleI;
136
+ // // let sum = 0;
137
+ // // for (let sampleSizeI = 0; sampleSizeI < sampleSize; sampleSizeI += 1) {
138
+ // // sum += Math.abs(audioLevels[sampleStart + sampleSizeI]);
139
+ // // }
140
+ // // amplitudes.push(sum / sampleSize);
141
+ // // } else {
142
+ // console.log(sampleI);
143
+ // amplitudes.push(Math.abs(audioLevels[Math.floor(sampleI)]));
144
+ // // for (let sampleSizeI = 0; sampleSizeI < sampleSize; sampleSizeI += 1) {
145
+ // // console.log(sampleI, sampleSize);
146
+ // // amplitudes.push(Math.abs(audioLevels[sampleI % sampleSize]));
147
+ // // }
148
+ // // }
149
+ // }
150
+
151
+
152
+ var minAmplitude = Math.min.apply(Math, _toConsumableArray(amplitudes));
153
+ var maxAmplitude = Math.max.apply(Math, _toConsumableArray(amplitudes));
154
+ var delta = maxAmplitude - minAmplitude;
131
155
  var normalizedAmplitudes = amplitudes.map(function (n) {
132
- return n * Math.pow(Math.max.apply(Math, amplitudes), -1);
156
+ return (n - minAmplitude) / delta;
133
157
  }); // draw samples
134
158
 
135
159
  var canvasBg = canvasBackgroundRef.current;
@@ -282,7 +306,13 @@ var Audio = function Audio(_ref) {
282
306
 
283
307
  var _ref2 = media || {},
284
308
  _ref2$url = _ref2.url,
285
- url = _ref2$url === void 0 ? null : _ref2$url;
309
+ url = _ref2$url === void 0 ? null : _ref2$url,
310
+ _ref2$metadata = _ref2.metadata,
311
+ metadata = _ref2$metadata === void 0 ? null : _ref2$metadata;
312
+
313
+ var _ref3 = metadata || {},
314
+ _ref3$waveform = _ref3.waveform,
315
+ waveform = _ref3$waveform === void 0 ? null : _ref3$waveform;
286
316
 
287
317
  var userInteracted = useUserInteracted();
288
318
  var finalInitialMuted = initialMuted === true || initialMuted === 'auto' && autoPlay && !userInteracted;
@@ -329,13 +359,18 @@ var Audio = function Audio(_ref) {
329
359
 
330
360
  useEffect(function () {
331
361
  var canceled = false;
362
+ var AudioContext = typeof window !== 'undefined' ? window.AudioContext || window.webkitAudioContext : null;
332
363
 
333
- if (url !== null && waveFake) {
364
+ if (waveform !== null) {
365
+ setAudioLevels(waveform.map(function (it) {
366
+ return (it + 256 / 2) / 256;
367
+ }));
368
+ } else if (url !== null && waveFake) {
334
369
  var fakeLength = 1000;
335
370
  setAudioLevels(_toConsumableArray(new Array(fakeLength)).map(function () {
336
371
  return Math.random();
337
372
  }));
338
- } else if (url !== null && typeof window !== 'undefined') {
373
+ } else if (url !== null && AudioContext !== null) {
339
374
  fetch(url, {
340
375
  mode: 'cors'
341
376
  }).then(function (response) {
@@ -350,7 +385,7 @@ var Audio = function Audio(_ref) {
350
385
  }
351
386
 
352
387
  setBlobUrl(URL.createObjectURL(new Blob([arrayBuffer])));
353
- var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
388
+ var audioCtx = new AudioContext();
354
389
  return audioCtx.decodeAudioData(arrayBuffer);
355
390
  }).then(function (buffer) {
356
391
  var channelsCount = buffer.numberOfChannels;
@@ -358,9 +393,9 @@ var Audio = function Audio(_ref) {
358
393
  if (channelsCount > 0) {
359
394
  var leftChannelData = buffer.getChannelData(0);
360
395
  setAudioLevels(leftChannelData.reduce(function (newArray, level, levelIndex) {
361
- if (levelIndex % reduceBufferFactor === 0) {
362
- newArray[newArray.length] = level;
363
- }
396
+ // if (levelIndex % reduceBufferFactor === 0) {
397
+ // console.log(level, (level + 1) / 2);
398
+ newArray[newArray.length] = Math.abs(level); // }
364
399
 
365
400
  return newArray;
366
401
  }, []));
@@ -375,7 +410,7 @@ var Audio = function Audio(_ref) {
375
410
  canceled = true;
376
411
  }
377
412
  };
378
- }, [url, setAudioLevels, setBlobUrl, reduceBufferFactor, waveFake]);
413
+ }, [url, waveform, setAudioLevels, setBlobUrl, reduceBufferFactor, waveFake]);
379
414
  var ready = waveFake || audioReady && blobUrl !== null;
380
415
  useEffect(function () {
381
416
  if (ready && onReady !== null) {
package/lib/index.js CHANGED
@@ -6,16 +6,16 @@ var _defineProperty = require('@babel/runtime/helpers/defineProperty');
6
6
  var _toConsumableArray = require('@babel/runtime/helpers/toConsumableArray');
7
7
  var _slicedToArray = require('@babel/runtime/helpers/slicedToArray');
8
8
  var _objectWithoutProperties = require('@babel/runtime/helpers/objectWithoutProperties');
9
- var React = require('react');
10
- var PropTypes = require('prop-types');
11
- var classNames = require('classnames');
12
9
  var core$1 = require('@micromag/core');
13
10
  var contexts = require('@micromag/core/contexts');
14
11
  var hooks = require('@micromag/core/hooks');
15
- require('whatwg-fetch');
16
- var reactUseGesture = require('react-use-gesture');
12
+ var classNames = require('classnames');
13
+ var PropTypes = require('prop-types');
14
+ var React = require('react');
17
15
  var core = require('@react-spring/core');
18
16
  var web = require('@react-spring/web');
17
+ var reactUseGesture = require('react-use-gesture');
18
+ require('whatwg-fetch');
19
19
 
20
20
  function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
21
21
 
@@ -23,9 +23,9 @@ var _defineProperty__default = /*#__PURE__*/_interopDefaultLegacy(_definePropert
23
23
  var _toConsumableArray__default = /*#__PURE__*/_interopDefaultLegacy(_toConsumableArray);
24
24
  var _slicedToArray__default = /*#__PURE__*/_interopDefaultLegacy(_slicedToArray);
25
25
  var _objectWithoutProperties__default = /*#__PURE__*/_interopDefaultLegacy(_objectWithoutProperties);
26
- var React__default = /*#__PURE__*/_interopDefaultLegacy(React);
27
- var PropTypes__default = /*#__PURE__*/_interopDefaultLegacy(PropTypes);
28
26
  var classNames__default = /*#__PURE__*/_interopDefaultLegacy(classNames);
27
+ var PropTypes__default = /*#__PURE__*/_interopDefaultLegacy(PropTypes);
28
+ var React__default = /*#__PURE__*/_interopDefaultLegacy(React);
29
29
 
30
30
  var styles$1 = {"container":"micromag-element-audio-audio-wave-container","button":"micromag-element-audio-audio-wave-button","canvasBackground":"micromag-element-audio-audio-wave-canvasBackground","canvasProgress":"micromag-element-audio-audio-wave-canvasProgress"};
31
31
 
@@ -126,24 +126,48 @@ var AudioWave = function AudioWave(_ref) {
126
126
  }
127
127
 
128
128
  var sampleOuterWidth = sampleWidth + sampleMargin * 2;
129
- var samplesCount = Math.floor(elWidth / sampleOuterWidth);
130
- var amplitudes = []; // get samples
129
+ var samplesCount = Math.floor(elWidth / sampleOuterWidth); // const amplitudes = [];
130
+ // get samples
131
131
 
132
- var sampleSize = Math.floor(audioLevels.length / samplesCount);
132
+ var levelsBySamples = audioLevels.length / samplesCount;
133
133
 
134
- for (var sampleI = 0; sampleI < samplesCount; sampleI += 1) {
135
- var sampleStart = sampleSize * sampleI;
136
- var sum = 0;
134
+ var amplitudes = _toConsumableArray__default["default"](Array(samplesCount).keys()).reduce(function (newAmplitudes, index) {
135
+ var levelStartIndex = index * levelsBySamples;
136
+ var levelEndIndex = levelStartIndex + levelsBySamples;
137
+ var newValues = [];
137
138
 
138
- for (var sampleSizeI = 0; sampleSizeI < sampleSize; sampleSizeI += 1) {
139
- sum += Math.abs(audioLevels[sampleStart + sampleSizeI]);
139
+ for (var i = Math.floor(levelStartIndex); i < Math.round(levelEndIndex); i += 1) {
140
+ newValues.push(audioLevels[i]);
140
141
  }
141
142
 
142
- amplitudes.push(sum / sampleSize);
143
- }
144
-
143
+ return levelsBySamples >= 1 ? [].concat(_toConsumableArray__default["default"](newAmplitudes), [newValues.reduce(function (total, value) {
144
+ return total + value;
145
+ }, 0) / newValues.length]) : [].concat(_toConsumableArray__default["default"](newAmplitudes), newValues);
146
+ }, []); // for (let sampleI = 0; sampleI < samplesCount; sampleI += levelsBySamples) {
147
+ // // if (levelsBySamples >= 1) {
148
+ // // const sampleSize = Math.floor(levelsBySamples);
149
+ // // const sampleStart = sampleSize * sampleI;
150
+ // // let sum = 0;
151
+ // // for (let sampleSizeI = 0; sampleSizeI < sampleSize; sampleSizeI += 1) {
152
+ // // sum += Math.abs(audioLevels[sampleStart + sampleSizeI]);
153
+ // // }
154
+ // // amplitudes.push(sum / sampleSize);
155
+ // // } else {
156
+ // console.log(sampleI);
157
+ // amplitudes.push(Math.abs(audioLevels[Math.floor(sampleI)]));
158
+ // // for (let sampleSizeI = 0; sampleSizeI < sampleSize; sampleSizeI += 1) {
159
+ // // console.log(sampleI, sampleSize);
160
+ // // amplitudes.push(Math.abs(audioLevels[sampleI % sampleSize]));
161
+ // // }
162
+ // // }
163
+ // }
164
+
165
+
166
+ var minAmplitude = Math.min.apply(Math, _toConsumableArray__default["default"](amplitudes));
167
+ var maxAmplitude = Math.max.apply(Math, _toConsumableArray__default["default"](amplitudes));
168
+ var delta = maxAmplitude - minAmplitude;
145
169
  var normalizedAmplitudes = amplitudes.map(function (n) {
146
- return n * Math.pow(Math.max.apply(Math, amplitudes), -1);
170
+ return (n - minAmplitude) / delta;
147
171
  }); // draw samples
148
172
 
149
173
  var canvasBg = canvasBackgroundRef.current;
@@ -296,7 +320,13 @@ var Audio = function Audio(_ref) {
296
320
 
297
321
  var _ref2 = media || {},
298
322
  _ref2$url = _ref2.url,
299
- url = _ref2$url === void 0 ? null : _ref2$url;
323
+ url = _ref2$url === void 0 ? null : _ref2$url,
324
+ _ref2$metadata = _ref2.metadata,
325
+ metadata = _ref2$metadata === void 0 ? null : _ref2$metadata;
326
+
327
+ var _ref3 = metadata || {},
328
+ _ref3$waveform = _ref3.waveform,
329
+ waveform = _ref3$waveform === void 0 ? null : _ref3$waveform;
300
330
 
301
331
  var userInteracted = contexts.useUserInteracted();
302
332
  var finalInitialMuted = initialMuted === true || initialMuted === 'auto' && autoPlay && !userInteracted;
@@ -343,13 +373,18 @@ var Audio = function Audio(_ref) {
343
373
 
344
374
  React.useEffect(function () {
345
375
  var canceled = false;
376
+ var AudioContext = typeof window !== 'undefined' ? window.AudioContext || window.webkitAudioContext : null;
346
377
 
347
- if (url !== null && waveFake) {
378
+ if (waveform !== null) {
379
+ setAudioLevels(waveform.map(function (it) {
380
+ return (it + 256 / 2) / 256;
381
+ }));
382
+ } else if (url !== null && waveFake) {
348
383
  var fakeLength = 1000;
349
384
  setAudioLevels(_toConsumableArray__default["default"](new Array(fakeLength)).map(function () {
350
385
  return Math.random();
351
386
  }));
352
- } else if (url !== null && typeof window !== 'undefined') {
387
+ } else if (url !== null && AudioContext !== null) {
353
388
  fetch(url, {
354
389
  mode: 'cors'
355
390
  }).then(function (response) {
@@ -364,7 +399,7 @@ var Audio = function Audio(_ref) {
364
399
  }
365
400
 
366
401
  setBlobUrl(URL.createObjectURL(new Blob([arrayBuffer])));
367
- var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
402
+ var audioCtx = new AudioContext();
368
403
  return audioCtx.decodeAudioData(arrayBuffer);
369
404
  }).then(function (buffer) {
370
405
  var channelsCount = buffer.numberOfChannels;
@@ -372,9 +407,9 @@ var Audio = function Audio(_ref) {
372
407
  if (channelsCount > 0) {
373
408
  var leftChannelData = buffer.getChannelData(0);
374
409
  setAudioLevels(leftChannelData.reduce(function (newArray, level, levelIndex) {
375
- if (levelIndex % reduceBufferFactor === 0) {
376
- newArray[newArray.length] = level;
377
- }
410
+ // if (levelIndex % reduceBufferFactor === 0) {
411
+ // console.log(level, (level + 1) / 2);
412
+ newArray[newArray.length] = Math.abs(level); // }
378
413
 
379
414
  return newArray;
380
415
  }, []));
@@ -389,7 +424,7 @@ var Audio = function Audio(_ref) {
389
424
  canceled = true;
390
425
  }
391
426
  };
392
- }, [url, setAudioLevels, setBlobUrl, reduceBufferFactor, waveFake]);
427
+ }, [url, waveform, setAudioLevels, setBlobUrl, reduceBufferFactor, waveFake]);
393
428
  var ready = waveFake || audioReady && blobUrl !== null;
394
429
  React.useEffect(function () {
395
430
  if (ready && onReady !== null) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@micromag/element-audio",
3
- "version": "0.2.371",
3
+ "version": "0.2.372",
4
4
  "description": "",
5
5
  "keywords": [
6
6
  "javascript"
@@ -67,5 +67,5 @@
67
67
  "publishConfig": {
68
68
  "access": "public"
69
69
  },
70
- "gitHead": "c0b4ada82950489dde098c81af7eb04cce86b56d"
70
+ "gitHead": "684d9c742633e0274f74f464722b4707cc80d7fa"
71
71
  }