@onesy/ui-react 1.0.24 → 1.0.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AudioRecorder/AudioRecorder.d.ts +3 -1
- package/AudioRecorder/AudioRecorder.js +40 -25
- package/esm/AudioRecorder/AudioRecorder.js +52 -31
- package/esm/index.js +1 -1
- package/esm/utils.js +45 -1
- package/package.json +4 -2
- package/types.d.ts +5 -0
- package/utils.d.ts +2 -1
- package/utils.js +64 -1
@@ -16,7 +16,9 @@ export interface IAudioRecorder extends ILine {
|
|
16
16
|
IconStart?: IElementReference;
|
17
17
|
IconPause?: IElementReference;
|
18
18
|
IconStop?: IElementReference;
|
19
|
-
onConfirm?: (value: Blob
|
19
|
+
onConfirm?: (value: Blob, meta: {
|
20
|
+
duration: number;
|
21
|
+
}) => any;
|
20
22
|
onData?: (value: Blob) => any;
|
21
23
|
onStart?: (event: React.MouseEvent<any>) => any;
|
22
24
|
onPause?: (event: React.MouseEvent<any>) => any;
|
@@ -86,11 +86,14 @@ const AudioRecorder = react_1.default.forwardRef((props_, ref) => {
|
|
86
86
|
root: react_1.default.useRef(null),
|
87
87
|
mediaRecorder: react_1.default.useRef(null),
|
88
88
|
mediaRecorderBytes: react_1.default.useRef([]),
|
89
|
-
|
89
|
+
startedAt: react_1.default.useRef(0),
|
90
90
|
valuePaused: react_1.default.useRef(0),
|
91
91
|
value: react_1.default.useRef(null),
|
92
92
|
animationFrame: react_1.default.useRef(null),
|
93
|
-
onData: react_1.default.useRef(null)
|
93
|
+
onData: react_1.default.useRef(null),
|
94
|
+
// fallback to duration calculation on desktop
|
95
|
+
// ie. for mobile where we can't easily determine duration
|
96
|
+
duration: react_1.default.useRef(0)
|
94
97
|
};
|
95
98
|
refs.onData.current = onData;
|
96
99
|
const supported = (0, utils_1.isEnvironment)('browser') && ((_a = window.navigator.mediaDevices) === null || _a === void 0 ? void 0 : _a.getUserMedia);
|
@@ -105,7 +108,7 @@ const AudioRecorder = react_1.default.forwardRef((props_, ref) => {
|
|
105
108
|
};
|
106
109
|
}, []);
|
107
110
|
const update = () => {
|
108
|
-
setValue(refs.valuePaused.current + (date_1.OnesyDate.milliseconds - refs.
|
111
|
+
setValue(refs.valuePaused.current + (date_1.OnesyDate.milliseconds - refs.startedAt.current));
|
109
112
|
refs.animationFrame.current = requestAnimationFrame(update);
|
110
113
|
};
|
111
114
|
const onStart = react_1.default.useCallback(async (event) => {
|
@@ -136,7 +139,10 @@ const AudioRecorder = react_1.default.forwardRef((props_, ref) => {
|
|
136
139
|
onError(error);
|
137
140
|
return;
|
138
141
|
}
|
139
|
-
|
142
|
+
// reset duration
|
143
|
+
refs.duration.current = 0;
|
144
|
+
// started at milliseconds
|
145
|
+
refs.startedAt.current = date_1.OnesyDate.milliseconds;
|
140
146
|
// ~60+ fps
|
141
147
|
refs.animationFrame.current = requestAnimationFrame(update);
|
142
148
|
setStatus('running');
|
@@ -145,9 +151,10 @@ const AudioRecorder = react_1.default.forwardRef((props_, ref) => {
|
|
145
151
|
}, [onStart_, onError]);
|
146
152
|
const onPause = react_1.default.useCallback((event) => {
|
147
153
|
// media recorder
|
148
|
-
if (refs.mediaRecorder.current)
|
154
|
+
if (refs.mediaRecorder.current)
|
149
155
|
refs.mediaRecorder.current.pause();
|
150
|
-
|
156
|
+
// add so far to duration
|
157
|
+
refs.duration.current += date_1.OnesyDate.milliseconds - refs.startedAt.current;
|
151
158
|
clear();
|
152
159
|
// Remember previous value
|
153
160
|
refs.valuePaused.current = refs.value.current;
|
@@ -157,43 +164,51 @@ const AudioRecorder = react_1.default.forwardRef((props_, ref) => {
|
|
157
164
|
}, [onPause_]);
|
158
165
|
const onStop = react_1.default.useCallback((event) => {
|
159
166
|
// media recorder
|
160
|
-
if (refs.mediaRecorder.current)
|
167
|
+
if (refs.mediaRecorder.current)
|
161
168
|
refs.mediaRecorder.current.stop();
|
162
|
-
}
|
163
169
|
clear();
|
164
170
|
setStatus('initial');
|
165
171
|
setValue(0);
|
166
|
-
refs.start.current = 0;
|
167
172
|
refs.valuePaused.current = 0;
|
168
173
|
refs.value.current = 0;
|
169
174
|
if ((0, utils_1.is)('function', onStop_))
|
170
175
|
onStop_(event);
|
171
176
|
}, [onStop_]);
|
172
177
|
const onConfirm = react_1.default.useCallback(async (event) => {
|
178
|
+
var _a;
|
173
179
|
// Stop
|
174
180
|
onStop(event);
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
}
|
181
|
+
// add so far to duration
|
182
|
+
refs.duration.current += date_1.OnesyDate.milliseconds - refs.startedAt.current;
|
183
|
+
await (0, utils_1.wait)(40);
|
184
|
+
// Get the blob
|
185
|
+
const mimeType = (_a = refs.mediaRecorder.current) === null || _a === void 0 ? void 0 : _a.mimeType;
|
186
|
+
let blob = new Blob(refs.mediaRecorderBytes.current, { type: mimeType });
|
187
|
+
// clean up
|
188
|
+
refs.mediaRecorderBytes.current = [];
|
189
|
+
const meta = {
|
190
|
+
// duration in seconds
|
191
|
+
duration: refs.duration.current / 1e3
|
192
|
+
};
|
193
|
+
// works well on desktop
|
194
|
+
// mobile has issues with fixing web audio
|
195
|
+
const { blob: blobAudioFix, duration: durationAudioFix, error } = await (0, utils_2.audioFix)(blob);
|
196
|
+
if (!error) {
|
197
|
+
blob = blobAudioFix;
|
198
|
+
if (durationAudioFix !== undefined)
|
199
|
+
meta.duration = durationAudioFix;
|
200
|
+
}
|
201
|
+
if ((0, utils_1.is)('function', onConfirm_))
|
202
|
+
onConfirm_(blob, meta);
|
187
203
|
}, [onStop, onConfirm_]);
|
188
204
|
const onResume = react_1.default.useCallback((event) => {
|
189
205
|
// media recorder
|
190
|
-
if (refs.mediaRecorder.current)
|
206
|
+
if (refs.mediaRecorder.current)
|
191
207
|
refs.mediaRecorder.current.resume();
|
192
|
-
|
208
|
+
// record at milliseconds
|
209
|
+
refs.startedAt.current = date_1.OnesyDate.milliseconds;
|
193
210
|
// ~60+ fps
|
194
211
|
refs.animationFrame.current = requestAnimationFrame(update);
|
195
|
-
// Update start, valuePaused value
|
196
|
-
refs.start.current = date_1.OnesyDate.milliseconds;
|
197
212
|
setStatus('running');
|
198
213
|
if ((0, utils_1.is)('function', onResume_))
|
199
214
|
onResume_(event);
|
@@ -5,7 +5,7 @@ const _excluded = ["size", "pause", "renderMain", "renderTime", "loading", "disa
|
|
5
5
|
function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
6
6
|
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
|
7
7
|
import React from 'react';
|
8
|
-
import { getLeadingZerosNumber, is, isEnvironment } from '@onesy/utils';
|
8
|
+
import { getLeadingZerosNumber, is, isEnvironment, wait } from '@onesy/utils';
|
9
9
|
import { classNames, style as styleMethod, useOnesyTheme } from '@onesy/style-react';
|
10
10
|
import { OnesyDate, duration } from '@onesy/date';
|
11
11
|
import IconMaterialMic from '@onesy/icons-material-rounded-react/IconMaterialMicW100';
|
@@ -18,7 +18,7 @@ import FadeElement from '../Fade';
|
|
18
18
|
import TypeElement from '../Type';
|
19
19
|
import TooltipElement from '../Tooltip';
|
20
20
|
import IconButtonElement from '../IconButton';
|
21
|
-
import { staticClassName } from '../utils';
|
21
|
+
import { audioFix, staticClassName } from '../utils';
|
22
22
|
const useStyle = styleMethod(theme => ({
|
23
23
|
'@keyframes pulse': {
|
24
24
|
'0%': {
|
@@ -103,11 +103,14 @@ const AudioRecorder = /*#__PURE__*/React.forwardRef((props_, ref) => {
|
|
103
103
|
root: React.useRef(null),
|
104
104
|
mediaRecorder: React.useRef(null),
|
105
105
|
mediaRecorderBytes: React.useRef([]),
|
106
|
-
|
106
|
+
startedAt: React.useRef(0),
|
107
107
|
valuePaused: React.useRef(0),
|
108
108
|
value: React.useRef(null),
|
109
109
|
animationFrame: React.useRef(null),
|
110
|
-
onData: React.useRef(null)
|
110
|
+
onData: React.useRef(null),
|
111
|
+
// fallback to duration calculation on desktop
|
112
|
+
// ie. for mobile where we can't easily determine duration
|
113
|
+
duration: React.useRef(0)
|
111
114
|
};
|
112
115
|
refs.onData.current = onData;
|
113
116
|
const supported = isEnvironment('browser') && window.navigator.mediaDevices?.getUserMedia;
|
@@ -122,7 +125,7 @@ const AudioRecorder = /*#__PURE__*/React.forwardRef((props_, ref) => {
|
|
122
125
|
};
|
123
126
|
}, []);
|
124
127
|
const update = () => {
|
125
|
-
setValue(refs.valuePaused.current + (OnesyDate.milliseconds - refs.
|
128
|
+
setValue(refs.valuePaused.current + (OnesyDate.milliseconds - refs.startedAt.current));
|
126
129
|
refs.animationFrame.current = requestAnimationFrame(update);
|
127
130
|
};
|
128
131
|
const onStart = React.useCallback(async event => {
|
@@ -157,7 +160,12 @@ const AudioRecorder = /*#__PURE__*/React.forwardRef((props_, ref) => {
|
|
157
160
|
if (is('function', onError)) onError(error);
|
158
161
|
return;
|
159
162
|
}
|
160
|
-
|
163
|
+
|
164
|
+
// reset duration
|
165
|
+
refs.duration.current = 0;
|
166
|
+
|
167
|
+
// started at milliseconds
|
168
|
+
refs.startedAt.current = OnesyDate.milliseconds;
|
161
169
|
|
162
170
|
// ~60+ fps
|
163
171
|
refs.animationFrame.current = requestAnimationFrame(update);
|
@@ -166,9 +174,10 @@ const AudioRecorder = /*#__PURE__*/React.forwardRef((props_, ref) => {
|
|
166
174
|
}, [onStart_, onError]);
|
167
175
|
const onPause = React.useCallback(event => {
|
168
176
|
// media recorder
|
169
|
-
if (refs.mediaRecorder.current)
|
170
|
-
|
171
|
-
|
177
|
+
if (refs.mediaRecorder.current) refs.mediaRecorder.current.pause();
|
178
|
+
|
179
|
+
// add so far to duration
|
180
|
+
refs.duration.current += OnesyDate.milliseconds - refs.startedAt.current;
|
172
181
|
clear();
|
173
182
|
|
174
183
|
// Remember previous value
|
@@ -178,13 +187,10 @@ const AudioRecorder = /*#__PURE__*/React.forwardRef((props_, ref) => {
|
|
178
187
|
}, [onPause_]);
|
179
188
|
const onStop = React.useCallback(event => {
|
180
189
|
// media recorder
|
181
|
-
if (refs.mediaRecorder.current)
|
182
|
-
refs.mediaRecorder.current.stop();
|
183
|
-
}
|
190
|
+
if (refs.mediaRecorder.current) refs.mediaRecorder.current.stop();
|
184
191
|
clear();
|
185
192
|
setStatus('initial');
|
186
193
|
setValue(0);
|
187
|
-
refs.start.current = 0;
|
188
194
|
refs.valuePaused.current = 0;
|
189
195
|
refs.value.current = 0;
|
190
196
|
if (is('function', onStop_)) onStop_(event);
|
@@ -192,31 +198,46 @@ const AudioRecorder = /*#__PURE__*/React.forwardRef((props_, ref) => {
|
|
192
198
|
const onConfirm = React.useCallback(async event => {
|
193
199
|
// Stop
|
194
200
|
onStop(event);
|
195
|
-
setTimeout(() => {
|
196
|
-
// Get the blob
|
197
|
-
const mimeType = refs.mediaRecorder.current?.mimeType;
|
198
|
-
console.log('AudioRecorder onConfirm', mimeType);
|
199
|
-
const blob = new Blob(refs.mediaRecorderBytes.current, {
|
200
|
-
type: mimeType
|
201
|
-
});
|
202
201
|
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
202
|
+
// add so far to duration
|
203
|
+
refs.duration.current += OnesyDate.milliseconds - refs.startedAt.current;
|
204
|
+
await wait(40);
|
205
|
+
|
206
|
+
// Get the blob
|
207
|
+
const mimeType = refs.mediaRecorder.current?.mimeType;
|
208
|
+
let blob = new Blob(refs.mediaRecorderBytes.current, {
|
209
|
+
type: mimeType
|
210
|
+
});
|
211
|
+
|
212
|
+
// clean up
|
213
|
+
refs.mediaRecorderBytes.current = [];
|
214
|
+
const meta = {
|
215
|
+
// duration in seconds
|
216
|
+
duration: refs.duration.current / 1e3
|
217
|
+
};
|
218
|
+
|
219
|
+
// works well on desktop
|
220
|
+
// mobile has issues with fixing web audio
|
221
|
+
const {
|
222
|
+
blob: blobAudioFix,
|
223
|
+
duration: durationAudioFix,
|
224
|
+
error
|
225
|
+
} = await audioFix(blob);
|
226
|
+
if (!error) {
|
227
|
+
blob = blobAudioFix;
|
228
|
+
if (durationAudioFix !== undefined) meta.duration = durationAudioFix;
|
229
|
+
}
|
230
|
+
if (is('function', onConfirm_)) onConfirm_(blob, meta);
|
208
231
|
}, [onStop, onConfirm_]);
|
209
232
|
const onResume = React.useCallback(event => {
|
210
233
|
// media recorder
|
211
|
-
if (refs.mediaRecorder.current)
|
212
|
-
|
213
|
-
|
234
|
+
if (refs.mediaRecorder.current) refs.mediaRecorder.current.resume();
|
235
|
+
|
236
|
+
// record at milliseconds
|
237
|
+
refs.startedAt.current = OnesyDate.milliseconds;
|
214
238
|
|
215
239
|
// ~60+ fps
|
216
240
|
refs.animationFrame.current = requestAnimationFrame(update);
|
217
|
-
|
218
|
-
// Update start, valuePaused value
|
219
|
-
refs.start.current = OnesyDate.milliseconds;
|
220
241
|
setStatus('running');
|
221
242
|
if (is('function', onResume_)) onResume_(event);
|
222
243
|
}, [onResume_]);
|
package/esm/index.js
CHANGED
package/esm/utils.js
CHANGED
@@ -1,4 +1,9 @@
|
|
1
|
+
import * as ebml from 'ts-ebml';
|
2
|
+
import { Buffer } from 'buffer';
|
1
3
|
import { is, canvasFilterBrightness, canvasFilterContrast, canvasFilterSaturation, canvasFilterFade, canvasFilterInvert, canvasFilterOldPhoto, download, clamp, isEnvironment } from '@onesy/utils';
|
4
|
+
if (isEnvironment('browser')) {
|
5
|
+
window.Buffer = Buffer;
|
6
|
+
}
|
2
7
|
export function reflow(element) {
|
3
8
|
element?.offsetHeight;
|
4
9
|
}
|
@@ -1253,4 +1258,43 @@ export const currencies = [{
|
|
1253
1258
|
rounding: 0,
|
1254
1259
|
code: 'ZMK',
|
1255
1260
|
name_plural: 'Zambian kwachas'
|
1256
|
-
}];
|
1261
|
+
}];
|
1262
|
+
export const audioFix = async blob => {
|
1263
|
+
try {
|
1264
|
+
const readAsArrayBuffer = () => {
|
1265
|
+
return new Promise((resolve, reject) => {
|
1266
|
+
const fileReader = new FileReader();
|
1267
|
+
fileReader.readAsArrayBuffer(blob);
|
1268
|
+
fileReader.onloadend = () => resolve(fileReader.result);
|
1269
|
+
fileReader.onerror = event => reject(event.error);
|
1270
|
+
});
|
1271
|
+
};
|
1272
|
+
const arrayBuffer = await readAsArrayBuffer();
|
1273
|
+
const decoder = new ebml.Decoder();
|
1274
|
+
const reader = new ebml.Reader();
|
1275
|
+
reader.logging = false;
|
1276
|
+
reader.drop_default_duration = false;
|
1277
|
+
const uint8Array = new Uint8Array(arrayBuffer);
|
1278
|
+
const elements = decoder.decode(uint8Array);
|
1279
|
+
elements.forEach(element => reader.read(element));
|
1280
|
+
reader.stop();
|
1281
|
+
const refinedMetadataBuf = ebml.tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
|
1282
|
+
const body = arrayBuffer.slice(reader.metadataSize);
|
1283
|
+
const result = new Blob([refinedMetadataBuf, body], {
|
1284
|
+
type: blob.type
|
1285
|
+
});
|
1286
|
+
return {
|
1287
|
+
blob: result
|
1288
|
+
};
|
1289
|
+
} catch (error) {
|
1290
|
+
console.log('audioFix error', error);
|
1291
|
+
|
1292
|
+
// fallback
|
1293
|
+
// return durationFallback(blob);
|
1294
|
+
|
1295
|
+
return {
|
1296
|
+
blob,
|
1297
|
+
error
|
1298
|
+
};
|
1299
|
+
}
|
1300
|
+
};
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@onesy/ui-react",
|
3
|
-
"version": "1.0.
|
3
|
+
"version": "1.0.25",
|
4
4
|
"description": "UI for React",
|
5
5
|
"repository": "https://github.com/onesy-me/onesy.git",
|
6
6
|
"author": "Lazar <lazareric2@gmail.com>",
|
@@ -41,7 +41,9 @@
|
|
41
41
|
"@onesy/icons-material-rounded-react": "^1.0.2",
|
42
42
|
"@onesy/log": "^1.0.0",
|
43
43
|
"@onesy/subscription": "^1.0.0",
|
44
|
-
"@onesy/utils": "^1.0.
|
44
|
+
"@onesy/utils": "^1.0.2",
|
45
|
+
"buffer": "^6.0.3",
|
46
|
+
"ts-ebml": "^3.0.1"
|
45
47
|
},
|
46
48
|
"publishConfig": {
|
47
49
|
"access": "public",
|
package/types.d.ts
CHANGED
package/utils.d.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IPoint } from './types';
|
1
|
+
import { IAudioFix, IPoint } from './types';
|
2
2
|
export declare function reflow(element: HTMLElement): void;
|
3
3
|
export declare const staticClassName: (name: string, theme: any) => any;
|
4
4
|
export declare const iconSizeToFontSize: (value: string | number) => any;
|
@@ -46,3 +46,4 @@ export declare const currencies: {
|
|
46
46
|
code: string;
|
47
47
|
name_plural: string;
|
48
48
|
}[];
|
49
|
+
export declare const audioFix: (blob: Blob) => Promise<IAudioFix>;
|
package/utils.js
CHANGED
@@ -1,7 +1,35 @@
|
|
1
1
|
"use strict";
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
3
|
+
if (k2 === undefined) k2 = k;
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
7
|
+
}
|
8
|
+
Object.defineProperty(o, k2, desc);
|
9
|
+
}) : (function(o, m, k, k2) {
|
10
|
+
if (k2 === undefined) k2 = k;
|
11
|
+
o[k2] = m[k];
|
12
|
+
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
19
|
+
if (mod && mod.__esModule) return mod;
|
20
|
+
var result = {};
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
22
|
+
__setModuleDefault(result, mod);
|
23
|
+
return result;
|
24
|
+
};
|
2
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.currencies = exports.iconFontSize = exports.formats = exports.toNumber = exports.caret = exports.keyboardStyleCommands = exports.keyboardStandardCommands = exports.getOverflowParent = exports.importIframeStyles = exports.replace = exports.sanitize = exports.minMaxBetweenNumbers = exports.controlPoint = exports.line = exports.angleToCoordinates = exports.matches = exports.save = exports.print = exports.canvasOldPhoto = exports.canvasInvert = exports.canvasFade = exports.canvasSaturation = exports.canvasContrast = exports.canvasBrightness = exports.image = exports.valueBreakpoints = exports.iconSizeToFontSize = exports.staticClassName = exports.reflow = void 0;
|
26
|
+
exports.audioFix = exports.currencies = exports.iconFontSize = exports.formats = exports.toNumber = exports.caret = exports.keyboardStyleCommands = exports.keyboardStandardCommands = exports.getOverflowParent = exports.importIframeStyles = exports.replace = exports.sanitize = exports.minMaxBetweenNumbers = exports.controlPoint = exports.line = exports.angleToCoordinates = exports.matches = exports.save = exports.print = exports.canvasOldPhoto = exports.canvasInvert = exports.canvasFade = exports.canvasSaturation = exports.canvasContrast = exports.canvasBrightness = exports.image = exports.valueBreakpoints = exports.iconSizeToFontSize = exports.staticClassName = exports.reflow = void 0;
|
27
|
+
const ebml = __importStar(require("ts-ebml"));
|
28
|
+
const buffer_1 = require("buffer");
|
4
29
|
const utils_1 = require("@onesy/utils");
|
30
|
+
if ((0, utils_1.isEnvironment)('browser')) {
|
31
|
+
window.Buffer = buffer_1.Buffer;
|
32
|
+
}
|
5
33
|
function reflow(element) {
|
6
34
|
element === null || element === void 0 ? void 0 : element.offsetHeight;
|
7
35
|
}
|
@@ -1420,3 +1448,38 @@ exports.currencies = [
|
|
1420
1448
|
name_plural: 'Zambian kwachas'
|
1421
1449
|
}
|
1422
1450
|
];
|
1451
|
+
const audioFix = async (blob) => {
|
1452
|
+
try {
|
1453
|
+
const readAsArrayBuffer = () => {
|
1454
|
+
return new Promise((resolve, reject) => {
|
1455
|
+
const fileReader = new FileReader();
|
1456
|
+
fileReader.readAsArrayBuffer(blob);
|
1457
|
+
fileReader.onloadend = () => resolve(fileReader.result);
|
1458
|
+
fileReader.onerror = (event) => reject(event.error);
|
1459
|
+
});
|
1460
|
+
};
|
1461
|
+
const arrayBuffer = await readAsArrayBuffer();
|
1462
|
+
const decoder = new ebml.Decoder();
|
1463
|
+
const reader = new ebml.Reader();
|
1464
|
+
reader.logging = false;
|
1465
|
+
reader.drop_default_duration = false;
|
1466
|
+
const uint8Array = new Uint8Array(arrayBuffer);
|
1467
|
+
const elements = decoder.decode(uint8Array);
|
1468
|
+
elements.forEach((element) => reader.read(element));
|
1469
|
+
reader.stop();
|
1470
|
+
const refinedMetadataBuf = ebml.tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
|
1471
|
+
const body = arrayBuffer.slice(reader.metadataSize);
|
1472
|
+
const result = new Blob([refinedMetadataBuf, body], { type: blob.type });
|
1473
|
+
return { blob: result };
|
1474
|
+
}
|
1475
|
+
catch (error) {
|
1476
|
+
console.log('audioFix error', error);
|
1477
|
+
// fallback
|
1478
|
+
// return durationFallback(blob);
|
1479
|
+
return {
|
1480
|
+
blob,
|
1481
|
+
error
|
1482
|
+
};
|
1483
|
+
}
|
1484
|
+
};
|
1485
|
+
exports.audioFix = audioFix;
|