@kenzuya/mediabunny 1.26.0 → 1.28.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/bundles/{mediabunny.mjs → mediabunny.js} +21963 -21388
- package/dist/bundles/mediabunny.min.js +490 -0
- package/dist/modules/shared/mp3-misc.d.ts.map +1 -1
- package/dist/modules/src/adts/adts-demuxer.d.ts +6 -6
- package/dist/modules/src/adts/adts-demuxer.d.ts.map +1 -1
- package/dist/modules/src/adts/adts-muxer.d.ts +4 -4
- package/dist/modules/src/adts/adts-muxer.d.ts.map +1 -1
- package/dist/modules/src/adts/adts-reader.d.ts +1 -1
- package/dist/modules/src/adts/adts-reader.d.ts.map +1 -1
- package/dist/modules/src/avi/avi-demuxer.d.ts +44 -0
- package/dist/modules/src/avi/avi-demuxer.d.ts.map +1 -0
- package/dist/modules/src/avi/avi-misc.d.ts +88 -0
- package/dist/modules/src/avi/avi-misc.d.ts.map +1 -0
- package/dist/modules/src/avi/avi-muxer.d.ts +45 -0
- package/dist/modules/src/avi/avi-muxer.d.ts.map +1 -0
- package/dist/modules/src/avi/riff-writer.d.ts +26 -0
- package/dist/modules/src/avi/riff-writer.d.ts.map +1 -0
- package/dist/modules/src/codec-data.d.ts +8 -3
- package/dist/modules/src/codec-data.d.ts.map +1 -1
- package/dist/modules/src/codec.d.ts +10 -10
- package/dist/modules/src/codec.d.ts.map +1 -1
- package/dist/modules/src/conversion.d.ts +33 -16
- package/dist/modules/src/conversion.d.ts.map +1 -1
- package/dist/modules/src/custom-coder.d.ts +8 -8
- package/dist/modules/src/custom-coder.d.ts.map +1 -1
- package/dist/modules/src/demuxer.d.ts +3 -3
- package/dist/modules/src/demuxer.d.ts.map +1 -1
- package/dist/modules/src/encode.d.ts +8 -8
- package/dist/modules/src/encode.d.ts.map +1 -1
- package/dist/modules/src/flac/flac-demuxer.d.ts +7 -7
- package/dist/modules/src/flac/flac-demuxer.d.ts.map +1 -1
- package/dist/modules/src/flac/flac-misc.d.ts +3 -3
- package/dist/modules/src/flac/flac-misc.d.ts.map +1 -1
- package/dist/modules/src/flac/flac-muxer.d.ts +5 -5
- package/dist/modules/src/flac/flac-muxer.d.ts.map +1 -1
- package/dist/modules/src/id3.d.ts +3 -3
- package/dist/modules/src/id3.d.ts.map +1 -1
- package/dist/modules/src/index.d.ts +20 -20
- package/dist/modules/src/index.d.ts.map +1 -1
- package/dist/modules/src/input-format.d.ts +22 -0
- package/dist/modules/src/input-format.d.ts.map +1 -1
- package/dist/modules/src/input-track.d.ts +8 -8
- package/dist/modules/src/input-track.d.ts.map +1 -1
- package/dist/modules/src/input.d.ts +12 -12
- package/dist/modules/src/isobmff/isobmff-boxes.d.ts +2 -2
- package/dist/modules/src/isobmff/isobmff-boxes.d.ts.map +1 -1
- package/dist/modules/src/isobmff/isobmff-demuxer.d.ts +12 -12
- package/dist/modules/src/isobmff/isobmff-demuxer.d.ts.map +1 -1
- package/dist/modules/src/isobmff/isobmff-misc.d.ts.map +1 -1
- package/dist/modules/src/isobmff/isobmff-muxer.d.ts +11 -11
- package/dist/modules/src/isobmff/isobmff-muxer.d.ts.map +1 -1
- package/dist/modules/src/isobmff/isobmff-reader.d.ts +2 -2
- package/dist/modules/src/isobmff/isobmff-reader.d.ts.map +1 -1
- package/dist/modules/src/matroska/ebml.d.ts +3 -3
- package/dist/modules/src/matroska/ebml.d.ts.map +1 -1
- package/dist/modules/src/matroska/matroska-demuxer.d.ts +13 -13
- package/dist/modules/src/matroska/matroska-demuxer.d.ts.map +1 -1
- package/dist/modules/src/matroska/matroska-input.d.ts +33 -0
- package/dist/modules/src/matroska/matroska-input.d.ts.map +1 -0
- package/dist/modules/src/matroska/matroska-misc.d.ts.map +1 -1
- package/dist/modules/src/matroska/matroska-muxer.d.ts +5 -5
- package/dist/modules/src/matroska/matroska-muxer.d.ts.map +1 -1
- package/dist/modules/src/media-sink.d.ts +5 -5
- package/dist/modules/src/media-sink.d.ts.map +1 -1
- package/dist/modules/src/media-source.d.ts +22 -4
- package/dist/modules/src/media-source.d.ts.map +1 -1
- package/dist/modules/src/metadata.d.ts +2 -2
- package/dist/modules/src/metadata.d.ts.map +1 -1
- package/dist/modules/src/misc.d.ts +5 -4
- package/dist/modules/src/misc.d.ts.map +1 -1
- package/dist/modules/src/mp3/mp3-demuxer.d.ts +7 -7
- package/dist/modules/src/mp3/mp3-demuxer.d.ts.map +1 -1
- package/dist/modules/src/mp3/mp3-muxer.d.ts +4 -4
- package/dist/modules/src/mp3/mp3-muxer.d.ts.map +1 -1
- package/dist/modules/src/mp3/mp3-reader.d.ts +2 -2
- package/dist/modules/src/mp3/mp3-reader.d.ts.map +1 -1
- package/dist/modules/src/mp3/mp3-writer.d.ts +1 -1
- package/dist/modules/src/mp3/mp3-writer.d.ts.map +1 -1
- package/dist/modules/src/muxer.d.ts +4 -4
- package/dist/modules/src/muxer.d.ts.map +1 -1
- package/dist/modules/src/node.d.ts +1 -1
- package/dist/modules/src/ogg/ogg-demuxer.d.ts +7 -7
- package/dist/modules/src/ogg/ogg-demuxer.d.ts.map +1 -1
- package/dist/modules/src/ogg/ogg-misc.d.ts +1 -1
- package/dist/modules/src/ogg/ogg-misc.d.ts.map +1 -1
- package/dist/modules/src/ogg/ogg-muxer.d.ts +5 -5
- package/dist/modules/src/ogg/ogg-muxer.d.ts.map +1 -1
- package/dist/modules/src/ogg/ogg-reader.d.ts +1 -1
- package/dist/modules/src/ogg/ogg-reader.d.ts.map +1 -1
- package/dist/modules/src/output-format.d.ts +51 -6
- package/dist/modules/src/output-format.d.ts.map +1 -1
- package/dist/modules/src/output.d.ts +13 -13
- package/dist/modules/src/output.d.ts.map +1 -1
- package/dist/modules/src/packet.d.ts +1 -1
- package/dist/modules/src/packet.d.ts.map +1 -1
- package/dist/modules/src/pcm.d.ts.map +1 -1
- package/dist/modules/src/reader.d.ts +2 -2
- package/dist/modules/src/reader.d.ts.map +1 -1
- package/dist/modules/src/sample.d.ts +57 -15
- package/dist/modules/src/sample.d.ts.map +1 -1
- package/dist/modules/src/source.d.ts +3 -3
- package/dist/modules/src/source.d.ts.map +1 -1
- package/dist/modules/src/subtitles.d.ts +1 -1
- package/dist/modules/src/subtitles.d.ts.map +1 -1
- package/dist/modules/src/target.d.ts +2 -2
- package/dist/modules/src/target.d.ts.map +1 -1
- package/dist/modules/src/tsconfig.tsbuildinfo +1 -1
- package/dist/modules/src/wave/riff-writer.d.ts +1 -1
- package/dist/modules/src/wave/riff-writer.d.ts.map +1 -1
- package/dist/modules/src/wave/wave-demuxer.d.ts +6 -6
- package/dist/modules/src/wave/wave-demuxer.d.ts.map +1 -1
- package/dist/modules/src/wave/wave-muxer.d.ts +4 -4
- package/dist/modules/src/wave/wave-muxer.d.ts.map +1 -1
- package/dist/modules/src/writer.d.ts +1 -1
- package/dist/modules/src/writer.d.ts.map +1 -1
- package/dist/packages/eac3/eac3.wasm +0 -0
- package/dist/packages/eac3/mediabunny-eac3.js +1058 -0
- package/dist/packages/eac3/mediabunny-eac3.min.js +44 -0
- package/dist/packages/mp3-encoder/mediabunny-mp3-encoder.js +694 -0
- package/dist/packages/mp3-encoder/mediabunny-mp3-encoder.min.js +58 -0
- package/dist/packages/mpeg4/mediabunny-mpeg4.js +1198 -0
- package/dist/packages/mpeg4/mediabunny-mpeg4.min.js +44 -0
- package/dist/packages/mpeg4/xvid.wasm +0 -0
- package/package.json +18 -57
- package/dist/bundles/mediabunny.cjs +0 -26140
- package/dist/bundles/mediabunny.min.cjs +0 -147
- package/dist/bundles/mediabunny.min.mjs +0 -146
- package/dist/mediabunny.d.ts +0 -3319
- package/dist/modules/shared/mp3-misc.js +0 -147
- package/dist/modules/src/adts/adts-demuxer.js +0 -239
- package/dist/modules/src/adts/adts-muxer.js +0 -80
- package/dist/modules/src/adts/adts-reader.js +0 -63
- package/dist/modules/src/codec-data.js +0 -1730
- package/dist/modules/src/codec.js +0 -869
- package/dist/modules/src/conversion.js +0 -1459
- package/dist/modules/src/custom-coder.js +0 -117
- package/dist/modules/src/demuxer.js +0 -12
- package/dist/modules/src/encode.js +0 -442
- package/dist/modules/src/flac/flac-demuxer.js +0 -504
- package/dist/modules/src/flac/flac-misc.js +0 -135
- package/dist/modules/src/flac/flac-muxer.js +0 -222
- package/dist/modules/src/id3.js +0 -848
- package/dist/modules/src/index.js +0 -28
- package/dist/modules/src/input-format.js +0 -480
- package/dist/modules/src/input-track.js +0 -372
- package/dist/modules/src/input.js +0 -188
- package/dist/modules/src/isobmff/isobmff-boxes.js +0 -1480
- package/dist/modules/src/isobmff/isobmff-demuxer.js +0 -2618
- package/dist/modules/src/isobmff/isobmff-misc.js +0 -20
- package/dist/modules/src/isobmff/isobmff-muxer.js +0 -966
- package/dist/modules/src/isobmff/isobmff-reader.js +0 -72
- package/dist/modules/src/matroska/ebml.js +0 -653
- package/dist/modules/src/matroska/matroska-demuxer.js +0 -2133
- package/dist/modules/src/matroska/matroska-misc.js +0 -20
- package/dist/modules/src/matroska/matroska-muxer.js +0 -1017
- package/dist/modules/src/media-sink.js +0 -1736
- package/dist/modules/src/media-source.js +0 -1825
- package/dist/modules/src/metadata.js +0 -193
- package/dist/modules/src/misc.js +0 -623
- package/dist/modules/src/mp3/mp3-demuxer.js +0 -285
- package/dist/modules/src/mp3/mp3-muxer.js +0 -123
- package/dist/modules/src/mp3/mp3-reader.js +0 -26
- package/dist/modules/src/mp3/mp3-writer.js +0 -78
- package/dist/modules/src/muxer.js +0 -50
- package/dist/modules/src/node.js +0 -9
- package/dist/modules/src/ogg/ogg-demuxer.js +0 -763
- package/dist/modules/src/ogg/ogg-misc.js +0 -78
- package/dist/modules/src/ogg/ogg-muxer.js +0 -353
- package/dist/modules/src/ogg/ogg-reader.js +0 -65
- package/dist/modules/src/output-format.js +0 -527
- package/dist/modules/src/output.js +0 -300
- package/dist/modules/src/packet.js +0 -182
- package/dist/modules/src/pcm.js +0 -85
- package/dist/modules/src/reader.js +0 -236
- package/dist/modules/src/sample.js +0 -1056
- package/dist/modules/src/source.js +0 -1182
- package/dist/modules/src/subtitles.js +0 -575
- package/dist/modules/src/target.js +0 -140
- package/dist/modules/src/wave/riff-writer.js +0 -30
- package/dist/modules/src/wave/wave-demuxer.js +0 -447
- package/dist/modules/src/wave/wave-muxer.js +0 -318
- package/dist/modules/src/writer.js +0 -370
- package/src/adts/adts-demuxer.ts +0 -331
- package/src/adts/adts-muxer.ts +0 -111
- package/src/adts/adts-reader.ts +0 -85
- package/src/codec-data.ts +0 -2078
- package/src/codec.ts +0 -1092
- package/src/conversion.ts +0 -2112
- package/src/custom-coder.ts +0 -197
- package/src/demuxer.ts +0 -24
- package/src/encode.ts +0 -739
- package/src/flac/flac-demuxer.ts +0 -730
- package/src/flac/flac-misc.ts +0 -164
- package/src/flac/flac-muxer.ts +0 -320
- package/src/id3.ts +0 -925
- package/src/index.ts +0 -221
- package/src/input-format.ts +0 -541
- package/src/input-track.ts +0 -529
- package/src/input.ts +0 -235
- package/src/isobmff/isobmff-boxes.ts +0 -1719
- package/src/isobmff/isobmff-demuxer.ts +0 -3190
- package/src/isobmff/isobmff-misc.ts +0 -29
- package/src/isobmff/isobmff-muxer.ts +0 -1348
- package/src/isobmff/isobmff-reader.ts +0 -91
- package/src/matroska/ebml.ts +0 -730
- package/src/matroska/matroska-demuxer.ts +0 -2481
- package/src/matroska/matroska-misc.ts +0 -29
- package/src/matroska/matroska-muxer.ts +0 -1276
- package/src/media-sink.ts +0 -2179
- package/src/media-source.ts +0 -2243
- package/src/metadata.ts +0 -320
- package/src/misc.ts +0 -798
- package/src/mp3/mp3-demuxer.ts +0 -383
- package/src/mp3/mp3-muxer.ts +0 -166
- package/src/mp3/mp3-reader.ts +0 -34
- package/src/mp3/mp3-writer.ts +0 -120
- package/src/muxer.ts +0 -88
- package/src/node.ts +0 -11
- package/src/ogg/ogg-demuxer.ts +0 -1053
- package/src/ogg/ogg-misc.ts +0 -116
- package/src/ogg/ogg-muxer.ts +0 -497
- package/src/ogg/ogg-reader.ts +0 -93
- package/src/output-format.ts +0 -945
- package/src/output.ts +0 -488
- package/src/packet.ts +0 -263
- package/src/pcm.ts +0 -112
- package/src/reader.ts +0 -323
- package/src/sample.ts +0 -1461
- package/src/source.ts +0 -1688
- package/src/subtitles.ts +0 -711
- package/src/target.ts +0 -204
- package/src/tsconfig.json +0 -16
- package/src/wave/riff-writer.ts +0 -36
- package/src/wave/wave-demuxer.ts +0 -529
- package/src/wave/wave-muxer.ts +0 -371
- package/src/writer.ts +0 -490
|
@@ -1,1056 +0,0 @@
|
|
|
1
|
-
/*!
|
|
2
|
-
* Copyright (c) 2025-present, Vanilagy and contributors
|
|
3
|
-
*
|
|
4
|
-
* This Source Code Form is subject to the terms of the Mozilla Public
|
|
5
|
-
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
6
|
-
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
7
|
-
*/
|
|
8
|
-
import { assert, clamp, isAllowSharedBufferSource, SECOND_TO_MICROSECOND_FACTOR, toDataView, toUint8Array, isFirefox, polyfillSymbolDispose, } from './misc.js';
|
|
9
|
-
polyfillSymbolDispose();
|
|
10
|
-
/**
|
|
11
|
-
* Represents a raw, unencoded video sample (frame). Mainly used as an expressive wrapper around WebCodecs API's
|
|
12
|
-
* [`VideoFrame`](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame), but can also be used standalone.
|
|
13
|
-
* @group Samples
|
|
14
|
-
* @public
|
|
15
|
-
*/
|
|
16
|
-
export class VideoSample {
|
|
17
|
-
/** The width of the frame in pixels after rotation. */
|
|
18
|
-
get displayWidth() {
|
|
19
|
-
return this.rotation % 180 === 0 ? this.codedWidth : this.codedHeight;
|
|
20
|
-
}
|
|
21
|
-
/** The height of the frame in pixels after rotation. */
|
|
22
|
-
get displayHeight() {
|
|
23
|
-
return this.rotation % 180 === 0 ? this.codedHeight : this.codedWidth;
|
|
24
|
-
}
|
|
25
|
-
/** The presentation timestamp of the frame in microseconds. */
|
|
26
|
-
get microsecondTimestamp() {
|
|
27
|
-
return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.timestamp);
|
|
28
|
-
}
|
|
29
|
-
/** The duration of the frame in microseconds. */
|
|
30
|
-
get microsecondDuration() {
|
|
31
|
-
return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.duration);
|
|
32
|
-
}
|
|
33
|
-
/**
|
|
34
|
-
* Whether this sample uses a pixel format that can hold transparency data. Note that this doesn't necessarily mean
|
|
35
|
-
* that the sample is transparent.
|
|
36
|
-
*/
|
|
37
|
-
get hasAlpha() {
|
|
38
|
-
return this.format && this.format.includes('A');
|
|
39
|
-
}
|
|
40
|
-
constructor(data, init) {
|
|
41
|
-
/** @internal */
|
|
42
|
-
this._closed = false;
|
|
43
|
-
if (data instanceof ArrayBuffer || ArrayBuffer.isView(data)) {
|
|
44
|
-
if (!init || typeof init !== 'object') {
|
|
45
|
-
throw new TypeError('init must be an object.');
|
|
46
|
-
}
|
|
47
|
-
if (!('format' in init) || typeof init.format !== 'string') {
|
|
48
|
-
throw new TypeError('init.format must be a string.');
|
|
49
|
-
}
|
|
50
|
-
if (!Number.isInteger(init.codedWidth) || init.codedWidth <= 0) {
|
|
51
|
-
throw new TypeError('init.codedWidth must be a positive integer.');
|
|
52
|
-
}
|
|
53
|
-
if (!Number.isInteger(init.codedHeight) || init.codedHeight <= 0) {
|
|
54
|
-
throw new TypeError('init.codedHeight must be a positive integer.');
|
|
55
|
-
}
|
|
56
|
-
if (init.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {
|
|
57
|
-
throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');
|
|
58
|
-
}
|
|
59
|
-
if (!Number.isFinite(init.timestamp)) {
|
|
60
|
-
throw new TypeError('init.timestamp must be a number.');
|
|
61
|
-
}
|
|
62
|
-
if (init.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {
|
|
63
|
-
throw new TypeError('init.duration, when provided, must be a non-negative number.');
|
|
64
|
-
}
|
|
65
|
-
this._data = toUint8Array(data).slice(); // Copy it
|
|
66
|
-
this.format = init.format;
|
|
67
|
-
this.codedWidth = init.codedWidth;
|
|
68
|
-
this.codedHeight = init.codedHeight;
|
|
69
|
-
this.rotation = init.rotation ?? 0;
|
|
70
|
-
this.timestamp = init.timestamp;
|
|
71
|
-
this.duration = init.duration ?? 0;
|
|
72
|
-
this.colorSpace = new VideoColorSpace(init.colorSpace);
|
|
73
|
-
}
|
|
74
|
-
else if (typeof VideoFrame !== 'undefined' && data instanceof VideoFrame) {
|
|
75
|
-
if (init?.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {
|
|
76
|
-
throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');
|
|
77
|
-
}
|
|
78
|
-
if (init?.timestamp !== undefined && !Number.isFinite(init?.timestamp)) {
|
|
79
|
-
throw new TypeError('init.timestamp, when provided, must be a number.');
|
|
80
|
-
}
|
|
81
|
-
if (init?.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {
|
|
82
|
-
throw new TypeError('init.duration, when provided, must be a non-negative number.');
|
|
83
|
-
}
|
|
84
|
-
this._data = data;
|
|
85
|
-
this.format = data.format;
|
|
86
|
-
// Copying the display dimensions here, assuming no innate VideoFrame rotation
|
|
87
|
-
this.codedWidth = data.displayWidth;
|
|
88
|
-
this.codedHeight = data.displayHeight;
|
|
89
|
-
// The VideoFrame's rotation is ignored here. It's still a new field, and I'm not sure of any application
|
|
90
|
-
// where the browser makes use of it. If a case gets found, I'll add it.
|
|
91
|
-
this.rotation = init?.rotation ?? 0;
|
|
92
|
-
this.timestamp = init?.timestamp ?? data.timestamp / 1e6;
|
|
93
|
-
this.duration = init?.duration ?? (data.duration ?? 0) / 1e6;
|
|
94
|
-
this.colorSpace = data.colorSpace;
|
|
95
|
-
}
|
|
96
|
-
else if ((typeof HTMLImageElement !== 'undefined' && data instanceof HTMLImageElement)
|
|
97
|
-
|| (typeof SVGImageElement !== 'undefined' && data instanceof SVGImageElement)
|
|
98
|
-
|| (typeof ImageBitmap !== 'undefined' && data instanceof ImageBitmap)
|
|
99
|
-
|| (typeof HTMLVideoElement !== 'undefined' && data instanceof HTMLVideoElement)
|
|
100
|
-
|| (typeof HTMLCanvasElement !== 'undefined' && data instanceof HTMLCanvasElement)
|
|
101
|
-
|| (typeof OffscreenCanvas !== 'undefined' && data instanceof OffscreenCanvas)) {
|
|
102
|
-
if (!init || typeof init !== 'object') {
|
|
103
|
-
throw new TypeError('init must be an object.');
|
|
104
|
-
}
|
|
105
|
-
if (init.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {
|
|
106
|
-
throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');
|
|
107
|
-
}
|
|
108
|
-
if (!Number.isFinite(init.timestamp)) {
|
|
109
|
-
throw new TypeError('init.timestamp must be a number.');
|
|
110
|
-
}
|
|
111
|
-
if (init.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {
|
|
112
|
-
throw new TypeError('init.duration, when provided, must be a non-negative number.');
|
|
113
|
-
}
|
|
114
|
-
if (typeof VideoFrame !== 'undefined') {
|
|
115
|
-
return new VideoSample(new VideoFrame(data, {
|
|
116
|
-
timestamp: Math.trunc(init.timestamp * SECOND_TO_MICROSECOND_FACTOR),
|
|
117
|
-
// Drag 0 to undefined
|
|
118
|
-
duration: Math.trunc((init.duration ?? 0) * SECOND_TO_MICROSECOND_FACTOR) || undefined,
|
|
119
|
-
}), init);
|
|
120
|
-
}
|
|
121
|
-
let width = 0;
|
|
122
|
-
let height = 0;
|
|
123
|
-
// Determine the dimensions of the thing
|
|
124
|
-
if ('naturalWidth' in data) {
|
|
125
|
-
width = data.naturalWidth;
|
|
126
|
-
height = data.naturalHeight;
|
|
127
|
-
}
|
|
128
|
-
else if ('videoWidth' in data) {
|
|
129
|
-
width = data.videoWidth;
|
|
130
|
-
height = data.videoHeight;
|
|
131
|
-
}
|
|
132
|
-
else if ('width' in data) {
|
|
133
|
-
width = Number(data.width);
|
|
134
|
-
height = Number(data.height);
|
|
135
|
-
}
|
|
136
|
-
if (!width || !height) {
|
|
137
|
-
throw new TypeError('Could not determine dimensions.');
|
|
138
|
-
}
|
|
139
|
-
const canvas = new OffscreenCanvas(width, height);
|
|
140
|
-
const context = canvas.getContext('2d', {
|
|
141
|
-
alpha: isFirefox(), // Firefox has VideoFrame glitches with opaque canvases
|
|
142
|
-
willReadFrequently: true,
|
|
143
|
-
});
|
|
144
|
-
assert(context);
|
|
145
|
-
// Draw it to a canvas
|
|
146
|
-
context.drawImage(data, 0, 0);
|
|
147
|
-
this._data = canvas;
|
|
148
|
-
this.format = 'RGBX';
|
|
149
|
-
this.codedWidth = width;
|
|
150
|
-
this.codedHeight = height;
|
|
151
|
-
this.rotation = init.rotation ?? 0;
|
|
152
|
-
this.timestamp = init.timestamp;
|
|
153
|
-
this.duration = init.duration ?? 0;
|
|
154
|
-
this.colorSpace = new VideoColorSpace({
|
|
155
|
-
matrix: 'rgb',
|
|
156
|
-
primaries: 'bt709',
|
|
157
|
-
transfer: 'iec61966-2-1',
|
|
158
|
-
fullRange: true,
|
|
159
|
-
});
|
|
160
|
-
}
|
|
161
|
-
else {
|
|
162
|
-
throw new TypeError('Invalid data type: Must be a BufferSource or CanvasImageSource.');
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
/** Clones this video sample. */
|
|
166
|
-
clone() {
|
|
167
|
-
if (this._closed) {
|
|
168
|
-
throw new Error('VideoSample is closed.');
|
|
169
|
-
}
|
|
170
|
-
assert(this._data !== null);
|
|
171
|
-
if (isVideoFrame(this._data)) {
|
|
172
|
-
return new VideoSample(this._data.clone(), {
|
|
173
|
-
timestamp: this.timestamp,
|
|
174
|
-
duration: this.duration,
|
|
175
|
-
rotation: this.rotation,
|
|
176
|
-
});
|
|
177
|
-
}
|
|
178
|
-
else if (this._data instanceof Uint8Array) {
|
|
179
|
-
return new VideoSample(this._data.slice(), {
|
|
180
|
-
format: this.format,
|
|
181
|
-
codedWidth: this.codedWidth,
|
|
182
|
-
codedHeight: this.codedHeight,
|
|
183
|
-
timestamp: this.timestamp,
|
|
184
|
-
duration: this.duration,
|
|
185
|
-
colorSpace: this.colorSpace,
|
|
186
|
-
rotation: this.rotation,
|
|
187
|
-
});
|
|
188
|
-
}
|
|
189
|
-
else {
|
|
190
|
-
return new VideoSample(this._data, {
|
|
191
|
-
format: this.format,
|
|
192
|
-
codedWidth: this.codedWidth,
|
|
193
|
-
codedHeight: this.codedHeight,
|
|
194
|
-
timestamp: this.timestamp,
|
|
195
|
-
duration: this.duration,
|
|
196
|
-
colorSpace: this.colorSpace,
|
|
197
|
-
rotation: this.rotation,
|
|
198
|
-
});
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
/**
|
|
202
|
-
* Closes this video sample, releasing held resources. Video samples should be closed as soon as they are not
|
|
203
|
-
* needed anymore.
|
|
204
|
-
*/
|
|
205
|
-
close() {
|
|
206
|
-
if (this._closed) {
|
|
207
|
-
return;
|
|
208
|
-
}
|
|
209
|
-
if (isVideoFrame(this._data)) {
|
|
210
|
-
this._data.close();
|
|
211
|
-
}
|
|
212
|
-
else {
|
|
213
|
-
this._data = null; // GC that shit
|
|
214
|
-
}
|
|
215
|
-
this._closed = true;
|
|
216
|
-
}
|
|
217
|
-
/** Returns the number of bytes required to hold this video sample's pixel data. */
|
|
218
|
-
allocationSize() {
|
|
219
|
-
if (this._closed) {
|
|
220
|
-
throw new Error('VideoSample is closed.');
|
|
221
|
-
}
|
|
222
|
-
assert(this._data !== null);
|
|
223
|
-
if (isVideoFrame(this._data)) {
|
|
224
|
-
return this._data.allocationSize();
|
|
225
|
-
}
|
|
226
|
-
else if (this._data instanceof Uint8Array) {
|
|
227
|
-
return this._data.byteLength;
|
|
228
|
-
}
|
|
229
|
-
else {
|
|
230
|
-
return this.codedWidth * this.codedHeight * 4; // RGBX
|
|
231
|
-
}
|
|
232
|
-
}
|
|
233
|
-
/** Copies this video sample's pixel data to an ArrayBuffer or ArrayBufferView. */
|
|
234
|
-
async copyTo(destination) {
|
|
235
|
-
if (!isAllowSharedBufferSource(destination)) {
|
|
236
|
-
throw new TypeError('destination must be an ArrayBuffer or an ArrayBuffer view.');
|
|
237
|
-
}
|
|
238
|
-
if (this._closed) {
|
|
239
|
-
throw new Error('VideoSample is closed.');
|
|
240
|
-
}
|
|
241
|
-
assert(this._data !== null);
|
|
242
|
-
if (isVideoFrame(this._data)) {
|
|
243
|
-
await this._data.copyTo(destination);
|
|
244
|
-
}
|
|
245
|
-
else if (this._data instanceof Uint8Array) {
|
|
246
|
-
const dest = toUint8Array(destination);
|
|
247
|
-
dest.set(this._data);
|
|
248
|
-
}
|
|
249
|
-
else {
|
|
250
|
-
const canvas = this._data;
|
|
251
|
-
const context = canvas.getContext('2d');
|
|
252
|
-
assert(context);
|
|
253
|
-
const imageData = context.getImageData(0, 0, this.codedWidth, this.codedHeight);
|
|
254
|
-
const dest = toUint8Array(destination);
|
|
255
|
-
dest.set(imageData.data);
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
/**
|
|
259
|
-
* Converts this video sample to a VideoFrame for use with the WebCodecs API. The VideoFrame returned by this
|
|
260
|
-
* method *must* be closed separately from this video sample.
|
|
261
|
-
*/
|
|
262
|
-
toVideoFrame() {
|
|
263
|
-
if (this._closed) {
|
|
264
|
-
throw new Error('VideoSample is closed.');
|
|
265
|
-
}
|
|
266
|
-
assert(this._data !== null);
|
|
267
|
-
if (isVideoFrame(this._data)) {
|
|
268
|
-
return new VideoFrame(this._data, {
|
|
269
|
-
timestamp: this.microsecondTimestamp,
|
|
270
|
-
duration: this.microsecondDuration || undefined, // Drag 0 duration to undefined, glitches some codecs
|
|
271
|
-
});
|
|
272
|
-
}
|
|
273
|
-
else if (this._data instanceof Uint8Array) {
|
|
274
|
-
return new VideoFrame(this._data, {
|
|
275
|
-
format: this.format,
|
|
276
|
-
codedWidth: this.codedWidth,
|
|
277
|
-
codedHeight: this.codedHeight,
|
|
278
|
-
timestamp: this.microsecondTimestamp,
|
|
279
|
-
duration: this.microsecondDuration || undefined,
|
|
280
|
-
colorSpace: this.colorSpace,
|
|
281
|
-
});
|
|
282
|
-
}
|
|
283
|
-
else {
|
|
284
|
-
return new VideoFrame(this._data, {
|
|
285
|
-
timestamp: this.microsecondTimestamp,
|
|
286
|
-
duration: this.microsecondDuration || undefined,
|
|
287
|
-
});
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
draw(context, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) {
|
|
291
|
-
let sx = 0;
|
|
292
|
-
let sy = 0;
|
|
293
|
-
let sWidth = this.displayWidth;
|
|
294
|
-
let sHeight = this.displayHeight;
|
|
295
|
-
let dx = 0;
|
|
296
|
-
let dy = 0;
|
|
297
|
-
let dWidth = this.displayWidth;
|
|
298
|
-
let dHeight = this.displayHeight;
|
|
299
|
-
if (arg5 !== undefined) {
|
|
300
|
-
sx = arg1;
|
|
301
|
-
sy = arg2;
|
|
302
|
-
sWidth = arg3;
|
|
303
|
-
sHeight = arg4;
|
|
304
|
-
dx = arg5;
|
|
305
|
-
dy = arg6;
|
|
306
|
-
if (arg7 !== undefined) {
|
|
307
|
-
dWidth = arg7;
|
|
308
|
-
dHeight = arg8;
|
|
309
|
-
}
|
|
310
|
-
else {
|
|
311
|
-
dWidth = sWidth;
|
|
312
|
-
dHeight = sHeight;
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
else {
|
|
316
|
-
dx = arg1;
|
|
317
|
-
dy = arg2;
|
|
318
|
-
if (arg3 !== undefined) {
|
|
319
|
-
dWidth = arg3;
|
|
320
|
-
dHeight = arg4;
|
|
321
|
-
}
|
|
322
|
-
}
|
|
323
|
-
if (!((typeof CanvasRenderingContext2D !== 'undefined' && context instanceof CanvasRenderingContext2D)
|
|
324
|
-
|| (typeof OffscreenCanvasRenderingContext2D !== 'undefined'
|
|
325
|
-
&& context instanceof OffscreenCanvasRenderingContext2D))) {
|
|
326
|
-
throw new TypeError('context must be a CanvasRenderingContext2D or OffscreenCanvasRenderingContext2D.');
|
|
327
|
-
}
|
|
328
|
-
if (!Number.isFinite(sx)) {
|
|
329
|
-
throw new TypeError('sx must be a number.');
|
|
330
|
-
}
|
|
331
|
-
if (!Number.isFinite(sy)) {
|
|
332
|
-
throw new TypeError('sy must be a number.');
|
|
333
|
-
}
|
|
334
|
-
if (!Number.isFinite(sWidth) || sWidth < 0) {
|
|
335
|
-
throw new TypeError('sWidth must be a non-negative number.');
|
|
336
|
-
}
|
|
337
|
-
if (!Number.isFinite(sHeight) || sHeight < 0) {
|
|
338
|
-
throw new TypeError('sHeight must be a non-negative number.');
|
|
339
|
-
}
|
|
340
|
-
if (!Number.isFinite(dx)) {
|
|
341
|
-
throw new TypeError('dx must be a number.');
|
|
342
|
-
}
|
|
343
|
-
if (!Number.isFinite(dy)) {
|
|
344
|
-
throw new TypeError('dy must be a number.');
|
|
345
|
-
}
|
|
346
|
-
if (!Number.isFinite(dWidth) || dWidth < 0) {
|
|
347
|
-
throw new TypeError('dWidth must be a non-negative number.');
|
|
348
|
-
}
|
|
349
|
-
if (!Number.isFinite(dHeight) || dHeight < 0) {
|
|
350
|
-
throw new TypeError('dHeight must be a non-negative number.');
|
|
351
|
-
}
|
|
352
|
-
if (this._closed) {
|
|
353
|
-
throw new Error('VideoSample is closed.');
|
|
354
|
-
}
|
|
355
|
-
({ sx, sy, sWidth, sHeight } = this._rotateSourceRegion(sx, sy, sWidth, sHeight, this.rotation));
|
|
356
|
-
const source = this.toCanvasImageSource();
|
|
357
|
-
context.save();
|
|
358
|
-
const centerX = dx + dWidth / 2;
|
|
359
|
-
const centerY = dy + dHeight / 2;
|
|
360
|
-
context.translate(centerX, centerY);
|
|
361
|
-
context.rotate(this.rotation * Math.PI / 180);
|
|
362
|
-
const aspectRatioChange = this.rotation % 180 === 0 ? 1 : dWidth / dHeight;
|
|
363
|
-
// Scale to compensate for aspect ratio changes when rotated
|
|
364
|
-
context.scale(1 / aspectRatioChange, aspectRatioChange);
|
|
365
|
-
context.drawImage(source, sx, sy, sWidth, sHeight, -dWidth / 2, -dHeight / 2, dWidth, dHeight);
|
|
366
|
-
context.restore();
|
|
367
|
-
}
|
|
368
|
-
/**
|
|
369
|
-
* Draws the sample in the middle of the canvas corresponding to the context with the specified fit behavior.
|
|
370
|
-
*/
|
|
371
|
-
drawWithFit(context, options) {
|
|
372
|
-
if (!((typeof CanvasRenderingContext2D !== 'undefined' && context instanceof CanvasRenderingContext2D)
|
|
373
|
-
|| (typeof OffscreenCanvasRenderingContext2D !== 'undefined'
|
|
374
|
-
&& context instanceof OffscreenCanvasRenderingContext2D))) {
|
|
375
|
-
throw new TypeError('context must be a CanvasRenderingContext2D or OffscreenCanvasRenderingContext2D.');
|
|
376
|
-
}
|
|
377
|
-
if (!options || typeof options !== 'object') {
|
|
378
|
-
throw new TypeError('options must be an object.');
|
|
379
|
-
}
|
|
380
|
-
if (!['fill', 'contain', 'cover'].includes(options.fit)) {
|
|
381
|
-
throw new TypeError('options.fit must be \'fill\', \'contain\', or \'cover\'.');
|
|
382
|
-
}
|
|
383
|
-
if (options.rotation !== undefined && ![0, 90, 180, 270].includes(options.rotation)) {
|
|
384
|
-
throw new TypeError('options.rotation, when provided, must be 0, 90, 180, or 270.');
|
|
385
|
-
}
|
|
386
|
-
if (options.crop !== undefined) {
|
|
387
|
-
validateCropRectangle(options.crop, 'options.');
|
|
388
|
-
}
|
|
389
|
-
const canvasWidth = context.canvas.width;
|
|
390
|
-
const canvasHeight = context.canvas.height;
|
|
391
|
-
const rotation = options.rotation ?? this.rotation;
|
|
392
|
-
const [rotatedWidth, rotatedHeight] = rotation % 180 === 0
|
|
393
|
-
? [this.codedWidth, this.codedHeight]
|
|
394
|
-
: [this.codedHeight, this.codedWidth];
|
|
395
|
-
if (options.crop) {
|
|
396
|
-
clampCropRectangle(options.crop, rotatedWidth, rotatedHeight);
|
|
397
|
-
}
|
|
398
|
-
// These variables specify where the final sample will be drawn on the canvas
|
|
399
|
-
let dx;
|
|
400
|
-
let dy;
|
|
401
|
-
let newWidth;
|
|
402
|
-
let newHeight;
|
|
403
|
-
const { sx, sy, sWidth, sHeight } = this._rotateSourceRegion(options.crop?.left ?? 0, options.crop?.top ?? 0, options.crop?.width ?? rotatedWidth, options.crop?.height ?? rotatedHeight, rotation);
|
|
404
|
-
if (options.fit === 'fill') {
|
|
405
|
-
dx = 0;
|
|
406
|
-
dy = 0;
|
|
407
|
-
newWidth = canvasWidth;
|
|
408
|
-
newHeight = canvasHeight;
|
|
409
|
-
}
|
|
410
|
-
else {
|
|
411
|
-
const [sampleWidth, sampleHeight] = options.crop
|
|
412
|
-
? [options.crop.width, options.crop.height]
|
|
413
|
-
: [rotatedWidth, rotatedHeight];
|
|
414
|
-
const scale = options.fit === 'contain'
|
|
415
|
-
? Math.min(canvasWidth / sampleWidth, canvasHeight / sampleHeight)
|
|
416
|
-
: Math.max(canvasWidth / sampleWidth, canvasHeight / sampleHeight);
|
|
417
|
-
newWidth = sampleWidth * scale;
|
|
418
|
-
newHeight = sampleHeight * scale;
|
|
419
|
-
dx = (canvasWidth - newWidth) / 2;
|
|
420
|
-
dy = (canvasHeight - newHeight) / 2;
|
|
421
|
-
}
|
|
422
|
-
context.save();
|
|
423
|
-
const aspectRatioChange = rotation % 180 === 0 ? 1 : newWidth / newHeight;
|
|
424
|
-
context.translate(canvasWidth / 2, canvasHeight / 2);
|
|
425
|
-
context.rotate(rotation * Math.PI / 180);
|
|
426
|
-
// This aspect ratio compensation is done so that we can draw the sample with the intended dimensions and
|
|
427
|
-
// don't need to think about how those dimensions change after the rotation
|
|
428
|
-
context.scale(1 / aspectRatioChange, aspectRatioChange);
|
|
429
|
-
context.translate(-canvasWidth / 2, -canvasHeight / 2);
|
|
430
|
-
// Important that we don't use .draw() here since that would take rotation into account, but we wanna handle it
|
|
431
|
-
// ourselves here
|
|
432
|
-
context.drawImage(this.toCanvasImageSource(), sx, sy, sWidth, sHeight, dx, dy, newWidth, newHeight);
|
|
433
|
-
context.restore();
|
|
434
|
-
}
|
|
435
|
-
/** @internal */
|
|
436
|
-
_rotateSourceRegion(sx, sy, sWidth, sHeight, rotation) {
|
|
437
|
-
// The provided sx,sy,sWidth,sHeight refer to the final rotated image, but that's not actually how the image is
|
|
438
|
-
// stored. Therefore, we must map these back onto the original, pre-rotation image.
|
|
439
|
-
if (rotation === 90) {
|
|
440
|
-
[sx, sy, sWidth, sHeight] = [
|
|
441
|
-
sy,
|
|
442
|
-
this.codedHeight - sx - sWidth,
|
|
443
|
-
sHeight,
|
|
444
|
-
sWidth,
|
|
445
|
-
];
|
|
446
|
-
}
|
|
447
|
-
else if (rotation === 180) {
|
|
448
|
-
[sx, sy] = [
|
|
449
|
-
this.codedWidth - sx - sWidth,
|
|
450
|
-
this.codedHeight - sy - sHeight,
|
|
451
|
-
];
|
|
452
|
-
}
|
|
453
|
-
else if (rotation === 270) {
|
|
454
|
-
[sx, sy, sWidth, sHeight] = [
|
|
455
|
-
this.codedWidth - sy - sHeight,
|
|
456
|
-
sx,
|
|
457
|
-
sHeight,
|
|
458
|
-
sWidth,
|
|
459
|
-
];
|
|
460
|
-
}
|
|
461
|
-
return { sx, sy, sWidth, sHeight };
|
|
462
|
-
}
|
|
463
|
-
/**
|
|
464
|
-
* Converts this video sample to a
|
|
465
|
-
* [`CanvasImageSource`](https://udn.realityripple.com/docs/Web/API/CanvasImageSource) for drawing to a canvas.
|
|
466
|
-
*
|
|
467
|
-
* You must use the value returned by this method immediately, as any VideoFrame created internally will
|
|
468
|
-
* automatically be closed in the next microtask.
|
|
469
|
-
*/
|
|
470
|
-
toCanvasImageSource() {
|
|
471
|
-
if (this._closed) {
|
|
472
|
-
throw new Error('VideoSample is closed.');
|
|
473
|
-
}
|
|
474
|
-
assert(this._data !== null);
|
|
475
|
-
if (this._data instanceof Uint8Array) {
|
|
476
|
-
// Requires VideoFrame to be defined
|
|
477
|
-
const videoFrame = this.toVideoFrame();
|
|
478
|
-
queueMicrotask(() => videoFrame.close()); // Let's automatically close the frame in the next microtask
|
|
479
|
-
return videoFrame;
|
|
480
|
-
}
|
|
481
|
-
else {
|
|
482
|
-
return this._data;
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
/** Sets the rotation metadata of this video sample. */
|
|
486
|
-
setRotation(newRotation) {
|
|
487
|
-
if (![0, 90, 180, 270].includes(newRotation)) {
|
|
488
|
-
throw new TypeError('newRotation must be 0, 90, 180, or 270.');
|
|
489
|
-
}
|
|
490
|
-
// eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
|
|
491
|
-
this.rotation = newRotation;
|
|
492
|
-
}
|
|
493
|
-
/** Sets the presentation timestamp of this video sample, in seconds. */
|
|
494
|
-
setTimestamp(newTimestamp) {
|
|
495
|
-
if (!Number.isFinite(newTimestamp)) {
|
|
496
|
-
throw new TypeError('newTimestamp must be a number.');
|
|
497
|
-
}
|
|
498
|
-
// eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
|
|
499
|
-
this.timestamp = newTimestamp;
|
|
500
|
-
}
|
|
501
|
-
/** Sets the duration of this video sample, in seconds. */
|
|
502
|
-
setDuration(newDuration) {
|
|
503
|
-
if (!Number.isFinite(newDuration) || newDuration < 0) {
|
|
504
|
-
throw new TypeError('newDuration must be a non-negative number.');
|
|
505
|
-
}
|
|
506
|
-
// eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
|
|
507
|
-
this.duration = newDuration;
|
|
508
|
-
}
|
|
509
|
-
/** Calls `.close()`. */
|
|
510
|
-
[Symbol.dispose]() {
|
|
511
|
-
this.close();
|
|
512
|
-
}
|
|
513
|
-
}
|
|
514
|
-
const isVideoFrame = (x) => {
|
|
515
|
-
return typeof VideoFrame !== 'undefined' && x instanceof VideoFrame;
|
|
516
|
-
};
|
|
517
|
-
export const clampCropRectangle = (crop, outerWidth, outerHeight) => {
|
|
518
|
-
crop.left = Math.min(crop.left, outerWidth);
|
|
519
|
-
crop.top = Math.min(crop.top, outerHeight);
|
|
520
|
-
crop.width = Math.min(crop.width, outerWidth - crop.left);
|
|
521
|
-
crop.height = Math.min(crop.height, outerHeight - crop.top);
|
|
522
|
-
assert(crop.width >= 0);
|
|
523
|
-
assert(crop.height >= 0);
|
|
524
|
-
};
|
|
525
|
-
export const validateCropRectangle = (crop, prefix) => {
|
|
526
|
-
if (!crop || typeof crop !== 'object') {
|
|
527
|
-
throw new TypeError(prefix + 'crop, when provided, must be an object.');
|
|
528
|
-
}
|
|
529
|
-
if (!Number.isInteger(crop.left) || crop.left < 0) {
|
|
530
|
-
throw new TypeError(prefix + 'crop.left must be a non-negative integer.');
|
|
531
|
-
}
|
|
532
|
-
if (!Number.isInteger(crop.top) || crop.top < 0) {
|
|
533
|
-
throw new TypeError(prefix + 'crop.top must be a non-negative integer.');
|
|
534
|
-
}
|
|
535
|
-
if (!Number.isInteger(crop.width) || crop.width < 0) {
|
|
536
|
-
throw new TypeError(prefix + 'crop.width must be a non-negative integer.');
|
|
537
|
-
}
|
|
538
|
-
if (!Number.isInteger(crop.height) || crop.height < 0) {
|
|
539
|
-
throw new TypeError(prefix + 'crop.height must be a non-negative integer.');
|
|
540
|
-
}
|
|
541
|
-
};
|
|
542
|
-
const AUDIO_SAMPLE_FORMATS = new Set(['f32', 'f32-planar', 's16', 's16-planar', 's32', 's32-planar', 'u8', 'u8-planar']);
|
|
543
|
-
/**
|
|
544
|
-
* Represents a raw, unencoded audio sample. Mainly used as an expressive wrapper around WebCodecs API's
|
|
545
|
-
* [`AudioData`](https://developer.mozilla.org/en-US/docs/Web/API/AudioData), but can also be used standalone.
|
|
546
|
-
* @group Samples
|
|
547
|
-
* @public
|
|
548
|
-
*/
|
|
549
|
-
export class AudioSample {
|
|
550
|
-
/** The presentation timestamp of the sample in microseconds. */
|
|
551
|
-
get microsecondTimestamp() {
|
|
552
|
-
return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.timestamp);
|
|
553
|
-
}
|
|
554
|
-
/** The duration of the sample in microseconds. */
|
|
555
|
-
get microsecondDuration() {
|
|
556
|
-
return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.duration);
|
|
557
|
-
}
|
|
558
|
-
/**
|
|
559
|
-
* Creates a new {@link AudioSample}, either from an existing
|
|
560
|
-
* [`AudioData`](https://developer.mozilla.org/en-US/docs/Web/API/AudioData) or from raw bytes specified in
|
|
561
|
-
* {@link AudioSampleInit}.
|
|
562
|
-
*/
|
|
563
|
-
constructor(init) {
|
|
564
|
-
/** @internal */
|
|
565
|
-
this._closed = false;
|
|
566
|
-
if (isAudioData(init)) {
|
|
567
|
-
if (init.format === null) {
|
|
568
|
-
throw new TypeError('AudioData with null format is not supported.');
|
|
569
|
-
}
|
|
570
|
-
this._data = init;
|
|
571
|
-
this.format = init.format;
|
|
572
|
-
this.sampleRate = init.sampleRate;
|
|
573
|
-
this.numberOfFrames = init.numberOfFrames;
|
|
574
|
-
this.numberOfChannels = init.numberOfChannels;
|
|
575
|
-
this.timestamp = init.timestamp / 1e6;
|
|
576
|
-
this.duration = init.numberOfFrames / init.sampleRate;
|
|
577
|
-
}
|
|
578
|
-
else {
|
|
579
|
-
if (!init || typeof init !== 'object') {
|
|
580
|
-
throw new TypeError('Invalid AudioDataInit: must be an object.');
|
|
581
|
-
}
|
|
582
|
-
if (!AUDIO_SAMPLE_FORMATS.has(init.format)) {
|
|
583
|
-
throw new TypeError('Invalid AudioDataInit: invalid format.');
|
|
584
|
-
}
|
|
585
|
-
if (!Number.isFinite(init.sampleRate) || init.sampleRate <= 0) {
|
|
586
|
-
throw new TypeError('Invalid AudioDataInit: sampleRate must be > 0.');
|
|
587
|
-
}
|
|
588
|
-
if (!Number.isInteger(init.numberOfChannels) || init.numberOfChannels === 0) {
|
|
589
|
-
throw new TypeError('Invalid AudioDataInit: numberOfChannels must be an integer > 0.');
|
|
590
|
-
}
|
|
591
|
-
if (!Number.isFinite(init?.timestamp)) {
|
|
592
|
-
throw new TypeError('init.timestamp must be a number.');
|
|
593
|
-
}
|
|
594
|
-
const numberOfFrames = init.data.byteLength / (getBytesPerSample(init.format) * init.numberOfChannels);
|
|
595
|
-
if (!Number.isInteger(numberOfFrames)) {
|
|
596
|
-
throw new TypeError('Invalid AudioDataInit: data size is not a multiple of frame size.');
|
|
597
|
-
}
|
|
598
|
-
this.format = init.format;
|
|
599
|
-
this.sampleRate = init.sampleRate;
|
|
600
|
-
this.numberOfFrames = numberOfFrames;
|
|
601
|
-
this.numberOfChannels = init.numberOfChannels;
|
|
602
|
-
this.timestamp = init.timestamp;
|
|
603
|
-
this.duration = numberOfFrames / init.sampleRate;
|
|
604
|
-
let dataBuffer;
|
|
605
|
-
if (init.data instanceof ArrayBuffer) {
|
|
606
|
-
dataBuffer = new Uint8Array(init.data);
|
|
607
|
-
}
|
|
608
|
-
else if (ArrayBuffer.isView(init.data)) {
|
|
609
|
-
dataBuffer = new Uint8Array(init.data.buffer, init.data.byteOffset, init.data.byteLength);
|
|
610
|
-
}
|
|
611
|
-
else {
|
|
612
|
-
throw new TypeError('Invalid AudioDataInit: data is not a BufferSource.');
|
|
613
|
-
}
|
|
614
|
-
const expectedSize = this.numberOfFrames * this.numberOfChannels * getBytesPerSample(this.format);
|
|
615
|
-
if (dataBuffer.byteLength < expectedSize) {
|
|
616
|
-
throw new TypeError('Invalid AudioDataInit: insufficient data size.');
|
|
617
|
-
}
|
|
618
|
-
this._data = dataBuffer;
|
|
619
|
-
}
|
|
620
|
-
}
|
|
621
|
-
/** Returns the number of bytes required to hold the audio sample's data as specified by the given options. */
|
|
622
|
-
allocationSize(options) {
|
|
623
|
-
if (!options || typeof options !== 'object') {
|
|
624
|
-
throw new TypeError('options must be an object.');
|
|
625
|
-
}
|
|
626
|
-
if (!Number.isInteger(options.planeIndex) || options.planeIndex < 0) {
|
|
627
|
-
throw new TypeError('planeIndex must be a non-negative integer.');
|
|
628
|
-
}
|
|
629
|
-
if (options.format !== undefined && !AUDIO_SAMPLE_FORMATS.has(options.format)) {
|
|
630
|
-
throw new TypeError('Invalid format.');
|
|
631
|
-
}
|
|
632
|
-
if (options.frameOffset !== undefined && (!Number.isInteger(options.frameOffset) || options.frameOffset < 0)) {
|
|
633
|
-
throw new TypeError('frameOffset must be a non-negative integer.');
|
|
634
|
-
}
|
|
635
|
-
if (options.frameCount !== undefined && (!Number.isInteger(options.frameCount) || options.frameCount < 0)) {
|
|
636
|
-
throw new TypeError('frameCount must be a non-negative integer.');
|
|
637
|
-
}
|
|
638
|
-
if (this._closed) {
|
|
639
|
-
throw new Error('AudioSample is closed.');
|
|
640
|
-
}
|
|
641
|
-
const destFormat = options.format ?? this.format;
|
|
642
|
-
const frameOffset = options.frameOffset ?? 0;
|
|
643
|
-
if (frameOffset >= this.numberOfFrames) {
|
|
644
|
-
throw new RangeError('frameOffset out of range');
|
|
645
|
-
}
|
|
646
|
-
const copyFrameCount = options.frameCount !== undefined ? options.frameCount : (this.numberOfFrames - frameOffset);
|
|
647
|
-
if (copyFrameCount > (this.numberOfFrames - frameOffset)) {
|
|
648
|
-
throw new RangeError('frameCount out of range');
|
|
649
|
-
}
|
|
650
|
-
const bytesPerSample = getBytesPerSample(destFormat);
|
|
651
|
-
const isPlanar = formatIsPlanar(destFormat);
|
|
652
|
-
if (isPlanar && options.planeIndex >= this.numberOfChannels) {
|
|
653
|
-
throw new RangeError('planeIndex out of range');
|
|
654
|
-
}
|
|
655
|
-
if (!isPlanar && options.planeIndex !== 0) {
|
|
656
|
-
throw new RangeError('planeIndex out of range');
|
|
657
|
-
}
|
|
658
|
-
const elementCount = isPlanar ? copyFrameCount : copyFrameCount * this.numberOfChannels;
|
|
659
|
-
return elementCount * bytesPerSample;
|
|
660
|
-
}
|
|
661
|
-
/** Copies the audio sample's data to an ArrayBuffer or ArrayBufferView as specified by the given options. */
|
|
662
|
-
copyTo(destination, options) {
|
|
663
|
-
if (!isAllowSharedBufferSource(destination)) {
|
|
664
|
-
throw new TypeError('destination must be an ArrayBuffer or an ArrayBuffer view.');
|
|
665
|
-
}
|
|
666
|
-
if (!options || typeof options !== 'object') {
|
|
667
|
-
throw new TypeError('options must be an object.');
|
|
668
|
-
}
|
|
669
|
-
if (!Number.isInteger(options.planeIndex) || options.planeIndex < 0) {
|
|
670
|
-
throw new TypeError('planeIndex must be a non-negative integer.');
|
|
671
|
-
}
|
|
672
|
-
if (options.format !== undefined && !AUDIO_SAMPLE_FORMATS.has(options.format)) {
|
|
673
|
-
throw new TypeError('Invalid format.');
|
|
674
|
-
}
|
|
675
|
-
if (options.frameOffset !== undefined && (!Number.isInteger(options.frameOffset) || options.frameOffset < 0)) {
|
|
676
|
-
throw new TypeError('frameOffset must be a non-negative integer.');
|
|
677
|
-
}
|
|
678
|
-
if (options.frameCount !== undefined && (!Number.isInteger(options.frameCount) || options.frameCount < 0)) {
|
|
679
|
-
throw new TypeError('frameCount must be a non-negative integer.');
|
|
680
|
-
}
|
|
681
|
-
if (this._closed) {
|
|
682
|
-
throw new Error('AudioSample is closed.');
|
|
683
|
-
}
|
|
684
|
-
const { planeIndex, format, frameCount: optFrameCount, frameOffset: optFrameOffset } = options;
|
|
685
|
-
const destFormat = format ?? this.format;
|
|
686
|
-
if (!destFormat)
|
|
687
|
-
throw new Error('Destination format not determined');
|
|
688
|
-
const numFrames = this.numberOfFrames;
|
|
689
|
-
const numChannels = this.numberOfChannels;
|
|
690
|
-
const frameOffset = optFrameOffset ?? 0;
|
|
691
|
-
if (frameOffset >= numFrames) {
|
|
692
|
-
throw new RangeError('frameOffset out of range');
|
|
693
|
-
}
|
|
694
|
-
const copyFrameCount = optFrameCount !== undefined ? optFrameCount : (numFrames - frameOffset);
|
|
695
|
-
if (copyFrameCount > (numFrames - frameOffset)) {
|
|
696
|
-
throw new RangeError('frameCount out of range');
|
|
697
|
-
}
|
|
698
|
-
const destBytesPerSample = getBytesPerSample(destFormat);
|
|
699
|
-
const destIsPlanar = formatIsPlanar(destFormat);
|
|
700
|
-
if (destIsPlanar && planeIndex >= numChannels) {
|
|
701
|
-
throw new RangeError('planeIndex out of range');
|
|
702
|
-
}
|
|
703
|
-
if (!destIsPlanar && planeIndex !== 0) {
|
|
704
|
-
throw new RangeError('planeIndex out of range');
|
|
705
|
-
}
|
|
706
|
-
const destElementCount = destIsPlanar ? copyFrameCount : copyFrameCount * numChannels;
|
|
707
|
-
const requiredSize = destElementCount * destBytesPerSample;
|
|
708
|
-
if (destination.byteLength < requiredSize) {
|
|
709
|
-
throw new RangeError('Destination buffer is too small');
|
|
710
|
-
}
|
|
711
|
-
const destView = toDataView(destination);
|
|
712
|
-
const writeFn = getWriteFunction(destFormat);
|
|
713
|
-
if (isAudioData(this._data)) {
|
|
714
|
-
if (destIsPlanar) {
|
|
715
|
-
if (destFormat === 'f32-planar') {
|
|
716
|
-
// Simple, since the browser must support f32-planar, we can just delegate here
|
|
717
|
-
this._data.copyTo(destination, {
|
|
718
|
-
planeIndex,
|
|
719
|
-
frameOffset,
|
|
720
|
-
frameCount: copyFrameCount,
|
|
721
|
-
format: 'f32-planar',
|
|
722
|
-
});
|
|
723
|
-
}
|
|
724
|
-
else {
|
|
725
|
-
// Allocate temporary buffer for f32-planar data
|
|
726
|
-
const tempBuffer = new ArrayBuffer(copyFrameCount * 4);
|
|
727
|
-
const tempArray = new Float32Array(tempBuffer);
|
|
728
|
-
this._data.copyTo(tempArray, {
|
|
729
|
-
planeIndex,
|
|
730
|
-
frameOffset,
|
|
731
|
-
frameCount: copyFrameCount,
|
|
732
|
-
format: 'f32-planar',
|
|
733
|
-
});
|
|
734
|
-
// Convert each f32 sample to destination format
|
|
735
|
-
const tempView = new DataView(tempBuffer);
|
|
736
|
-
for (let i = 0; i < copyFrameCount; i++) {
|
|
737
|
-
const destOffset = i * destBytesPerSample;
|
|
738
|
-
const sample = tempView.getFloat32(i * 4, true);
|
|
739
|
-
writeFn(destView, destOffset, sample);
|
|
740
|
-
}
|
|
741
|
-
}
|
|
742
|
-
}
|
|
743
|
-
else {
|
|
744
|
-
// Destination is interleaved.
|
|
745
|
-
// Allocate a temporary Float32Array to hold one channel's worth of data.
|
|
746
|
-
const numCh = numChannels;
|
|
747
|
-
const temp = new Float32Array(copyFrameCount);
|
|
748
|
-
for (let ch = 0; ch < numCh; ch++) {
|
|
749
|
-
this._data.copyTo(temp, {
|
|
750
|
-
planeIndex: ch,
|
|
751
|
-
frameOffset,
|
|
752
|
-
frameCount: copyFrameCount,
|
|
753
|
-
format: 'f32-planar',
|
|
754
|
-
});
|
|
755
|
-
for (let i = 0; i < copyFrameCount; i++) {
|
|
756
|
-
const destIndex = i * numCh + ch;
|
|
757
|
-
const destOffset = destIndex * destBytesPerSample;
|
|
758
|
-
writeFn(destView, destOffset, temp[i]);
|
|
759
|
-
}
|
|
760
|
-
}
|
|
761
|
-
}
|
|
762
|
-
}
|
|
763
|
-
else {
|
|
764
|
-
// Branch for Uint8Array data (non-AudioData)
|
|
765
|
-
const uint8Data = this._data;
|
|
766
|
-
const srcView = new DataView(uint8Data.buffer, uint8Data.byteOffset, uint8Data.byteLength);
|
|
767
|
-
const srcFormat = this.format;
|
|
768
|
-
const readFn = getReadFunction(srcFormat);
|
|
769
|
-
const srcBytesPerSample = getBytesPerSample(srcFormat);
|
|
770
|
-
const srcIsPlanar = formatIsPlanar(srcFormat);
|
|
771
|
-
for (let i = 0; i < copyFrameCount; i++) {
|
|
772
|
-
if (destIsPlanar) {
|
|
773
|
-
const destOffset = i * destBytesPerSample;
|
|
774
|
-
let srcOffset;
|
|
775
|
-
if (srcIsPlanar) {
|
|
776
|
-
srcOffset = (planeIndex * numFrames + (i + frameOffset)) * srcBytesPerSample;
|
|
777
|
-
}
|
|
778
|
-
else {
|
|
779
|
-
srcOffset = (((i + frameOffset) * numChannels) + planeIndex) * srcBytesPerSample;
|
|
780
|
-
}
|
|
781
|
-
const normalized = readFn(srcView, srcOffset);
|
|
782
|
-
writeFn(destView, destOffset, normalized);
|
|
783
|
-
}
|
|
784
|
-
else {
|
|
785
|
-
for (let ch = 0; ch < numChannels; ch++) {
|
|
786
|
-
const destIndex = i * numChannels + ch;
|
|
787
|
-
const destOffset = destIndex * destBytesPerSample;
|
|
788
|
-
let srcOffset;
|
|
789
|
-
if (srcIsPlanar) {
|
|
790
|
-
srcOffset = (ch * numFrames + (i + frameOffset)) * srcBytesPerSample;
|
|
791
|
-
}
|
|
792
|
-
else {
|
|
793
|
-
srcOffset = (((i + frameOffset) * numChannels) + ch) * srcBytesPerSample;
|
|
794
|
-
}
|
|
795
|
-
const normalized = readFn(srcView, srcOffset);
|
|
796
|
-
writeFn(destView, destOffset, normalized);
|
|
797
|
-
}
|
|
798
|
-
}
|
|
799
|
-
}
|
|
800
|
-
}
|
|
801
|
-
}
|
|
802
|
-
/** Clones this audio sample. */
|
|
803
|
-
clone() {
|
|
804
|
-
if (this._closed) {
|
|
805
|
-
throw new Error('AudioSample is closed.');
|
|
806
|
-
}
|
|
807
|
-
if (isAudioData(this._data)) {
|
|
808
|
-
const sample = new AudioSample(this._data.clone());
|
|
809
|
-
sample.setTimestamp(this.timestamp); // Make sure the timestamp is precise (beyond microsecond accuracy)
|
|
810
|
-
return sample;
|
|
811
|
-
}
|
|
812
|
-
else {
|
|
813
|
-
return new AudioSample({
|
|
814
|
-
format: this.format,
|
|
815
|
-
sampleRate: this.sampleRate,
|
|
816
|
-
numberOfFrames: this.numberOfFrames,
|
|
817
|
-
numberOfChannels: this.numberOfChannels,
|
|
818
|
-
timestamp: this.timestamp,
|
|
819
|
-
data: this._data,
|
|
820
|
-
});
|
|
821
|
-
}
|
|
822
|
-
}
|
|
823
|
-
/**
|
|
824
|
-
* Closes this audio sample, releasing held resources. Audio samples should be closed as soon as they are not
|
|
825
|
-
* needed anymore.
|
|
826
|
-
*/
|
|
827
|
-
close() {
|
|
828
|
-
if (this._closed) {
|
|
829
|
-
return;
|
|
830
|
-
}
|
|
831
|
-
if (isAudioData(this._data)) {
|
|
832
|
-
this._data.close();
|
|
833
|
-
}
|
|
834
|
-
else {
|
|
835
|
-
this._data = new Uint8Array(0);
|
|
836
|
-
}
|
|
837
|
-
this._closed = true;
|
|
838
|
-
}
|
|
839
|
-
/**
|
|
840
|
-
* Converts this audio sample to an AudioData for use with the WebCodecs API. The AudioData returned by this
|
|
841
|
-
* method *must* be closed separately from this audio sample.
|
|
842
|
-
*/
|
|
843
|
-
toAudioData() {
|
|
844
|
-
if (this._closed) {
|
|
845
|
-
throw new Error('AudioSample is closed.');
|
|
846
|
-
}
|
|
847
|
-
if (isAudioData(this._data)) {
|
|
848
|
-
if (this._data.timestamp === this.microsecondTimestamp) {
|
|
849
|
-
// Timestamp matches, let's just return the data (but cloned)
|
|
850
|
-
return this._data.clone();
|
|
851
|
-
}
|
|
852
|
-
else {
|
|
853
|
-
// It's impossible to simply change an AudioData's timestamp, so we'll need to create a new one
|
|
854
|
-
if (formatIsPlanar(this.format)) {
|
|
855
|
-
const size = this.allocationSize({ planeIndex: 0, format: this.format });
|
|
856
|
-
const data = new ArrayBuffer(size * this.numberOfChannels);
|
|
857
|
-
// We gotta read out each plane individually
|
|
858
|
-
for (let i = 0; i < this.numberOfChannels; i++) {
|
|
859
|
-
this.copyTo(new Uint8Array(data, i * size, size), { planeIndex: i, format: this.format });
|
|
860
|
-
}
|
|
861
|
-
return new AudioData({
|
|
862
|
-
format: this.format,
|
|
863
|
-
sampleRate: this.sampleRate,
|
|
864
|
-
numberOfFrames: this.numberOfFrames,
|
|
865
|
-
numberOfChannels: this.numberOfChannels,
|
|
866
|
-
timestamp: this.microsecondTimestamp,
|
|
867
|
-
data,
|
|
868
|
-
});
|
|
869
|
-
}
|
|
870
|
-
else {
|
|
871
|
-
const data = new ArrayBuffer(this.allocationSize({ planeIndex: 0, format: this.format }));
|
|
872
|
-
this.copyTo(data, { planeIndex: 0, format: this.format });
|
|
873
|
-
return new AudioData({
|
|
874
|
-
format: this.format,
|
|
875
|
-
sampleRate: this.sampleRate,
|
|
876
|
-
numberOfFrames: this.numberOfFrames,
|
|
877
|
-
numberOfChannels: this.numberOfChannels,
|
|
878
|
-
timestamp: this.microsecondTimestamp,
|
|
879
|
-
data,
|
|
880
|
-
});
|
|
881
|
-
}
|
|
882
|
-
}
|
|
883
|
-
}
|
|
884
|
-
else {
|
|
885
|
-
return new AudioData({
|
|
886
|
-
format: this.format,
|
|
887
|
-
sampleRate: this.sampleRate,
|
|
888
|
-
numberOfFrames: this.numberOfFrames,
|
|
889
|
-
numberOfChannels: this.numberOfChannels,
|
|
890
|
-
timestamp: this.microsecondTimestamp,
|
|
891
|
-
data: this._data,
|
|
892
|
-
});
|
|
893
|
-
}
|
|
894
|
-
}
|
|
895
|
-
/** Convert this audio sample to an AudioBuffer for use with the Web Audio API. */
|
|
896
|
-
toAudioBuffer() {
|
|
897
|
-
if (this._closed) {
|
|
898
|
-
throw new Error('AudioSample is closed.');
|
|
899
|
-
}
|
|
900
|
-
const audioBuffer = new AudioBuffer({
|
|
901
|
-
numberOfChannels: this.numberOfChannels,
|
|
902
|
-
length: this.numberOfFrames,
|
|
903
|
-
sampleRate: this.sampleRate,
|
|
904
|
-
});
|
|
905
|
-
const dataBytes = new Float32Array(this.allocationSize({ planeIndex: 0, format: 'f32-planar' }) / 4);
|
|
906
|
-
for (let i = 0; i < this.numberOfChannels; i++) {
|
|
907
|
-
this.copyTo(dataBytes, { planeIndex: i, format: 'f32-planar' });
|
|
908
|
-
audioBuffer.copyToChannel(dataBytes, i);
|
|
909
|
-
}
|
|
910
|
-
return audioBuffer;
|
|
911
|
-
}
|
|
912
|
-
/** Sets the presentation timestamp of this audio sample, in seconds. */
|
|
913
|
-
setTimestamp(newTimestamp) {
|
|
914
|
-
if (!Number.isFinite(newTimestamp)) {
|
|
915
|
-
throw new TypeError('newTimestamp must be a number.');
|
|
916
|
-
}
|
|
917
|
-
// eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
|
|
918
|
-
this.timestamp = newTimestamp;
|
|
919
|
-
}
|
|
920
|
-
/** Calls `.close()`. */
|
|
921
|
-
[Symbol.dispose]() {
|
|
922
|
-
this.close();
|
|
923
|
-
}
|
|
924
|
-
/** @internal */
|
|
925
|
-
static *_fromAudioBuffer(audioBuffer, timestamp) {
|
|
926
|
-
if (!(audioBuffer instanceof AudioBuffer)) {
|
|
927
|
-
throw new TypeError('audioBuffer must be an AudioBuffer.');
|
|
928
|
-
}
|
|
929
|
-
const MAX_FLOAT_COUNT = 48000 * 5; // 5 seconds of mono 48 kHz audio per sample
|
|
930
|
-
const numberOfChannels = audioBuffer.numberOfChannels;
|
|
931
|
-
const sampleRate = audioBuffer.sampleRate;
|
|
932
|
-
const totalFrames = audioBuffer.length;
|
|
933
|
-
const maxFramesPerChunk = Math.floor(MAX_FLOAT_COUNT / numberOfChannels);
|
|
934
|
-
let currentRelativeFrame = 0;
|
|
935
|
-
let remainingFrames = totalFrames;
|
|
936
|
-
// Create AudioSamples in a chunked fashion so we don't create huge Float32Arrays
|
|
937
|
-
while (remainingFrames > 0) {
|
|
938
|
-
const framesToCopy = Math.min(maxFramesPerChunk, remainingFrames);
|
|
939
|
-
const chunkData = new Float32Array(numberOfChannels * framesToCopy);
|
|
940
|
-
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
941
|
-
audioBuffer.copyFromChannel(chunkData.subarray(channel * framesToCopy, (channel + 1) * framesToCopy), channel, currentRelativeFrame);
|
|
942
|
-
}
|
|
943
|
-
yield new AudioSample({
|
|
944
|
-
format: 'f32-planar',
|
|
945
|
-
sampleRate,
|
|
946
|
-
numberOfFrames: framesToCopy,
|
|
947
|
-
numberOfChannels,
|
|
948
|
-
timestamp: timestamp + currentRelativeFrame / sampleRate,
|
|
949
|
-
data: chunkData,
|
|
950
|
-
});
|
|
951
|
-
currentRelativeFrame += framesToCopy;
|
|
952
|
-
remainingFrames -= framesToCopy;
|
|
953
|
-
}
|
|
954
|
-
}
|
|
955
|
-
/**
|
|
956
|
-
* Creates AudioSamples from an AudioBuffer, starting at the given timestamp in seconds. Typically creates exactly
|
|
957
|
-
* one sample, but may create multiple if the AudioBuffer is exceedingly large.
|
|
958
|
-
*/
|
|
959
|
-
static fromAudioBuffer(audioBuffer, timestamp) {
|
|
960
|
-
if (!(audioBuffer instanceof AudioBuffer)) {
|
|
961
|
-
throw new TypeError('audioBuffer must be an AudioBuffer.');
|
|
962
|
-
}
|
|
963
|
-
const MAX_FLOAT_COUNT = 48000 * 5; // 5 seconds of mono 48 kHz audio per sample
|
|
964
|
-
const numberOfChannels = audioBuffer.numberOfChannels;
|
|
965
|
-
const sampleRate = audioBuffer.sampleRate;
|
|
966
|
-
const totalFrames = audioBuffer.length;
|
|
967
|
-
const maxFramesPerChunk = Math.floor(MAX_FLOAT_COUNT / numberOfChannels);
|
|
968
|
-
let currentRelativeFrame = 0;
|
|
969
|
-
let remainingFrames = totalFrames;
|
|
970
|
-
const result = [];
|
|
971
|
-
// Create AudioSamples in a chunked fashion so we don't create huge Float32Arrays
|
|
972
|
-
while (remainingFrames > 0) {
|
|
973
|
-
const framesToCopy = Math.min(maxFramesPerChunk, remainingFrames);
|
|
974
|
-
const chunkData = new Float32Array(numberOfChannels * framesToCopy);
|
|
975
|
-
for (let channel = 0; channel < numberOfChannels; channel++) {
|
|
976
|
-
audioBuffer.copyFromChannel(chunkData.subarray(channel * framesToCopy, (channel + 1) * framesToCopy), channel, currentRelativeFrame);
|
|
977
|
-
}
|
|
978
|
-
const audioSample = new AudioSample({
|
|
979
|
-
format: 'f32-planar',
|
|
980
|
-
sampleRate,
|
|
981
|
-
numberOfFrames: framesToCopy,
|
|
982
|
-
numberOfChannels,
|
|
983
|
-
timestamp: timestamp + currentRelativeFrame / sampleRate,
|
|
984
|
-
data: chunkData,
|
|
985
|
-
});
|
|
986
|
-
result.push(audioSample);
|
|
987
|
-
currentRelativeFrame += framesToCopy;
|
|
988
|
-
remainingFrames -= framesToCopy;
|
|
989
|
-
}
|
|
990
|
-
return result;
|
|
991
|
-
}
|
|
992
|
-
}
|
|
993
|
-
const getBytesPerSample = (format) => {
|
|
994
|
-
switch (format) {
|
|
995
|
-
case 'u8':
|
|
996
|
-
case 'u8-planar':
|
|
997
|
-
return 1;
|
|
998
|
-
case 's16':
|
|
999
|
-
case 's16-planar':
|
|
1000
|
-
return 2;
|
|
1001
|
-
case 's32':
|
|
1002
|
-
case 's32-planar':
|
|
1003
|
-
return 4;
|
|
1004
|
-
case 'f32':
|
|
1005
|
-
case 'f32-planar':
|
|
1006
|
-
return 4;
|
|
1007
|
-
default:
|
|
1008
|
-
throw new Error('Unknown AudioSampleFormat');
|
|
1009
|
-
}
|
|
1010
|
-
};
|
|
1011
|
-
const formatIsPlanar = (format) => {
|
|
1012
|
-
switch (format) {
|
|
1013
|
-
case 'u8-planar':
|
|
1014
|
-
case 's16-planar':
|
|
1015
|
-
case 's32-planar':
|
|
1016
|
-
case 'f32-planar':
|
|
1017
|
-
return true;
|
|
1018
|
-
default:
|
|
1019
|
-
return false;
|
|
1020
|
-
}
|
|
1021
|
-
};
|
|
1022
|
-
const getReadFunction = (format) => {
|
|
1023
|
-
switch (format) {
|
|
1024
|
-
case 'u8':
|
|
1025
|
-
case 'u8-planar':
|
|
1026
|
-
return (view, offset) => (view.getUint8(offset) - 128) / 128;
|
|
1027
|
-
case 's16':
|
|
1028
|
-
case 's16-planar':
|
|
1029
|
-
return (view, offset) => view.getInt16(offset, true) / 32768;
|
|
1030
|
-
case 's32':
|
|
1031
|
-
case 's32-planar':
|
|
1032
|
-
return (view, offset) => view.getInt32(offset, true) / 2147483648;
|
|
1033
|
-
case 'f32':
|
|
1034
|
-
case 'f32-planar':
|
|
1035
|
-
return (view, offset) => view.getFloat32(offset, true);
|
|
1036
|
-
}
|
|
1037
|
-
};
|
|
1038
|
-
const getWriteFunction = (format) => {
|
|
1039
|
-
switch (format) {
|
|
1040
|
-
case 'u8':
|
|
1041
|
-
case 'u8-planar':
|
|
1042
|
-
return (view, offset, value) => view.setUint8(offset, clamp((value + 1) * 127.5, 0, 255));
|
|
1043
|
-
case 's16':
|
|
1044
|
-
case 's16-planar':
|
|
1045
|
-
return (view, offset, value) => view.setInt16(offset, clamp(Math.round(value * 32767), -32768, 32767), true);
|
|
1046
|
-
case 's32':
|
|
1047
|
-
case 's32-planar':
|
|
1048
|
-
return (view, offset, value) => view.setInt32(offset, clamp(Math.round(value * 2147483647), -2147483648, 2147483647), true);
|
|
1049
|
-
case 'f32':
|
|
1050
|
-
case 'f32-planar':
|
|
1051
|
-
return (view, offset, value) => view.setFloat32(offset, value, true);
|
|
1052
|
-
}
|
|
1053
|
-
};
|
|
1054
|
-
const isAudioData = (x) => {
|
|
1055
|
-
return typeof AudioData !== 'undefined' && x instanceof AudioData;
|
|
1056
|
-
};
|