@kenzuya/mediabunny 1.26.0 → 1.28.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (238) hide show
  1. package/README.md +1 -1
  2. package/dist/bundles/{mediabunny.mjs → mediabunny.js} +21963 -21390
  3. package/dist/bundles/mediabunny.min.js +490 -0
  4. package/dist/modules/shared/mp3-misc.d.ts.map +1 -1
  5. package/dist/modules/src/adts/adts-demuxer.d.ts +6 -6
  6. package/dist/modules/src/adts/adts-demuxer.d.ts.map +1 -1
  7. package/dist/modules/src/adts/adts-muxer.d.ts +4 -4
  8. package/dist/modules/src/adts/adts-muxer.d.ts.map +1 -1
  9. package/dist/modules/src/adts/adts-reader.d.ts +1 -1
  10. package/dist/modules/src/adts/adts-reader.d.ts.map +1 -1
  11. package/dist/modules/src/avi/avi-demuxer.d.ts +44 -0
  12. package/dist/modules/src/avi/avi-demuxer.d.ts.map +1 -0
  13. package/dist/modules/src/avi/avi-misc.d.ts +88 -0
  14. package/dist/modules/src/avi/avi-misc.d.ts.map +1 -0
  15. package/dist/modules/src/avi/avi-muxer.d.ts +45 -0
  16. package/dist/modules/src/avi/avi-muxer.d.ts.map +1 -0
  17. package/dist/modules/src/avi/riff-writer.d.ts +26 -0
  18. package/dist/modules/src/avi/riff-writer.d.ts.map +1 -0
  19. package/dist/modules/src/codec-data.d.ts +8 -3
  20. package/dist/modules/src/codec-data.d.ts.map +1 -1
  21. package/dist/modules/src/codec.d.ts +10 -10
  22. package/dist/modules/src/codec.d.ts.map +1 -1
  23. package/dist/modules/src/conversion.d.ts +33 -16
  24. package/dist/modules/src/conversion.d.ts.map +1 -1
  25. package/dist/modules/src/custom-coder.d.ts +8 -8
  26. package/dist/modules/src/custom-coder.d.ts.map +1 -1
  27. package/dist/modules/src/demuxer.d.ts +3 -3
  28. package/dist/modules/src/demuxer.d.ts.map +1 -1
  29. package/dist/modules/src/encode.d.ts +8 -8
  30. package/dist/modules/src/encode.d.ts.map +1 -1
  31. package/dist/modules/src/flac/flac-demuxer.d.ts +7 -7
  32. package/dist/modules/src/flac/flac-demuxer.d.ts.map +1 -1
  33. package/dist/modules/src/flac/flac-misc.d.ts +3 -3
  34. package/dist/modules/src/flac/flac-misc.d.ts.map +1 -1
  35. package/dist/modules/src/flac/flac-muxer.d.ts +5 -5
  36. package/dist/modules/src/flac/flac-muxer.d.ts.map +1 -1
  37. package/dist/modules/src/id3.d.ts +3 -3
  38. package/dist/modules/src/id3.d.ts.map +1 -1
  39. package/dist/modules/src/index.d.ts +20 -20
  40. package/dist/modules/src/index.d.ts.map +1 -1
  41. package/dist/modules/src/input-format.d.ts +22 -0
  42. package/dist/modules/src/input-format.d.ts.map +1 -1
  43. package/dist/modules/src/input-track.d.ts +8 -8
  44. package/dist/modules/src/input-track.d.ts.map +1 -1
  45. package/dist/modules/src/input.d.ts +12 -12
  46. package/dist/modules/src/isobmff/isobmff-boxes.d.ts +2 -2
  47. package/dist/modules/src/isobmff/isobmff-boxes.d.ts.map +1 -1
  48. package/dist/modules/src/isobmff/isobmff-demuxer.d.ts +12 -12
  49. package/dist/modules/src/isobmff/isobmff-demuxer.d.ts.map +1 -1
  50. package/dist/modules/src/isobmff/isobmff-misc.d.ts.map +1 -1
  51. package/dist/modules/src/isobmff/isobmff-muxer.d.ts +11 -11
  52. package/dist/modules/src/isobmff/isobmff-muxer.d.ts.map +1 -1
  53. package/dist/modules/src/isobmff/isobmff-reader.d.ts +2 -2
  54. package/dist/modules/src/isobmff/isobmff-reader.d.ts.map +1 -1
  55. package/dist/modules/src/matroska/ebml.d.ts +3 -3
  56. package/dist/modules/src/matroska/ebml.d.ts.map +1 -1
  57. package/dist/modules/src/matroska/matroska-demuxer.d.ts +13 -13
  58. package/dist/modules/src/matroska/matroska-demuxer.d.ts.map +1 -1
  59. package/dist/modules/src/matroska/matroska-input.d.ts +33 -0
  60. package/dist/modules/src/matroska/matroska-input.d.ts.map +1 -0
  61. package/dist/modules/src/matroska/matroska-misc.d.ts.map +1 -1
  62. package/dist/modules/src/matroska/matroska-muxer.d.ts +5 -5
  63. package/dist/modules/src/matroska/matroska-muxer.d.ts.map +1 -1
  64. package/dist/modules/src/media-sink.d.ts +5 -5
  65. package/dist/modules/src/media-sink.d.ts.map +1 -1
  66. package/dist/modules/src/media-source.d.ts +22 -4
  67. package/dist/modules/src/media-source.d.ts.map +1 -1
  68. package/dist/modules/src/metadata.d.ts +2 -2
  69. package/dist/modules/src/metadata.d.ts.map +1 -1
  70. package/dist/modules/src/misc.d.ts +5 -4
  71. package/dist/modules/src/misc.d.ts.map +1 -1
  72. package/dist/modules/src/mp3/mp3-demuxer.d.ts +7 -7
  73. package/dist/modules/src/mp3/mp3-demuxer.d.ts.map +1 -1
  74. package/dist/modules/src/mp3/mp3-muxer.d.ts +4 -4
  75. package/dist/modules/src/mp3/mp3-muxer.d.ts.map +1 -1
  76. package/dist/modules/src/mp3/mp3-reader.d.ts +2 -2
  77. package/dist/modules/src/mp3/mp3-reader.d.ts.map +1 -1
  78. package/dist/modules/src/mp3/mp3-writer.d.ts +1 -1
  79. package/dist/modules/src/mp3/mp3-writer.d.ts.map +1 -1
  80. package/dist/modules/src/muxer.d.ts +4 -4
  81. package/dist/modules/src/muxer.d.ts.map +1 -1
  82. package/dist/modules/src/ogg/ogg-demuxer.d.ts +7 -7
  83. package/dist/modules/src/ogg/ogg-demuxer.d.ts.map +1 -1
  84. package/dist/modules/src/ogg/ogg-misc.d.ts +1 -1
  85. package/dist/modules/src/ogg/ogg-misc.d.ts.map +1 -1
  86. package/dist/modules/src/ogg/ogg-muxer.d.ts +5 -5
  87. package/dist/modules/src/ogg/ogg-muxer.d.ts.map +1 -1
  88. package/dist/modules/src/ogg/ogg-reader.d.ts +1 -1
  89. package/dist/modules/src/ogg/ogg-reader.d.ts.map +1 -1
  90. package/dist/modules/src/output-format.d.ts +51 -6
  91. package/dist/modules/src/output-format.d.ts.map +1 -1
  92. package/dist/modules/src/output.d.ts +13 -13
  93. package/dist/modules/src/output.d.ts.map +1 -1
  94. package/dist/modules/src/packet.d.ts +1 -1
  95. package/dist/modules/src/packet.d.ts.map +1 -1
  96. package/dist/modules/src/pcm.d.ts.map +1 -1
  97. package/dist/modules/src/reader.d.ts +2 -2
  98. package/dist/modules/src/reader.d.ts.map +1 -1
  99. package/dist/modules/src/sample.d.ts +57 -15
  100. package/dist/modules/src/sample.d.ts.map +1 -1
  101. package/dist/modules/src/source.d.ts +3 -3
  102. package/dist/modules/src/source.d.ts.map +1 -1
  103. package/dist/modules/src/subtitles.d.ts +1 -1
  104. package/dist/modules/src/subtitles.d.ts.map +1 -1
  105. package/dist/modules/src/target.d.ts +2 -2
  106. package/dist/modules/src/target.d.ts.map +1 -1
  107. package/dist/modules/src/tsconfig.tsbuildinfo +1 -1
  108. package/dist/modules/src/wave/riff-writer.d.ts +1 -1
  109. package/dist/modules/src/wave/riff-writer.d.ts.map +1 -1
  110. package/dist/modules/src/wave/wave-demuxer.d.ts +6 -6
  111. package/dist/modules/src/wave/wave-demuxer.d.ts.map +1 -1
  112. package/dist/modules/src/wave/wave-muxer.d.ts +4 -4
  113. package/dist/modules/src/wave/wave-muxer.d.ts.map +1 -1
  114. package/dist/modules/src/writer.d.ts +1 -1
  115. package/dist/modules/src/writer.d.ts.map +1 -1
  116. package/dist/packages/eac3/eac3.wasm +0 -0
  117. package/dist/packages/eac3/mediabunny-eac3.js +1058 -0
  118. package/dist/packages/eac3/mediabunny-eac3.min.js +44 -0
  119. package/dist/packages/mp3-encoder/mediabunny-mp3-encoder.js +694 -0
  120. package/dist/packages/mp3-encoder/mediabunny-mp3-encoder.min.js +58 -0
  121. package/dist/packages/mpeg4/mediabunny-mpeg4.js +1198 -0
  122. package/dist/packages/mpeg4/mediabunny-mpeg4.min.js +44 -0
  123. package/dist/packages/mpeg4/xvid.wasm +0 -0
  124. package/package.json +18 -57
  125. package/dist/bundles/mediabunny.cjs +0 -26140
  126. package/dist/bundles/mediabunny.min.cjs +0 -147
  127. package/dist/bundles/mediabunny.min.mjs +0 -146
  128. package/dist/mediabunny.d.ts +0 -3319
  129. package/dist/modules/shared/mp3-misc.js +0 -147
  130. package/dist/modules/src/adts/adts-demuxer.js +0 -239
  131. package/dist/modules/src/adts/adts-muxer.js +0 -80
  132. package/dist/modules/src/adts/adts-reader.js +0 -63
  133. package/dist/modules/src/codec-data.js +0 -1730
  134. package/dist/modules/src/codec.js +0 -869
  135. package/dist/modules/src/conversion.js +0 -1459
  136. package/dist/modules/src/custom-coder.js +0 -117
  137. package/dist/modules/src/demuxer.js +0 -12
  138. package/dist/modules/src/encode.js +0 -442
  139. package/dist/modules/src/flac/flac-demuxer.js +0 -504
  140. package/dist/modules/src/flac/flac-misc.js +0 -135
  141. package/dist/modules/src/flac/flac-muxer.js +0 -222
  142. package/dist/modules/src/id3.js +0 -848
  143. package/dist/modules/src/index.js +0 -28
  144. package/dist/modules/src/input-format.js +0 -480
  145. package/dist/modules/src/input-track.js +0 -372
  146. package/dist/modules/src/input.js +0 -188
  147. package/dist/modules/src/isobmff/isobmff-boxes.js +0 -1480
  148. package/dist/modules/src/isobmff/isobmff-demuxer.js +0 -2618
  149. package/dist/modules/src/isobmff/isobmff-misc.js +0 -20
  150. package/dist/modules/src/isobmff/isobmff-muxer.js +0 -966
  151. package/dist/modules/src/isobmff/isobmff-reader.js +0 -72
  152. package/dist/modules/src/matroska/ebml.js +0 -653
  153. package/dist/modules/src/matroska/matroska-demuxer.js +0 -2133
  154. package/dist/modules/src/matroska/matroska-misc.js +0 -20
  155. package/dist/modules/src/matroska/matroska-muxer.js +0 -1017
  156. package/dist/modules/src/media-sink.js +0 -1736
  157. package/dist/modules/src/media-source.js +0 -1825
  158. package/dist/modules/src/metadata.js +0 -193
  159. package/dist/modules/src/misc.js +0 -623
  160. package/dist/modules/src/mp3/mp3-demuxer.js +0 -285
  161. package/dist/modules/src/mp3/mp3-muxer.js +0 -123
  162. package/dist/modules/src/mp3/mp3-reader.js +0 -26
  163. package/dist/modules/src/mp3/mp3-writer.js +0 -78
  164. package/dist/modules/src/muxer.js +0 -50
  165. package/dist/modules/src/node.d.ts +0 -9
  166. package/dist/modules/src/node.d.ts.map +0 -1
  167. package/dist/modules/src/node.js +0 -9
  168. package/dist/modules/src/ogg/ogg-demuxer.js +0 -763
  169. package/dist/modules/src/ogg/ogg-misc.js +0 -78
  170. package/dist/modules/src/ogg/ogg-muxer.js +0 -353
  171. package/dist/modules/src/ogg/ogg-reader.js +0 -65
  172. package/dist/modules/src/output-format.js +0 -527
  173. package/dist/modules/src/output.js +0 -300
  174. package/dist/modules/src/packet.js +0 -182
  175. package/dist/modules/src/pcm.js +0 -85
  176. package/dist/modules/src/reader.js +0 -236
  177. package/dist/modules/src/sample.js +0 -1056
  178. package/dist/modules/src/source.js +0 -1182
  179. package/dist/modules/src/subtitles.js +0 -575
  180. package/dist/modules/src/target.js +0 -140
  181. package/dist/modules/src/wave/riff-writer.js +0 -30
  182. package/dist/modules/src/wave/wave-demuxer.js +0 -447
  183. package/dist/modules/src/wave/wave-muxer.js +0 -318
  184. package/dist/modules/src/writer.js +0 -370
  185. package/src/adts/adts-demuxer.ts +0 -331
  186. package/src/adts/adts-muxer.ts +0 -111
  187. package/src/adts/adts-reader.ts +0 -85
  188. package/src/codec-data.ts +0 -2078
  189. package/src/codec.ts +0 -1092
  190. package/src/conversion.ts +0 -2112
  191. package/src/custom-coder.ts +0 -197
  192. package/src/demuxer.ts +0 -24
  193. package/src/encode.ts +0 -739
  194. package/src/flac/flac-demuxer.ts +0 -730
  195. package/src/flac/flac-misc.ts +0 -164
  196. package/src/flac/flac-muxer.ts +0 -320
  197. package/src/id3.ts +0 -925
  198. package/src/index.ts +0 -221
  199. package/src/input-format.ts +0 -541
  200. package/src/input-track.ts +0 -529
  201. package/src/input.ts +0 -235
  202. package/src/isobmff/isobmff-boxes.ts +0 -1719
  203. package/src/isobmff/isobmff-demuxer.ts +0 -3190
  204. package/src/isobmff/isobmff-misc.ts +0 -29
  205. package/src/isobmff/isobmff-muxer.ts +0 -1348
  206. package/src/isobmff/isobmff-reader.ts +0 -91
  207. package/src/matroska/ebml.ts +0 -730
  208. package/src/matroska/matroska-demuxer.ts +0 -2481
  209. package/src/matroska/matroska-misc.ts +0 -29
  210. package/src/matroska/matroska-muxer.ts +0 -1276
  211. package/src/media-sink.ts +0 -2179
  212. package/src/media-source.ts +0 -2243
  213. package/src/metadata.ts +0 -320
  214. package/src/misc.ts +0 -798
  215. package/src/mp3/mp3-demuxer.ts +0 -383
  216. package/src/mp3/mp3-muxer.ts +0 -166
  217. package/src/mp3/mp3-reader.ts +0 -34
  218. package/src/mp3/mp3-writer.ts +0 -120
  219. package/src/muxer.ts +0 -88
  220. package/src/node.ts +0 -11
  221. package/src/ogg/ogg-demuxer.ts +0 -1053
  222. package/src/ogg/ogg-misc.ts +0 -116
  223. package/src/ogg/ogg-muxer.ts +0 -497
  224. package/src/ogg/ogg-reader.ts +0 -93
  225. package/src/output-format.ts +0 -945
  226. package/src/output.ts +0 -488
  227. package/src/packet.ts +0 -263
  228. package/src/pcm.ts +0 -112
  229. package/src/reader.ts +0 -323
  230. package/src/sample.ts +0 -1461
  231. package/src/source.ts +0 -1688
  232. package/src/subtitles.ts +0 -711
  233. package/src/target.ts +0 -204
  234. package/src/tsconfig.json +0 -16
  235. package/src/wave/riff-writer.ts +0 -36
  236. package/src/wave/wave-demuxer.ts +0 -529
  237. package/src/wave/wave-muxer.ts +0 -371
  238. package/src/writer.ts +0 -490
package/src/sample.ts DELETED
@@ -1,1461 +0,0 @@
1
- /*!
2
- * Copyright (c) 2025-present, Vanilagy and contributors
3
- *
4
- * This Source Code Form is subject to the terms of the Mozilla Public
5
- * License, v. 2.0. If a copy of the MPL was not distributed with this
6
- * file, You can obtain one at https://mozilla.org/MPL/2.0/.
7
- */
8
-
9
- import {
10
- assert,
11
- clamp,
12
- isAllowSharedBufferSource,
13
- Rotation,
14
- SECOND_TO_MICROSECOND_FACTOR,
15
- toDataView,
16
- toUint8Array,
17
- SetRequired,
18
- isFirefox,
19
- polyfillSymbolDispose,
20
- } from './misc';
21
-
22
- polyfillSymbolDispose();
23
-
24
- /**
25
- * Metadata used for VideoSample initialization.
26
- * @group Samples
27
- * @public
28
- */
29
- export type VideoSampleInit = {
30
- /**
31
- * The internal pixel format in which the frame is stored.
32
- * [See pixel formats](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame/format)
33
- */
34
- format?: VideoPixelFormat;
35
- /** The width of the frame in pixels. */
36
- codedWidth?: number;
37
- /** The height of the frame in pixels. */
38
- codedHeight?: number;
39
- /** The rotation of the frame in degrees, clockwise. */
40
- rotation?: Rotation;
41
- /** The presentation timestamp of the frame in seconds. */
42
- timestamp?: number;
43
- /** The duration of the frame in seconds. */
44
- duration?: number;
45
- /** The color space of the frame. */
46
- colorSpace?: VideoColorSpaceInit;
47
- };
48
-
49
- /**
50
- * Represents a raw, unencoded video sample (frame). Mainly used as an expressive wrapper around WebCodecs API's
51
- * [`VideoFrame`](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame), but can also be used standalone.
52
- * @group Samples
53
- * @public
54
- */
55
- export class VideoSample implements Disposable {
56
- /** @internal */
57
- _data!: VideoFrame | OffscreenCanvas | Uint8Array | null;
58
- /** @internal */
59
- _closed: boolean = false;
60
-
61
- /**
62
- * The internal pixel format in which the frame is stored.
63
- * [See pixel formats](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame/format)
64
- */
65
- readonly format!: VideoPixelFormat | null;
66
- /** The width of the frame in pixels. */
67
- readonly codedWidth!: number;
68
- /** The height of the frame in pixels. */
69
- readonly codedHeight!: number;
70
- /** The rotation of the frame in degrees, clockwise. */
71
- readonly rotation!: Rotation;
72
- /**
73
- * The presentation timestamp of the frame in seconds. May be negative. Frames with negative end timestamps should
74
- * not be presented.
75
- */
76
- readonly timestamp!: number;
77
- /** The duration of the frame in seconds. */
78
- readonly duration!: number;
79
- /** The color space of the frame. */
80
- readonly colorSpace!: VideoColorSpace;
81
-
82
- /** The width of the frame in pixels after rotation. */
83
- get displayWidth() {
84
- return this.rotation % 180 === 0 ? this.codedWidth : this.codedHeight;
85
- }
86
-
87
- /** The height of the frame in pixels after rotation. */
88
- get displayHeight() {
89
- return this.rotation % 180 === 0 ? this.codedHeight : this.codedWidth;
90
- }
91
-
92
- /** The presentation timestamp of the frame in microseconds. */
93
- get microsecondTimestamp() {
94
- return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.timestamp);
95
- }
96
-
97
- /** The duration of the frame in microseconds. */
98
- get microsecondDuration() {
99
- return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.duration);
100
- }
101
-
102
- /**
103
- * Whether this sample uses a pixel format that can hold transparency data. Note that this doesn't necessarily mean
104
- * that the sample is transparent.
105
- */
106
- get hasAlpha() {
107
- return this.format && this.format.includes('A');
108
- }
109
-
110
- /**
111
- * Creates a new {@link VideoSample} from a
112
- * [`VideoFrame`](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame). This is essentially a near zero-cost
113
- * wrapper around `VideoFrame`. The sample's metadata is optionally refined using the data specified in `init`.
114
- */
115
- constructor(data: VideoFrame, init?: VideoSampleInit);
116
- /**
117
- * Creates a new {@link VideoSample} from a
118
- * [`CanvasImageSource`](https://udn.realityripple.com/docs/Web/API/CanvasImageSource), similar to the
119
- * [`VideoFrame`](https://developer.mozilla.org/en-US/docs/Web/API/VideoFrame) constructor. When `VideoFrame` is
120
- * available, this is simply a wrapper around its constructor. If not, it will copy the source's image data to an
121
- * internal canvas for later use.
122
- */
123
- constructor(data: CanvasImageSource, init: SetRequired<VideoSampleInit, 'timestamp'>);
124
- /**
125
- * Creates a new {@link VideoSample} from raw pixel data specified in `data`. Additional metadata must be provided
126
- * in `init`.
127
- */
128
- constructor(
129
- data: AllowSharedBufferSource,
130
- init: SetRequired<VideoSampleInit, 'format' | 'codedWidth' | 'codedHeight' | 'timestamp'>
131
- );
132
- constructor(
133
- data: VideoFrame | CanvasImageSource | AllowSharedBufferSource,
134
- init?: VideoSampleInit,
135
- ) {
136
- if (data instanceof ArrayBuffer || ArrayBuffer.isView(data)) {
137
- if (!init || typeof init !== 'object') {
138
- throw new TypeError('init must be an object.');
139
- }
140
- if (!('format' in init) || typeof init.format !== 'string') {
141
- throw new TypeError('init.format must be a string.');
142
- }
143
- if (!Number.isInteger(init.codedWidth) || init.codedWidth! <= 0) {
144
- throw new TypeError('init.codedWidth must be a positive integer.');
145
- }
146
- if (!Number.isInteger(init.codedHeight) || init.codedHeight! <= 0) {
147
- throw new TypeError('init.codedHeight must be a positive integer.');
148
- }
149
- if (init.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {
150
- throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');
151
- }
152
- if (!Number.isFinite(init.timestamp)) {
153
- throw new TypeError('init.timestamp must be a number.');
154
- }
155
- if (init.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {
156
- throw new TypeError('init.duration, when provided, must be a non-negative number.');
157
- }
158
-
159
- this._data = toUint8Array(data).slice(); // Copy it
160
-
161
- this.format = init.format;
162
- this.codedWidth = init.codedWidth!;
163
- this.codedHeight = init.codedHeight!;
164
- this.rotation = init.rotation ?? 0;
165
- this.timestamp = init.timestamp!;
166
- this.duration = init.duration ?? 0;
167
- this.colorSpace = new VideoColorSpace(init.colorSpace);
168
- } else if (typeof VideoFrame !== 'undefined' && data instanceof VideoFrame) {
169
- if (init?.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {
170
- throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');
171
- }
172
- if (init?.timestamp !== undefined && !Number.isFinite(init?.timestamp)) {
173
- throw new TypeError('init.timestamp, when provided, must be a number.');
174
- }
175
- if (init?.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {
176
- throw new TypeError('init.duration, when provided, must be a non-negative number.');
177
- }
178
-
179
- this._data = data;
180
-
181
- this.format = data.format;
182
- // Copying the display dimensions here, assuming no innate VideoFrame rotation
183
- this.codedWidth = data.displayWidth;
184
- this.codedHeight = data.displayHeight;
185
- // The VideoFrame's rotation is ignored here. It's still a new field, and I'm not sure of any application
186
- // where the browser makes use of it. If a case gets found, I'll add it.
187
- this.rotation = init?.rotation ?? 0;
188
- this.timestamp = init?.timestamp ?? data.timestamp / 1e6;
189
- this.duration = init?.duration ?? (data.duration ?? 0) / 1e6;
190
- this.colorSpace = data.colorSpace;
191
- } else if (
192
- (typeof HTMLImageElement !== 'undefined' && data instanceof HTMLImageElement)
193
- || (typeof SVGImageElement !== 'undefined' && data instanceof SVGImageElement)
194
- || (typeof ImageBitmap !== 'undefined' && data instanceof ImageBitmap)
195
- || (typeof HTMLVideoElement !== 'undefined' && data instanceof HTMLVideoElement)
196
- || (typeof HTMLCanvasElement !== 'undefined' && data instanceof HTMLCanvasElement)
197
- || (typeof OffscreenCanvas !== 'undefined' && data instanceof OffscreenCanvas)
198
- ) {
199
- if (!init || typeof init !== 'object') {
200
- throw new TypeError('init must be an object.');
201
- }
202
- if (init.rotation !== undefined && ![0, 90, 180, 270].includes(init.rotation)) {
203
- throw new TypeError('init.rotation, when provided, must be 0, 90, 180, or 270.');
204
- }
205
- if (!Number.isFinite(init.timestamp)) {
206
- throw new TypeError('init.timestamp must be a number.');
207
- }
208
- if (init.duration !== undefined && (!Number.isFinite(init.duration) || init.duration < 0)) {
209
- throw new TypeError('init.duration, when provided, must be a non-negative number.');
210
- }
211
-
212
- if (typeof VideoFrame !== 'undefined') {
213
- return new VideoSample(
214
- new VideoFrame(data, {
215
- timestamp: Math.trunc(init.timestamp! * SECOND_TO_MICROSECOND_FACTOR),
216
- // Drag 0 to undefined
217
- duration: Math.trunc((init.duration ?? 0) * SECOND_TO_MICROSECOND_FACTOR) || undefined,
218
- }),
219
- init,
220
- );
221
- }
222
-
223
- let width = 0;
224
- let height = 0;
225
-
226
- // Determine the dimensions of the thing
227
- if ('naturalWidth' in data) {
228
- width = data.naturalWidth;
229
- height = data.naturalHeight;
230
- } else if ('videoWidth' in data) {
231
- width = data.videoWidth;
232
- height = data.videoHeight;
233
- } else if ('width' in data) {
234
- width = Number(data.width);
235
- height = Number(data.height);
236
- }
237
-
238
- if (!width || !height) {
239
- throw new TypeError('Could not determine dimensions.');
240
- }
241
-
242
- const canvas = new OffscreenCanvas(width, height);
243
- const context = canvas.getContext('2d', {
244
- alpha: isFirefox(), // Firefox has VideoFrame glitches with opaque canvases
245
- willReadFrequently: true,
246
- });
247
- assert(context);
248
-
249
- // Draw it to a canvas
250
- context.drawImage(data, 0, 0);
251
- this._data = canvas;
252
-
253
- this.format = 'RGBX';
254
- this.codedWidth = width;
255
- this.codedHeight = height;
256
- this.rotation = init.rotation ?? 0;
257
- this.timestamp = init.timestamp!;
258
- this.duration = init.duration ?? 0;
259
- this.colorSpace = new VideoColorSpace({
260
- matrix: 'rgb',
261
- primaries: 'bt709',
262
- transfer: 'iec61966-2-1',
263
- fullRange: true,
264
- });
265
- } else {
266
- throw new TypeError('Invalid data type: Must be a BufferSource or CanvasImageSource.');
267
- }
268
- }
269
-
270
- /** Clones this video sample. */
271
- clone() {
272
- if (this._closed) {
273
- throw new Error('VideoSample is closed.');
274
- }
275
-
276
- assert(this._data !== null);
277
-
278
- if (isVideoFrame(this._data)) {
279
- return new VideoSample(this._data.clone(), {
280
- timestamp: this.timestamp,
281
- duration: this.duration,
282
- rotation: this.rotation,
283
- });
284
- } else if (this._data instanceof Uint8Array) {
285
- return new VideoSample(this._data.slice(), {
286
- format: this.format!,
287
- codedWidth: this.codedWidth,
288
- codedHeight: this.codedHeight,
289
- timestamp: this.timestamp,
290
- duration: this.duration,
291
- colorSpace: this.colorSpace,
292
- rotation: this.rotation,
293
- });
294
- } else {
295
- return new VideoSample(this._data, {
296
- format: this.format!,
297
- codedWidth: this.codedWidth,
298
- codedHeight: this.codedHeight,
299
- timestamp: this.timestamp,
300
- duration: this.duration,
301
- colorSpace: this.colorSpace,
302
- rotation: this.rotation,
303
- });
304
- }
305
- }
306
-
307
- /**
308
- * Closes this video sample, releasing held resources. Video samples should be closed as soon as they are not
309
- * needed anymore.
310
- */
311
- close() {
312
- if (this._closed) {
313
- return;
314
- }
315
-
316
- if (isVideoFrame(this._data)) {
317
- this._data.close();
318
- } else {
319
- this._data = null; // GC that shit
320
- }
321
-
322
- this._closed = true;
323
- }
324
-
325
- /** Returns the number of bytes required to hold this video sample's pixel data. */
326
- allocationSize() {
327
- if (this._closed) {
328
- throw new Error('VideoSample is closed.');
329
- }
330
-
331
- assert(this._data !== null);
332
-
333
- if (isVideoFrame(this._data)) {
334
- return this._data.allocationSize();
335
- } else if (this._data instanceof Uint8Array) {
336
- return this._data.byteLength;
337
- } else {
338
- return this.codedWidth * this.codedHeight * 4; // RGBX
339
- }
340
- }
341
-
342
- /** Copies this video sample's pixel data to an ArrayBuffer or ArrayBufferView. */
343
- async copyTo(destination: AllowSharedBufferSource) {
344
- if (!isAllowSharedBufferSource(destination)) {
345
- throw new TypeError('destination must be an ArrayBuffer or an ArrayBuffer view.');
346
- }
347
-
348
- if (this._closed) {
349
- throw new Error('VideoSample is closed.');
350
- }
351
-
352
- assert(this._data !== null);
353
-
354
- if (isVideoFrame(this._data)) {
355
- await this._data.copyTo(destination);
356
- } else if (this._data instanceof Uint8Array) {
357
- const dest = toUint8Array(destination);
358
- dest.set(this._data);
359
- } else {
360
- const canvas = this._data;
361
- const context = canvas.getContext('2d');
362
- assert(context);
363
-
364
- const imageData = context.getImageData(0, 0, this.codedWidth, this.codedHeight);
365
- const dest = toUint8Array(destination);
366
- dest.set(imageData.data);
367
- }
368
- }
369
-
370
- /**
371
- * Converts this video sample to a VideoFrame for use with the WebCodecs API. The VideoFrame returned by this
372
- * method *must* be closed separately from this video sample.
373
- */
374
- toVideoFrame() {
375
- if (this._closed) {
376
- throw new Error('VideoSample is closed.');
377
- }
378
-
379
- assert(this._data !== null);
380
-
381
- if (isVideoFrame(this._data)) {
382
- return new VideoFrame(this._data, {
383
- timestamp: this.microsecondTimestamp,
384
- duration: this.microsecondDuration || undefined, // Drag 0 duration to undefined, glitches some codecs
385
- });
386
- } else if (this._data instanceof Uint8Array) {
387
- return new VideoFrame(this._data, {
388
- format: this.format!,
389
- codedWidth: this.codedWidth,
390
- codedHeight: this.codedHeight,
391
- timestamp: this.microsecondTimestamp,
392
- duration: this.microsecondDuration || undefined,
393
- colorSpace: this.colorSpace,
394
- });
395
- } else {
396
- return new VideoFrame(this._data, {
397
- timestamp: this.microsecondTimestamp,
398
- duration: this.microsecondDuration || undefined,
399
- });
400
- }
401
- }
402
-
403
- /**
404
- * Draws the video sample to a 2D canvas context. Rotation metadata will be taken into account.
405
- *
406
- * @param dx - The x-coordinate in the destination canvas at which to place the top-left corner of the source image.
407
- * @param dy - The y-coordinate in the destination canvas at which to place the top-left corner of the source image.
408
- * @param dWidth - The width in pixels with which to draw the image in the destination canvas.
409
- * @param dHeight - The height in pixels with which to draw the image in the destination canvas.
410
- */
411
- draw(
412
- context: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D,
413
- dx: number,
414
- dy: number,
415
- dWidth?: number,
416
- dHeight?: number,
417
- ): void;
418
- /**
419
- * Draws the video sample to a 2D canvas context. Rotation metadata will be taken into account.
420
- *
421
- * @param sx - The x-coordinate of the top left corner of the sub-rectangle of the source image to draw into the
422
- * destination context.
423
- * @param sy - The y-coordinate of the top left corner of the sub-rectangle of the source image to draw into the
424
- * destination context.
425
- * @param sWidth - The width of the sub-rectangle of the source image to draw into the destination context.
426
- * @param sHeight - The height of the sub-rectangle of the source image to draw into the destination context.
427
- * @param dx - The x-coordinate in the destination canvas at which to place the top-left corner of the source image.
428
- * @param dy - The y-coordinate in the destination canvas at which to place the top-left corner of the source image.
429
- * @param dWidth - The width in pixels with which to draw the image in the destination canvas.
430
- * @param dHeight - The height in pixels with which to draw the image in the destination canvas.
431
- */
432
- draw(
433
- context: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D,
434
- sx: number,
435
- sy: number,
436
- sWidth: number,
437
- sHeight: number,
438
- dx: number,
439
- dy: number,
440
- dWidth?: number,
441
- dHeight?: number,
442
- ): void;
443
- draw(
444
- context: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D,
445
- arg1: number,
446
- arg2: number,
447
- arg3?: number,
448
- arg4?: number,
449
- arg5?: number,
450
- arg6?: number,
451
- arg7?: number,
452
- arg8?: number,
453
- ) {
454
- let sx = 0;
455
- let sy = 0;
456
- let sWidth = this.displayWidth;
457
- let sHeight = this.displayHeight;
458
- let dx = 0;
459
- let dy = 0;
460
- let dWidth = this.displayWidth;
461
- let dHeight = this.displayHeight;
462
-
463
- if (arg5 !== undefined) {
464
- sx = arg1!;
465
- sy = arg2!;
466
- sWidth = arg3!;
467
- sHeight = arg4!;
468
- dx = arg5;
469
- dy = arg6!;
470
-
471
- if (arg7 !== undefined) {
472
- dWidth = arg7;
473
- dHeight = arg8!;
474
- } else {
475
- dWidth = sWidth;
476
- dHeight = sHeight;
477
- }
478
- } else {
479
- dx = arg1;
480
- dy = arg2;
481
-
482
- if (arg3 !== undefined) {
483
- dWidth = arg3;
484
- dHeight = arg4!;
485
- }
486
- }
487
-
488
- if (!(
489
- (typeof CanvasRenderingContext2D !== 'undefined' && context instanceof CanvasRenderingContext2D)
490
- || (
491
- typeof OffscreenCanvasRenderingContext2D !== 'undefined'
492
- && context instanceof OffscreenCanvasRenderingContext2D
493
- )
494
- )) {
495
- throw new TypeError('context must be a CanvasRenderingContext2D or OffscreenCanvasRenderingContext2D.');
496
- }
497
- if (!Number.isFinite(sx)) {
498
- throw new TypeError('sx must be a number.');
499
- }
500
- if (!Number.isFinite(sy)) {
501
- throw new TypeError('sy must be a number.');
502
- }
503
- if (!Number.isFinite(sWidth) || sWidth < 0) {
504
- throw new TypeError('sWidth must be a non-negative number.');
505
- }
506
- if (!Number.isFinite(sHeight) || sHeight < 0) {
507
- throw new TypeError('sHeight must be a non-negative number.');
508
- }
509
- if (!Number.isFinite(dx)) {
510
- throw new TypeError('dx must be a number.');
511
- }
512
- if (!Number.isFinite(dy)) {
513
- throw new TypeError('dy must be a number.');
514
- }
515
- if (!Number.isFinite(dWidth) || dWidth < 0) {
516
- throw new TypeError('dWidth must be a non-negative number.');
517
- }
518
- if (!Number.isFinite(dHeight) || dHeight < 0) {
519
- throw new TypeError('dHeight must be a non-negative number.');
520
- }
521
-
522
- if (this._closed) {
523
- throw new Error('VideoSample is closed.');
524
- }
525
-
526
- ({ sx, sy, sWidth, sHeight } = this._rotateSourceRegion(sx, sy, sWidth, sHeight, this.rotation));
527
-
528
- const source = this.toCanvasImageSource();
529
-
530
- context.save();
531
-
532
- const centerX = dx + dWidth / 2;
533
- const centerY = dy + dHeight / 2;
534
-
535
- context.translate(centerX, centerY);
536
- context.rotate(this.rotation * Math.PI / 180);
537
-
538
- const aspectRatioChange = this.rotation % 180 === 0 ? 1 : dWidth / dHeight;
539
-
540
- // Scale to compensate for aspect ratio changes when rotated
541
- context.scale(1 / aspectRatioChange, aspectRatioChange);
542
-
543
- context.drawImage(
544
- source,
545
- sx,
546
- sy,
547
- sWidth,
548
- sHeight,
549
- -dWidth / 2,
550
- -dHeight / 2,
551
- dWidth,
552
- dHeight,
553
- );
554
-
555
- context.restore();
556
- }
557
-
558
- /**
559
- * Draws the sample in the middle of the canvas corresponding to the context with the specified fit behavior.
560
- */
561
- drawWithFit(context: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D, options: {
562
- /**
563
- * Controls the fitting algorithm.
564
- *
565
- * - `'fill'` will stretch the image to fill the entire box, potentially altering aspect ratio.
566
- * - `'contain'` will contain the entire image within the box while preserving aspect ratio. This may lead to
567
- * letterboxing.
568
- * - `'cover'` will scale the image until the entire box is filled, while preserving aspect ratio.
569
- */
570
- fit: 'fill' | 'contain' | 'cover';
571
- /** A way to override rotation. Defaults to the rotation of the sample. */
572
- rotation?: Rotation;
573
- /**
574
- * Specifies the rectangular region of the video sample to crop to. The crop region will automatically be
575
- * clamped to the dimensions of the video sample. Cropping is performed after rotation but before resizing.
576
- */
577
- crop?: CropRectangle;
578
- }) {
579
- if (!(
580
- (typeof CanvasRenderingContext2D !== 'undefined' && context instanceof CanvasRenderingContext2D)
581
- || (
582
- typeof OffscreenCanvasRenderingContext2D !== 'undefined'
583
- && context instanceof OffscreenCanvasRenderingContext2D
584
- )
585
- )) {
586
- throw new TypeError('context must be a CanvasRenderingContext2D or OffscreenCanvasRenderingContext2D.');
587
- }
588
- if (!options || typeof options !== 'object') {
589
- throw new TypeError('options must be an object.');
590
- }
591
- if (!['fill', 'contain', 'cover'].includes(options.fit)) {
592
- throw new TypeError('options.fit must be \'fill\', \'contain\', or \'cover\'.');
593
- }
594
- if (options.rotation !== undefined && ![0, 90, 180, 270].includes(options.rotation)) {
595
- throw new TypeError('options.rotation, when provided, must be 0, 90, 180, or 270.');
596
- }
597
- if (options.crop !== undefined) {
598
- validateCropRectangle(options.crop, 'options.');
599
- }
600
-
601
- const canvasWidth = context.canvas.width;
602
- const canvasHeight = context.canvas.height;
603
- const rotation = options.rotation ?? this.rotation;
604
-
605
- const [rotatedWidth, rotatedHeight] = rotation % 180 === 0
606
- ? [this.codedWidth, this.codedHeight]
607
- : [this.codedHeight, this.codedWidth];
608
-
609
- if (options.crop) {
610
- clampCropRectangle(options.crop, rotatedWidth, rotatedHeight);
611
- }
612
-
613
- // These variables specify where the final sample will be drawn on the canvas
614
- let dx: number;
615
- let dy: number;
616
- let newWidth: number;
617
- let newHeight: number;
618
-
619
- const { sx, sy, sWidth, sHeight } = this._rotateSourceRegion(
620
- options.crop?.left ?? 0,
621
- options.crop?.top ?? 0,
622
- options.crop?.width ?? rotatedWidth,
623
- options.crop?.height ?? rotatedHeight,
624
- rotation,
625
- );
626
-
627
- if (options.fit === 'fill') {
628
- dx = 0;
629
- dy = 0;
630
- newWidth = canvasWidth;
631
- newHeight = canvasHeight;
632
- } else {
633
- const [sampleWidth, sampleHeight] = options.crop
634
- ? [options.crop.width, options.crop.height]
635
- : [rotatedWidth, rotatedHeight];
636
-
637
- const scale = options.fit === 'contain'
638
- ? Math.min(canvasWidth / sampleWidth, canvasHeight / sampleHeight)
639
- : Math.max(canvasWidth / sampleWidth, canvasHeight / sampleHeight);
640
- newWidth = sampleWidth * scale;
641
- newHeight = sampleHeight * scale;
642
- dx = (canvasWidth - newWidth) / 2;
643
- dy = (canvasHeight - newHeight) / 2;
644
- }
645
-
646
- context.save();
647
-
648
- const aspectRatioChange = rotation % 180 === 0 ? 1 : newWidth / newHeight;
649
- context.translate(canvasWidth / 2, canvasHeight / 2);
650
- context.rotate(rotation * Math.PI / 180);
651
- // This aspect ratio compensation is done so that we can draw the sample with the intended dimensions and
652
- // don't need to think about how those dimensions change after the rotation
653
- context.scale(1 / aspectRatioChange, aspectRatioChange);
654
- context.translate(-canvasWidth / 2, -canvasHeight / 2);
655
-
656
- // Important that we don't use .draw() here since that would take rotation into account, but we wanna handle it
657
- // ourselves here
658
- context.drawImage(this.toCanvasImageSource(), sx, sy, sWidth, sHeight, dx, dy, newWidth, newHeight);
659
-
660
- context.restore();
661
- }
662
-
663
- /** @internal */
664
- _rotateSourceRegion(sx: number, sy: number, sWidth: number, sHeight: number, rotation: number) {
665
- // The provided sx,sy,sWidth,sHeight refer to the final rotated image, but that's not actually how the image is
666
- // stored. Therefore, we must map these back onto the original, pre-rotation image.
667
- if (rotation === 90) {
668
- [sx, sy, sWidth, sHeight] = [
669
- sy,
670
- this.codedHeight - sx - sWidth,
671
- sHeight,
672
- sWidth,
673
- ];
674
- } else if (rotation === 180) {
675
- [sx, sy] = [
676
- this.codedWidth - sx - sWidth,
677
- this.codedHeight - sy - sHeight,
678
- ];
679
- } else if (rotation === 270) {
680
- [sx, sy, sWidth, sHeight] = [
681
- this.codedWidth - sy - sHeight,
682
- sx,
683
- sHeight,
684
- sWidth,
685
- ];
686
- }
687
-
688
- return { sx, sy, sWidth, sHeight };
689
- }
690
-
691
- /**
692
- * Converts this video sample to a
693
- * [`CanvasImageSource`](https://udn.realityripple.com/docs/Web/API/CanvasImageSource) for drawing to a canvas.
694
- *
695
- * You must use the value returned by this method immediately, as any VideoFrame created internally will
696
- * automatically be closed in the next microtask.
697
- */
698
- toCanvasImageSource() {
699
- if (this._closed) {
700
- throw new Error('VideoSample is closed.');
701
- }
702
-
703
- assert(this._data !== null);
704
-
705
- if (this._data instanceof Uint8Array) {
706
- // Requires VideoFrame to be defined
707
- const videoFrame = this.toVideoFrame();
708
- queueMicrotask(() => videoFrame.close()); // Let's automatically close the frame in the next microtask
709
-
710
- return videoFrame;
711
- } else {
712
- return this._data;
713
- }
714
- }
715
-
716
- /** Sets the rotation metadata of this video sample. */
717
- setRotation(newRotation: Rotation) {
718
- if (![0, 90, 180, 270].includes(newRotation)) {
719
- throw new TypeError('newRotation must be 0, 90, 180, or 270.');
720
- }
721
-
722
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
723
- (this.rotation as Rotation) = newRotation;
724
- }
725
-
726
- /** Sets the presentation timestamp of this video sample, in seconds. */
727
- setTimestamp(newTimestamp: number) {
728
- if (!Number.isFinite(newTimestamp)) {
729
- throw new TypeError('newTimestamp must be a number.');
730
- }
731
-
732
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
733
- (this.timestamp as number) = newTimestamp;
734
- }
735
-
736
- /** Sets the duration of this video sample, in seconds. */
737
- setDuration(newDuration: number) {
738
- if (!Number.isFinite(newDuration) || newDuration < 0) {
739
- throw new TypeError('newDuration must be a non-negative number.');
740
- }
741
-
742
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
743
- (this.duration as number) = newDuration;
744
- }
745
-
746
- /** Calls `.close()`. */
747
- [Symbol.dispose]() {
748
- this.close();
749
- }
750
- }
751
-
752
- const isVideoFrame = (x: unknown): x is VideoFrame => {
753
- return typeof VideoFrame !== 'undefined' && x instanceof VideoFrame;
754
- };
755
-
756
- /**
757
- * Specifies the rectangular cropping region.
758
- * @group Miscellaneous
759
- * @public
760
- */
761
- export type CropRectangle = {
762
- /** The distance in pixels from the left edge of the source frame to the left edge of the crop rectangle. */
763
- left: number;
764
- /** The distance in pixels from the top edge of the source frame to the top edge of the crop rectangle. */
765
- top: number;
766
- /** The width in pixels of the crop rectangle. */
767
- width: number;
768
- /** The height in pixels of the crop rectangle. */
769
- height: number;
770
- };
771
-
772
- export const clampCropRectangle = (crop: CropRectangle, outerWidth: number, outerHeight: number) => {
773
- crop.left = Math.min(crop.left, outerWidth);
774
- crop.top = Math.min(crop.top, outerHeight);
775
- crop.width = Math.min(crop.width, outerWidth - crop.left);
776
- crop.height = Math.min(crop.height, outerHeight - crop.top);
777
-
778
- assert(crop.width >= 0);
779
- assert(crop.height >= 0);
780
- };
781
-
782
- export const validateCropRectangle = (crop: CropRectangle, prefix: string) => {
783
- if (!crop || typeof crop !== 'object') {
784
- throw new TypeError(prefix + 'crop, when provided, must be an object.');
785
- }
786
- if (!Number.isInteger(crop.left) || crop.left < 0) {
787
- throw new TypeError(prefix + 'crop.left must be a non-negative integer.');
788
- }
789
- if (!Number.isInteger(crop.top) || crop.top < 0) {
790
- throw new TypeError(prefix + 'crop.top must be a non-negative integer.');
791
- }
792
- if (!Number.isInteger(crop.width) || crop.width < 0) {
793
- throw new TypeError(prefix + 'crop.width must be a non-negative integer.');
794
- }
795
- if (!Number.isInteger(crop.height) || crop.height < 0) {
796
- throw new TypeError(prefix + 'crop.height must be a non-negative integer.');
797
- }
798
- };
799
-
800
- const AUDIO_SAMPLE_FORMATS = new Set(
801
- ['f32', 'f32-planar', 's16', 's16-planar', 's32', 's32-planar', 'u8', 'u8-planar'],
802
- );
803
-
804
- /**
805
- * Metadata used for AudioSample initialization.
806
- * @group Samples
807
- * @public
808
- */
809
- export type AudioSampleInit = {
810
- /** The audio data for this sample. */
811
- data: AllowSharedBufferSource;
812
- /**
813
- * The audio sample format. [See sample formats](https://developer.mozilla.org/en-US/docs/Web/API/AudioData/format)
814
- */
815
- format: AudioSampleFormat;
816
- /** The number of audio channels. */
817
- numberOfChannels: number;
818
- /** The audio sample rate in hertz. */
819
- sampleRate: number;
820
- /** The presentation timestamp of the sample in seconds. */
821
- timestamp: number;
822
- };
823
-
824
- /**
825
- * Options used for copying audio sample data.
826
- * @group Samples
827
- * @public
828
- */
829
- export type AudioSampleCopyToOptions = {
830
- /**
831
- * The index identifying the plane to copy from. This must be 0 if using a non-planar (interleaved) output format.
832
- */
833
- planeIndex: number;
834
- /**
835
- * The output format for the destination data. Defaults to the AudioSample's format.
836
- * [See sample formats](https://developer.mozilla.org/en-US/docs/Web/API/AudioData/format)
837
- */
838
- format?: AudioSampleFormat;
839
- /** An offset into the source plane data indicating which frame to begin copying from. Defaults to 0. */
840
- frameOffset?: number;
841
- /**
842
- * The number of frames to copy. If not provided, the copy will include all frames in the plane beginning
843
- * with frameOffset.
844
- */
845
- frameCount?: number;
846
- };
847
-
848
- /**
849
- * Represents a raw, unencoded audio sample. Mainly used as an expressive wrapper around WebCodecs API's
850
- * [`AudioData`](https://developer.mozilla.org/en-US/docs/Web/API/AudioData), but can also be used standalone.
851
- * @group Samples
852
- * @public
853
- */
854
- export class AudioSample implements Disposable {
855
- /** @internal */
856
- _data: AudioData | Uint8Array;
857
- /** @internal */
858
- _closed: boolean = false;
859
-
860
- /**
861
- * The audio sample format.
862
- * [See sample formats](https://developer.mozilla.org/en-US/docs/Web/API/AudioData/format)
863
- */
864
- readonly format: AudioSampleFormat;
865
- /** The audio sample rate in hertz. */
866
- readonly sampleRate: number;
867
- /**
868
- * The number of audio frames in the sample, per channel. In other words, the length of this audio sample in frames.
869
- */
870
- readonly numberOfFrames: number;
871
- /** The number of audio channels. */
872
- readonly numberOfChannels: number;
873
- /** The duration of the sample in seconds. */
874
- readonly duration: number;
875
- /**
876
- * The presentation timestamp of the sample in seconds. May be negative. Samples with negative end timestamps should
877
- * not be presented.
878
- */
879
- readonly timestamp: number;
880
-
881
- /** The presentation timestamp of the sample in microseconds. */
882
- get microsecondTimestamp() {
883
- return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.timestamp);
884
- }
885
-
886
- /** The duration of the sample in microseconds. */
887
- get microsecondDuration() {
888
- return Math.trunc(SECOND_TO_MICROSECOND_FACTOR * this.duration);
889
- }
890
-
891
- /**
892
- * Creates a new {@link AudioSample}, either from an existing
893
- * [`AudioData`](https://developer.mozilla.org/en-US/docs/Web/API/AudioData) or from raw bytes specified in
894
- * {@link AudioSampleInit}.
895
- */
896
- constructor(init: AudioData | AudioSampleInit) {
897
- if (isAudioData(init)) {
898
- if (init.format === null) {
899
- throw new TypeError('AudioData with null format is not supported.');
900
- }
901
-
902
- this._data = init;
903
-
904
- this.format = init.format;
905
- this.sampleRate = init.sampleRate;
906
- this.numberOfFrames = init.numberOfFrames;
907
- this.numberOfChannels = init.numberOfChannels;
908
- this.timestamp = init.timestamp / 1e6;
909
- this.duration = init.numberOfFrames / init.sampleRate;
910
- } else {
911
- if (!init || typeof init !== 'object') {
912
- throw new TypeError('Invalid AudioDataInit: must be an object.');
913
- }
914
-
915
- if (!AUDIO_SAMPLE_FORMATS.has(init.format)) {
916
- throw new TypeError('Invalid AudioDataInit: invalid format.');
917
- }
918
- if (!Number.isFinite(init.sampleRate) || init.sampleRate <= 0) {
919
- throw new TypeError('Invalid AudioDataInit: sampleRate must be > 0.');
920
- }
921
- if (!Number.isInteger(init.numberOfChannels) || init.numberOfChannels === 0) {
922
- throw new TypeError('Invalid AudioDataInit: numberOfChannels must be an integer > 0.');
923
- }
924
- if (!Number.isFinite(init?.timestamp)) {
925
- throw new TypeError('init.timestamp must be a number.');
926
- }
927
-
928
- const numberOfFrames
929
- = init.data.byteLength / (getBytesPerSample(init.format) * init.numberOfChannels);
930
- if (!Number.isInteger(numberOfFrames)) {
931
- throw new TypeError('Invalid AudioDataInit: data size is not a multiple of frame size.');
932
- }
933
-
934
- this.format = init.format;
935
- this.sampleRate = init.sampleRate;
936
- this.numberOfFrames = numberOfFrames;
937
- this.numberOfChannels = init.numberOfChannels;
938
- this.timestamp = init.timestamp;
939
- this.duration = numberOfFrames / init.sampleRate;
940
-
941
- let dataBuffer: Uint8Array;
942
- if (init.data instanceof ArrayBuffer) {
943
- dataBuffer = new Uint8Array(init.data);
944
- } else if (ArrayBuffer.isView(init.data)) {
945
- dataBuffer = new Uint8Array(init.data.buffer, init.data.byteOffset, init.data.byteLength);
946
- } else {
947
- throw new TypeError('Invalid AudioDataInit: data is not a BufferSource.');
948
- }
949
-
950
- const expectedSize
951
- = this.numberOfFrames * this.numberOfChannels * getBytesPerSample(this.format);
952
- if (dataBuffer.byteLength < expectedSize) {
953
- throw new TypeError('Invalid AudioDataInit: insufficient data size.');
954
- }
955
-
956
- this._data = dataBuffer;
957
- }
958
- }
959
-
960
- /** Returns the number of bytes required to hold the audio sample's data as specified by the given options. */
961
- allocationSize(options: AudioSampleCopyToOptions) {
962
- if (!options || typeof options !== 'object') {
963
- throw new TypeError('options must be an object.');
964
- }
965
- if (!Number.isInteger(options.planeIndex) || options.planeIndex < 0) {
966
- throw new TypeError('planeIndex must be a non-negative integer.');
967
- }
968
-
969
- if (options.format !== undefined && !AUDIO_SAMPLE_FORMATS.has(options.format)) {
970
- throw new TypeError('Invalid format.');
971
- }
972
- if (options.frameOffset !== undefined && (!Number.isInteger(options.frameOffset) || options.frameOffset < 0)) {
973
- throw new TypeError('frameOffset must be a non-negative integer.');
974
- }
975
- if (options.frameCount !== undefined && (!Number.isInteger(options.frameCount) || options.frameCount < 0)) {
976
- throw new TypeError('frameCount must be a non-negative integer.');
977
- }
978
-
979
- if (this._closed) {
980
- throw new Error('AudioSample is closed.');
981
- }
982
-
983
- const destFormat = options.format ?? this.format;
984
-
985
- const frameOffset = options.frameOffset ?? 0;
986
- if (frameOffset >= this.numberOfFrames) {
987
- throw new RangeError('frameOffset out of range');
988
- }
989
-
990
- const copyFrameCount
991
- = options.frameCount !== undefined ? options.frameCount : (this.numberOfFrames - frameOffset);
992
- if (copyFrameCount > (this.numberOfFrames - frameOffset)) {
993
- throw new RangeError('frameCount out of range');
994
- }
995
-
996
- const bytesPerSample = getBytesPerSample(destFormat);
997
- const isPlanar = formatIsPlanar(destFormat);
998
- if (isPlanar && options.planeIndex >= this.numberOfChannels) {
999
- throw new RangeError('planeIndex out of range');
1000
- }
1001
- if (!isPlanar && options.planeIndex !== 0) {
1002
- throw new RangeError('planeIndex out of range');
1003
- }
1004
-
1005
- const elementCount = isPlanar ? copyFrameCount : copyFrameCount * this.numberOfChannels;
1006
- return elementCount * bytesPerSample;
1007
- }
1008
-
1009
- /** Copies the audio sample's data to an ArrayBuffer or ArrayBufferView as specified by the given options. */
1010
- copyTo(destination: AllowSharedBufferSource, options: AudioSampleCopyToOptions) {
1011
- if (!isAllowSharedBufferSource(destination)) {
1012
- throw new TypeError('destination must be an ArrayBuffer or an ArrayBuffer view.');
1013
- }
1014
- if (!options || typeof options !== 'object') {
1015
- throw new TypeError('options must be an object.');
1016
- }
1017
- if (!Number.isInteger(options.planeIndex) || options.planeIndex < 0) {
1018
- throw new TypeError('planeIndex must be a non-negative integer.');
1019
- }
1020
-
1021
- if (options.format !== undefined && !AUDIO_SAMPLE_FORMATS.has(options.format)) {
1022
- throw new TypeError('Invalid format.');
1023
- }
1024
- if (options.frameOffset !== undefined && (!Number.isInteger(options.frameOffset) || options.frameOffset < 0)) {
1025
- throw new TypeError('frameOffset must be a non-negative integer.');
1026
- }
1027
- if (options.frameCount !== undefined && (!Number.isInteger(options.frameCount) || options.frameCount < 0)) {
1028
- throw new TypeError('frameCount must be a non-negative integer.');
1029
- }
1030
-
1031
- if (this._closed) {
1032
- throw new Error('AudioSample is closed.');
1033
- }
1034
-
1035
- const { planeIndex, format, frameCount: optFrameCount, frameOffset: optFrameOffset } = options;
1036
-
1037
- const destFormat = format ?? this.format;
1038
- if (!destFormat) throw new Error('Destination format not determined');
1039
-
1040
- const numFrames = this.numberOfFrames;
1041
- const numChannels = this.numberOfChannels;
1042
- const frameOffset = optFrameOffset ?? 0;
1043
- if (frameOffset >= numFrames) {
1044
- throw new RangeError('frameOffset out of range');
1045
- }
1046
-
1047
- const copyFrameCount = optFrameCount !== undefined ? optFrameCount : (numFrames - frameOffset);
1048
- if (copyFrameCount > (numFrames - frameOffset)) {
1049
- throw new RangeError('frameCount out of range');
1050
- }
1051
-
1052
- const destBytesPerSample = getBytesPerSample(destFormat);
1053
- const destIsPlanar = formatIsPlanar(destFormat);
1054
- if (destIsPlanar && planeIndex >= numChannels) {
1055
- throw new RangeError('planeIndex out of range');
1056
- }
1057
- if (!destIsPlanar && planeIndex !== 0) {
1058
- throw new RangeError('planeIndex out of range');
1059
- }
1060
-
1061
- const destElementCount = destIsPlanar ? copyFrameCount : copyFrameCount * numChannels;
1062
- const requiredSize = destElementCount * destBytesPerSample;
1063
- if (destination.byteLength < requiredSize) {
1064
- throw new RangeError('Destination buffer is too small');
1065
- }
1066
-
1067
- const destView = toDataView(destination);
1068
- const writeFn = getWriteFunction(destFormat);
1069
-
1070
- if (isAudioData(this._data)) {
1071
- if (destIsPlanar) {
1072
- if (destFormat === 'f32-planar') {
1073
- // Simple, since the browser must support f32-planar, we can just delegate here
1074
- this._data.copyTo(destination, {
1075
- planeIndex,
1076
- frameOffset,
1077
- frameCount: copyFrameCount,
1078
- format: 'f32-planar',
1079
- });
1080
- } else {
1081
- // Allocate temporary buffer for f32-planar data
1082
- const tempBuffer = new ArrayBuffer(copyFrameCount * 4);
1083
- const tempArray = new Float32Array(tempBuffer);
1084
- this._data.copyTo(tempArray, {
1085
- planeIndex,
1086
- frameOffset,
1087
- frameCount: copyFrameCount,
1088
- format: 'f32-planar',
1089
- });
1090
-
1091
- // Convert each f32 sample to destination format
1092
- const tempView = new DataView(tempBuffer);
1093
- for (let i = 0; i < copyFrameCount; i++) {
1094
- const destOffset = i * destBytesPerSample;
1095
- const sample = tempView.getFloat32(i * 4, true);
1096
- writeFn(destView, destOffset, sample);
1097
- }
1098
- }
1099
- } else {
1100
- // Destination is interleaved.
1101
- // Allocate a temporary Float32Array to hold one channel's worth of data.
1102
- const numCh = numChannels;
1103
- const temp = new Float32Array(copyFrameCount);
1104
- for (let ch = 0; ch < numCh; ch++) {
1105
- this._data.copyTo(temp, {
1106
- planeIndex: ch,
1107
- frameOffset,
1108
- frameCount: copyFrameCount,
1109
- format: 'f32-planar',
1110
- });
1111
- for (let i = 0; i < copyFrameCount; i++) {
1112
- const destIndex = i * numCh + ch;
1113
- const destOffset = destIndex * destBytesPerSample;
1114
- writeFn(destView, destOffset, temp[i]!);
1115
- }
1116
- }
1117
- }
1118
- } else {
1119
- // Branch for Uint8Array data (non-AudioData)
1120
- const uint8Data = this._data;
1121
- const srcView = new DataView(uint8Data.buffer, uint8Data.byteOffset, uint8Data.byteLength);
1122
-
1123
- const srcFormat = this.format;
1124
- const readFn = getReadFunction(srcFormat);
1125
- const srcBytesPerSample = getBytesPerSample(srcFormat);
1126
- const srcIsPlanar = formatIsPlanar(srcFormat);
1127
-
1128
- for (let i = 0; i < copyFrameCount; i++) {
1129
- if (destIsPlanar) {
1130
- const destOffset = i * destBytesPerSample;
1131
- let srcOffset: number;
1132
- if (srcIsPlanar) {
1133
- srcOffset = (planeIndex * numFrames + (i + frameOffset)) * srcBytesPerSample;
1134
- } else {
1135
- srcOffset = (((i + frameOffset) * numChannels) + planeIndex) * srcBytesPerSample;
1136
- }
1137
-
1138
- const normalized = readFn(srcView, srcOffset);
1139
- writeFn(destView, destOffset, normalized);
1140
- } else {
1141
- for (let ch = 0; ch < numChannels; ch++) {
1142
- const destIndex = i * numChannels + ch;
1143
- const destOffset = destIndex * destBytesPerSample;
1144
- let srcOffset: number;
1145
- if (srcIsPlanar) {
1146
- srcOffset = (ch * numFrames + (i + frameOffset)) * srcBytesPerSample;
1147
- } else {
1148
- srcOffset = (((i + frameOffset) * numChannels) + ch) * srcBytesPerSample;
1149
- }
1150
-
1151
- const normalized = readFn(srcView, srcOffset);
1152
- writeFn(destView, destOffset, normalized);
1153
- }
1154
- }
1155
- }
1156
- }
1157
- }
1158
-
1159
- /** Clones this audio sample. */
1160
- clone(): AudioSample {
1161
- if (this._closed) {
1162
- throw new Error('AudioSample is closed.');
1163
- }
1164
-
1165
- if (isAudioData(this._data)) {
1166
- const sample = new AudioSample(this._data.clone());
1167
- sample.setTimestamp(this.timestamp); // Make sure the timestamp is precise (beyond microsecond accuracy)
1168
-
1169
- return sample;
1170
- } else {
1171
- return new AudioSample({
1172
- format: this.format,
1173
- sampleRate: this.sampleRate,
1174
- numberOfFrames: this.numberOfFrames,
1175
- numberOfChannels: this.numberOfChannels,
1176
- timestamp: this.timestamp,
1177
- data: this._data,
1178
- });
1179
- }
1180
- }
1181
-
1182
- /**
1183
- * Closes this audio sample, releasing held resources. Audio samples should be closed as soon as they are not
1184
- * needed anymore.
1185
- */
1186
- close(): void {
1187
- if (this._closed) {
1188
- return;
1189
- }
1190
-
1191
- if (isAudioData(this._data)) {
1192
- this._data.close();
1193
- } else {
1194
- this._data = new Uint8Array(0);
1195
- }
1196
-
1197
- this._closed = true;
1198
- }
1199
-
1200
- /**
1201
- * Converts this audio sample to an AudioData for use with the WebCodecs API. The AudioData returned by this
1202
- * method *must* be closed separately from this audio sample.
1203
- */
1204
- toAudioData() {
1205
- if (this._closed) {
1206
- throw new Error('AudioSample is closed.');
1207
- }
1208
-
1209
- if (isAudioData(this._data)) {
1210
- if (this._data.timestamp === this.microsecondTimestamp) {
1211
- // Timestamp matches, let's just return the data (but cloned)
1212
- return this._data.clone();
1213
- } else {
1214
- // It's impossible to simply change an AudioData's timestamp, so we'll need to create a new one
1215
- if (formatIsPlanar(this.format)) {
1216
- const size = this.allocationSize({ planeIndex: 0, format: this.format });
1217
- const data = new ArrayBuffer(size * this.numberOfChannels);
1218
-
1219
- // We gotta read out each plane individually
1220
- for (let i = 0; i < this.numberOfChannels; i++) {
1221
- this.copyTo(new Uint8Array(data, i * size, size), { planeIndex: i, format: this.format });
1222
- }
1223
-
1224
- return new AudioData({
1225
- format: this.format,
1226
- sampleRate: this.sampleRate,
1227
- numberOfFrames: this.numberOfFrames,
1228
- numberOfChannels: this.numberOfChannels,
1229
- timestamp: this.microsecondTimestamp,
1230
- data,
1231
- });
1232
- } else {
1233
- const data = new ArrayBuffer(this.allocationSize({ planeIndex: 0, format: this.format }));
1234
- this.copyTo(data, { planeIndex: 0, format: this.format });
1235
-
1236
- return new AudioData({
1237
- format: this.format,
1238
- sampleRate: this.sampleRate,
1239
- numberOfFrames: this.numberOfFrames,
1240
- numberOfChannels: this.numberOfChannels,
1241
- timestamp: this.microsecondTimestamp,
1242
- data,
1243
- });
1244
- }
1245
- }
1246
- } else {
1247
- return new AudioData({
1248
- format: this.format,
1249
- sampleRate: this.sampleRate,
1250
- numberOfFrames: this.numberOfFrames,
1251
- numberOfChannels: this.numberOfChannels,
1252
- timestamp: this.microsecondTimestamp,
1253
- data: this._data,
1254
- });
1255
- }
1256
- }
1257
-
1258
- /** Convert this audio sample to an AudioBuffer for use with the Web Audio API. */
1259
- toAudioBuffer() {
1260
- if (this._closed) {
1261
- throw new Error('AudioSample is closed.');
1262
- }
1263
-
1264
- const audioBuffer = new AudioBuffer({
1265
- numberOfChannels: this.numberOfChannels,
1266
- length: this.numberOfFrames,
1267
- sampleRate: this.sampleRate,
1268
- });
1269
-
1270
- const dataBytes = new Float32Array(this.allocationSize({ planeIndex: 0, format: 'f32-planar' }) / 4);
1271
-
1272
- for (let i = 0; i < this.numberOfChannels; i++) {
1273
- this.copyTo(dataBytes, { planeIndex: i, format: 'f32-planar' });
1274
- audioBuffer.copyToChannel(dataBytes, i);
1275
- }
1276
-
1277
- return audioBuffer;
1278
- }
1279
-
1280
- /** Sets the presentation timestamp of this audio sample, in seconds. */
1281
- setTimestamp(newTimestamp: number) {
1282
- if (!Number.isFinite(newTimestamp)) {
1283
- throw new TypeError('newTimestamp must be a number.');
1284
- }
1285
-
1286
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
1287
- (this.timestamp as number) = newTimestamp;
1288
- }
1289
-
1290
- /** Calls `.close()`. */
1291
- [Symbol.dispose]() {
1292
- this.close();
1293
- }
1294
-
1295
- /** @internal */
1296
- static* _fromAudioBuffer(audioBuffer: AudioBuffer, timestamp: number) {
1297
- if (!(audioBuffer instanceof AudioBuffer)) {
1298
- throw new TypeError('audioBuffer must be an AudioBuffer.');
1299
- }
1300
-
1301
- const MAX_FLOAT_COUNT = 48000 * 5; // 5 seconds of mono 48 kHz audio per sample
1302
-
1303
- const numberOfChannels = audioBuffer.numberOfChannels;
1304
- const sampleRate = audioBuffer.sampleRate;
1305
- const totalFrames = audioBuffer.length;
1306
- const maxFramesPerChunk = Math.floor(MAX_FLOAT_COUNT / numberOfChannels);
1307
-
1308
- let currentRelativeFrame = 0;
1309
- let remainingFrames = totalFrames;
1310
-
1311
- // Create AudioSamples in a chunked fashion so we don't create huge Float32Arrays
1312
- while (remainingFrames > 0) {
1313
- const framesToCopy = Math.min(maxFramesPerChunk, remainingFrames);
1314
- const chunkData = new Float32Array(numberOfChannels * framesToCopy);
1315
-
1316
- for (let channel = 0; channel < numberOfChannels; channel++) {
1317
- audioBuffer.copyFromChannel(
1318
- chunkData.subarray(channel * framesToCopy, (channel + 1) * framesToCopy),
1319
- channel,
1320
- currentRelativeFrame,
1321
- );
1322
- }
1323
-
1324
- yield new AudioSample({
1325
- format: 'f32-planar',
1326
- sampleRate,
1327
- numberOfFrames: framesToCopy,
1328
- numberOfChannels,
1329
- timestamp: timestamp + currentRelativeFrame / sampleRate,
1330
- data: chunkData,
1331
- });
1332
-
1333
- currentRelativeFrame += framesToCopy;
1334
- remainingFrames -= framesToCopy;
1335
- }
1336
- }
1337
-
1338
- /**
1339
- * Creates AudioSamples from an AudioBuffer, starting at the given timestamp in seconds. Typically creates exactly
1340
- * one sample, but may create multiple if the AudioBuffer is exceedingly large.
1341
- */
1342
- static fromAudioBuffer(audioBuffer: AudioBuffer, timestamp: number) {
1343
- if (!(audioBuffer instanceof AudioBuffer)) {
1344
- throw new TypeError('audioBuffer must be an AudioBuffer.');
1345
- }
1346
-
1347
- const MAX_FLOAT_COUNT = 48000 * 5; // 5 seconds of mono 48 kHz audio per sample
1348
-
1349
- const numberOfChannels = audioBuffer.numberOfChannels;
1350
- const sampleRate = audioBuffer.sampleRate;
1351
- const totalFrames = audioBuffer.length;
1352
- const maxFramesPerChunk = Math.floor(MAX_FLOAT_COUNT / numberOfChannels);
1353
-
1354
- let currentRelativeFrame = 0;
1355
- let remainingFrames = totalFrames;
1356
-
1357
- const result: AudioSample[] = [];
1358
-
1359
- // Create AudioSamples in a chunked fashion so we don't create huge Float32Arrays
1360
- while (remainingFrames > 0) {
1361
- const framesToCopy = Math.min(maxFramesPerChunk, remainingFrames);
1362
- const chunkData = new Float32Array(numberOfChannels * framesToCopy);
1363
-
1364
- for (let channel = 0; channel < numberOfChannels; channel++) {
1365
- audioBuffer.copyFromChannel(
1366
- chunkData.subarray(channel * framesToCopy, (channel + 1) * framesToCopy),
1367
- channel,
1368
- currentRelativeFrame,
1369
- );
1370
- }
1371
-
1372
- const audioSample = new AudioSample({
1373
- format: 'f32-planar',
1374
- sampleRate,
1375
- numberOfFrames: framesToCopy,
1376
- numberOfChannels,
1377
- timestamp: timestamp + currentRelativeFrame / sampleRate,
1378
- data: chunkData,
1379
- });
1380
-
1381
- result.push(audioSample);
1382
-
1383
- currentRelativeFrame += framesToCopy;
1384
- remainingFrames -= framesToCopy;
1385
- }
1386
-
1387
- return result;
1388
- }
1389
- }
1390
-
1391
- const getBytesPerSample = (format: AudioSampleFormat): number => {
1392
- switch (format) {
1393
- case 'u8':
1394
- case 'u8-planar':
1395
- return 1;
1396
- case 's16':
1397
- case 's16-planar':
1398
- return 2;
1399
- case 's32':
1400
- case 's32-planar':
1401
- return 4;
1402
- case 'f32':
1403
- case 'f32-planar':
1404
- return 4;
1405
- default:
1406
- throw new Error('Unknown AudioSampleFormat');
1407
- }
1408
- };
1409
-
1410
- const formatIsPlanar = (format: AudioSampleFormat): boolean => {
1411
- switch (format) {
1412
- case 'u8-planar':
1413
- case 's16-planar':
1414
- case 's32-planar':
1415
- case 'f32-planar':
1416
- return true;
1417
- default:
1418
- return false;
1419
- }
1420
- };
1421
-
1422
- const getReadFunction = (format: AudioSampleFormat): (view: DataView, offset: number) => number => {
1423
- switch (format) {
1424
- case 'u8':
1425
- case 'u8-planar':
1426
- return (view, offset) => (view.getUint8(offset) - 128) / 128;
1427
- case 's16':
1428
- case 's16-planar':
1429
- return (view, offset) => view.getInt16(offset, true) / 32768;
1430
- case 's32':
1431
- case 's32-planar':
1432
- return (view, offset) => view.getInt32(offset, true) / 2147483648;
1433
- case 'f32':
1434
- case 'f32-planar':
1435
- return (view, offset) => view.getFloat32(offset, true);
1436
- }
1437
- };
1438
-
1439
- const getWriteFunction = (format: AudioSampleFormat): (view: DataView, offset: number, value: number) => void => {
1440
- switch (format) {
1441
- case 'u8':
1442
- case 'u8-planar':
1443
- return (view, offset, value) =>
1444
- view.setUint8(offset, clamp((value + 1) * 127.5, 0, 255));
1445
- case 's16':
1446
- case 's16-planar':
1447
- return (view, offset, value) =>
1448
- view.setInt16(offset, clamp(Math.round(value * 32767), -32768, 32767), true);
1449
- case 's32':
1450
- case 's32-planar':
1451
- return (view, offset, value) =>
1452
- view.setInt32(offset, clamp(Math.round(value * 2147483647), -2147483648, 2147483647), true);
1453
- case 'f32':
1454
- case 'f32-planar':
1455
- return (view, offset, value) => view.setFloat32(offset, value, true);
1456
- }
1457
- };
1458
-
1459
- const isAudioData = (x: unknown): x is AudioData => {
1460
- return typeof AudioData !== 'undefined' && x instanceof AudioData;
1461
- };