webcodecs-utils 0.2.5 → 0.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +142 -0
- package/dist/audio/extract-channels.d.ts +45 -0
- package/dist/audio/extract-channels.d.ts.map +1 -0
- package/dist/audio/extract-channels.js +25 -0
- package/dist/audio/get-sample-rate.d.ts +19 -0
- package/dist/audio/get-sample-rate.d.ts.map +1 -0
- package/dist/audio/get-sample-rate.js +14 -0
- package/dist/audio/mp3.d.ts +162 -0
- package/dist/audio/mp3.d.ts.map +1 -0
- package/dist/audio/mp3.js +173 -0
- package/dist/demux/example-muxer.d.ts +74 -0
- package/dist/demux/example-muxer.d.ts.map +1 -0
- package/dist/demux/example-muxer.js +40 -0
- package/dist/demux/get-chunks.d.ts +81 -0
- package/dist/demux/get-chunks.d.ts.map +1 -0
- package/dist/demux/get-chunks.js +88 -0
- package/dist/demux/mp4-demuxer.d.ts +84 -0
- package/dist/demux/mp4-demuxer.d.ts.map +1 -0
- package/dist/demux/mp4-demuxer.js +215 -0
- package/dist/demux/simple-demuxer.d.ts +49 -0
- package/dist/demux/simple-demuxer.d.ts.map +1 -0
- package/dist/demux/simple-demuxer.js +87 -0
- package/dist/in-memory-storage.d.ts +30 -0
- package/dist/in-memory-storage.d.ts.map +1 -0
- package/dist/in-memory-storage.js +54 -0
- package/dist/index.cjs +5 -298
- package/dist/index.d.ts +17 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +36 -13877
- package/dist/mux/simple-muxer.d.ts +47 -0
- package/dist/mux/simple-muxer.d.ts.map +1 -0
- package/dist/mux/simple-muxer.js +83 -0
- package/dist/polyfills/media-stream-track-processor.d.ts +5 -0
- package/dist/polyfills/media-stream-track-processor.d.ts.map +1 -0
- package/dist/polyfills/media-stream-track-processor.js +109 -0
- package/dist/streams/video-decode-stream.d.ts +11 -0
- package/dist/streams/video-decode-stream.d.ts.map +1 -0
- package/dist/streams/video-decode-stream.js +39 -0
- package/dist/streams/video-encode-stream.d.ts +15 -0
- package/dist/streams/video-encode-stream.d.ts.map +1 -0
- package/dist/streams/video-encode-stream.js +40 -0
- package/dist/streams/video-process-stream.d.ts +22 -0
- package/dist/streams/video-process-stream.d.ts.map +1 -0
- package/dist/streams/video-process-stream.js +21 -0
- package/dist/video/get-bitrate.d.ts +35 -0
- package/dist/video/get-bitrate.d.ts.map +1 -0
- package/dist/video/get-bitrate.js +12 -0
- package/dist/video/get-codec-string.d.ts +46 -0
- package/dist/video/get-codec-string.d.ts.map +1 -0
- package/dist/video/get-codec-string.js +195 -0
- package/dist/video/gpu-renderer.d.ts +108 -0
- package/dist/video/gpu-renderer.d.ts.map +1 -0
- package/dist/video/gpu-renderer.js +266 -0
- package/package.json +1 -1
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Simple wrapper around MediaBunny's Output for easier muxing
|
|
3
|
+
* Provides WritableStream interface for use in pipelines
|
|
4
|
+
*
|
|
5
|
+
* **⚠️ Demo/Learning Only**: For production use, use MediaBunny directly.
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* ```typescript
|
|
9
|
+
* const muxer = new SimpleMuxer({ video: 'avc', audio: 'aac' });
|
|
10
|
+
*
|
|
11
|
+
* await videoStream
|
|
12
|
+
* .pipeThrough(new VideoDecodeStream(config))
|
|
13
|
+
* .pipeThrough(new VideoEncodeStream(config))
|
|
14
|
+
* .pipeTo(muxer.videoSink());
|
|
15
|
+
*
|
|
16
|
+
* const blob = await muxer.finalize();
|
|
17
|
+
* ```
|
|
18
|
+
*/
|
|
19
|
+
export declare class SimpleMuxer {
|
|
20
|
+
private output;
|
|
21
|
+
private videoSource?;
|
|
22
|
+
private audioSource?;
|
|
23
|
+
private started;
|
|
24
|
+
constructor(config: {
|
|
25
|
+
video?: 'avc' | 'hevc' | 'vp8' | 'vp9' | 'av1';
|
|
26
|
+
audio?: 'aac' | 'opus' | 'mp3' | 'vorbis' | 'flac';
|
|
27
|
+
});
|
|
28
|
+
/**
|
|
29
|
+
* Get a WritableStream for video chunks with metadata
|
|
30
|
+
* Use with .pipeTo() in a pipeline
|
|
31
|
+
*/
|
|
32
|
+
videoSink(): WritableStream<{
|
|
33
|
+
chunk: EncodedVideoChunk;
|
|
34
|
+
meta?: EncodedVideoChunkMetadata;
|
|
35
|
+
}>;
|
|
36
|
+
/**
|
|
37
|
+
* Get a WritableStream for audio chunks
|
|
38
|
+
* Use with .pipeTo() in a pipeline
|
|
39
|
+
*/
|
|
40
|
+
audioSink(): WritableStream<EncodedAudioChunk>;
|
|
41
|
+
/**
|
|
42
|
+
* Finalize the muxer and return the output as a Blob
|
|
43
|
+
* Call this after all streams have finished writing
|
|
44
|
+
*/
|
|
45
|
+
finalize(): Promise<Blob>;
|
|
46
|
+
}
|
|
47
|
+
//# sourceMappingURL=simple-muxer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"simple-muxer.d.ts","sourceRoot":"","sources":["../../src/mux/simple-muxer.ts"],"names":[],"mappings":"AAuBA;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,WAAW;IACtB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,WAAW,CAAC,CAA2B;IAC/C,OAAO,CAAC,WAAW,CAAC,CAA2B;IAC/C,OAAO,CAAC,OAAO,CAAS;gBAEZ,MAAM,EAAE;QAClB,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,GAAG,KAAK,CAAC;QAC/C,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;KACpD;IAmBD;;;OAGG;IACH,SAAS,IAAI,cAAc,CAAC;QAAE,KAAK,EAAE,iBAAiB,CAAC;QAAC,IAAI,CAAC,EAAE,yBAAyB,CAAA;KAAE,CAAC;IA8B3F;;;OAGG;IACH,SAAS,IAAI,cAAc,CAAC,iBAAiB,CAAC;IA8B9C;;;OAGG;IACG,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;CAQhC"}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { BufferTarget as u, Output as n, Mp4OutputFormat as s, EncodedVideoPacketSource as d, EncodedAudioPacketSource as c, EncodedPacket as i } from "mediabunny";
|
|
2
|
+
let a = !1;
|
|
3
|
+
function h() {
|
|
4
|
+
a || (console.warn(
|
|
5
|
+
"⚠️ Demo/Learning Function: SimpleMuxer is intended for demos and learning purposes only. For production use, please use MediaBunny directly (https://mediabunny.dev/) for more features and better control."
|
|
6
|
+
), a = !0);
|
|
7
|
+
}
|
|
8
|
+
class w {
|
|
9
|
+
constructor(t) {
|
|
10
|
+
this.started = !1, h();
|
|
11
|
+
const o = new u();
|
|
12
|
+
this.output = new n({
|
|
13
|
+
format: new s(),
|
|
14
|
+
target: o
|
|
15
|
+
}), t.video && (this.videoSource = new d(t.video), this.output.addVideoTrack(this.videoSource)), t.audio && (this.audioSource = new c(t.audio), this.output.addAudioTrack(this.audioSource));
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Get a WritableStream for video chunks with metadata
|
|
19
|
+
* Use with .pipeTo() in a pipeline
|
|
20
|
+
*/
|
|
21
|
+
videoSink() {
|
|
22
|
+
if (!this.videoSource)
|
|
23
|
+
throw new Error("SimpleMuxer: No video track configured");
|
|
24
|
+
const t = this.videoSource, o = async () => {
|
|
25
|
+
this.started || (await this.output.start(), this.started = !0);
|
|
26
|
+
};
|
|
27
|
+
return new WritableStream({
|
|
28
|
+
start: async () => {
|
|
29
|
+
await o();
|
|
30
|
+
},
|
|
31
|
+
write: (e) => {
|
|
32
|
+
const r = i.fromEncodedChunk(e.chunk);
|
|
33
|
+
t.add(r, e.meta);
|
|
34
|
+
},
|
|
35
|
+
close: () => {
|
|
36
|
+
console.log("Video sink closed");
|
|
37
|
+
},
|
|
38
|
+
abort: (e) => {
|
|
39
|
+
console.error("Video sink aborted:", e);
|
|
40
|
+
}
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Get a WritableStream for audio chunks
|
|
45
|
+
* Use with .pipeTo() in a pipeline
|
|
46
|
+
*/
|
|
47
|
+
audioSink() {
|
|
48
|
+
if (!this.audioSource)
|
|
49
|
+
throw new Error("SimpleMuxer: No audio track configured");
|
|
50
|
+
const t = this.audioSource, o = async () => {
|
|
51
|
+
this.started || (await this.output.start(), this.started = !0);
|
|
52
|
+
};
|
|
53
|
+
return new WritableStream({
|
|
54
|
+
start: async () => {
|
|
55
|
+
await o();
|
|
56
|
+
},
|
|
57
|
+
write: (e) => {
|
|
58
|
+
const r = i.fromEncodedChunk(e);
|
|
59
|
+
t.add(r);
|
|
60
|
+
},
|
|
61
|
+
close: () => {
|
|
62
|
+
console.log("Audio sink closed");
|
|
63
|
+
},
|
|
64
|
+
abort: (e) => {
|
|
65
|
+
console.error("Audio sink aborted:", e);
|
|
66
|
+
}
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Finalize the muxer and return the output as a Blob
|
|
71
|
+
* Call this after all streams have finished writing
|
|
72
|
+
*/
|
|
73
|
+
async finalize() {
|
|
74
|
+
await this.output.finalize();
|
|
75
|
+
const t = this.output.target.buffer;
|
|
76
|
+
if (!t)
|
|
77
|
+
throw new Error("SimpleMuxer: No data was written to the muxer");
|
|
78
|
+
return new Blob([t], { type: "video/mp4" });
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
export {
|
|
82
|
+
w as SimpleMuxer
|
|
83
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"media-stream-track-processor.d.ts","sourceRoot":"","sources":["../../src/polyfills/media-stream-track-processor.ts"],"names":[],"mappings":"AAoLA,eAAO,MAAM,yBAAyB;;;CAAsE,CAAC"}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
const b = `
|
|
2
|
+
class AudioCaptureProcessor extends AudioWorkletProcessor {
|
|
3
|
+
sampleCount = 0;
|
|
4
|
+
|
|
5
|
+
process(inputs) {
|
|
6
|
+
if (inputs.length === 0 || inputs[0].length === 0) {
|
|
7
|
+
return true;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
const channels = inputs[0];
|
|
11
|
+
const timestamp = (this.sampleCount / sampleRate) * 1_000_000; // Convert to microseconds
|
|
12
|
+
|
|
13
|
+
this.port.postMessage({
|
|
14
|
+
timestamp,
|
|
15
|
+
channels: channels.map(channel => channel.slice()),
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
this.sampleCount += channels[0].length;
|
|
19
|
+
return true;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
registerProcessor('audio-capture-processor', AudioCaptureProcessor);
|
|
24
|
+
`;
|
|
25
|
+
class f {
|
|
26
|
+
constructor({ track: a }) {
|
|
27
|
+
const o = a.getSettings();
|
|
28
|
+
if (!o)
|
|
29
|
+
throw new Error("track has no settings");
|
|
30
|
+
if (a.kind === "video")
|
|
31
|
+
this.readable = this.createVideoStream(a, o);
|
|
32
|
+
else if (a.kind === "audio")
|
|
33
|
+
this.readable = this.createAudioStream(a, o);
|
|
34
|
+
else
|
|
35
|
+
throw new Error(`Unsupported track kind: ${a.kind}`);
|
|
36
|
+
}
|
|
37
|
+
createVideoStream(a, o) {
|
|
38
|
+
let e, t, n;
|
|
39
|
+
const l = o.frameRate ?? 30;
|
|
40
|
+
return new ReadableStream({
|
|
41
|
+
async start() {
|
|
42
|
+
e = document.createElement("video"), e.srcObject = new MediaStream([a]), await Promise.all([
|
|
43
|
+
e.play(),
|
|
44
|
+
new Promise((s) => {
|
|
45
|
+
e.onloadedmetadata = s;
|
|
46
|
+
})
|
|
47
|
+
]), t = performance.now();
|
|
48
|
+
},
|
|
49
|
+
async pull(s) {
|
|
50
|
+
for (; ; ) {
|
|
51
|
+
const i = performance.now();
|
|
52
|
+
if (i - t < 1e3 / l) {
|
|
53
|
+
await new Promise((u) => requestAnimationFrame(u));
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
const c = n ?? Math.round((i - t) * 1e3);
|
|
57
|
+
n = c, t = i, s.enqueue(new VideoFrame(e, {
|
|
58
|
+
timestamp: t * 1e3,
|
|
59
|
+
duration: c
|
|
60
|
+
}));
|
|
61
|
+
break;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
createAudioStream(a, o) {
|
|
67
|
+
let e, t, n;
|
|
68
|
+
return new ReadableStream({
|
|
69
|
+
async start(l) {
|
|
70
|
+
e = new AudioContext({
|
|
71
|
+
sampleRate: o.sampleRate || 48e3
|
|
72
|
+
});
|
|
73
|
+
const s = new MediaStreamAudioSourceNode(e, {
|
|
74
|
+
mediaStream: new MediaStream([a])
|
|
75
|
+
}), i = new Blob([b], { type: "application/javascript" });
|
|
76
|
+
n = URL.createObjectURL(i), await e.audioWorklet.addModule(n), t = new AudioWorkletNode(e, "audio-capture-processor", {
|
|
77
|
+
numberOfInputs: 1,
|
|
78
|
+
numberOfOutputs: 0,
|
|
79
|
+
channelCount: o.channelCount || 2
|
|
80
|
+
}), s.connect(t), t.port.onmessage = (c) => {
|
|
81
|
+
const { timestamp: u, channels: w } = c.data, d = w, m = d[0].length, p = d.length, S = m * p, h = new Float32Array(S);
|
|
82
|
+
for (let r = 0; r < p; r++)
|
|
83
|
+
h.set(d[r], r * m);
|
|
84
|
+
try {
|
|
85
|
+
const r = new AudioData({
|
|
86
|
+
format: "f32-planar",
|
|
87
|
+
sampleRate: e.sampleRate,
|
|
88
|
+
numberOfFrames: m,
|
|
89
|
+
numberOfChannels: p,
|
|
90
|
+
timestamp: u,
|
|
91
|
+
data: h
|
|
92
|
+
});
|
|
93
|
+
l.enqueue(r);
|
|
94
|
+
} catch (r) {
|
|
95
|
+
console.error("Failed to create AudioData:", r);
|
|
96
|
+
}
|
|
97
|
+
};
|
|
98
|
+
},
|
|
99
|
+
cancel() {
|
|
100
|
+
t && (t.disconnect(), t.port.onmessage = null), e && e.close(), n && URL.revokeObjectURL(n);
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
self.MediaStreamTrackProcessor || (self.MediaStreamTrackProcessor = f);
|
|
106
|
+
const g = self.MediaStreamTrackProcessor || f;
|
|
107
|
+
export {
|
|
108
|
+
g as MediaStreamTrackProcessor
|
|
109
|
+
};
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* TransformStream that decodes EncodedVideoChunks into VideoFrames
|
|
3
|
+
* Handles backpressure from both the decoder's internal queue and downstream
|
|
4
|
+
*/
|
|
5
|
+
export declare class VideoDecodeStream extends TransformStream<EncodedVideoChunk, VideoFrame> {
|
|
6
|
+
constructor(config: VideoDecoderConfig, options?: {
|
|
7
|
+
highWaterMark?: number;
|
|
8
|
+
maxDecodeQueueSize?: number;
|
|
9
|
+
});
|
|
10
|
+
}
|
|
11
|
+
//# sourceMappingURL=video-decode-stream.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"video-decode-stream.d.ts","sourceRoot":"","sources":["../../src/streams/video-decode-stream.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,qBAAa,iBAAkB,SAAQ,eAAe,CAAC,iBAAiB,EAAE,UAAU,CAAC;gBAEjF,MAAM,EAAE,kBAAkB,EAC1B,OAAO,CAAC,EAAE;QACR,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,kBAAkB,CAAC,EAAE,MAAM,CAAC;KAC7B;CAkDJ"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
class t extends TransformStream {
|
|
2
|
+
constructor(u, r) {
|
|
3
|
+
let a;
|
|
4
|
+
const o = (r == null ? void 0 : r.highWaterMark) ?? 10, s = (r == null ? void 0 : r.maxDecodeQueueSize) ?? 20;
|
|
5
|
+
super(
|
|
6
|
+
{
|
|
7
|
+
start(c) {
|
|
8
|
+
a = new VideoDecoder({
|
|
9
|
+
output: (e) => {
|
|
10
|
+
c.enqueue(e);
|
|
11
|
+
},
|
|
12
|
+
error: (e) => {
|
|
13
|
+
console.error("VideoDecoder error:", e), c.error(e);
|
|
14
|
+
}
|
|
15
|
+
}), a.configure(u);
|
|
16
|
+
},
|
|
17
|
+
async transform(c, e) {
|
|
18
|
+
for (; a.decodeQueueSize >= s; )
|
|
19
|
+
await new Promise((d) => setTimeout(d, 10));
|
|
20
|
+
for (; e.desiredSize !== null && e.desiredSize < 0; )
|
|
21
|
+
await new Promise((d) => setTimeout(d, 10));
|
|
22
|
+
a.decode(c);
|
|
23
|
+
},
|
|
24
|
+
async flush() {
|
|
25
|
+
await a.flush();
|
|
26
|
+
try {
|
|
27
|
+
a.close();
|
|
28
|
+
} catch (c) {
|
|
29
|
+
console.error("Error closing decoder:", c);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
},
|
|
33
|
+
{ highWaterMark: o }
|
|
34
|
+
);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
export {
|
|
38
|
+
t as VideoDecodeStream
|
|
39
|
+
};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* TransformStream that encodes VideoFrames into EncodedVideoChunks with metadata
|
|
3
|
+
* Handles backpressure from both the encoder's internal queue and downstream
|
|
4
|
+
*/
|
|
5
|
+
export declare class VideoEncodeStream extends TransformStream<VideoFrame, {
|
|
6
|
+
chunk: EncodedVideoChunk;
|
|
7
|
+
meta?: EncodedVideoChunkMetadata;
|
|
8
|
+
}> {
|
|
9
|
+
constructor(config: VideoEncoderConfig, options?: {
|
|
10
|
+
highWaterMark?: number;
|
|
11
|
+
maxEncodeQueueSize?: number;
|
|
12
|
+
keyFrameInterval?: number;
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
//# sourceMappingURL=video-encode-stream.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"video-encode-stream.d.ts","sourceRoot":"","sources":["../../src/streams/video-encode-stream.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,qBAAa,iBAAkB,SAAQ,eAAe,CACpD,UAAU,EACV;IAAE,KAAK,EAAE,iBAAiB,CAAC;IAAC,IAAI,CAAC,EAAE,yBAAyB,CAAA;CAAE,CAC/D;gBAEG,MAAM,EAAE,kBAAkB,EAC1B,OAAO,CAAC,EAAE;QACR,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,kBAAkB,CAAC,EAAE,MAAM,CAAC;QAC5B,gBAAgB,CAAC,EAAE,MAAM,CAAC;KAC3B;CA0DJ"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
class h extends TransformStream {
|
|
2
|
+
constructor(t, e) {
|
|
3
|
+
let c, n = 0;
|
|
4
|
+
const m = (e == null ? void 0 : e.highWaterMark) ?? 10, s = (e == null ? void 0 : e.maxEncodeQueueSize) ?? 20, l = (e == null ? void 0 : e.keyFrameInterval) ?? 60;
|
|
5
|
+
super(
|
|
6
|
+
{
|
|
7
|
+
start(r) {
|
|
8
|
+
c = new VideoEncoder({
|
|
9
|
+
output: (a, u) => {
|
|
10
|
+
r.enqueue({ chunk: a, meta: u });
|
|
11
|
+
},
|
|
12
|
+
error: (a) => {
|
|
13
|
+
console.error("VideoEncoder error:", a), r.error(a);
|
|
14
|
+
}
|
|
15
|
+
}), c.configure(t);
|
|
16
|
+
},
|
|
17
|
+
async transform(r, a) {
|
|
18
|
+
for (; c.encodeQueueSize >= s; )
|
|
19
|
+
await new Promise((d) => setTimeout(d, 10));
|
|
20
|
+
for (; a.desiredSize !== null && a.desiredSize < 0; )
|
|
21
|
+
await new Promise((d) => setTimeout(d, 10));
|
|
22
|
+
const u = n % l === 0;
|
|
23
|
+
c.encode(r, { keyFrame: u }), n++, r.close();
|
|
24
|
+
},
|
|
25
|
+
async flush() {
|
|
26
|
+
await c.flush();
|
|
27
|
+
try {
|
|
28
|
+
c.close();
|
|
29
|
+
} catch (r) {
|
|
30
|
+
console.error("Error closing encoder:", r);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
{ highWaterMark: m }
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
export {
|
|
39
|
+
h as VideoEncodeStream
|
|
40
|
+
};
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* TransformStream that applies a custom processing function to each VideoFrame
|
|
3
|
+
* The transform function receives a frame and should return a new processed frame
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```typescript
|
|
7
|
+
* const upscaler = new VideoProcessStream(async (frame) => {
|
|
8
|
+
* await websr.render(frame);
|
|
9
|
+
* const upscaledFrame = new VideoFrame(canvas, {
|
|
10
|
+
* timestamp: frame.timestamp,
|
|
11
|
+
* duration: frame.duration
|
|
12
|
+
* });
|
|
13
|
+
* return upscaledFrame;
|
|
14
|
+
* });
|
|
15
|
+
* ```
|
|
16
|
+
*/
|
|
17
|
+
export declare class VideoProcessStream extends TransformStream<VideoFrame, VideoFrame> {
|
|
18
|
+
constructor(transformFn: (frame: VideoFrame) => Promise<VideoFrame> | VideoFrame, options?: {
|
|
19
|
+
highWaterMark?: number;
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=video-process-stream.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"video-process-stream.d.ts","sourceRoot":"","sources":["../../src/streams/video-process-stream.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;GAeG;AACH,qBAAa,kBAAmB,SAAQ,eAAe,CAAC,UAAU,EAAE,UAAU,CAAC;gBAE3E,WAAW,EAAE,CAAC,KAAK,EAAE,UAAU,KAAK,OAAO,CAAC,UAAU,CAAC,GAAG,UAAU,EACpE,OAAO,CAAC,EAAE;QACR,aAAa,CAAC,EAAE,MAAM,CAAC;KACxB;CA4BJ"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
class h extends TransformStream {
|
|
2
|
+
constructor(a, s) {
|
|
3
|
+
const o = (s == null ? void 0 : s.highWaterMark) ?? 5;
|
|
4
|
+
super(
|
|
5
|
+
{
|
|
6
|
+
async transform(e, c) {
|
|
7
|
+
try {
|
|
8
|
+
const r = await a(e);
|
|
9
|
+
c.enqueue(r), r !== e && e.close();
|
|
10
|
+
} catch (r) {
|
|
11
|
+
console.error("VideoProcessStream error:", r), e.close(), c.error(r);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
{ highWaterMark: o }
|
|
16
|
+
);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
export {
|
|
20
|
+
h as VideoProcessStream
|
|
21
|
+
};
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Calculate optimal bitrate for video encoding based on resolution, framerate, and quality.
|
|
3
|
+
*
|
|
4
|
+
* This function uses a simple heuristic formula: `pixels * fps * quality_factor`
|
|
5
|
+
*
|
|
6
|
+
* @param width - Video width in pixels
|
|
7
|
+
* @param height - Video height in pixels
|
|
8
|
+
* @param fps - Target framerate (default: 30)
|
|
9
|
+
* @param quality - Quality preset: 'low' (0.05), 'good' (0.08), 'high' (0.10), 'very-high' (0.15)
|
|
10
|
+
* @returns Bitrate in bits per second (bps)
|
|
11
|
+
*
|
|
12
|
+
* @example
|
|
13
|
+
* ```typescript
|
|
14
|
+
* // 1080p at 30fps, good quality
|
|
15
|
+
* const bitrate = getBitrate(1920, 1080, 30, 'good');
|
|
16
|
+
* // Returns: ~4.9 Mbps
|
|
17
|
+
*
|
|
18
|
+
* encoder.configure({
|
|
19
|
+
* codec: 'avc1.42003e',
|
|
20
|
+
* width: 1920,
|
|
21
|
+
* height: 1080,
|
|
22
|
+
* bitrate,
|
|
23
|
+
* framerate: 30
|
|
24
|
+
* });
|
|
25
|
+
* ```
|
|
26
|
+
*
|
|
27
|
+
* @example
|
|
28
|
+
* ```typescript
|
|
29
|
+
* // 4K at 60fps, high quality
|
|
30
|
+
* const bitrate = getBitrate(3840, 2160, 60, 'high');
|
|
31
|
+
* // Returns: ~49.7 Mbps
|
|
32
|
+
* ```
|
|
33
|
+
*/
|
|
34
|
+
export declare function getBitrate(width: number, height: number, fps?: number, quality?: 'low' | 'good' | 'high' | 'very-high'): number;
|
|
35
|
+
//# sourceMappingURL=get-bitrate.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"get-bitrate.d.ts","sourceRoot":"","sources":["../../src/video/get-bitrate.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,GAAE,MAAW,EAAE,OAAO,GAAE,KAAK,GAAG,MAAM,GAAG,MAAM,GAAG,WAAoB,UAUlI"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generate proper codec strings with profiles and levels for video encoding.
|
|
3
|
+
*
|
|
4
|
+
* This function automatically determines the appropriate profile and level based on
|
|
5
|
+
* video resolution and bitrate. The codec strings follow the format required by
|
|
6
|
+
* VideoEncoder.configure() and are essential for proper encoding.
|
|
7
|
+
*
|
|
8
|
+
* Supported codecs:
|
|
9
|
+
* - **AVC (H.264)**: High Profile, levels 1-6.2
|
|
10
|
+
* - **HEVC (H.265)**: Main Profile, levels 1-6.2 (Low/High Tier)
|
|
11
|
+
* - **VP8**: Simple codec string 'vp8'
|
|
12
|
+
* - **VP9**: Profile 0, 8-bit, levels 1-6.2
|
|
13
|
+
* - **AV1**: Main Profile, levels 2.0-6.3 (Main/High Tier)
|
|
14
|
+
*
|
|
15
|
+
* @param codec - Codec type: 'avc', 'hevc', 'vp8', 'vp9', or 'av1'
|
|
16
|
+
* @param width - Video width in pixels
|
|
17
|
+
* @param height - Video height in pixels
|
|
18
|
+
* @param bitrate - Target bitrate in bits per second
|
|
19
|
+
* @returns Properly formatted codec string (e.g., 'avc1.640028' for H.264)
|
|
20
|
+
*
|
|
21
|
+
* @example
|
|
22
|
+
* ```typescript
|
|
23
|
+
* // H.264 for 1080p
|
|
24
|
+
* const codecString = getCodecString('avc', 1920, 1080, 5000000);
|
|
25
|
+
* // Returns: 'avc1.640028' (High Profile, Level 4.0)
|
|
26
|
+
*
|
|
27
|
+
* encoder.configure({
|
|
28
|
+
* codec: codecString,
|
|
29
|
+
* width: 1920,
|
|
30
|
+
* height: 1080,
|
|
31
|
+
* bitrate: 5000000
|
|
32
|
+
* });
|
|
33
|
+
* ```
|
|
34
|
+
*
|
|
35
|
+
* @example
|
|
36
|
+
* ```typescript
|
|
37
|
+
* // VP9 for 4K
|
|
38
|
+
* const codecString = getCodecString('vp9', 3840, 2160, 20000000);
|
|
39
|
+
* // Returns: 'vp09.00.50.08' (Profile 0, Level 5.0, 8-bit)
|
|
40
|
+
* ```
|
|
41
|
+
*
|
|
42
|
+
* @remarks
|
|
43
|
+
* Adapted from MediaBunny's codec.ts: https://github.com/Vanilagy/mediabunny/blob/main/src/codec.ts
|
|
44
|
+
*/
|
|
45
|
+
export declare function getCodecString(codec: 'avc' | 'hevc' | 'vp8' | 'vp9' | 'av1', width: number, height: number, bitrate: number): string;
|
|
46
|
+
//# sourceMappingURL=get-codec-string.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"get-codec-string.d.ts","sourceRoot":"","sources":["../../src/video/get-codec-string.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2CG;AACH,wBAAgB,cAAc,CAAE,KAAK,EAAE,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,GAAG,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,UAmK5H"}
|