@meframe/core 0.2.4 → 0.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Meframe.d.ts +7 -1
- package/dist/Meframe.d.ts.map +1 -1
- package/dist/Meframe.js +7 -27
- package/dist/Meframe.js.map +1 -1
- package/dist/controllers/ExportController.d.ts +7 -1
- package/dist/controllers/ExportController.d.ts.map +1 -1
- package/dist/controllers/ExportController.js.map +1 -1
- package/dist/model/types.d.ts +7 -0
- package/dist/model/types.d.ts.map +1 -1
- package/dist/model/types.js.map +1 -1
- package/dist/orchestrator/AudioExportSession.d.ts +3 -1
- package/dist/orchestrator/AudioExportSession.d.ts.map +1 -1
- package/dist/orchestrator/AudioExportSession.js +28 -20
- package/dist/orchestrator/AudioExportSession.js.map +1 -1
- package/dist/orchestrator/AudioWindowPreparer.d.ts.map +1 -1
- package/dist/orchestrator/AudioWindowPreparer.js +23 -3
- package/dist/orchestrator/AudioWindowPreparer.js.map +1 -1
- package/dist/orchestrator/ExportScheduler.d.ts +5 -1
- package/dist/orchestrator/ExportScheduler.d.ts.map +1 -1
- package/dist/orchestrator/ExportScheduler.js +20 -2
- package/dist/orchestrator/ExportScheduler.js.map +1 -1
- package/dist/orchestrator/OnDemandVideoSession.js +2 -2
- package/dist/orchestrator/OnDemandVideoSession.js.map +1 -1
- package/dist/orchestrator/Orchestrator.d.ts +1 -1
- package/dist/orchestrator/Orchestrator.d.ts.map +1 -1
- package/dist/orchestrator/Orchestrator.js.map +1 -1
- package/dist/orchestrator/types.d.ts +1 -1
- package/dist/orchestrator/types.d.ts.map +1 -1
- package/dist/stages/compose/FrameRateConverter.d.ts +6 -2
- package/dist/stages/compose/FrameRateConverter.d.ts.map +1 -1
- package/dist/stages/compose/OfflineAudioMixer.d.ts.map +1 -1
- package/dist/stages/compose/OfflineAudioMixer.js +55 -28
- package/dist/stages/compose/OfflineAudioMixer.js.map +1 -1
- package/dist/stages/mux/MP4Muxer.d.ts +11 -2
- package/dist/stages/mux/MP4Muxer.d.ts.map +1 -1
- package/dist/stages/mux/MP4Muxer.js +32 -6
- package/dist/stages/mux/MP4Muxer.js.map +1 -1
- package/dist/stages/mux/MuxManager.d.ts +5 -1
- package/dist/stages/mux/MuxManager.d.ts.map +1 -1
- package/dist/stages/mux/MuxManager.js +15 -7
- package/dist/stages/mux/MuxManager.js.map +1 -1
- package/dist/types.d.ts +8 -0
- package/dist/types.d.ts.map +1 -1
- package/dist/utils/loop-utils.d.ts +16 -0
- package/dist/utils/loop-utils.d.ts.map +1 -0
- package/dist/utils/loop-utils.js +44 -0
- package/dist/utils/loop-utils.js.map +1 -0
- package/dist/workers/stages/compose/{video-compose.worker.CA2_Kpg-.js → video-compose.worker.KMZjuJuY.js} +47 -4
- package/dist/workers/stages/compose/video-compose.worker.KMZjuJuY.js.map +1 -0
- package/dist/workers/worker-manifest.json +1 -1
- package/package.json +1 -1
- package/dist/workers/stages/compose/video-compose.worker.CA2_Kpg-.js.map +0 -1
|
@@ -1,4 +1,5 @@
|
|
|
1
|
-
import { hasAudioConfig } from "../../model/types.js";
|
|
1
|
+
import { hasResourceId, hasAudioConfig } from "../../model/types.js";
|
|
2
|
+
import { buildLoopedResourceSegments } from "../../utils/loop-utils.js";
|
|
2
3
|
class OfflineAudioMixer {
|
|
3
4
|
constructor(cacheManager, getModel) {
|
|
4
5
|
this.cacheManager = cacheManager;
|
|
@@ -13,6 +14,14 @@ class OfflineAudioMixer {
|
|
|
13
14
|
Math.ceil(Math.max(0, durationUs) / 1e6 * this.sampleRate)
|
|
14
15
|
);
|
|
15
16
|
const ctx = new OfflineAudioContext(this.numberOfChannels, frameCount, this.sampleRate);
|
|
17
|
+
const silent = ctx.createBuffer(1, frameCount, this.sampleRate);
|
|
18
|
+
const silentSource = ctx.createBufferSource();
|
|
19
|
+
silentSource.buffer = silent;
|
|
20
|
+
const silentGain = ctx.createGain();
|
|
21
|
+
silentGain.gain.value = 0;
|
|
22
|
+
silentSource.connect(silentGain);
|
|
23
|
+
silentGain.connect(ctx.destination);
|
|
24
|
+
silentSource.start(0);
|
|
16
25
|
const clips = this.getClipsInWindow(windowStartUs, windowEndUs);
|
|
17
26
|
for (const clip of clips) {
|
|
18
27
|
const clipIntersectStartUs = Math.max(windowStartUs, clip.startUs);
|
|
@@ -21,36 +30,54 @@ class OfflineAudioMixer {
|
|
|
21
30
|
const clipRelativeEndUs = clipIntersectEndUs - clip.startUs;
|
|
22
31
|
const clipModel = this.getModel()?.findClip(clip.clipId);
|
|
23
32
|
const trimStartUs = clipModel?.trimStartUs ?? 0;
|
|
24
|
-
const
|
|
25
|
-
const
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
+
const loop = clipModel?.trackKind === "audio" && clipModel.loop === true;
|
|
34
|
+
const resourceDurationUs = clipModel && hasResourceId(clipModel) ? this.cacheManager.audioSampleCache.get(clipModel.resourceId)?.durationUs ?? 0 : 0;
|
|
35
|
+
let segments = buildLoopedResourceSegments({
|
|
36
|
+
clipRelativeStartUs,
|
|
37
|
+
clipRelativeEndUs,
|
|
38
|
+
trimStartUs,
|
|
39
|
+
resourceDurationUs,
|
|
40
|
+
loop
|
|
41
|
+
});
|
|
42
|
+
if (segments.length === 0 && clipRelativeEndUs > clipRelativeStartUs) {
|
|
43
|
+
segments = buildLoopedResourceSegments({
|
|
44
|
+
clipRelativeStartUs,
|
|
45
|
+
clipRelativeEndUs,
|
|
46
|
+
trimStartUs,
|
|
47
|
+
resourceDurationUs,
|
|
48
|
+
loop: false
|
|
49
|
+
});
|
|
33
50
|
}
|
|
34
|
-
const
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
51
|
+
for (const seg of segments) {
|
|
52
|
+
const pcmData = this.cacheManager.getClipPCMWithMetadata(
|
|
53
|
+
clip.clipId,
|
|
54
|
+
seg.resourceStartUs,
|
|
55
|
+
seg.resourceEndUs
|
|
56
|
+
);
|
|
57
|
+
if (!pcmData || pcmData.planes.length === 0) {
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
const intersectFrames = pcmData.planes[0]?.length ?? 0;
|
|
61
|
+
if (intersectFrames === 0) {
|
|
62
|
+
continue;
|
|
63
|
+
}
|
|
64
|
+
const buffer = ctx.createBuffer(pcmData.planes.length, intersectFrames, pcmData.sampleRate);
|
|
65
|
+
for (let channel = 0; channel < pcmData.planes.length; channel++) {
|
|
66
|
+
const plane = pcmData.planes[channel];
|
|
67
|
+
if (plane) {
|
|
68
|
+
buffer.copyToChannel(new Float32Array(plane), channel);
|
|
69
|
+
}
|
|
43
70
|
}
|
|
71
|
+
const source = ctx.createBufferSource();
|
|
72
|
+
source.buffer = buffer;
|
|
73
|
+
const gainNode = ctx.createGain();
|
|
74
|
+
gainNode.gain.value = clip.volume;
|
|
75
|
+
source.connect(gainNode);
|
|
76
|
+
gainNode.connect(ctx.destination);
|
|
77
|
+
const segmentStartUs = clip.startUs + seg.clipRelativeStartUs;
|
|
78
|
+
const startTime = (segmentStartUs - windowStartUs) / 1e6;
|
|
79
|
+
source.start(startTime);
|
|
44
80
|
}
|
|
45
|
-
const source = ctx.createBufferSource();
|
|
46
|
-
source.buffer = buffer;
|
|
47
|
-
const gainNode = ctx.createGain();
|
|
48
|
-
gainNode.gain.value = clip.volume;
|
|
49
|
-
source.connect(gainNode);
|
|
50
|
-
gainNode.connect(ctx.destination);
|
|
51
|
-
const relativeStartUs = clipIntersectStartUs - windowStartUs;
|
|
52
|
-
const startTime = relativeStartUs / 1e6;
|
|
53
|
-
source.start(startTime);
|
|
54
81
|
}
|
|
55
82
|
const mixedBuffer = await ctx.startRendering();
|
|
56
83
|
return mixedBuffer;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"OfflineAudioMixer.js","sources":["../../../src/stages/compose/OfflineAudioMixer.ts"],"sourcesContent":["import type { TimeUs } from '../../model/types';\nimport { hasAudioConfig } from '../../model/types';\nimport type { CompositionModel } from '../../model';\nimport type { CacheManager } from '../../cache/CacheManager';\n\ninterface MixClipInfo {\n clipId: string;\n startUs: TimeUs;\n durationUs: TimeUs;\n volume: number;\n}\n\nexport class OfflineAudioMixer {\n private sampleRate = 48_000;\n private numberOfChannels = 2;\n\n constructor(\n private cacheManager: CacheManager,\n private getModel: () => CompositionModel | null\n ) {}\n\n async mix(windowStartUs: TimeUs, windowEndUs: TimeUs): Promise<AudioBuffer> {\n const durationUs = windowEndUs - windowStartUs;\n // Guard against invalid/empty ranges (can happen near timeline end or after clamping).\n // OfflineAudioContext requires length >= 1.\n const frameCount = Math.max(\n 1,\n Math.ceil((Math.max(0, durationUs) / 1_000_000) * this.sampleRate)\n );\n\n const ctx = new OfflineAudioContext(this.numberOfChannels, frameCount, this.sampleRate);\n\n const clips = this.getClipsInWindow(windowStartUs, windowEndUs);\n\n for (const clip of clips) {\n // Calculate clip-relative time range\n const clipIntersectStartUs = Math.max(windowStartUs, clip.startUs);\n const clipIntersectEndUs = Math.min(windowEndUs, clip.startUs + clip.durationUs);\n const clipRelativeStartUs = clipIntersectStartUs - clip.startUs;\n const clipRelativeEndUs = clipIntersectEndUs - clip.startUs;\n\n // Convert to resource time (aligned with video architecture)\n const clipModel = this.getModel()?.findClip(clip.clipId);\n const trimStartUs = clipModel?.trimStartUs ?? 0;\n const
|
|
1
|
+
{"version":3,"file":"OfflineAudioMixer.js","sources":["../../../src/stages/compose/OfflineAudioMixer.ts"],"sourcesContent":["import type { TimeUs } from '../../model/types';\nimport { hasAudioConfig, hasResourceId } from '../../model/types';\nimport type { CompositionModel } from '../../model';\nimport type { CacheManager } from '../../cache/CacheManager';\nimport { buildLoopedResourceSegments } from '../../utils/loop-utils';\n\ninterface MixClipInfo {\n clipId: string;\n startUs: TimeUs;\n durationUs: TimeUs;\n volume: number;\n}\n\nexport class OfflineAudioMixer {\n private sampleRate = 48_000;\n private numberOfChannels = 2;\n\n constructor(\n private cacheManager: CacheManager,\n private getModel: () => CompositionModel | null\n ) {}\n\n async mix(windowStartUs: TimeUs, windowEndUs: TimeUs): Promise<AudioBuffer> {\n const durationUs = windowEndUs - windowStartUs;\n // Guard against invalid/empty ranges (can happen near timeline end or after clamping).\n // OfflineAudioContext requires length >= 1.\n const frameCount = Math.max(\n 1,\n Math.ceil((Math.max(0, durationUs) / 1_000_000) * this.sampleRate)\n );\n\n const ctx = new OfflineAudioContext(this.numberOfChannels, frameCount, this.sampleRate);\n\n // Ensure the OfflineAudioContext renders the full requested length.\n // Some implementations may stop early if no sources are scheduled near the tail,\n // which would truncate trailing silence and make export audio shorter than video.\n const silent = ctx.createBuffer(1, frameCount, this.sampleRate);\n const silentSource = ctx.createBufferSource();\n silentSource.buffer = silent;\n const silentGain = ctx.createGain();\n silentGain.gain.value = 0;\n silentSource.connect(silentGain);\n silentGain.connect(ctx.destination);\n silentSource.start(0);\n\n const clips = this.getClipsInWindow(windowStartUs, windowEndUs);\n\n for (const clip of clips) {\n // Calculate clip-relative time range\n const clipIntersectStartUs = Math.max(windowStartUs, clip.startUs);\n const clipIntersectEndUs = Math.min(windowEndUs, clip.startUs + clip.durationUs);\n const clipRelativeStartUs = clipIntersectStartUs - clip.startUs;\n const clipRelativeEndUs = clipIntersectEndUs - clip.startUs;\n\n // Convert to resource time (aligned with video architecture)\n const clipModel = this.getModel()?.findClip(clip.clipId);\n const trimStartUs = clipModel?.trimStartUs ?? 0;\n const loop = clipModel?.trackKind === 'audio' && clipModel.loop === true;\n const resourceDurationUs =\n clipModel && hasResourceId(clipModel)\n ? (this.cacheManager.audioSampleCache.get(clipModel.resourceId)?.durationUs ?? 0)\n : 0;\n\n let segments = buildLoopedResourceSegments({\n clipRelativeStartUs,\n clipRelativeEndUs,\n trimStartUs,\n resourceDurationUs,\n loop,\n });\n if (segments.length === 0 && clipRelativeEndUs > clipRelativeStartUs) {\n segments = buildLoopedResourceSegments({\n clipRelativeStartUs,\n clipRelativeEndUs,\n trimStartUs,\n resourceDurationUs,\n loop: false,\n });\n }\n\n for (const seg of segments) {\n // Get PCM data using resource time coordinates\n const pcmData = this.cacheManager.getClipPCMWithMetadata(\n clip.clipId,\n seg.resourceStartUs,\n seg.resourceEndUs\n );\n\n if (!pcmData || pcmData.planes.length === 0) {\n continue;\n }\n\n const intersectFrames = pcmData.planes[0]?.length ?? 0;\n if (intersectFrames === 0) {\n continue;\n }\n\n // Create AudioBuffer\n const buffer = ctx.createBuffer(pcmData.planes.length, intersectFrames, pcmData.sampleRate);\n\n for (let channel = 0; channel < pcmData.planes.length; channel++) {\n const plane = pcmData.planes[channel];\n if (plane) {\n // Create new Float32Array to ensure correct type (ArrayBuffer, not SharedArrayBuffer)\n buffer.copyToChannel(new Float32Array(plane), channel);\n }\n }\n\n const source = ctx.createBufferSource();\n source.buffer = buffer;\n\n const gainNode = ctx.createGain();\n gainNode.gain.value = clip.volume;\n\n source.connect(gainNode);\n gainNode.connect(ctx.destination);\n\n const segmentStartUs = clip.startUs + seg.clipRelativeStartUs;\n const startTime = (segmentStartUs - windowStartUs) / 1_000_000;\n source.start(startTime);\n }\n }\n\n const mixedBuffer = await ctx.startRendering();\n return mixedBuffer;\n }\n\n private getClipsInWindow(windowStartUs: TimeUs, windowEndUs: TimeUs): MixClipInfo[] {\n const clips: MixClipInfo[] = [];\n const model = this.getModel();\n if (!model) {\n return clips;\n }\n\n for (const track of model.tracks) {\n for (const clip of track.clips) {\n const clipEndUs = clip.startUs + clip.durationUs;\n if (clip.startUs < windowEndUs && clipEndUs > windowStartUs) {\n // Read audio config (only video/audio clips have audioConfig)\n if (hasAudioConfig(clip)) {\n const muted = clip.audioConfig?.muted ?? false;\n\n // Skip muted clips in export (performance optimization)\n if (muted) {\n continue;\n }\n\n const volume = clip.audioConfig?.volume ?? 1.0;\n\n clips.push({\n clipId: clip.id,\n startUs: clip.startUs,\n durationUs: clip.durationUs,\n volume,\n });\n } else {\n // Caption/Fx clips in audio track should not happen, but handle gracefully\n clips.push({\n clipId: clip.id,\n startUs: clip.startUs,\n durationUs: clip.durationUs,\n volume: 1.0,\n });\n }\n }\n }\n }\n\n return clips;\n }\n}\n"],"names":[],"mappings":";;AAaO,MAAM,kBAAkB;AAAA,EAI7B,YACU,cACA,UACR;AAFQ,SAAA,eAAA;AACA,SAAA,WAAA;AAAA,EACP;AAAA,EANK,aAAa;AAAA,EACb,mBAAmB;AAAA,EAO3B,MAAM,IAAI,eAAuB,aAA2C;AAC1E,UAAM,aAAa,cAAc;AAGjC,UAAM,aAAa,KAAK;AAAA,MACtB;AAAA,MACA,KAAK,KAAM,KAAK,IAAI,GAAG,UAAU,IAAI,MAAa,KAAK,UAAU;AAAA,IAAA;AAGnE,UAAM,MAAM,IAAI,oBAAoB,KAAK,kBAAkB,YAAY,KAAK,UAAU;AAKtF,UAAM,SAAS,IAAI,aAAa,GAAG,YAAY,KAAK,UAAU;AAC9D,UAAM,eAAe,IAAI,mBAAA;AACzB,iBAAa,SAAS;AACtB,UAAM,aAAa,IAAI,WAAA;AACvB,eAAW,KAAK,QAAQ;AACxB,iBAAa,QAAQ,UAAU;AAC/B,eAAW,QAAQ,IAAI,WAAW;AAClC,iBAAa,MAAM,CAAC;AAEpB,UAAM,QAAQ,KAAK,iBAAiB,eAAe,WAAW;AAE9D,eAAW,QAAQ,OAAO;AAExB,YAAM,uBAAuB,KAAK,IAAI,eAAe,KAAK,OAAO;AACjE,YAAM,qBAAqB,KAAK,IAAI,aAAa,KAAK,UAAU,KAAK,UAAU;AAC/E,YAAM,sBAAsB,uBAAuB,KAAK;AACxD,YAAM,oBAAoB,qBAAqB,KAAK;AAGpD,YAAM,YAAY,KAAK,SAAA,GAAY,SAAS,KAAK,MAAM;AACvD,YAAM,cAAc,WAAW,eAAe;AAC9C,YAAM,OAAO,WAAW,cAAc,WAAW,UAAU,SAAS;AACpE,YAAM,qBACJ,aAAa,cAAc,SAAS,IAC/B,KAAK,aAAa,iBAAiB,IAAI,UAAU,UAAU,GAAG,cAAc,IAC7E;AAEN,UAAI,WAAW,4BAA4B;AAAA,QACzC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA,CACD;AACD,UAAI,SAAS,WAAW,KAAK,oBAAoB,qBAAqB;AACpE,mBAAW,4BAA4B;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,MAAM;AAAA,QAAA,CACP;AAAA,MACH;AAEA,iBAAW,OAAO,UAAU;AAE1B,cAAM,UAAU,KAAK,aAAa;AAAA,UAChC,KAAK;AAAA,UACL,IAAI;AAAA,UACJ,IAAI;AAAA,QAAA;AAGN,YAAI,CAAC,WAAW,QAAQ,OAAO,WAAW,GAAG;AAC3C;AAAA,QACF;AAEA,cAAM,kBAAkB,QAAQ,OAAO,CAAC,GAAG,UAAU;AACrD,YAAI,oBAAoB,GAAG;AACzB;AAAA,QACF;AAGA,cAAM,SAAS,IAAI,aAAa,QAAQ,OAAO,QAAQ,iBAAiB,QAAQ,UAAU;AAE1F,iBAAS,UAAU,GAAG,UAAU,QAAQ,OAAO,QAAQ,WAAW;AAChE,gBAAM,QAAQ,QAAQ,OAAO,OAAO;AACpC,cAAI,OAAO;AAET,mBAAO,cAAc,IAAI,aAAa,KAAK,GAAG,OAAO;AAAA,UACvD;AAAA,QACF;AAEA,cAAM,SAAS,IAAI,mBAAA;AACnB,eAAO,SAAS;AAEhB,cAAM,WAAW,IAAI,WAAA;AACrB,iBAAS,KAAK,QAAQ,KAAK;AAE3B,eAAO,QAAQ,QAAQ;AACvB,iBAAS,QAAQ,IAAI,WAAW;AAEhC,cAAM,iBAAiB,KAAK,UAAU,IAAI;AAC1C,cAAM,aAAa,iBAAiB,iBAAiB;AACrD,eAAO,MAAM,SAAS;AAAA,MACxB;AAAA,IACF;AAEA,UAAM,cAAc,MAAM,IAAI,eAAA;AAC9B,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAiB,eAAuB,aAAoC;AAClF,UAAM,QAAuB,CAAA;AAC7B,UAAM,QAAQ,KAAK,SAAA;AACnB,QAAI,CAAC,OAAO;AACV,aAAO;AAAA,IACT;AAEA,eAAW,SAAS,MAAM,QAAQ;AAChC,iBAAW,QAAQ,MAAM,OAAO;AAC9B,cAAM,YAAY,KAAK,UAAU,KAAK;AACtC,YAAI,KAAK,UAAU,eAAe,YAAY,eAAe;AAE3D,cAAI,eAAe,IAAI,GAAG;AACxB,kBAAM,QAAQ,KAAK,aAAa,SAAS;AAGzC,gBAAI,OAAO;AACT;AAAA,YACF;AAEA,kBAAM,SAAS,KAAK,aAAa,UAAU;AAE3C,kBAAM,KAAK;AAAA,cACT,QAAQ,KAAK;AAAA,cACb,SAAS,KAAK;AAAA,cACd,YAAY,KAAK;AAAA,cACjB;AAAA,YAAA,CACD;AAAA,UACH,OAAO;AAEL,kBAAM,KAAK;AAAA,cACT,QAAQ,KAAK;AAAA,cACb,SAAS,KAAK;AAAA,cACd,YAAY,KAAK;AAAA,cACjB,QAAQ;AAAA,YAAA,CACT;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;"}
|
|
@@ -1,3 +1,11 @@
|
|
|
1
|
+
export type MuxOutputConfig = {
|
|
2
|
+
kind: 'blob';
|
|
3
|
+
} | {
|
|
4
|
+
kind: 'stream';
|
|
5
|
+
onData: (data: Uint8Array, position: number) => void;
|
|
6
|
+
chunked?: boolean;
|
|
7
|
+
chunkSize?: number;
|
|
8
|
+
};
|
|
1
9
|
/**
|
|
2
10
|
* MP4Muxer - MP4 container multiplexer using mp4-muxer library
|
|
3
11
|
* Supports video and audio track export
|
|
@@ -8,6 +16,7 @@ export declare class MP4Muxer {
|
|
|
8
16
|
private firstAudioChunk;
|
|
9
17
|
private videoChunkMeta;
|
|
10
18
|
private audioChunkMeta;
|
|
19
|
+
private lastAudioDurationUs;
|
|
11
20
|
constructor(config: {
|
|
12
21
|
width: number;
|
|
13
22
|
height: number;
|
|
@@ -15,11 +24,11 @@ export declare class MP4Muxer {
|
|
|
15
24
|
fastStart?: false | 'in-memory' | 'fragmented';
|
|
16
25
|
videoChunkMeta?: any;
|
|
17
26
|
audioChunkMeta?: any;
|
|
18
|
-
});
|
|
27
|
+
}, output?: MuxOutputConfig);
|
|
19
28
|
private videoChunkCount;
|
|
20
29
|
writeVideoChunk(chunk: EncodedVideoChunk, metadata?: EncodedVideoChunkMetadata): void;
|
|
21
30
|
private audioChunkCount;
|
|
22
31
|
writeAudioChunk(chunk: EncodedAudioChunk, metadata?: EncodedAudioChunkMetadata): void;
|
|
23
|
-
finalize(): Blob;
|
|
32
|
+
finalize(): Blob | null;
|
|
24
33
|
}
|
|
25
34
|
//# sourceMappingURL=MP4Muxer.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"MP4Muxer.d.ts","sourceRoot":"","sources":["../../../src/stages/mux/MP4Muxer.ts"],"names":[],"mappings":"AAEA;;;GAGG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,KAAK,
|
|
1
|
+
{"version":3,"file":"MP4Muxer.d.ts","sourceRoot":"","sources":["../../../src/stages/mux/MP4Muxer.ts"],"names":[],"mappings":"AAEA,MAAM,MAAM,eAAe,GACvB;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,GAChB;IACE,IAAI,EAAE,QAAQ,CAAC;IACf,MAAM,EAAE,CAAC,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IACrD,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB,CAAC;AAEN;;;GAGG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,KAAK,CAA0C;IACvD,OAAO,CAAC,eAAe,CAAQ;IAC/B,OAAO,CAAC,eAAe,CAAQ;IAC/B,OAAO,CAAC,cAAc,CAAa;IACnC,OAAO,CAAC,cAAc,CAAa;IACnC,OAAO,CAAC,mBAAmB,CAAuB;gBAGhD,MAAM,EAAE;QACN,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,MAAM,CAAC;QACf,GAAG,EAAE,MAAM,CAAC;QACZ,SAAS,CAAC,EAAE,KAAK,GAAG,WAAW,GAAG,YAAY,CAAC;QAC/C,cAAc,CAAC,EAAE,GAAG,CAAC;QACrB,cAAc,CAAC,EAAE,GAAG,CAAC;KACtB,EACD,MAAM,GAAE,eAAkC;IAqD5C,OAAO,CAAC,eAAe,CAAK;IAE5B,eAAe,CAAC,KAAK,EAAE,iBAAiB,EAAE,QAAQ,CAAC,EAAE,yBAAyB,GAAG,IAAI;IAwBrF,OAAO,CAAC,eAAe,CAAK;IAE5B,eAAe,CAAC,KAAK,EAAE,iBAAiB,EAAE,QAAQ,CAAC,EAAE,yBAAyB,GAAG,IAAI;IAmDrF,QAAQ,IAAI,IAAI,GAAG,IAAI;CASxB"}
|
|
@@ -1,15 +1,22 @@
|
|
|
1
|
-
import { ArrayBufferTarget, Muxer } from "../../medeo-fe/node_modules/.pnpm/mp4-muxer@5.2.2/node_modules/mp4-muxer/build/mp4-muxer.js";
|
|
1
|
+
import { StreamTarget, ArrayBufferTarget, Muxer } from "../../medeo-fe/node_modules/.pnpm/mp4-muxer@5.2.2/node_modules/mp4-muxer/build/mp4-muxer.js";
|
|
2
2
|
class MP4Muxer {
|
|
3
3
|
muxer;
|
|
4
4
|
firstVideoChunk = true;
|
|
5
5
|
firstAudioChunk = true;
|
|
6
6
|
videoChunkMeta = null;
|
|
7
7
|
audioChunkMeta = null;
|
|
8
|
-
|
|
8
|
+
lastAudioDurationUs = null;
|
|
9
|
+
constructor(config, output = { kind: "blob" }) {
|
|
9
10
|
this.videoChunkMeta = config.videoChunkMeta;
|
|
10
11
|
this.audioChunkMeta = config.audioChunkMeta;
|
|
12
|
+
const target = output.kind === "stream" ? new StreamTarget({
|
|
13
|
+
onData: output.onData,
|
|
14
|
+
chunked: output.chunked ?? true,
|
|
15
|
+
chunkSize: output.chunkSize ?? 16 * 1024 * 1024
|
|
16
|
+
// 16 MiB default to reduce writes
|
|
17
|
+
}) : new ArrayBufferTarget();
|
|
11
18
|
const muxerConfig = {
|
|
12
|
-
target
|
|
19
|
+
target,
|
|
13
20
|
video: {
|
|
14
21
|
codec: "avc",
|
|
15
22
|
width: config.width,
|
|
@@ -46,7 +53,7 @@ class MP4Muxer {
|
|
|
46
53
|
}
|
|
47
54
|
audioChunkCount = 0;
|
|
48
55
|
writeAudioChunk(chunk, metadata) {
|
|
49
|
-
if (chunk.byteLength <= 16) {
|
|
56
|
+
if (chunk.byteLength <= 16 && (!chunk.duration || chunk.duration <= 0)) {
|
|
50
57
|
return;
|
|
51
58
|
}
|
|
52
59
|
let meta;
|
|
@@ -59,13 +66,32 @@ class MP4Muxer {
|
|
|
59
66
|
}
|
|
60
67
|
this.firstAudioChunk = false;
|
|
61
68
|
}
|
|
69
|
+
const hasValidDuration = !!chunk.duration && chunk.duration > 0;
|
|
70
|
+
if (!hasValidDuration) {
|
|
71
|
+
const sampleRate = metadata?.decoderConfig?.sampleRate ?? this.audioChunkMeta?.sampleRate ?? 48e3;
|
|
72
|
+
const inferredDurationUs = this.lastAudioDurationUs ?? Math.max(1, Math.round(1024 / sampleRate * 1e6));
|
|
73
|
+
const buffer = new ArrayBuffer(chunk.byteLength);
|
|
74
|
+
chunk.copyTo(buffer);
|
|
75
|
+
chunk = new EncodedAudioChunk({
|
|
76
|
+
type: chunk.type,
|
|
77
|
+
timestamp: chunk.timestamp,
|
|
78
|
+
duration: inferredDurationUs,
|
|
79
|
+
data: buffer
|
|
80
|
+
});
|
|
81
|
+
this.lastAudioDurationUs = inferredDurationUs;
|
|
82
|
+
} else {
|
|
83
|
+
this.lastAudioDurationUs = chunk.duration;
|
|
84
|
+
}
|
|
62
85
|
this.audioChunkCount++;
|
|
63
86
|
this.muxer.addAudioChunk(chunk, meta);
|
|
64
87
|
}
|
|
65
88
|
finalize() {
|
|
66
89
|
this.muxer.finalize();
|
|
67
|
-
|
|
68
|
-
|
|
90
|
+
if (this.muxer.target instanceof ArrayBufferTarget) {
|
|
91
|
+
const buffer = this.muxer.target.buffer;
|
|
92
|
+
return new Blob([buffer], { type: "video/mp4" });
|
|
93
|
+
}
|
|
94
|
+
return null;
|
|
69
95
|
}
|
|
70
96
|
}
|
|
71
97
|
export {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"MP4Muxer.js","sources":["../../../src/stages/mux/MP4Muxer.ts"],"sourcesContent":["import { Muxer, ArrayBufferTarget } from 'mp4-muxer';\n\n/**\n * MP4Muxer - MP4 container multiplexer using mp4-muxer library\n * Supports video and audio track export\n */\nexport class MP4Muxer {\n private muxer: Muxer<ArrayBufferTarget>;\n private firstVideoChunk = true;\n private firstAudioChunk = true;\n private videoChunkMeta: any = null;\n private audioChunkMeta: any = null;\n\n constructor(config: {\n
|
|
1
|
+
{"version":3,"file":"MP4Muxer.js","sources":["../../../src/stages/mux/MP4Muxer.ts"],"sourcesContent":["import { Muxer, ArrayBufferTarget, StreamTarget } from 'mp4-muxer';\n\nexport type MuxOutputConfig =\n | { kind: 'blob' }\n | {\n kind: 'stream';\n onData: (data: Uint8Array, position: number) => void;\n chunked?: boolean;\n chunkSize?: number;\n };\n\n/**\n * MP4Muxer - MP4 container multiplexer using mp4-muxer library\n * Supports video and audio track export\n */\nexport class MP4Muxer {\n private muxer: Muxer<ArrayBufferTarget | StreamTarget>;\n private firstVideoChunk = true;\n private firstAudioChunk = true;\n private videoChunkMeta: any = null;\n private audioChunkMeta: any = null;\n private lastAudioDurationUs: number | null = null;\n\n constructor(\n config: {\n width: number;\n height: number;\n fps: number;\n fastStart?: false | 'in-memory' | 'fragmented';\n videoChunkMeta?: any;\n audioChunkMeta?: any;\n },\n output: MuxOutputConfig = { kind: 'blob' }\n ) {\n this.videoChunkMeta = config.videoChunkMeta;\n this.audioChunkMeta = config.audioChunkMeta;\n\n const target =\n output.kind === 'stream'\n ? new StreamTarget({\n onData: output.onData,\n chunked: output.chunked ?? true,\n chunkSize: output.chunkSize ?? 16 * 1024 * 1024, // 16 MiB default to reduce writes\n })\n : new ArrayBufferTarget();\n\n const muxerConfig: any = {\n target,\n video: {\n codec: 'avc',\n width: config.width,\n height: config.height,\n frameRate: config.fps,\n },\n fastStart: config.fastStart ?? 'in-memory',\n firstTimestampBehavior: 'offset',\n };\n\n // Add audio configuration if provided\n // If not provided initially, we assume AAC (standard for web)\n // but mp4-muxer might need it.\n // Actually mp4-muxer allows adding track configuration later?\n // No, it requires it in constructor or inferred?\n // If audioChunkMeta is missing, we can't configure audio fully here.\n // But mp4-muxer docs say: \"If you don't provide options.audio, no audio track will be created.\"\n // So we MUST provide options.audio if we want audio.\n // If we don't have meta yet, we guess?\n // Or we rely on audio export always providing AAC 48k?\n // Let's assume AAC 48k 2ch as default if we want audio support.\n // Or better: if audioChunkMeta is null, we enable audio with defaults.\n\n // However, for robustness, we should probably wait for first chunk to configure muxer?\n // But MuxManager calls `start` then `writeVideoChunk` then `writeAudioChunk`.\n // Audio might come later.\n //\n // Let's assume we always want audio track capability.\n muxerConfig.audio = {\n codec: 'aac',\n sampleRate: this.audioChunkMeta?.sampleRate || 48000,\n numberOfChannels: this.audioChunkMeta?.numberOfChannels || 2,\n };\n\n this.muxer = new Muxer(muxerConfig);\n }\n\n private videoChunkCount = 0;\n\n writeVideoChunk(chunk: EncodedVideoChunk, metadata?: EncodedVideoChunkMetadata): void {\n let meta: EncodedVideoChunkMetadata | undefined;\n\n if (this.firstVideoChunk) {\n if (metadata && metadata.decoderConfig) {\n this.videoChunkMeta = metadata.decoderConfig;\n }\n\n if (this.videoChunkMeta) {\n meta = { decoderConfig: this.videoChunkMeta };\n }\n\n // Ensure we have metadata for first chunk if it's a keyframe\n if (chunk.type === 'key' && !meta) {\n console.warn('[MP4Muxer] First video chunk is keyframe but missing decoderConfig');\n }\n\n this.firstVideoChunk = false;\n }\n\n this.videoChunkCount++;\n this.muxer.addVideoChunk(chunk, meta);\n }\n\n private audioChunkCount = 0;\n\n writeAudioChunk(chunk: EncodedAudioChunk, metadata?: EncodedAudioChunkMetadata): void {\n // Safari (and sometimes Chromium) may emit tiny (e.g. 6 bytes) \"audio\" chunks that are not valid AAC frames.\n // mp4-muxer treats durations as authoritative; dropping *valid* small chunks can collapse silence gaps and cause stutter.\n // So only drop tiny chunks when they are clearly invalid (duration missing/<=0).\n if (chunk.byteLength <= 16 && (!chunk.duration || chunk.duration <= 0)) {\n return;\n }\n\n let meta: EncodedAudioChunkMetadata | undefined;\n\n if (this.firstAudioChunk) {\n if (metadata && metadata.decoderConfig) {\n this.audioChunkMeta = metadata.decoderConfig;\n }\n\n if (this.audioChunkMeta) {\n meta = { decoderConfig: this.audioChunkMeta };\n }\n\n this.firstAudioChunk = false;\n }\n\n // Some platforms output duration=0/undefined for audio chunks.\n // mp4-muxer uses duration to build the timeline; 0 duration can collapse time and remove intended gaps.\n // Try to synthesize a sane duration:\n // - Prefer the chunk's own duration when >0\n // - Otherwise use AAC-LC frame duration (1024 samples) inferred from decoderConfig sampleRate if known\n const hasValidDuration = !!chunk.duration && chunk.duration > 0;\n if (!hasValidDuration) {\n const sampleRate =\n (metadata as any)?.decoderConfig?.sampleRate ?? this.audioChunkMeta?.sampleRate ?? 48_000;\n const inferredDurationUs =\n this.lastAudioDurationUs ?? Math.max(1, Math.round((1024 / sampleRate) * 1_000_000));\n\n const buffer = new ArrayBuffer(chunk.byteLength);\n chunk.copyTo(buffer);\n chunk = new EncodedAudioChunk({\n type: chunk.type,\n timestamp: chunk.timestamp,\n duration: inferredDurationUs,\n data: buffer,\n });\n this.lastAudioDurationUs = inferredDurationUs;\n } else {\n this.lastAudioDurationUs = chunk.duration!;\n }\n\n this.audioChunkCount++;\n this.muxer.addAudioChunk(chunk, meta);\n }\n\n finalize(): Blob | null {\n this.muxer.finalize();\n if (this.muxer.target instanceof ArrayBufferTarget) {\n const buffer = this.muxer.target.buffer;\n return new Blob([buffer], { type: 'video/mp4' });\n }\n // Stream target has already delivered data via onData callback.\n return null;\n }\n}\n"],"names":[],"mappings":";AAeO,MAAM,SAAS;AAAA,EACZ;AAAA,EACA,kBAAkB;AAAA,EAClB,kBAAkB;AAAA,EAClB,iBAAsB;AAAA,EACtB,iBAAsB;AAAA,EACtB,sBAAqC;AAAA,EAE7C,YACE,QAQA,SAA0B,EAAE,MAAM,UAClC;AACA,SAAK,iBAAiB,OAAO;AAC7B,SAAK,iBAAiB,OAAO;AAE7B,UAAM,SACJ,OAAO,SAAS,WACZ,IAAI,aAAa;AAAA,MACf,QAAQ,OAAO;AAAA,MACf,SAAS,OAAO,WAAW;AAAA,MAC3B,WAAW,OAAO,aAAa,KAAK,OAAO;AAAA;AAAA,IAAA,CAC5C,IACD,IAAI,kBAAA;AAEV,UAAM,cAAmB;AAAA,MACvB;AAAA,MACA,OAAO;AAAA,QACL,OAAO;AAAA,QACP,OAAO,OAAO;AAAA,QACd,QAAQ,OAAO;AAAA,QACf,WAAW,OAAO;AAAA,MAAA;AAAA,MAEpB,WAAW,OAAO,aAAa;AAAA,MAC/B,wBAAwB;AAAA,IAAA;AAqB1B,gBAAY,QAAQ;AAAA,MAClB,OAAO;AAAA,MACP,YAAY,KAAK,gBAAgB,cAAc;AAAA,MAC/C,kBAAkB,KAAK,gBAAgB,oBAAoB;AAAA,IAAA;AAG7D,SAAK,QAAQ,IAAI,MAAM,WAAW;AAAA,EACpC;AAAA,EAEQ,kBAAkB;AAAA,EAE1B,gBAAgB,OAA0B,UAA4C;AACpF,QAAI;AAEJ,QAAI,KAAK,iBAAiB;AACxB,UAAI,YAAY,SAAS,eAAe;AACtC,aAAK,iBAAiB,SAAS;AAAA,MACjC;AAEA,UAAI,KAAK,gBAAgB;AACvB,eAAO,EAAE,eAAe,KAAK,eAAA;AAAA,MAC/B;AAGA,UAAI,MAAM,SAAS,SAAS,CAAC,MAAM;AACjC,gBAAQ,KAAK,oEAAoE;AAAA,MACnF;AAEA,WAAK,kBAAkB;AAAA,IACzB;AAEA,SAAK;AACL,SAAK,MAAM,cAAc,OAAO,IAAI;AAAA,EACtC;AAAA,EAEQ,kBAAkB;AAAA,EAE1B,gBAAgB,OAA0B,UAA4C;AAIpF,QAAI,MAAM,cAAc,OAAO,CAAC,MAAM,YAAY,MAAM,YAAY,IAAI;AACtE;AAAA,IACF;AAEA,QAAI;AAEJ,QAAI,KAAK,iBAAiB;AACxB,UAAI,YAAY,SAAS,eAAe;AACtC,aAAK,iBAAiB,SAAS;AAAA,MACjC;AAEA,UAAI,KAAK,gBAAgB;AACvB,eAAO,EAAE,eAAe,KAAK,eAAA;AAAA,MAC/B;AAEA,WAAK,kBAAkB;AAAA,IACzB;AAOA,UAAM,mBAAmB,CAAC,CAAC,MAAM,YAAY,MAAM,WAAW;AAC9D,QAAI,CAAC,kBAAkB;AACrB,YAAM,aACH,UAAkB,eAAe,cAAc,KAAK,gBAAgB,cAAc;AACrF,YAAM,qBACJ,KAAK,uBAAuB,KAAK,IAAI,GAAG,KAAK,MAAO,OAAO,aAAc,GAAS,CAAC;AAErF,YAAM,SAAS,IAAI,YAAY,MAAM,UAAU;AAC/C,YAAM,OAAO,MAAM;AACnB,cAAQ,IAAI,kBAAkB;AAAA,QAC5B,MAAM,MAAM;AAAA,QACZ,WAAW,MAAM;AAAA,QACjB,UAAU;AAAA,QACV,MAAM;AAAA,MAAA,CACP;AACD,WAAK,sBAAsB;AAAA,IAC7B,OAAO;AACL,WAAK,sBAAsB,MAAM;AAAA,IACnC;AAEA,SAAK;AACL,SAAK,MAAM,cAAc,OAAO,IAAI;AAAA,EACtC;AAAA,EAEA,WAAwB;AACtB,SAAK,MAAM,SAAA;AACX,QAAI,KAAK,MAAM,kBAAkB,mBAAmB;AAClD,YAAM,SAAS,KAAK,MAAM,OAAO;AACjC,aAAO,IAAI,KAAK,CAAC,MAAM,GAAG,EAAE,MAAM,aAAa;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AACF;"}
|
|
@@ -1,13 +1,17 @@
|
|
|
1
|
+
import { MuxOutputConfig } from './MP4Muxer';
|
|
2
|
+
|
|
1
3
|
export declare class MuxManager {
|
|
2
4
|
private muxer;
|
|
5
|
+
private output;
|
|
3
6
|
constructor();
|
|
4
7
|
start(config: {
|
|
5
8
|
width: number;
|
|
6
9
|
height: number;
|
|
7
10
|
fps: number;
|
|
11
|
+
output?: MuxOutputConfig;
|
|
8
12
|
}): void;
|
|
9
13
|
writeVideoChunk(chunk: EncodedVideoChunk, metadata?: EncodedVideoChunkMetadata): void;
|
|
10
14
|
writeAudioChunk(chunk: EncodedAudioChunk, metadata?: EncodedAudioChunkMetadata): void;
|
|
11
|
-
finalize(): Blob;
|
|
15
|
+
finalize(): Blob | null;
|
|
12
16
|
}
|
|
13
17
|
//# sourceMappingURL=MuxManager.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"MuxManager.d.ts","sourceRoot":"","sources":["../../../src/stages/mux/MuxManager.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"MuxManager.d.ts","sourceRoot":"","sources":["../../../src/stages/mux/MuxManager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAY,KAAK,eAAe,EAAE,MAAM,YAAY,CAAC;AAE5D,qBAAa,UAAU;IACrB,OAAO,CAAC,KAAK,CAAyB;IACtC,OAAO,CAAC,MAAM,CAAgC;;IAI9C,KAAK,CAAC,MAAM,EAAE;QAAE,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAC;QAAC,GAAG,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,eAAe,CAAA;KAAE;IAgBtF,eAAe,CAAC,KAAK,EAAE,iBAAiB,EAAE,QAAQ,CAAC,EAAE,yBAAyB;IAK9E,eAAe,CAAC,KAAK,EAAE,iBAAiB,EAAE,QAAQ,CAAC,EAAE,yBAAyB;IAS9E,QAAQ,IAAI,IAAI,GAAG,IAAI;CAOxB"}
|
|
@@ -1,16 +1,23 @@
|
|
|
1
1
|
import { MP4Muxer } from "./MP4Muxer.js";
|
|
2
2
|
class MuxManager {
|
|
3
3
|
muxer = null;
|
|
4
|
+
output = null;
|
|
4
5
|
constructor() {
|
|
5
6
|
}
|
|
6
7
|
start(config) {
|
|
7
|
-
this.
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
8
|
+
this.output = config.output ?? { kind: "blob" };
|
|
9
|
+
this.muxer = new MP4Muxer(
|
|
10
|
+
{
|
|
11
|
+
width: config.width,
|
|
12
|
+
height: config.height,
|
|
13
|
+
fps: config.fps,
|
|
14
|
+
// For blob output, prefer fast start by keeping chunks in memory.
|
|
15
|
+
// For stream output, prefer minimal memory usage: metadata at end (non-fMP4).
|
|
16
|
+
fastStart: this.output.kind === "stream" ? false : "in-memory"
|
|
17
|
+
// Metadata will be handled by first chunks
|
|
18
|
+
},
|
|
19
|
+
this.output
|
|
20
|
+
);
|
|
14
21
|
}
|
|
15
22
|
writeVideoChunk(chunk, metadata) {
|
|
16
23
|
if (!this.muxer) throw new Error("Muxer not started");
|
|
@@ -27,6 +34,7 @@ class MuxManager {
|
|
|
27
34
|
if (!this.muxer) throw new Error("Muxer not started");
|
|
28
35
|
const blob = this.muxer.finalize();
|
|
29
36
|
this.muxer = null;
|
|
37
|
+
this.output = null;
|
|
30
38
|
return blob;
|
|
31
39
|
}
|
|
32
40
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"MuxManager.js","sources":["../../../src/stages/mux/MuxManager.ts"],"sourcesContent":["import { MP4Muxer } from './MP4Muxer';\n\nexport class MuxManager {\n private muxer: MP4Muxer | null = null;\n\n constructor() {}\n\n start(config: { width: number; height: number; fps: number }) {\n this.muxer = new MP4Muxer({\n
|
|
1
|
+
{"version":3,"file":"MuxManager.js","sources":["../../../src/stages/mux/MuxManager.ts"],"sourcesContent":["import { MP4Muxer, type MuxOutputConfig } from './MP4Muxer';\n\nexport class MuxManager {\n private muxer: MP4Muxer | null = null;\n private output: MuxOutputConfig | null = null;\n\n constructor() {}\n\n start(config: { width: number; height: number; fps: number; output?: MuxOutputConfig }) {\n this.output = config.output ?? { kind: 'blob' };\n this.muxer = new MP4Muxer(\n {\n width: config.width,\n height: config.height,\n fps: config.fps,\n // For blob output, prefer fast start by keeping chunks in memory.\n // For stream output, prefer minimal memory usage: metadata at end (non-fMP4).\n fastStart: this.output.kind === 'stream' ? false : 'in-memory',\n // Metadata will be handled by first chunks\n },\n this.output\n );\n }\n\n writeVideoChunk(chunk: EncodedVideoChunk, metadata?: EncodedVideoChunkMetadata) {\n if (!this.muxer) throw new Error('Muxer not started');\n this.muxer.writeVideoChunk(chunk, metadata);\n }\n\n writeAudioChunk(chunk: EncodedAudioChunk, metadata?: EncodedAudioChunkMetadata) {\n // Check if muxer is available (it might have been finalized already if audio is late)\n if (!this.muxer) {\n console.warn('[MuxManager] writeAudioChunk called after finalization, dropping chunk');\n return;\n }\n this.muxer.writeAudioChunk(chunk, metadata);\n }\n\n finalize(): Blob | null {\n if (!this.muxer) throw new Error('Muxer not started');\n const blob = this.muxer.finalize();\n this.muxer = null;\n this.output = null;\n return blob;\n }\n}\n"],"names":[],"mappings":";AAEO,MAAM,WAAW;AAAA,EACd,QAAyB;AAAA,EACzB,SAAiC;AAAA,EAEzC,cAAc;AAAA,EAAC;AAAA,EAEf,MAAM,QAAkF;AACtF,SAAK,SAAS,OAAO,UAAU,EAAE,MAAM,OAAA;AACvC,SAAK,QAAQ,IAAI;AAAA,MACf;AAAA,QACE,OAAO,OAAO;AAAA,QACd,QAAQ,OAAO;AAAA,QACf,KAAK,OAAO;AAAA;AAAA;AAAA,QAGZ,WAAW,KAAK,OAAO,SAAS,WAAW,QAAQ;AAAA;AAAA,MAAA;AAAA,MAGrD,KAAK;AAAA,IAAA;AAAA,EAET;AAAA,EAEA,gBAAgB,OAA0B,UAAsC;AAC9E,QAAI,CAAC,KAAK,MAAO,OAAM,IAAI,MAAM,mBAAmB;AACpD,SAAK,MAAM,gBAAgB,OAAO,QAAQ;AAAA,EAC5C;AAAA,EAEA,gBAAgB,OAA0B,UAAsC;AAE9E,QAAI,CAAC,KAAK,OAAO;AACf,cAAQ,KAAK,wEAAwE;AACrF;AAAA,IACF;AACA,SAAK,MAAM,gBAAgB,OAAO,QAAQ;AAAA,EAC5C;AAAA,EAEA,WAAwB;AACtB,QAAI,CAAC,KAAK,MAAO,OAAM,IAAI,MAAM,mBAAmB;AACpD,UAAM,OAAO,KAAK,MAAM,SAAA;AACxB,SAAK,QAAQ;AACb,SAAK,SAAS;AACd,WAAO;AAAA,EACT;AACF;"}
|
package/dist/types.d.ts
CHANGED
|
@@ -21,5 +21,13 @@ export interface ExportOptions {
|
|
|
21
21
|
fps?: number;
|
|
22
22
|
/** Quality preset */
|
|
23
23
|
quality?: 'low' | 'medium' | 'high' | 'highest';
|
|
24
|
+
/** Export output mode (default blob). */
|
|
25
|
+
exportMode?: 'blob' | 'stream';
|
|
26
|
+
/** Streaming mux callback, required when exportMode is 'stream'. */
|
|
27
|
+
onMuxData?: (data: Uint8Array, position: number) => void;
|
|
28
|
+
/** Optional chunk sizing hint for streaming mux output. */
|
|
29
|
+
muxChunkSizeBytes?: number;
|
|
30
|
+
/** Whether to enable mp4-muxer chunked StreamTarget (default true). */
|
|
31
|
+
muxChunked?: boolean;
|
|
24
32
|
}
|
|
25
33
|
//# sourceMappingURL=types.d.ts.map
|
package/dist/types.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,YAAY,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAEpE,MAAM,MAAM,YAAY,GACpB,MAAM,GACN,SAAS,GACT,OAAO,GACP,SAAS,GACT,QAAQ,GACR,WAAW,GACX,OAAO,GACP,WAAW,CAAC;AAEhB,MAAM,WAAW,aAAa;IAC5B,oBAAoB;IACpB,MAAM,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;IAExB,kBAAkB;IAClB,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,CAAC;IAE7C,kBAAkB;IAClB,UAAU,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;IAE5B,2BAA2B;IAC3B,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,2BAA2B;IAC3B,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,wBAAwB;IACxB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB,iBAAiB;IACjB,GAAG,CAAC,EAAE,MAAM,CAAC;IAEb,qBAAqB;IACrB,OAAO,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,YAAY,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAEpE,MAAM,MAAM,YAAY,GACpB,MAAM,GACN,SAAS,GACT,OAAO,GACP,SAAS,GACT,QAAQ,GACR,WAAW,GACX,OAAO,GACP,WAAW,CAAC;AAEhB,MAAM,WAAW,aAAa;IAC5B,oBAAoB;IACpB,MAAM,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;IAExB,kBAAkB;IAClB,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,KAAK,GAAG,KAAK,CAAC;IAE7C,kBAAkB;IAClB,UAAU,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;IAE5B,2BAA2B;IAC3B,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,2BAA2B;IAC3B,YAAY,CAAC,EAAE,MAAM,CAAC;IAEtB,wBAAwB;IACxB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAEhB,iBAAiB;IACjB,GAAG,CAAC,EAAE,MAAM,CAAC;IAEb,qBAAqB;IACrB,OAAO,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,SAAS,CAAC;IAEhD,yCAAyC;IACzC,UAAU,CAAC,EAAE,MAAM,GAAG,QAAQ,CAAC;IAC/B,oEAAoE;IACpE,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IACzD,2DAA2D;IAC3D,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,uEAAuE;IACvE,UAAU,CAAC,EAAE,OAAO,CAAC;CACtB"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { TimeUs } from '../model/types';
|
|
2
|
+
|
|
3
|
+
export interface LoopSegment {
|
|
4
|
+
clipRelativeStartUs: TimeUs;
|
|
5
|
+
durationUs: TimeUs;
|
|
6
|
+
resourceStartUs: TimeUs;
|
|
7
|
+
resourceEndUs: TimeUs;
|
|
8
|
+
}
|
|
9
|
+
export declare function buildLoopedResourceSegments(params: {
|
|
10
|
+
clipRelativeStartUs: TimeUs;
|
|
11
|
+
clipRelativeEndUs: TimeUs;
|
|
12
|
+
trimStartUs: TimeUs;
|
|
13
|
+
resourceDurationUs: TimeUs;
|
|
14
|
+
loop: boolean;
|
|
15
|
+
}): LoopSegment[];
|
|
16
|
+
//# sourceMappingURL=loop-utils.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"loop-utils.d.ts","sourceRoot":"","sources":["../../src/utils/loop-utils.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAE7C,MAAM,WAAW,WAAW;IAC1B,mBAAmB,EAAE,MAAM,CAAC;IAC5B,UAAU,EAAE,MAAM,CAAC;IACnB,eAAe,EAAE,MAAM,CAAC;IACxB,aAAa,EAAE,MAAM,CAAC;CACvB;AAED,wBAAgB,2BAA2B,CAAC,MAAM,EAAE;IAClD,mBAAmB,EAAE,MAAM,CAAC;IAC5B,iBAAiB,EAAE,MAAM,CAAC;IAC1B,WAAW,EAAE,MAAM,CAAC;IACpB,kBAAkB,EAAE,MAAM,CAAC;IAC3B,IAAI,EAAE,OAAO,CAAC;CACf,GAAG,WAAW,EAAE,CA+ChB"}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
function buildLoopedResourceSegments(params) {
|
|
2
|
+
const rangeStartUs = Math.max(0, params.clipRelativeStartUs);
|
|
3
|
+
const rangeEndUs = Math.max(rangeStartUs, params.clipRelativeEndUs);
|
|
4
|
+
const requestedDurationUs = rangeEndUs - rangeStartUs;
|
|
5
|
+
if (requestedDurationUs <= 0) {
|
|
6
|
+
return [];
|
|
7
|
+
}
|
|
8
|
+
if (!params.loop) {
|
|
9
|
+
return [
|
|
10
|
+
{
|
|
11
|
+
clipRelativeStartUs: rangeStartUs,
|
|
12
|
+
durationUs: requestedDurationUs,
|
|
13
|
+
resourceStartUs: rangeStartUs + (params.trimStartUs ?? 0),
|
|
14
|
+
resourceEndUs: rangeEndUs + (params.trimStartUs ?? 0)
|
|
15
|
+
}
|
|
16
|
+
];
|
|
17
|
+
}
|
|
18
|
+
const trimStartUs = params.trimStartUs ?? 0;
|
|
19
|
+
const periodUs = params.resourceDurationUs - trimStartUs;
|
|
20
|
+
if (periodUs <= 0) {
|
|
21
|
+
return [];
|
|
22
|
+
}
|
|
23
|
+
const segments = [];
|
|
24
|
+
let tUs = rangeStartUs;
|
|
25
|
+
while (tUs < rangeEndUs) {
|
|
26
|
+
const offsetInPeriodUs = tUs % periodUs;
|
|
27
|
+
const maxLenUs = periodUs - offsetInPeriodUs;
|
|
28
|
+
const lenUs = Math.min(rangeEndUs - tUs, maxLenUs);
|
|
29
|
+
if (lenUs <= 0) break;
|
|
30
|
+
const resourceStartUs = trimStartUs + offsetInPeriodUs;
|
|
31
|
+
segments.push({
|
|
32
|
+
clipRelativeStartUs: tUs,
|
|
33
|
+
durationUs: lenUs,
|
|
34
|
+
resourceStartUs,
|
|
35
|
+
resourceEndUs: resourceStartUs + lenUs
|
|
36
|
+
});
|
|
37
|
+
tUs += lenUs;
|
|
38
|
+
}
|
|
39
|
+
return segments;
|
|
40
|
+
}
|
|
41
|
+
export {
|
|
42
|
+
buildLoopedResourceSegments
|
|
43
|
+
};
|
|
44
|
+
//# sourceMappingURL=loop-utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"loop-utils.js","sources":["../../src/utils/loop-utils.ts"],"sourcesContent":["import type { TimeUs } from '../model/types';\n\nexport interface LoopSegment {\n clipRelativeStartUs: TimeUs;\n durationUs: TimeUs;\n resourceStartUs: TimeUs;\n resourceEndUs: TimeUs;\n}\n\nexport function buildLoopedResourceSegments(params: {\n clipRelativeStartUs: TimeUs;\n clipRelativeEndUs: TimeUs;\n trimStartUs: TimeUs;\n resourceDurationUs: TimeUs;\n loop: boolean;\n}): LoopSegment[] {\n const rangeStartUs = Math.max(0, params.clipRelativeStartUs);\n const rangeEndUs = Math.max(rangeStartUs, params.clipRelativeEndUs);\n\n const requestedDurationUs = rangeEndUs - rangeStartUs;\n if (requestedDurationUs <= 0) {\n return [];\n }\n\n if (!params.loop) {\n return [\n {\n clipRelativeStartUs: rangeStartUs,\n durationUs: requestedDurationUs,\n resourceStartUs: rangeStartUs + (params.trimStartUs ?? 0),\n resourceEndUs: rangeEndUs + (params.trimStartUs ?? 0),\n },\n ];\n }\n\n const trimStartUs = params.trimStartUs ?? 0;\n const periodUs = params.resourceDurationUs - trimStartUs;\n if (periodUs <= 0) {\n return [];\n }\n\n const segments: LoopSegment[] = [];\n let tUs = rangeStartUs;\n\n while (tUs < rangeEndUs) {\n const offsetInPeriodUs = tUs % periodUs;\n const maxLenUs = periodUs - offsetInPeriodUs;\n const lenUs = Math.min(rangeEndUs - tUs, maxLenUs);\n if (lenUs <= 0) break;\n\n const resourceStartUs = trimStartUs + offsetInPeriodUs;\n segments.push({\n clipRelativeStartUs: tUs,\n durationUs: lenUs,\n resourceStartUs,\n resourceEndUs: resourceStartUs + lenUs,\n });\n\n tUs += lenUs;\n }\n\n return segments;\n}\n"],"names":[],"mappings":"AASO,SAAS,4BAA4B,QAM1B;AAChB,QAAM,eAAe,KAAK,IAAI,GAAG,OAAO,mBAAmB;AAC3D,QAAM,aAAa,KAAK,IAAI,cAAc,OAAO,iBAAiB;AAElE,QAAM,sBAAsB,aAAa;AACzC,MAAI,uBAAuB,GAAG;AAC5B,WAAO,CAAA;AAAA,EACT;AAEA,MAAI,CAAC,OAAO,MAAM;AAChB,WAAO;AAAA,MACL;AAAA,QACE,qBAAqB;AAAA,QACrB,YAAY;AAAA,QACZ,iBAAiB,gBAAgB,OAAO,eAAe;AAAA,QACvD,eAAe,cAAc,OAAO,eAAe;AAAA,MAAA;AAAA,IACrD;AAAA,EAEJ;AAEA,QAAM,cAAc,OAAO,eAAe;AAC1C,QAAM,WAAW,OAAO,qBAAqB;AAC7C,MAAI,YAAY,GAAG;AACjB,WAAO,CAAA;AAAA,EACT;AAEA,QAAM,WAA0B,CAAA;AAChC,MAAI,MAAM;AAEV,SAAO,MAAM,YAAY;AACvB,UAAM,mBAAmB,MAAM;AAC/B,UAAM,WAAW,WAAW;AAC5B,UAAM,QAAQ,KAAK,IAAI,aAAa,KAAK,QAAQ;AACjD,QAAI,SAAS,EAAG;AAEhB,UAAM,kBAAkB,cAAc;AACtC,aAAS,KAAK;AAAA,MACZ,qBAAqB;AAAA,MACrB,YAAY;AAAA,MACZ;AAAA,MACA,eAAe,kBAAkB;AAAA,IAAA,CAClC;AAED,WAAO;AAAA,EACT;AAEA,SAAO;AACT;"}
|
|
@@ -1828,10 +1828,12 @@ class FrameRateConverter {
|
|
|
1828
1828
|
clipDurationUs;
|
|
1829
1829
|
frameDurationUs;
|
|
1830
1830
|
trimStartUs;
|
|
1831
|
+
totalFrameCount;
|
|
1831
1832
|
// State for frame processing
|
|
1832
1833
|
targetFrameIndex = 0;
|
|
1833
1834
|
targetFrameTimeUs = 0;
|
|
1834
1835
|
sourceFrameBuffer = [];
|
|
1836
|
+
maxSourceTimestampUs = null;
|
|
1835
1837
|
constructor(targetFps, clipDurationUs, trimStartUs = 0) {
|
|
1836
1838
|
if (targetFps <= 0) {
|
|
1837
1839
|
throw new Error(`Invalid target fps: ${targetFps}`);
|
|
@@ -1842,6 +1844,7 @@ class FrameRateConverter {
|
|
|
1842
1844
|
this.clipDurationUs = clipDurationUs;
|
|
1843
1845
|
this.frameDurationUs = Math.round(1e6 / targetFps);
|
|
1844
1846
|
this.trimStartUs = trimStartUs;
|
|
1847
|
+
this.totalFrameCount = Number.isFinite(clipDurationUs) ? Math.max(1, Math.round(clipDurationUs / this.frameDurationUs)) : null;
|
|
1845
1848
|
}
|
|
1846
1849
|
/**
|
|
1847
1850
|
* Create a TransformStream that converts VFR frames to CFR frames
|
|
@@ -1854,6 +1857,7 @@ class FrameRateConverter {
|
|
|
1854
1857
|
this.sourceFrameBuffer = [];
|
|
1855
1858
|
this.sourceFrameCount = 0;
|
|
1856
1859
|
this.outputFrameCount = 0;
|
|
1860
|
+
this.maxSourceTimestampUs = null;
|
|
1857
1861
|
},
|
|
1858
1862
|
transform: (sourceFrame, controller) => {
|
|
1859
1863
|
this.processSourceFrame(sourceFrame, controller);
|
|
@@ -1887,7 +1891,12 @@ class FrameRateConverter {
|
|
|
1887
1891
|
}
|
|
1888
1892
|
this.sourceFrameBuffer.push(frameToBuffer);
|
|
1889
1893
|
this.sourceFrameCount++;
|
|
1890
|
-
|
|
1894
|
+
const bufferedTs = frameToBuffer.timestamp ?? 0;
|
|
1895
|
+
this.maxSourceTimestampUs = this.maxSourceTimestampUs === null ? bufferedTs : Math.max(this.maxSourceTimestampUs, bufferedTs);
|
|
1896
|
+
while (this.shouldContinueOutput()) {
|
|
1897
|
+
if (this.maxSourceTimestampUs !== null && this.targetFrameTimeUs > this.maxSourceTimestampUs) {
|
|
1898
|
+
break;
|
|
1899
|
+
}
|
|
1891
1900
|
const closestFrame = this.findClosestFrame(this.targetFrameTimeUs);
|
|
1892
1901
|
if (!closestFrame) {
|
|
1893
1902
|
break;
|
|
@@ -1916,12 +1925,33 @@ class FrameRateConverter {
|
|
|
1916
1925
|
* Flush remaining target frames at end of stream
|
|
1917
1926
|
*/
|
|
1918
1927
|
flushRemainingFrames(controller) {
|
|
1919
|
-
while (this.sourceFrameBuffer.length > 0 && this.
|
|
1928
|
+
while (this.sourceFrameBuffer.length > 0 && this.shouldContinueOutput()) {
|
|
1929
|
+
if (this.maxSourceTimestampUs !== null && this.targetFrameTimeUs > this.maxSourceTimestampUs) {
|
|
1930
|
+
break;
|
|
1931
|
+
}
|
|
1920
1932
|
const closestFrame = this.findClosestFrame(this.targetFrameTimeUs);
|
|
1921
1933
|
if (!closestFrame) break;
|
|
1922
1934
|
if (!this.outputTargetFrame(closestFrame, controller)) break;
|
|
1923
1935
|
}
|
|
1924
|
-
this.
|
|
1936
|
+
if (!Number.isFinite(this.clipDurationUs) || this.totalFrameCount === null) {
|
|
1937
|
+
this.cleanupAllFrames(null);
|
|
1938
|
+
return;
|
|
1939
|
+
}
|
|
1940
|
+
const padFrame = this.getPadFrame();
|
|
1941
|
+
if (!padFrame) {
|
|
1942
|
+
this.cleanupAllFrames(null);
|
|
1943
|
+
return;
|
|
1944
|
+
}
|
|
1945
|
+
while (this.shouldContinueOutput()) {
|
|
1946
|
+
if (!this.outputTargetFrame(padFrame, controller)) {
|
|
1947
|
+
break;
|
|
1948
|
+
}
|
|
1949
|
+
}
|
|
1950
|
+
this.cleanupAllFrames(padFrame);
|
|
1951
|
+
try {
|
|
1952
|
+
padFrame.close();
|
|
1953
|
+
} catch {
|
|
1954
|
+
}
|
|
1925
1955
|
}
|
|
1926
1956
|
/**
|
|
1927
1957
|
* Output a single target frame
|
|
@@ -1941,6 +1971,19 @@ class FrameRateConverter {
|
|
|
1941
1971
|
return false;
|
|
1942
1972
|
}
|
|
1943
1973
|
}
|
|
1974
|
+
getPadFrame() {
|
|
1975
|
+
if (this.sourceFrameBuffer.length === 0) {
|
|
1976
|
+
return null;
|
|
1977
|
+
}
|
|
1978
|
+
const last = this.sourceFrameBuffer[this.sourceFrameBuffer.length - 1] ?? null;
|
|
1979
|
+
return last;
|
|
1980
|
+
}
|
|
1981
|
+
shouldContinueOutput() {
|
|
1982
|
+
if (this.totalFrameCount === null) {
|
|
1983
|
+
return this.targetFrameTimeUs < this.clipDurationUs;
|
|
1984
|
+
}
|
|
1985
|
+
return this.targetFrameIndex < this.totalFrameCount;
|
|
1986
|
+
}
|
|
1944
1987
|
/**
|
|
1945
1988
|
* Clean up all buffered frames (except the specified frame to keep)
|
|
1946
1989
|
*/
|
|
@@ -2511,4 +2554,4 @@ export {
|
|
|
2511
2554
|
VideoComposeWorker,
|
|
2512
2555
|
videoCompose_worker as default
|
|
2513
2556
|
};
|
|
2514
|
-
//# sourceMappingURL=video-compose.worker.
|
|
2557
|
+
//# sourceMappingURL=video-compose.worker.KMZjuJuY.js.map
|