@meframe/core 0.0.30-beta → 0.0.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Meframe.d.ts +0 -17
- package/dist/Meframe.d.ts.map +1 -1
- package/dist/Meframe.js +0 -18
- package/dist/Meframe.js.map +1 -1
- package/dist/_virtual/_commonjsHelpers.js +7 -0
- package/dist/_virtual/_commonjsHelpers.js.map +1 -0
- package/dist/cache/CacheManager.d.ts +7 -49
- package/dist/cache/CacheManager.d.ts.map +1 -1
- package/dist/cache/CacheManager.js +3 -57
- package/dist/cache/CacheManager.js.map +1 -1
- package/dist/cache/resource/ResourceCache.d.ts +2 -2
- package/dist/cache/resource/ResourceCache.d.ts.map +1 -1
- package/dist/cache/resource/ResourceCache.js.map +1 -1
- package/dist/controllers/PlaybackController.d.ts +2 -1
- package/dist/controllers/PlaybackController.d.ts.map +1 -1
- package/dist/controllers/PlaybackController.js +21 -5
- package/dist/controllers/PlaybackController.js.map +1 -1
- package/dist/medeo-fe/node_modules/.pnpm/mp4-muxer@5.2.2/node_modules/mp4-muxer/build/mp4-muxer.js.map +1 -0
- package/dist/{node_modules → medeo-fe/node_modules}/.pnpm/mp4box@0.5.4/node_modules/mp4box/dist/mp4box.all.js +7 -2
- package/dist/medeo-fe/node_modules/.pnpm/mp4box@0.5.4/node_modules/mp4box/dist/mp4box.all.js.map +1 -0
- package/dist/model/types.d.ts +0 -1
- package/dist/model/types.d.ts.map +1 -1
- package/dist/model/types.js.map +1 -1
- package/dist/orchestrator/GlobalAudioSession.d.ts +3 -2
- package/dist/orchestrator/GlobalAudioSession.d.ts.map +1 -1
- package/dist/orchestrator/GlobalAudioSession.js +18 -13
- package/dist/orchestrator/GlobalAudioSession.js.map +1 -1
- package/dist/orchestrator/Orchestrator.d.ts.map +1 -1
- package/dist/orchestrator/Orchestrator.js +15 -17
- package/dist/orchestrator/Orchestrator.js.map +1 -1
- package/dist/stages/compose/VideoComposer.d.ts.map +1 -1
- package/dist/stages/compose/VideoComposer.js +4 -0
- package/dist/stages/compose/VideoComposer.js.map +1 -1
- package/dist/stages/demux/MP4Demuxer.js +6 -7
- package/dist/stages/demux/MP4Demuxer.js.map +1 -1
- package/dist/stages/demux/MP4IndexParser.js +3 -4
- package/dist/stages/demux/MP4IndexParser.js.map +1 -1
- package/dist/stages/load/ResourceLoader.d.ts +6 -14
- package/dist/stages/load/ResourceLoader.d.ts.map +1 -1
- package/dist/stages/load/ResourceLoader.js +37 -68
- package/dist/stages/load/ResourceLoader.js.map +1 -1
- package/dist/stages/load/index.d.ts +0 -2
- package/dist/stages/load/index.d.ts.map +1 -1
- package/dist/stages/load/types.d.ts +3 -10
- package/dist/stages/load/types.d.ts.map +1 -1
- package/dist/stages/mux/MP4Muxer.js +1 -1
- package/dist/utils/mp4box.d.ts +4 -0
- package/dist/utils/mp4box.d.ts.map +1 -0
- package/dist/utils/mp4box.js +17 -0
- package/dist/utils/mp4box.js.map +1 -0
- package/dist/workers/{MP4Demuxer.BEa6PLJm.js → MP4Demuxer.DxMpB08B.js} +49 -11
- package/dist/workers/MP4Demuxer.DxMpB08B.js.map +1 -0
- package/dist/workers/stages/compose/{video-compose.worker.DHQ8B105.js → video-compose.worker.BhpN-lxf.js} +5 -1
- package/dist/workers/stages/compose/video-compose.worker.BhpN-lxf.js.map +1 -0
- package/dist/workers/stages/demux/{audio-demux.worker._VRQdLdv.js → audio-demux.worker.Fd8sRTYi.js} +2 -2
- package/dist/workers/stages/demux/{audio-demux.worker._VRQdLdv.js.map → audio-demux.worker.Fd8sRTYi.js.map} +1 -1
- package/dist/workers/stages/demux/{video-demux.worker.CSkxGtmx.js → video-demux.worker.DqFOe12v.js} +2 -2
- package/dist/workers/stages/demux/{video-demux.worker.CSkxGtmx.js.map → video-demux.worker.DqFOe12v.js.map} +1 -1
- package/dist/workers/worker-manifest.json +3 -3
- package/package.json +1 -1
- package/dist/cache/l2/L2Cache.js +0 -329
- package/dist/cache/l2/L2Cache.js.map +0 -1
- package/dist/cache/l2/L2OPFSStore.js +0 -89
- package/dist/cache/l2/L2OPFSStore.js.map +0 -1
- package/dist/cache/storage/indexeddb/ChunkRecordStore.js +0 -180
- package/dist/cache/storage/indexeddb/ChunkRecordStore.js.map +0 -1
- package/dist/node_modules/.pnpm/mp4-muxer@5.2.2/node_modules/mp4-muxer/build/mp4-muxer.js.map +0 -1
- package/dist/node_modules/.pnpm/mp4box@0.5.4/node_modules/mp4box/dist/mp4box.all.js.map +0 -1
- package/dist/stages/load/EventHandlers.d.ts +0 -26
- package/dist/stages/load/EventHandlers.d.ts.map +0 -1
- package/dist/stages/load/EventHandlers.js +0 -42
- package/dist/stages/load/EventHandlers.js.map +0 -1
- package/dist/stages/load/WindowByteRangeResolver.d.ts +0 -47
- package/dist/stages/load/WindowByteRangeResolver.d.ts.map +0 -1
- package/dist/stages/load/WindowByteRangeResolver.js +0 -270
- package/dist/stages/load/WindowByteRangeResolver.js.map +0 -1
- package/dist/workers/MP4Demuxer.BEa6PLJm.js.map +0 -1
- package/dist/workers/stages/compose/video-compose.worker.DHQ8B105.js.map +0 -1
- /package/dist/{node_modules → medeo-fe/node_modules}/.pnpm/mp4-muxer@5.2.2/node_modules/mp4-muxer/build/mp4-muxer.js +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"video-demux.worker.CSkxGtmx.js","sources":["../../../../src/stages/demux/video-demux.worker.ts"],"sourcesContent":["import { WorkerChannel } from '../../worker/WorkerChannel';\nimport { WorkerMessageType, WorkerState } from '../../worker/types';\nimport { MP4Demuxer } from './MP4Demuxer';\nimport type { DemuxConfig } from './types';\n\ninterface LoaderStreamMetadata {\n sessionId?: string;\n byteStart?: number;\n byteEnd?: number;\n}\n/**\n * VideoDemuxWorker - First stage for video processing\n * Extracts video tracks from container formats (MP4, etc.)\n *\n * Pipeline: ResourceLoader (Main Thread) → VideoDemuxWorker → DecodeWorker\n *\n * Architecture Note:\n * - One VideoDemuxWorker instance per CLIP (not per resource)\n * - Multiple clips can share the same resource (different workers, independent processing)\n * - This enables clean 2-Clip strategy lifecycle management\n *\n * Features:\n * - MP4 container demuxing with mp4box.js\n * - Stream-based processing with backpressure\n * - Direct streaming to DecodeWorker\n */\nexport class VideoDemuxWorker {\n private channel: WorkerChannel;\n private demuxer: MP4Demuxer | null = null;\n private sessionId: string | null = null;\n private videoDownstreamPort: MessagePort | null = null;\n private audioDownstreamPort: MessagePort | null = null;\n\n constructor() {\n // Initialize WorkerChannel\n this.channel = new WorkerChannel(self as any, {\n name: 'VideoDemuxWorker',\n timeout: 30000,\n });\n this.setupHandlers();\n }\n\n /* @better-ai.mdc For test visibility */\n protected setupHandlers(): void {\n // Register message handlers\n this.channel.registerHandler('configure', this.handleConfigure.bind(this));\n this.channel.registerHandler('connect', this.handleConnect.bind(this));\n this.channel.registerHandler('get_stats', this.handleGetStats.bind(this));\n this.channel.registerHandler(WorkerMessageType.Dispose, this.handleDispose.bind(this));\n\n // Setup stream receiver from ResourceLoader (main thread)\n this.channel.receiveStream(this.handleReceiveStream.bind(this));\n }\n\n // ===== Helper methods (reduce duplication) =====\n private buildAudioDecoderConfig(): any {\n const info = this.demuxer?.audioTrackInfo;\n if (!info) return undefined;\n return {\n codec: info.codec,\n sampleRate: info.sampleRate,\n numberOfChannels: info.numberOfChannels,\n description: info.description,\n };\n }\n\n private sendVideoConfigure(): void {\n if (!this.videoDownstreamPort || !this.demuxer) return;\n const videoTrackInfo = this.demuxer.videoTrackInfo;\n if (!videoTrackInfo) {\n console.error('[VideoDemuxWorker] No video track found after ready');\n return;\n }\n const downstreamChannel = new WorkerChannel(this.videoDownstreamPort, {\n name: 'VideoDemux-Decoder',\n timeout: 30000,\n });\n downstreamChannel.send('configure' as any, {\n sessionId: this.sessionId,\n streamType: 'video',\n codec: videoTrackInfo.codec,\n width: videoTrackInfo.width,\n height: videoTrackInfo.height,\n description: videoTrackInfo.description,\n });\n }\n\n private sendAudioConfigure(): void {\n if (!this.audioDownstreamPort) return;\n const cfg = this.buildAudioDecoderConfig();\n if (!cfg) return;\n const audioChannel = new WorkerChannel(this.audioDownstreamPort, {\n name: 'VideoDemux-AudioDecoder',\n timeout: 30000,\n });\n audioChannel.send('configure' as any, {\n sessionId: this.sessionId,\n streamType: 'audio',\n ...cfg,\n });\n }\n\n private sendVideoStream(videoStream: ReadableStream<EncodedVideoChunk>): void {\n if (!this.videoDownstreamPort) return;\n const videoChannel = new WorkerChannel(this.videoDownstreamPort, {\n name: 'VideoDemux-Decoder',\n timeout: 30000,\n });\n videoChannel.sendStream(videoStream, {\n streamType: 'video',\n sessionId: this.sessionId,\n });\n }\n\n private sendAudioStream(audioStream: ReadableStream<EncodedAudioChunk> | null): void {\n if (!audioStream) return;\n\n // Prefer decoder when connected; otherwise emit to main when enabled\n if (this.audioDownstreamPort) {\n const audioChannel = new WorkerChannel(this.audioDownstreamPort, {\n name: 'VideoDemux-AudioDecoder',\n timeout: 30000,\n });\n audioChannel.sendStream(audioStream, {\n streamType: 'audio',\n sessionId: this.sessionId,\n });\n return;\n }\n }\n\n /**\n * Handle connection from orchestrator\n */\n private async handleConnect(payload: {\n port?: MessagePort;\n streamType?: string;\n sessionId?: string;\n direction?: string;\n }): Promise<{ success: boolean }> {\n const { port, sessionId, direction, streamType } = payload;\n\n if (!port) {\n return { success: false };\n }\n\n if (direction === 'downstream') {\n if (streamType === 'audio') {\n this.audioDownstreamPort = port;\n } else {\n this.videoDownstreamPort = port;\n }\n }\n\n this.sessionId = sessionId || null;\n return { success: true };\n }\n\n /**\n * Configure demuxer with format settings\n * @param payload.config - Demuxer configuration\n * @param payload.initial - If true, initialize worker state; otherwise just update config\n */\n private async handleConfigure(payload: {\n config: DemuxConfig;\n initial?: boolean;\n }): Promise<{ success: boolean; tracks?: any[] }> {\n const { config, initial = false } = payload;\n\n try {\n if (initial) {\n // Initial setup - set worker state to ready\n this.channel.state = WorkerState.Ready;\n\n // Create new demuxer instance\n if (this.demuxer) {\n this.demuxer.destroy();\n }\n\n this.demuxer = new MP4Demuxer({\n ...config,\n onReady: () => this.handleDemuxerReady(),\n });\n\n // Notify configuration complete\n this.channel.notify('configured');\n\n return { success: true };\n } else {\n // Update configuration only (e.g., backpressure settings)\n this.demuxer?.updateConfig(config);\n return { success: true };\n }\n } catch (error: any) {\n throw {\n code: error.code || 'CONFIG_ERROR',\n message: error.message,\n };\n }\n }\n\n /**\n * Handle input stream from ResourceLoader (main thread)\n * Strategy: Stream immediately, send codec info when ready\n */\n private async handleReceiveStream(\n stream: ReadableStream<Uint8Array | ArrayBuffer>,\n metadata?: LoaderStreamMetadata\n ): Promise<void> {\n // Store sessionId from metadata (only happens once per worker lifecycle)\n // Note: Do NOT override if already set by handleConnect (which has correct sessionId)\n if (!this.sessionId) {\n this.sessionId = metadata?.sessionId || this.sessionId;\n }\n\n // Initialize demuxer on first stream\n if (!this.demuxer) {\n this.demuxer = new MP4Demuxer({\n highWaterMark: 10,\n onReady: () => this.handleDemuxerReady(),\n });\n }\n\n // If no video downstream is connected, we can still proceed when\n // we only need to emit audio to the main thread for L2 caching.\n if (!this.videoDownstreamPort) {\n console.error('[VideoDemuxWorker] Decoder not connected', this.sessionId);\n return;\n }\n\n // Create output streams (readable only)\n const videoStream = this.demuxer.createVideoStream();\n const audioStream = this.demuxer.createAudioStream();\n\n // Send streams\n this.sendVideoStream(videoStream);\n this.sendAudioStream(audioStream);\n\n // Create input stream and pipe source to it (single appendBuffer)\n const inputStream = this.demuxer.createInputStream();\n await stream.pipeTo(inputStream).catch((error) => {\n console.error('[VideoDemuxWorker] Input stream error:', this.sessionId, error);\n });\n }\n\n private handleDemuxerReady(): void {\n if (!this.demuxer) return;\n this.sendVideoConfigure();\n this.sendAudioConfigure();\n }\n\n /**\n * Get demuxer statistics\n */\n private async handleGetStats(): Promise<{\n queueSize?: number;\n tracksInfo?: any[];\n state?: WorkerState;\n }> {\n if (!this.demuxer) {\n return { state: this.channel.state };\n }\n\n return {\n tracksInfo: Array.from(this.demuxer.tracks.values()),\n state: this.channel.state,\n };\n }\n\n /**\n * Dispose worker and cleanup resources\n */\n private async handleDispose(): Promise<{ success: boolean }> {\n // Destroy demuxer\n this.demuxer?.destroy();\n this.demuxer = null;\n this.sessionId = null;\n\n // Close connections\n this.videoDownstreamPort?.close();\n this.videoDownstreamPort = null;\n this.audioDownstreamPort?.close();\n this.audioDownstreamPort = null;\n\n this.channel.state = WorkerState.Disposed;\n\n return { success: true };\n }\n}\n// Initialize worker\nconst worker = new VideoDemuxWorker();\n\n// Handle worker termination\nself.addEventListener('beforeunload', () => {\n worker['handleDispose']();\n});\n\nexport default null; // Required for TypeScript worker compilation\n"],"names":[],"mappings":";;AA0BO,MAAM,iBAAiB;AAAA,EACpB;AAAA,EACA,UAA6B;AAAA,EAC7B,YAA2B;AAAA,EAC3B,sBAA0C;AAAA,EAC1C,sBAA0C;AAAA,EAElD,cAAc;AAEZ,SAAK,UAAU,IAAI,cAAc,MAAa;AAAA,MAC5C,MAAM;AAAA,MACN,SAAS;AAAA,IAAA,CACV;AACD,SAAK,cAAA;AAAA,EACP;AAAA;AAAA,EAGU,gBAAsB;AAE9B,SAAK,QAAQ,gBAAgB,aAAa,KAAK,gBAAgB,KAAK,IAAI,CAAC;AACzE,SAAK,QAAQ,gBAAgB,WAAW,KAAK,cAAc,KAAK,IAAI,CAAC;AACrE,SAAK,QAAQ,gBAAgB,aAAa,KAAK,eAAe,KAAK,IAAI,CAAC;AACxE,SAAK,QAAQ,gBAAgB,kBAAkB,SAAS,KAAK,cAAc,KAAK,IAAI,CAAC;AAGrF,SAAK,QAAQ,cAAc,KAAK,oBAAoB,KAAK,IAAI,CAAC;AAAA,EAChE;AAAA;AAAA,EAGQ,0BAA+B;AACrC,UAAM,OAAO,KAAK,SAAS;AAC3B,QAAI,CAAC,KAAM,QAAO;AAClB,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,kBAAkB,KAAK;AAAA,MACvB,aAAa,KAAK;AAAA,IAAA;AAAA,EAEtB;AAAA,EAEQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,uBAAuB,CAAC,KAAK,QAAS;AAChD,UAAM,iBAAiB,KAAK,QAAQ;AACpC,QAAI,CAAC,gBAAgB;AACnB,cAAQ,MAAM,qDAAqD;AACnE;AAAA,IACF;AACA,UAAM,oBAAoB,IAAI,cAAc,KAAK,qBAAqB;AAAA,MACpE,MAAM;AAAA,MACN,SAAS;AAAA,IAAA,CACV;AACD,sBAAkB,KAAK,aAAoB;AAAA,MACzC,WAAW,KAAK;AAAA,MAChB,YAAY;AAAA,MACZ,OAAO,eAAe;AAAA,MACtB,OAAO,eAAe;AAAA,MACtB,QAAQ,eAAe;AAAA,MACvB,aAAa,eAAe;AAAA,IAAA,CAC7B;AAAA,EACH;AAAA,EAEQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,oBAAqB;AAC/B,UAAM,MAAM,KAAK,wBAAA;AACjB,QAAI,CAAC,IAAK;AACV,UAAM,eAAe,IAAI,cAAc,KAAK,qBAAqB;AAAA,MAC/D,MAAM;AAAA,MACN,SAAS;AAAA,IAAA,CACV;AACD,iBAAa,KAAK,aAAoB;AAAA,MACpC,WAAW,KAAK;AAAA,MAChB,YAAY;AAAA,MACZ,GAAG;AAAA,IAAA,CACJ;AAAA,EACH;AAAA,EAEQ,gBAAgB,aAAsD;AAC5E,QAAI,CAAC,KAAK,oBAAqB;AAC/B,UAAM,eAAe,IAAI,cAAc,KAAK,qBAAqB;AAAA,MAC/D,MAAM;AAAA,MACN,SAAS;AAAA,IAAA,CACV;AACD,iBAAa,WAAW,aAAa;AAAA,MACnC,YAAY;AAAA,MACZ,WAAW,KAAK;AAAA,IAAA,CACjB;AAAA,EACH;AAAA,EAEQ,gBAAgB,aAA6D;AACnF,QAAI,CAAC,YAAa;AAGlB,QAAI,KAAK,qBAAqB;AAC5B,YAAM,eAAe,IAAI,cAAc,KAAK,qBAAqB;AAAA,QAC/D,MAAM;AAAA,QACN,SAAS;AAAA,MAAA,CACV;AACD,mBAAa,WAAW,aAAa;AAAA,QACnC,YAAY;AAAA,QACZ,WAAW,KAAK;AAAA,MAAA,CACjB;AACD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cAAc,SAKM;AAChC,UAAM,EAAE,MAAM,WAAW,WAAW,eAAe;AAEnD,QAAI,CAAC,MAAM;AACT,aAAO,EAAE,SAAS,MAAA;AAAA,IACpB;AAEA,QAAI,cAAc,cAAc;AAC9B,UAAI,eAAe,SAAS;AAC1B,aAAK,sBAAsB;AAAA,MAC7B,OAAO;AACL,aAAK,sBAAsB;AAAA,MAC7B;AAAA,IACF;AAEA,SAAK,YAAY,aAAa;AAC9B,WAAO,EAAE,SAAS,KAAA;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,gBAAgB,SAGoB;AAChD,UAAM,EAAE,QAAQ,UAAU,MAAA,IAAU;AAEpC,QAAI;AACF,UAAI,SAAS;AAEX,aAAK,QAAQ,QAAQ,YAAY;AAGjC,YAAI,KAAK,SAAS;AAChB,eAAK,QAAQ,QAAA;AAAA,QACf;AAEA,aAAK,UAAU,IAAI,WAAW;AAAA,UAC5B,GAAG;AAAA,UACH,SAAS,MAAM,KAAK,mBAAA;AAAA,QAAmB,CACxC;AAGD,aAAK,QAAQ,OAAO,YAAY;AAEhC,eAAO,EAAE,SAAS,KAAA;AAAA,MACpB,OAAO;AAEL,aAAK,SAAS,aAAa,MAAM;AACjC,eAAO,EAAE,SAAS,KAAA;AAAA,MACpB;AAAA,IACF,SAAS,OAAY;AACnB,YAAM;AAAA,QACJ,MAAM,MAAM,QAAQ;AAAA,QACpB,SAAS,MAAM;AAAA,MAAA;AAAA,IAEnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,oBACZ,QACA,UACe;AAGf,QAAI,CAAC,KAAK,WAAW;AACnB,WAAK,YAAY,UAAU,aAAa,KAAK;AAAA,IAC/C;AAGA,QAAI,CAAC,KAAK,SAAS;AACjB,WAAK,UAAU,IAAI,WAAW;AAAA,QAC5B,eAAe;AAAA,QACf,SAAS,MAAM,KAAK,mBAAA;AAAA,MAAmB,CACxC;AAAA,IACH;AAIA,QAAI,CAAC,KAAK,qBAAqB;AAC7B,cAAQ,MAAM,4CAA4C,KAAK,SAAS;AACxE;AAAA,IACF;AAGA,UAAM,cAAc,KAAK,QAAQ,kBAAA;AACjC,UAAM,cAAc,KAAK,QAAQ,kBAAA;AAGjC,SAAK,gBAAgB,WAAW;AAChC,SAAK,gBAAgB,WAAW;AAGhC,UAAM,cAAc,KAAK,QAAQ,kBAAA;AACjC,UAAM,OAAO,OAAO,WAAW,EAAE,MAAM,CAAC,UAAU;AAChD,cAAQ,MAAM,0CAA0C,KAAK,WAAW,KAAK;AAAA,IAC/E,CAAC;AAAA,EACH;AAAA,EAEQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,QAAS;AACnB,SAAK,mBAAA;AACL,SAAK,mBAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAIX;AACD,QAAI,CAAC,KAAK,SAAS;AACjB,aAAO,EAAE,OAAO,KAAK,QAAQ,MAAA;AAAA,IAC/B;AAEA,WAAO;AAAA,MACL,YAAY,MAAM,KAAK,KAAK,QAAQ,OAAO,QAAQ;AAAA,MACnD,OAAO,KAAK,QAAQ;AAAA,IAAA;AAAA,EAExB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAA+C;AAE3D,SAAK,SAAS,QAAA;AACd,SAAK,UAAU;AACf,SAAK,YAAY;AAGjB,SAAK,qBAAqB,MAAA;AAC1B,SAAK,sBAAsB;AAC3B,SAAK,qBAAqB,MAAA;AAC1B,SAAK,sBAAsB;AAE3B,SAAK,QAAQ,QAAQ,YAAY;AAEjC,WAAO,EAAE,SAAS,KAAA;AAAA,EACpB;AACF;AAEA,MAAM,SAAS,IAAI,iBAAA;AAGnB,KAAK,iBAAiB,gBAAgB,MAAM;AAC1C,SAAO,eAAe,EAAA;AACxB,CAAC;AAED,MAAA,oBAAe;"}
|
|
1
|
+
{"version":3,"file":"video-demux.worker.DqFOe12v.js","sources":["../../../../src/stages/demux/video-demux.worker.ts"],"sourcesContent":["import { WorkerChannel } from '../../worker/WorkerChannel';\nimport { WorkerMessageType, WorkerState } from '../../worker/types';\nimport { MP4Demuxer } from './MP4Demuxer';\nimport type { DemuxConfig } from './types';\n\ninterface LoaderStreamMetadata {\n sessionId?: string;\n byteStart?: number;\n byteEnd?: number;\n}\n/**\n * VideoDemuxWorker - First stage for video processing\n * Extracts video tracks from container formats (MP4, etc.)\n *\n * Pipeline: ResourceLoader (Main Thread) → VideoDemuxWorker → DecodeWorker\n *\n * Architecture Note:\n * - One VideoDemuxWorker instance per CLIP (not per resource)\n * - Multiple clips can share the same resource (different workers, independent processing)\n * - This enables clean 2-Clip strategy lifecycle management\n *\n * Features:\n * - MP4 container demuxing with mp4box.js\n * - Stream-based processing with backpressure\n * - Direct streaming to DecodeWorker\n */\nexport class VideoDemuxWorker {\n private channel: WorkerChannel;\n private demuxer: MP4Demuxer | null = null;\n private sessionId: string | null = null;\n private videoDownstreamPort: MessagePort | null = null;\n private audioDownstreamPort: MessagePort | null = null;\n\n constructor() {\n // Initialize WorkerChannel\n this.channel = new WorkerChannel(self as any, {\n name: 'VideoDemuxWorker',\n timeout: 30000,\n });\n this.setupHandlers();\n }\n\n /* @better-ai.mdc For test visibility */\n protected setupHandlers(): void {\n // Register message handlers\n this.channel.registerHandler('configure', this.handleConfigure.bind(this));\n this.channel.registerHandler('connect', this.handleConnect.bind(this));\n this.channel.registerHandler('get_stats', this.handleGetStats.bind(this));\n this.channel.registerHandler(WorkerMessageType.Dispose, this.handleDispose.bind(this));\n\n // Setup stream receiver from ResourceLoader (main thread)\n this.channel.receiveStream(this.handleReceiveStream.bind(this));\n }\n\n // ===== Helper methods (reduce duplication) =====\n private buildAudioDecoderConfig(): any {\n const info = this.demuxer?.audioTrackInfo;\n if (!info) return undefined;\n return {\n codec: info.codec,\n sampleRate: info.sampleRate,\n numberOfChannels: info.numberOfChannels,\n description: info.description,\n };\n }\n\n private sendVideoConfigure(): void {\n if (!this.videoDownstreamPort || !this.demuxer) return;\n const videoTrackInfo = this.demuxer.videoTrackInfo;\n if (!videoTrackInfo) {\n console.error('[VideoDemuxWorker] No video track found after ready');\n return;\n }\n const downstreamChannel = new WorkerChannel(this.videoDownstreamPort, {\n name: 'VideoDemux-Decoder',\n timeout: 30000,\n });\n downstreamChannel.send('configure' as any, {\n sessionId: this.sessionId,\n streamType: 'video',\n codec: videoTrackInfo.codec,\n width: videoTrackInfo.width,\n height: videoTrackInfo.height,\n description: videoTrackInfo.description,\n });\n }\n\n private sendAudioConfigure(): void {\n if (!this.audioDownstreamPort) return;\n const cfg = this.buildAudioDecoderConfig();\n if (!cfg) return;\n const audioChannel = new WorkerChannel(this.audioDownstreamPort, {\n name: 'VideoDemux-AudioDecoder',\n timeout: 30000,\n });\n audioChannel.send('configure' as any, {\n sessionId: this.sessionId,\n streamType: 'audio',\n ...cfg,\n });\n }\n\n private sendVideoStream(videoStream: ReadableStream<EncodedVideoChunk>): void {\n if (!this.videoDownstreamPort) return;\n const videoChannel = new WorkerChannel(this.videoDownstreamPort, {\n name: 'VideoDemux-Decoder',\n timeout: 30000,\n });\n videoChannel.sendStream(videoStream, {\n streamType: 'video',\n sessionId: this.sessionId,\n });\n }\n\n private sendAudioStream(audioStream: ReadableStream<EncodedAudioChunk> | null): void {\n if (!audioStream) return;\n\n // Prefer decoder when connected; otherwise emit to main when enabled\n if (this.audioDownstreamPort) {\n const audioChannel = new WorkerChannel(this.audioDownstreamPort, {\n name: 'VideoDemux-AudioDecoder',\n timeout: 30000,\n });\n audioChannel.sendStream(audioStream, {\n streamType: 'audio',\n sessionId: this.sessionId,\n });\n return;\n }\n }\n\n /**\n * Handle connection from orchestrator\n */\n private async handleConnect(payload: {\n port?: MessagePort;\n streamType?: string;\n sessionId?: string;\n direction?: string;\n }): Promise<{ success: boolean }> {\n const { port, sessionId, direction, streamType } = payload;\n\n if (!port) {\n return { success: false };\n }\n\n if (direction === 'downstream') {\n if (streamType === 'audio') {\n this.audioDownstreamPort = port;\n } else {\n this.videoDownstreamPort = port;\n }\n }\n\n this.sessionId = sessionId || null;\n return { success: true };\n }\n\n /**\n * Configure demuxer with format settings\n * @param payload.config - Demuxer configuration\n * @param payload.initial - If true, initialize worker state; otherwise just update config\n */\n private async handleConfigure(payload: {\n config: DemuxConfig;\n initial?: boolean;\n }): Promise<{ success: boolean; tracks?: any[] }> {\n const { config, initial = false } = payload;\n\n try {\n if (initial) {\n // Initial setup - set worker state to ready\n this.channel.state = WorkerState.Ready;\n\n // Create new demuxer instance\n if (this.demuxer) {\n this.demuxer.destroy();\n }\n\n this.demuxer = new MP4Demuxer({\n ...config,\n onReady: () => this.handleDemuxerReady(),\n });\n\n // Notify configuration complete\n this.channel.notify('configured');\n\n return { success: true };\n } else {\n // Update configuration only (e.g., backpressure settings)\n this.demuxer?.updateConfig(config);\n return { success: true };\n }\n } catch (error: any) {\n throw {\n code: error.code || 'CONFIG_ERROR',\n message: error.message,\n };\n }\n }\n\n /**\n * Handle input stream from ResourceLoader (main thread)\n * Strategy: Stream immediately, send codec info when ready\n */\n private async handleReceiveStream(\n stream: ReadableStream<Uint8Array | ArrayBuffer>,\n metadata?: LoaderStreamMetadata\n ): Promise<void> {\n // Store sessionId from metadata (only happens once per worker lifecycle)\n // Note: Do NOT override if already set by handleConnect (which has correct sessionId)\n if (!this.sessionId) {\n this.sessionId = metadata?.sessionId || this.sessionId;\n }\n\n // Initialize demuxer on first stream\n if (!this.demuxer) {\n this.demuxer = new MP4Demuxer({\n highWaterMark: 10,\n onReady: () => this.handleDemuxerReady(),\n });\n }\n\n // If no video downstream is connected, we can still proceed when\n // we only need to emit audio to the main thread for L2 caching.\n if (!this.videoDownstreamPort) {\n console.error('[VideoDemuxWorker] Decoder not connected', this.sessionId);\n return;\n }\n\n // Create output streams (readable only)\n const videoStream = this.demuxer.createVideoStream();\n const audioStream = this.demuxer.createAudioStream();\n\n // Send streams\n this.sendVideoStream(videoStream);\n this.sendAudioStream(audioStream);\n\n // Create input stream and pipe source to it (single appendBuffer)\n const inputStream = this.demuxer.createInputStream();\n await stream.pipeTo(inputStream).catch((error) => {\n console.error('[VideoDemuxWorker] Input stream error:', this.sessionId, error);\n });\n }\n\n private handleDemuxerReady(): void {\n if (!this.demuxer) return;\n this.sendVideoConfigure();\n this.sendAudioConfigure();\n }\n\n /**\n * Get demuxer statistics\n */\n private async handleGetStats(): Promise<{\n queueSize?: number;\n tracksInfo?: any[];\n state?: WorkerState;\n }> {\n if (!this.demuxer) {\n return { state: this.channel.state };\n }\n\n return {\n tracksInfo: Array.from(this.demuxer.tracks.values()),\n state: this.channel.state,\n };\n }\n\n /**\n * Dispose worker and cleanup resources\n */\n private async handleDispose(): Promise<{ success: boolean }> {\n // Destroy demuxer\n this.demuxer?.destroy();\n this.demuxer = null;\n this.sessionId = null;\n\n // Close connections\n this.videoDownstreamPort?.close();\n this.videoDownstreamPort = null;\n this.audioDownstreamPort?.close();\n this.audioDownstreamPort = null;\n\n this.channel.state = WorkerState.Disposed;\n\n return { success: true };\n }\n}\n// Initialize worker\nconst worker = new VideoDemuxWorker();\n\n// Handle worker termination\nself.addEventListener('beforeunload', () => {\n worker['handleDispose']();\n});\n\nexport default null; // Required for TypeScript worker compilation\n"],"names":[],"mappings":";;AA0BO,MAAM,iBAAiB;AAAA,EACpB;AAAA,EACA,UAA6B;AAAA,EAC7B,YAA2B;AAAA,EAC3B,sBAA0C;AAAA,EAC1C,sBAA0C;AAAA,EAElD,cAAc;AAEZ,SAAK,UAAU,IAAI,cAAc,MAAa;AAAA,MAC5C,MAAM;AAAA,MACN,SAAS;AAAA,IAAA,CACV;AACD,SAAK,cAAA;AAAA,EACP;AAAA;AAAA,EAGU,gBAAsB;AAE9B,SAAK,QAAQ,gBAAgB,aAAa,KAAK,gBAAgB,KAAK,IAAI,CAAC;AACzE,SAAK,QAAQ,gBAAgB,WAAW,KAAK,cAAc,KAAK,IAAI,CAAC;AACrE,SAAK,QAAQ,gBAAgB,aAAa,KAAK,eAAe,KAAK,IAAI,CAAC;AACxE,SAAK,QAAQ,gBAAgB,kBAAkB,SAAS,KAAK,cAAc,KAAK,IAAI,CAAC;AAGrF,SAAK,QAAQ,cAAc,KAAK,oBAAoB,KAAK,IAAI,CAAC;AAAA,EAChE;AAAA;AAAA,EAGQ,0BAA+B;AACrC,UAAM,OAAO,KAAK,SAAS;AAC3B,QAAI,CAAC,KAAM,QAAO;AAClB,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,YAAY,KAAK;AAAA,MACjB,kBAAkB,KAAK;AAAA,MACvB,aAAa,KAAK;AAAA,IAAA;AAAA,EAEtB;AAAA,EAEQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,uBAAuB,CAAC,KAAK,QAAS;AAChD,UAAM,iBAAiB,KAAK,QAAQ;AACpC,QAAI,CAAC,gBAAgB;AACnB,cAAQ,MAAM,qDAAqD;AACnE;AAAA,IACF;AACA,UAAM,oBAAoB,IAAI,cAAc,KAAK,qBAAqB;AAAA,MACpE,MAAM;AAAA,MACN,SAAS;AAAA,IAAA,CACV;AACD,sBAAkB,KAAK,aAAoB;AAAA,MACzC,WAAW,KAAK;AAAA,MAChB,YAAY;AAAA,MACZ,OAAO,eAAe;AAAA,MACtB,OAAO,eAAe;AAAA,MACtB,QAAQ,eAAe;AAAA,MACvB,aAAa,eAAe;AAAA,IAAA,CAC7B;AAAA,EACH;AAAA,EAEQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,oBAAqB;AAC/B,UAAM,MAAM,KAAK,wBAAA;AACjB,QAAI,CAAC,IAAK;AACV,UAAM,eAAe,IAAI,cAAc,KAAK,qBAAqB;AAAA,MAC/D,MAAM;AAAA,MACN,SAAS;AAAA,IAAA,CACV;AACD,iBAAa,KAAK,aAAoB;AAAA,MACpC,WAAW,KAAK;AAAA,MAChB,YAAY;AAAA,MACZ,GAAG;AAAA,IAAA,CACJ;AAAA,EACH;AAAA,EAEQ,gBAAgB,aAAsD;AAC5E,QAAI,CAAC,KAAK,oBAAqB;AAC/B,UAAM,eAAe,IAAI,cAAc,KAAK,qBAAqB;AAAA,MAC/D,MAAM;AAAA,MACN,SAAS;AAAA,IAAA,CACV;AACD,iBAAa,WAAW,aAAa;AAAA,MACnC,YAAY;AAAA,MACZ,WAAW,KAAK;AAAA,IAAA,CACjB;AAAA,EACH;AAAA,EAEQ,gBAAgB,aAA6D;AACnF,QAAI,CAAC,YAAa;AAGlB,QAAI,KAAK,qBAAqB;AAC5B,YAAM,eAAe,IAAI,cAAc,KAAK,qBAAqB;AAAA,QAC/D,MAAM;AAAA,QACN,SAAS;AAAA,MAAA,CACV;AACD,mBAAa,WAAW,aAAa;AAAA,QACnC,YAAY;AAAA,QACZ,WAAW,KAAK;AAAA,MAAA,CACjB;AACD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cAAc,SAKM;AAChC,UAAM,EAAE,MAAM,WAAW,WAAW,eAAe;AAEnD,QAAI,CAAC,MAAM;AACT,aAAO,EAAE,SAAS,MAAA;AAAA,IACpB;AAEA,QAAI,cAAc,cAAc;AAC9B,UAAI,eAAe,SAAS;AAC1B,aAAK,sBAAsB;AAAA,MAC7B,OAAO;AACL,aAAK,sBAAsB;AAAA,MAC7B;AAAA,IACF;AAEA,SAAK,YAAY,aAAa;AAC9B,WAAO,EAAE,SAAS,KAAA;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,gBAAgB,SAGoB;AAChD,UAAM,EAAE,QAAQ,UAAU,MAAA,IAAU;AAEpC,QAAI;AACF,UAAI,SAAS;AAEX,aAAK,QAAQ,QAAQ,YAAY;AAGjC,YAAI,KAAK,SAAS;AAChB,eAAK,QAAQ,QAAA;AAAA,QACf;AAEA,aAAK,UAAU,IAAI,WAAW;AAAA,UAC5B,GAAG;AAAA,UACH,SAAS,MAAM,KAAK,mBAAA;AAAA,QAAmB,CACxC;AAGD,aAAK,QAAQ,OAAO,YAAY;AAEhC,eAAO,EAAE,SAAS,KAAA;AAAA,MACpB,OAAO;AAEL,aAAK,SAAS,aAAa,MAAM;AACjC,eAAO,EAAE,SAAS,KAAA;AAAA,MACpB;AAAA,IACF,SAAS,OAAY;AACnB,YAAM;AAAA,QACJ,MAAM,MAAM,QAAQ;AAAA,QACpB,SAAS,MAAM;AAAA,MAAA;AAAA,IAEnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,oBACZ,QACA,UACe;AAGf,QAAI,CAAC,KAAK,WAAW;AACnB,WAAK,YAAY,UAAU,aAAa,KAAK;AAAA,IAC/C;AAGA,QAAI,CAAC,KAAK,SAAS;AACjB,WAAK,UAAU,IAAI,WAAW;AAAA,QAC5B,eAAe;AAAA,QACf,SAAS,MAAM,KAAK,mBAAA;AAAA,MAAmB,CACxC;AAAA,IACH;AAIA,QAAI,CAAC,KAAK,qBAAqB;AAC7B,cAAQ,MAAM,4CAA4C,KAAK,SAAS;AACxE;AAAA,IACF;AAGA,UAAM,cAAc,KAAK,QAAQ,kBAAA;AACjC,UAAM,cAAc,KAAK,QAAQ,kBAAA;AAGjC,SAAK,gBAAgB,WAAW;AAChC,SAAK,gBAAgB,WAAW;AAGhC,UAAM,cAAc,KAAK,QAAQ,kBAAA;AACjC,UAAM,OAAO,OAAO,WAAW,EAAE,MAAM,CAAC,UAAU;AAChD,cAAQ,MAAM,0CAA0C,KAAK,WAAW,KAAK;AAAA,IAC/E,CAAC;AAAA,EACH;AAAA,EAEQ,qBAA2B;AACjC,QAAI,CAAC,KAAK,QAAS;AACnB,SAAK,mBAAA;AACL,SAAK,mBAAA;AAAA,EACP;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAIX;AACD,QAAI,CAAC,KAAK,SAAS;AACjB,aAAO,EAAE,OAAO,KAAK,QAAQ,MAAA;AAAA,IAC/B;AAEA,WAAO;AAAA,MACL,YAAY,MAAM,KAAK,KAAK,QAAQ,OAAO,QAAQ;AAAA,MACnD,OAAO,KAAK,QAAQ;AAAA,IAAA;AAAA,EAExB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBAA+C;AAE3D,SAAK,SAAS,QAAA;AACd,SAAK,UAAU;AACf,SAAK,YAAY;AAGjB,SAAK,qBAAqB,MAAA;AAC1B,SAAK,sBAAsB;AAC3B,SAAK,qBAAqB,MAAA;AAC1B,SAAK,sBAAsB;AAE3B,SAAK,QAAQ,QAAQ,YAAY;AAEjC,WAAO,EAAE,SAAS,KAAA;AAAA,EACpB;AACF;AAEA,MAAM,SAAS,IAAI,iBAAA;AAGnB,KAAK,iBAAiB,gBAAgB,MAAM;AAC1C,SAAO,eAAe,EAAA;AACxB,CAAC;AAED,MAAA,oBAAe;"}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
{
|
|
2
2
|
"audio-compose.worker": "stages/compose/audio-compose.worker.rW63uN6z.js",
|
|
3
|
-
"video-compose.worker": "stages/compose/video-compose.worker.
|
|
3
|
+
"video-compose.worker": "stages/compose/video-compose.worker.BhpN-lxf.js",
|
|
4
4
|
"audio-decode.worker": "stages/decode/audio-decode.worker.CP8bXXa4.js",
|
|
5
5
|
"video-decode.worker": "stages/decode/video-decode.worker.BIspTxgV.js",
|
|
6
|
-
"audio-demux.worker": "stages/demux/audio-demux.worker.
|
|
7
|
-
"video-demux.worker": "stages/demux/video-demux.worker.
|
|
6
|
+
"audio-demux.worker": "stages/demux/audio-demux.worker.Fd8sRTYi.js",
|
|
7
|
+
"video-demux.worker": "stages/demux/video-demux.worker.DqFOe12v.js",
|
|
8
8
|
"video-encode.worker": "stages/encode/video-encode.worker.u2o7iXCT.js"
|
|
9
9
|
}
|
package/package.json
CHANGED
package/dist/cache/l2/L2Cache.js
DELETED
|
@@ -1,329 +0,0 @@
|
|
|
1
|
-
import { binarySearchRange } from "../../utils/binary-search.js";
|
|
2
|
-
import { L2OPFSStore } from "./L2OPFSStore.js";
|
|
3
|
-
import { ChunkRecordStore } from "../storage/indexeddb/ChunkRecordStore.js";
|
|
4
|
-
class L2Cache {
|
|
5
|
-
opfsStore;
|
|
6
|
-
dbStore;
|
|
7
|
-
maxSize;
|
|
8
|
-
projectId;
|
|
9
|
-
initPromise = null;
|
|
10
|
-
constructor(config) {
|
|
11
|
-
this.maxSize = config.maxSizeMB * 1024 * 1024;
|
|
12
|
-
this.projectId = config.projectId;
|
|
13
|
-
this.opfsStore = new L2OPFSStore();
|
|
14
|
-
this.dbStore = new ChunkRecordStore();
|
|
15
|
-
}
|
|
16
|
-
async init() {
|
|
17
|
-
if (this.initPromise) return this.initPromise;
|
|
18
|
-
this.initPromise = (async () => {
|
|
19
|
-
await Promise.all([this.opfsStore.init(), this.dbStore.init()]);
|
|
20
|
-
})();
|
|
21
|
-
return this.initPromise;
|
|
22
|
-
}
|
|
23
|
-
async get(timeUs, clipId) {
|
|
24
|
-
await this.init();
|
|
25
|
-
const records = await this.dbStore.collectRecordsByClipId(this.projectId, clipId);
|
|
26
|
-
for (const record of records) {
|
|
27
|
-
const batch = binarySearchRange(record.batches, timeUs, (b, _index) => ({
|
|
28
|
-
start: b.startUs,
|
|
29
|
-
end: b.startUs + b.durationUs
|
|
30
|
-
}));
|
|
31
|
-
if (!batch) {
|
|
32
|
-
continue;
|
|
33
|
-
}
|
|
34
|
-
const chunkData = await this.opfsStore.read(record.fileName, batch, this.projectId);
|
|
35
|
-
if (!chunkData) continue;
|
|
36
|
-
await this.dbStore.updateLastAccess(this.projectId, record.clipId, record.track);
|
|
37
|
-
return this.createChunk(chunkData, timeUs, record.track, batch.type, batch.durationUs);
|
|
38
|
-
}
|
|
39
|
-
return null;
|
|
40
|
-
}
|
|
41
|
-
async put(clipId, chunks, track, options) {
|
|
42
|
-
await this.init();
|
|
43
|
-
if (chunks.length === 0) return;
|
|
44
|
-
const fileName = `clip-${clipId}-${track[0]}1.${track === "video" ? "webm" : "m4a"}`;
|
|
45
|
-
let existingRecord = await this.dbStore.getRecord(this.projectId, clipId, track);
|
|
46
|
-
if (existingRecord) {
|
|
47
|
-
const fileExists = await this.opfsStore.fileExists(existingRecord.fileName, this.projectId);
|
|
48
|
-
if (!fileExists) {
|
|
49
|
-
await this.deleteEntry(clipId, track);
|
|
50
|
-
existingRecord = null;
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
let chunksToWrite = chunks;
|
|
54
|
-
if (existingRecord && existingRecord.batches.length > 0) {
|
|
55
|
-
const lastBatch = existingRecord.batches[existingRecord.batches.length - 1];
|
|
56
|
-
if (lastBatch) {
|
|
57
|
-
const lastTimestamp = lastBatch.startUs;
|
|
58
|
-
chunksToWrite = chunks.filter((chunk) => chunk.timestamp > lastTimestamp);
|
|
59
|
-
if (chunksToWrite.length === 0) {
|
|
60
|
-
return;
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
const newBatches = await this.opfsStore.append(
|
|
65
|
-
fileName,
|
|
66
|
-
chunksToWrite,
|
|
67
|
-
existingRecord?.batches,
|
|
68
|
-
this.projectId
|
|
69
|
-
);
|
|
70
|
-
const record = {
|
|
71
|
-
projectId: this.projectId,
|
|
72
|
-
clipId,
|
|
73
|
-
track,
|
|
74
|
-
fileName,
|
|
75
|
-
batches: existingRecord?.batches ? [...existingRecord.batches, ...newBatches] : newBatches,
|
|
76
|
-
lastAccess: Date.now(),
|
|
77
|
-
totalBytes: (existingRecord?.totalBytes || 0) + newBatches.reduce((sum, b) => sum + b.byteLength, 0),
|
|
78
|
-
isComplete: options?.isComplete ?? existingRecord?.isComplete ?? false,
|
|
79
|
-
expectedDurationUs: options?.expectedDurationUs ?? existingRecord?.expectedDurationUs,
|
|
80
|
-
metadata: options?.metadata ?? existingRecord?.metadata
|
|
81
|
-
};
|
|
82
|
-
await this.dbStore.putRecord(record);
|
|
83
|
-
await this.enforceQuota();
|
|
84
|
-
}
|
|
85
|
-
async invalidateRange(startUs, endUs, clipId) {
|
|
86
|
-
await this.init();
|
|
87
|
-
let records;
|
|
88
|
-
if (clipId) {
|
|
89
|
-
records = await this.dbStore.collectRecordsByClipId(this.projectId, clipId);
|
|
90
|
-
} else {
|
|
91
|
-
const allRecords = await this.dbStore.getAllRecords();
|
|
92
|
-
records = allRecords.filter((r) => r.projectId === this.projectId);
|
|
93
|
-
}
|
|
94
|
-
for (const record of records) {
|
|
95
|
-
const hasOverlap = record.batches.some((batch) => {
|
|
96
|
-
const batchEnd = batch.startUs + batch.durationUs;
|
|
97
|
-
return batch.startUs < endUs && batchEnd > startUs;
|
|
98
|
-
});
|
|
99
|
-
if (hasOverlap) {
|
|
100
|
-
await this.deleteEntry(record.clipId, record.track);
|
|
101
|
-
}
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
/**
|
|
105
|
-
* Check if clip has cached data in L2
|
|
106
|
-
*/
|
|
107
|
-
async hasClip(clipId, track) {
|
|
108
|
-
await this.init();
|
|
109
|
-
const record = await this.dbStore.getRecord(this.projectId, clipId, track);
|
|
110
|
-
return record !== null && record.batches && record.batches.length > 0;
|
|
111
|
-
}
|
|
112
|
-
/**
|
|
113
|
-
* Check if clip has complete cached data in L2
|
|
114
|
-
*/
|
|
115
|
-
async hasCompleteClip(clipId, track) {
|
|
116
|
-
await this.init();
|
|
117
|
-
const record = await this.dbStore.getRecord(this.projectId, clipId, track);
|
|
118
|
-
return record?.isComplete === true;
|
|
119
|
-
}
|
|
120
|
-
/**
|
|
121
|
-
* Mark clip as complete in L2 cache
|
|
122
|
-
*/
|
|
123
|
-
async markComplete(clipId, track) {
|
|
124
|
-
await this.init();
|
|
125
|
-
const record = await this.dbStore.getRecord(this.projectId, clipId, track);
|
|
126
|
-
if (record) {
|
|
127
|
-
record.isComplete = true;
|
|
128
|
-
record.lastAccess = Date.now();
|
|
129
|
-
await this.dbStore.putRecord(record);
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
async invalidateClip(clipId) {
|
|
133
|
-
await this.init();
|
|
134
|
-
const records = await this.dbStore.collectRecordsByClipId(this.projectId, clipId);
|
|
135
|
-
for (const record of records) {
|
|
136
|
-
await this.deleteEntry(record.clipId, record.track);
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
/**
|
|
140
|
-
* Create a readable stream of encoded chunks for export
|
|
141
|
-
* Reads chunks in timestamp order from OPFS
|
|
142
|
-
*/
|
|
143
|
-
async createReadStream(clipId, track) {
|
|
144
|
-
await this.init();
|
|
145
|
-
const record = await this.dbStore.getRecord(this.projectId, clipId, track);
|
|
146
|
-
if (!record || record.batches.length === 0) {
|
|
147
|
-
return null;
|
|
148
|
-
}
|
|
149
|
-
const batches = [...record.batches];
|
|
150
|
-
let batchIndex = 0;
|
|
151
|
-
return new ReadableStream({
|
|
152
|
-
pull: async (controller) => {
|
|
153
|
-
if (batchIndex >= batches.length) {
|
|
154
|
-
controller.close();
|
|
155
|
-
return;
|
|
156
|
-
}
|
|
157
|
-
const batch = batches[batchIndex];
|
|
158
|
-
if (!batch) {
|
|
159
|
-
controller.close();
|
|
160
|
-
return;
|
|
161
|
-
}
|
|
162
|
-
try {
|
|
163
|
-
const chunkData = await this.opfsStore.read(record.fileName, batch, this.projectId);
|
|
164
|
-
if (!chunkData) {
|
|
165
|
-
controller.close();
|
|
166
|
-
return;
|
|
167
|
-
}
|
|
168
|
-
const chunk = this.createChunk(
|
|
169
|
-
chunkData,
|
|
170
|
-
batch.startUs,
|
|
171
|
-
track,
|
|
172
|
-
batch.type,
|
|
173
|
-
batch.durationUs
|
|
174
|
-
);
|
|
175
|
-
controller.enqueue(chunk);
|
|
176
|
-
batchIndex++;
|
|
177
|
-
} catch (error) {
|
|
178
|
-
if (error instanceof DOMException && error.name === "NotFoundError") {
|
|
179
|
-
controller.close();
|
|
180
|
-
} else {
|
|
181
|
-
controller.error(error);
|
|
182
|
-
}
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
});
|
|
186
|
-
}
|
|
187
|
-
async clear() {
|
|
188
|
-
await this.init();
|
|
189
|
-
try {
|
|
190
|
-
await this.dbStore.clear();
|
|
191
|
-
} catch (error) {
|
|
192
|
-
console.error("[L2Cache] Failed to clear IndexedDB:", error);
|
|
193
|
-
throw error;
|
|
194
|
-
}
|
|
195
|
-
try {
|
|
196
|
-
await this.opfsStore.clear(this.projectId);
|
|
197
|
-
} catch (error) {
|
|
198
|
-
if (error?.name !== "NotFoundError") {
|
|
199
|
-
console.warn("[L2Cache] Failed to clear OPFS:", error);
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
}
|
|
203
|
-
createChunk(data, timeUs, track, chunkType = "key", durationUs = 0) {
|
|
204
|
-
if (track === "video") {
|
|
205
|
-
return new EncodedVideoChunk({
|
|
206
|
-
type: chunkType,
|
|
207
|
-
timestamp: timeUs,
|
|
208
|
-
duration: durationUs,
|
|
209
|
-
data
|
|
210
|
-
});
|
|
211
|
-
} else {
|
|
212
|
-
return new EncodedAudioChunk({
|
|
213
|
-
type: chunkType,
|
|
214
|
-
timestamp: timeUs,
|
|
215
|
-
duration: durationUs,
|
|
216
|
-
data
|
|
217
|
-
});
|
|
218
|
-
}
|
|
219
|
-
}
|
|
220
|
-
async deleteEntry(clipId, track, projectId) {
|
|
221
|
-
const targetProjectId = projectId ?? this.projectId;
|
|
222
|
-
const record = await this.dbStore.getRecord(targetProjectId, clipId, track);
|
|
223
|
-
if (record) {
|
|
224
|
-
await this.opfsStore.deleteFile(record.fileName, targetProjectId);
|
|
225
|
-
}
|
|
226
|
-
await this.dbStore.deleteRecord(targetProjectId, clipId, track);
|
|
227
|
-
}
|
|
228
|
-
async enforceQuota() {
|
|
229
|
-
const estimate = await navigator.storage.estimate();
|
|
230
|
-
const usage = estimate.usage || 0;
|
|
231
|
-
if (usage <= this.maxSize) return;
|
|
232
|
-
console.warn(
|
|
233
|
-
`[L2Cache] Quota exceeded! Deleting oldest entries: usage=${usage}, maxSize=${this.maxSize}`
|
|
234
|
-
);
|
|
235
|
-
const toDelete = usage - this.maxSize;
|
|
236
|
-
let bytesDeleted = 0;
|
|
237
|
-
const records = await this.dbStore.getAllRecordsSortedByAccess();
|
|
238
|
-
for (const record of records) {
|
|
239
|
-
if (bytesDeleted >= toDelete) break;
|
|
240
|
-
if (record.projectId !== this.projectId) {
|
|
241
|
-
await this.deleteEntry(record.clipId, record.track, record.projectId);
|
|
242
|
-
bytesDeleted += record.totalBytes;
|
|
243
|
-
}
|
|
244
|
-
}
|
|
245
|
-
if (bytesDeleted < toDelete) {
|
|
246
|
-
for (const record of records) {
|
|
247
|
-
if (bytesDeleted >= toDelete) break;
|
|
248
|
-
if (record.projectId === this.projectId) {
|
|
249
|
-
await this.deleteEntry(record.clipId, record.track);
|
|
250
|
-
bytesDeleted += record.totalBytes;
|
|
251
|
-
}
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
}
|
|
255
|
-
getMetadata() {
|
|
256
|
-
return {
|
|
257
|
-
maxSizeMB: this.maxSize / (1024 * 1024),
|
|
258
|
-
usedSizeMB: 0,
|
|
259
|
-
// Would need to track actual usage
|
|
260
|
-
entries: 0,
|
|
261
|
-
// Would need to track actual entries
|
|
262
|
-
hitRate: 0
|
|
263
|
-
// Would need to track hits and misses
|
|
264
|
-
};
|
|
265
|
-
}
|
|
266
|
-
async hasAvailableQuota(sizeMB) {
|
|
267
|
-
if (typeof navigator === "undefined" || !navigator.storage?.estimate) {
|
|
268
|
-
throw new Error("Storage API not available");
|
|
269
|
-
}
|
|
270
|
-
const estimate = await navigator.storage.estimate();
|
|
271
|
-
const availableMB = ((estimate.quota || 0) - (estimate.usage || 0)) / (1024 * 1024);
|
|
272
|
-
return availableMB >= sizeMB;
|
|
273
|
-
}
|
|
274
|
-
/**
|
|
275
|
-
* Get chunk metadata (decoderConfig) for a specific clip
|
|
276
|
-
*/
|
|
277
|
-
async getClipMetadata(clipId, track) {
|
|
278
|
-
await this.init();
|
|
279
|
-
const record = await this.dbStore.getRecord(this.projectId, clipId, track);
|
|
280
|
-
return record?.metadata || null;
|
|
281
|
-
}
|
|
282
|
-
/**
|
|
283
|
-
* List all cached projects
|
|
284
|
-
*/
|
|
285
|
-
async listProjects() {
|
|
286
|
-
await this.init();
|
|
287
|
-
const records = await this.dbStore.getAllRecords();
|
|
288
|
-
const projects = /* @__PURE__ */ new Map();
|
|
289
|
-
for (const record of records) {
|
|
290
|
-
const existing = projects.get(record.projectId) || {
|
|
291
|
-
totalBytes: 0,
|
|
292
|
-
clipCount: 0,
|
|
293
|
-
lastAccess: 0
|
|
294
|
-
};
|
|
295
|
-
projects.set(record.projectId, {
|
|
296
|
-
totalBytes: existing.totalBytes + record.totalBytes,
|
|
297
|
-
clipCount: existing.clipCount + 1,
|
|
298
|
-
lastAccess: Math.max(existing.lastAccess, record.lastAccess)
|
|
299
|
-
});
|
|
300
|
-
}
|
|
301
|
-
return Array.from(projects.entries()).map(([projectId, stats]) => ({
|
|
302
|
-
projectId,
|
|
303
|
-
...stats
|
|
304
|
-
}));
|
|
305
|
-
}
|
|
306
|
-
/**
|
|
307
|
-
* Clear all cache data for a specific project
|
|
308
|
-
*/
|
|
309
|
-
async clearProject(targetProjectId) {
|
|
310
|
-
await this.init();
|
|
311
|
-
const records = await this.dbStore.getRecordsByProjectId(targetProjectId);
|
|
312
|
-
for (const record of records) {
|
|
313
|
-
await this.deleteEntry(record.clipId, record.track, targetProjectId);
|
|
314
|
-
}
|
|
315
|
-
await this.opfsStore.deleteProjectDirectory(targetProjectId);
|
|
316
|
-
}
|
|
317
|
-
/**
|
|
318
|
-
* Get cache size for a specific project
|
|
319
|
-
*/
|
|
320
|
-
async getProjectSize(projectId) {
|
|
321
|
-
await this.init();
|
|
322
|
-
const records = await this.dbStore.getRecordsByProjectId(projectId);
|
|
323
|
-
return records.reduce((sum, r) => sum + r.totalBytes, 0);
|
|
324
|
-
}
|
|
325
|
-
}
|
|
326
|
-
export {
|
|
327
|
-
L2Cache
|
|
328
|
-
};
|
|
329
|
-
//# sourceMappingURL=L2Cache.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"L2Cache.js","sources":["../../../src/cache/l2/L2Cache.ts"],"sourcesContent":["import type { TimeUs } from '../../model/types';\nimport { binarySearchRange } from '../../utils/binary-search';\nimport { L2OPFSStore } from './L2OPFSStore';\nimport { ChunkRecordStore, type ChunkRecord } from '../storage/indexeddb/ChunkRecordStore';\n\ninterface L2Config {\n maxSizeMB: number;\n projectId: string;\n}\n\n/**\n * L2 Cache - High-level cache coordinator\n * Uses OPFSStore for file storage and IndexedDBStore for metadata\n */\nexport class L2Cache {\n private readonly opfsStore: L2OPFSStore;\n private readonly dbStore: ChunkRecordStore;\n readonly maxSize: number;\n readonly projectId: string;\n private initPromise: Promise<void> | null = null;\n\n constructor(config: L2Config) {\n this.maxSize = config.maxSizeMB * 1024 * 1024;\n this.projectId = config.projectId;\n this.opfsStore = new L2OPFSStore();\n this.dbStore = new ChunkRecordStore();\n }\n\n async init(): Promise<void> {\n if (this.initPromise) return this.initPromise;\n\n this.initPromise = (async () => {\n await Promise.all([this.opfsStore.init(), this.dbStore.init()]);\n })();\n\n return this.initPromise;\n }\n\n async get(timeUs: TimeUs, clipId: string): Promise<EncodedVideoChunk | EncodedAudioChunk | null> {\n await this.init();\n\n // Query IndexedDB for chunk metadata\n const records = await this.dbStore.collectRecordsByClipId(this.projectId, clipId);\n\n for (const record of records) {\n const batch = binarySearchRange(record.batches, timeUs, (b, _index) => ({\n start: b.startUs,\n end: b.startUs + b.durationUs,\n }));\n\n if (!batch) {\n continue;\n }\n\n const chunkData = await this.opfsStore.read(record.fileName, batch, this.projectId);\n if (!chunkData) continue;\n\n await this.dbStore.updateLastAccess(this.projectId, record.clipId, record.track);\n\n return this.createChunk(chunkData, timeUs, record.track, batch.type, batch.durationUs);\n }\n\n return null;\n }\n\n async put(\n clipId: string,\n chunks: Array<EncodedVideoChunk | EncodedAudioChunk>,\n track: 'video' | 'audio',\n options?: {\n isComplete?: boolean;\n expectedDurationUs?: number;\n metadata?: any;\n }\n ): Promise<void> {\n await this.init();\n\n if (chunks.length === 0) return;\n\n const fileName = `clip-${clipId}-${track[0]}1.${track === 'video' ? 'webm' : 'm4a'}`;\n\n // Step 1: Read existing record\n let existingRecord = await this.dbStore.getRecord(this.projectId, clipId, track);\n\n // Step 2: Validate consistency - if IndexedDB has record but OPFS file missing, delete the record\n if (existingRecord) {\n const fileExists = await this.opfsStore.fileExists(existingRecord.fileName, this.projectId);\n if (!fileExists) {\n await this.deleteEntry(clipId, track);\n existingRecord = null;\n }\n }\n\n // Step 3: Deduplicate based on timestamp\n let chunksToWrite = chunks;\n if (existingRecord && existingRecord.batches.length > 0) {\n const lastBatch = existingRecord.batches[existingRecord.batches.length - 1];\n if (lastBatch) {\n const lastTimestamp = lastBatch.startUs;\n // Filter out chunks with timestamp <= lastTimestamp\n chunksToWrite = chunks.filter((chunk) => chunk.timestamp > lastTimestamp);\n\n if (chunksToWrite.length === 0) {\n return;\n }\n }\n }\n\n // Step 4: Write to OPFS\n const newBatches = await this.opfsStore.append(\n fileName,\n chunksToWrite,\n existingRecord?.batches,\n this.projectId\n );\n\n // Step 5: Update IndexedDB\n const record: ChunkRecord = {\n projectId: this.projectId,\n clipId,\n track,\n fileName,\n batches: existingRecord?.batches ? [...existingRecord.batches, ...newBatches] : newBatches,\n lastAccess: Date.now(),\n totalBytes:\n (existingRecord?.totalBytes || 0) + newBatches.reduce((sum, b) => sum + b.byteLength, 0),\n isComplete: options?.isComplete ?? existingRecord?.isComplete ?? false,\n expectedDurationUs: options?.expectedDurationUs ?? existingRecord?.expectedDurationUs,\n metadata: options?.metadata ?? existingRecord?.metadata,\n };\n\n await this.dbStore.putRecord(record);\n\n // Check and enforce quota\n await this.enforceQuota();\n }\n\n async invalidateRange(startUs: TimeUs, endUs: TimeUs, clipId?: string): Promise<void> {\n await this.init();\n\n // Get records to check\n let records: ChunkRecord[];\n if (clipId) {\n records = await this.dbStore.collectRecordsByClipId(this.projectId, clipId);\n } else {\n // Need all records for current project\n const allRecords = await this.dbStore.getAllRecords();\n records = allRecords.filter((r) => r.projectId === this.projectId);\n }\n\n // Check overlaps and delete\n for (const record of records) {\n const hasOverlap = record.batches.some((batch) => {\n const batchEnd = batch.startUs + batch.durationUs;\n return batch.startUs < endUs && batchEnd > startUs;\n });\n\n if (hasOverlap) {\n await this.deleteEntry(record.clipId, record.track);\n }\n }\n }\n\n /**\n * Check if clip has cached data in L2\n */\n async hasClip(clipId: string, track: 'video' | 'audio'): Promise<boolean> {\n await this.init();\n\n const record = await this.dbStore.getRecord(this.projectId, clipId, track);\n return record !== null && record.batches && record.batches.length > 0;\n }\n\n /**\n * Check if clip has complete cached data in L2\n */\n async hasCompleteClip(clipId: string, track: 'video' | 'audio'): Promise<boolean> {\n await this.init();\n\n const record = await this.dbStore.getRecord(this.projectId, clipId, track);\n return record?.isComplete === true;\n }\n\n /**\n * Mark clip as complete in L2 cache\n */\n async markComplete(clipId: string, track: 'video' | 'audio'): Promise<void> {\n await this.init();\n\n const record = await this.dbStore.getRecord(this.projectId, clipId, track);\n if (record) {\n record.isComplete = true;\n record.lastAccess = Date.now();\n await this.dbStore.putRecord(record);\n }\n }\n\n async invalidateClip(clipId: string): Promise<void> {\n await this.init();\n\n // Collect records to delete\n const records = await this.dbStore.collectRecordsByClipId(this.projectId, clipId);\n\n // Delete each record\n for (const record of records) {\n await this.deleteEntry(record.clipId, record.track);\n }\n }\n\n /**\n * Create a readable stream of encoded chunks for export\n * Reads chunks in timestamp order from OPFS\n */\n async createReadStream(\n clipId: string,\n track: 'video' | 'audio'\n ): Promise<ReadableStream<EncodedVideoChunk | EncodedAudioChunk> | null> {\n await this.init();\n\n // Get chunk record\n const record = await this.dbStore.getRecord(this.projectId, clipId, track);\n\n if (!record || record.batches.length === 0) {\n return null;\n }\n\n // Clone batches array for stream iteration\n const batches = [...record.batches];\n let batchIndex = 0;\n\n return new ReadableStream<EncodedVideoChunk | EncodedAudioChunk>({\n pull: async (controller) => {\n if (batchIndex >= batches.length) {\n controller.close();\n return;\n }\n\n const batch = batches[batchIndex];\n if (!batch) {\n controller.close();\n return;\n }\n\n try {\n // Read chunk data from OPFS\n const chunkData = await this.opfsStore.read(record.fileName, batch, this.projectId);\n if (!chunkData) {\n controller.close();\n return;\n }\n\n // Create encoded chunk with correct type and duration\n const chunk = this.createChunk(\n chunkData,\n batch.startUs,\n track,\n batch.type,\n batch.durationUs\n );\n controller.enqueue(chunk);\n\n batchIndex++;\n } catch (error) {\n // File not found or read error - close stream gracefully\n if (error instanceof DOMException && error.name === 'NotFoundError') {\n controller.close();\n } else {\n controller.error(error);\n }\n }\n },\n });\n }\n\n async clear(): Promise<void> {\n await this.init();\n\n // Clear IndexedDB\n try {\n await this.dbStore.clear();\n } catch (error) {\n console.error('[L2Cache] Failed to clear IndexedDB:', error);\n throw error;\n }\n\n // Clear OPFS files\n try {\n await this.opfsStore.clear(this.projectId);\n } catch (error) {\n if ((error as any)?.name !== 'NotFoundError') {\n console.warn('[L2Cache] Failed to clear OPFS:', error);\n }\n }\n }\n\n private createChunk(\n data: ArrayBuffer,\n timeUs: TimeUs,\n track: 'video' | 'audio',\n chunkType: 'key' | 'delta' = 'key',\n durationUs: TimeUs = 0\n ): EncodedVideoChunk | EncodedAudioChunk {\n if (track === 'video') {\n return new EncodedVideoChunk({\n type: chunkType,\n timestamp: timeUs,\n duration: durationUs,\n data,\n });\n } else {\n return new EncodedAudioChunk({\n type: chunkType,\n timestamp: timeUs,\n duration: durationUs,\n data,\n });\n }\n }\n\n private async deleteEntry(clipId: string, track: string, projectId?: string): Promise<void> {\n const targetProjectId = projectId ?? this.projectId;\n\n // Step 1: Get record info\n const record = await this.dbStore.getRecord(targetProjectId, clipId, track);\n\n // Step 2: Delete OPFS file\n if (record) {\n await this.opfsStore.deleteFile(record.fileName, targetProjectId);\n }\n\n // Step 3: Delete IndexedDB record\n await this.dbStore.deleteRecord(targetProjectId, clipId, track);\n }\n\n private async enforceQuota(): Promise<void> {\n const estimate = await navigator.storage.estimate();\n const usage = estimate.usage || 0;\n\n if (usage <= this.maxSize) return;\n\n console.warn(\n `[L2Cache] Quota exceeded! Deleting oldest entries: usage=${usage}, maxSize=${this.maxSize}`\n );\n\n const toDelete = usage - this.maxSize;\n let bytesDeleted = 0;\n\n // Get all records sorted by lastAccess\n const records = await this.dbStore.getAllRecordsSortedByAccess();\n\n // Step 1: Delete oldest entries from OTHER projects (protect current project)\n for (const record of records) {\n if (bytesDeleted >= toDelete) break;\n\n if (record.projectId !== this.projectId) {\n await this.deleteEntry(record.clipId, record.track, record.projectId);\n bytesDeleted += record.totalBytes;\n }\n }\n\n // Step 2: If still over quota, delete oldest from current project\n if (bytesDeleted < toDelete) {\n for (const record of records) {\n if (bytesDeleted >= toDelete) break;\n\n if (record.projectId === this.projectId) {\n await this.deleteEntry(record.clipId, record.track);\n bytesDeleted += record.totalBytes;\n }\n }\n }\n }\n\n getMetadata(): {\n maxSizeMB: number;\n usedSizeMB: number;\n entries: number;\n hitRate: number;\n } {\n // This is a simplified implementation\n // In a real implementation, we would track actual usage\n return {\n maxSizeMB: this.maxSize / (1024 * 1024),\n usedSizeMB: 0, // Would need to track actual usage\n entries: 0, // Would need to track actual entries\n hitRate: 0, // Would need to track hits and misses\n };\n }\n\n async hasAvailableQuota(sizeMB: number): Promise<boolean> {\n if (typeof navigator === 'undefined' || !navigator.storage?.estimate) {\n // L2Cache requires storage API to function\n throw new Error('Storage API not available');\n }\n\n const estimate = await navigator.storage.estimate();\n const availableMB = ((estimate.quota || 0) - (estimate.usage || 0)) / (1024 * 1024);\n return availableMB >= sizeMB;\n }\n\n /**\n * Get chunk metadata (decoderConfig) for a specific clip\n */\n async getClipMetadata(clipId: string, track: 'video' | 'audio'): Promise<any | null> {\n await this.init();\n\n const record = await this.dbStore.getRecord(this.projectId, clipId, track);\n return record?.metadata || null;\n }\n\n /**\n * List all cached projects\n */\n async listProjects(): Promise<\n Array<{ projectId: string; totalBytes: number; clipCount: number; lastAccess: number }>\n > {\n await this.init();\n\n const records = await this.dbStore.getAllRecords();\n const projects = new Map<\n string,\n { totalBytes: number; clipCount: number; lastAccess: number }\n >();\n\n // Aggregate stats per project\n for (const record of records) {\n const existing = projects.get(record.projectId) || {\n totalBytes: 0,\n clipCount: 0,\n lastAccess: 0,\n };\n\n projects.set(record.projectId, {\n totalBytes: existing.totalBytes + record.totalBytes,\n clipCount: existing.clipCount + 1,\n lastAccess: Math.max(existing.lastAccess, record.lastAccess),\n });\n }\n\n return Array.from(projects.entries()).map(([projectId, stats]) => ({\n projectId,\n ...stats,\n }));\n }\n\n /**\n * Clear all cache data for a specific project\n */\n async clearProject(targetProjectId: string): Promise<void> {\n await this.init();\n\n // 1. Collect all records for this project\n const records = await this.dbStore.getRecordsByProjectId(targetProjectId);\n\n // 2. Delete OPFS files and IndexedDB records\n for (const record of records) {\n await this.deleteEntry(record.clipId, record.track, targetProjectId);\n }\n\n // 3. Delete project directory (recursive, even if not empty)\n await this.opfsStore.deleteProjectDirectory(targetProjectId);\n }\n\n /**\n * Get cache size for a specific project\n */\n async getProjectSize(projectId: string): Promise<number> {\n await this.init();\n\n const records = await this.dbStore.getRecordsByProjectId(projectId);\n return records.reduce((sum, r) => sum + r.totalBytes, 0);\n }\n}\n"],"names":[],"mappings":";;;AAcO,MAAM,QAAQ;AAAA,EACF;AAAA,EACA;AAAA,EACR;AAAA,EACA;AAAA,EACD,cAAoC;AAAA,EAE5C,YAAY,QAAkB;AAC5B,SAAK,UAAU,OAAO,YAAY,OAAO;AACzC,SAAK,YAAY,OAAO;AACxB,SAAK,YAAY,IAAI,YAAA;AACrB,SAAK,UAAU,IAAI,iBAAA;AAAA,EACrB;AAAA,EAEA,MAAM,OAAsB;AAC1B,QAAI,KAAK,YAAa,QAAO,KAAK;AAElC,SAAK,eAAe,YAAY;AAC9B,YAAM,QAAQ,IAAI,CAAC,KAAK,UAAU,KAAA,GAAQ,KAAK,QAAQ,KAAA,CAAM,CAAC;AAAA,IAChE,GAAA;AAEA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,IAAI,QAAgB,QAAuE;AAC/F,UAAM,KAAK,KAAA;AAGX,UAAM,UAAU,MAAM,KAAK,QAAQ,uBAAuB,KAAK,WAAW,MAAM;AAEhF,eAAW,UAAU,SAAS;AAC5B,YAAM,QAAQ,kBAAkB,OAAO,SAAS,QAAQ,CAAC,GAAG,YAAY;AAAA,QACtE,OAAO,EAAE;AAAA,QACT,KAAK,EAAE,UAAU,EAAE;AAAA,MAAA,EACnB;AAEF,UAAI,CAAC,OAAO;AACV;AAAA,MACF;AAEA,YAAM,YAAY,MAAM,KAAK,UAAU,KAAK,OAAO,UAAU,OAAO,KAAK,SAAS;AAClF,UAAI,CAAC,UAAW;AAEhB,YAAM,KAAK,QAAQ,iBAAiB,KAAK,WAAW,OAAO,QAAQ,OAAO,KAAK;AAE/E,aAAO,KAAK,YAAY,WAAW,QAAQ,OAAO,OAAO,MAAM,MAAM,MAAM,UAAU;AAAA,IACvF;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,IACJ,QACA,QACA,OACA,SAKe;AACf,UAAM,KAAK,KAAA;AAEX,QAAI,OAAO,WAAW,EAAG;AAEzB,UAAM,WAAW,QAAQ,MAAM,IAAI,MAAM,CAAC,CAAC,KAAK,UAAU,UAAU,SAAS,KAAK;AAGlF,QAAI,iBAAiB,MAAM,KAAK,QAAQ,UAAU,KAAK,WAAW,QAAQ,KAAK;AAG/E,QAAI,gBAAgB;AAClB,YAAM,aAAa,MAAM,KAAK,UAAU,WAAW,eAAe,UAAU,KAAK,SAAS;AAC1F,UAAI,CAAC,YAAY;AACf,cAAM,KAAK,YAAY,QAAQ,KAAK;AACpC,yBAAiB;AAAA,MACnB;AAAA,IACF;AAGA,QAAI,gBAAgB;AACpB,QAAI,kBAAkB,eAAe,QAAQ,SAAS,GAAG;AACvD,YAAM,YAAY,eAAe,QAAQ,eAAe,QAAQ,SAAS,CAAC;AAC1E,UAAI,WAAW;AACb,cAAM,gBAAgB,UAAU;AAEhC,wBAAgB,OAAO,OAAO,CAAC,UAAU,MAAM,YAAY,aAAa;AAExE,YAAI,cAAc,WAAW,GAAG;AAC9B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,KAAK,UAAU;AAAA,MACtC;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,MAChB,KAAK;AAAA,IAAA;AAIP,UAAM,SAAsB;AAAA,MAC1B,WAAW,KAAK;AAAA,MAChB;AAAA,MACA;AAAA,MACA;AAAA,MACA,SAAS,gBAAgB,UAAU,CAAC,GAAG,eAAe,SAAS,GAAG,UAAU,IAAI;AAAA,MAChF,YAAY,KAAK,IAAA;AAAA,MACjB,aACG,gBAAgB,cAAc,KAAK,WAAW,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,YAAY,CAAC;AAAA,MACzF,YAAY,SAAS,cAAc,gBAAgB,cAAc;AAAA,MACjE,oBAAoB,SAAS,sBAAsB,gBAAgB;AAAA,MACnE,UAAU,SAAS,YAAY,gBAAgB;AAAA,IAAA;AAGjD,UAAM,KAAK,QAAQ,UAAU,MAAM;AAGnC,UAAM,KAAK,aAAA;AAAA,EACb;AAAA,EAEA,MAAM,gBAAgB,SAAiB,OAAe,QAAgC;AACpF,UAAM,KAAK,KAAA;AAGX,QAAI;AACJ,QAAI,QAAQ;AACV,gBAAU,MAAM,KAAK,QAAQ,uBAAuB,KAAK,WAAW,MAAM;AAAA,IAC5E,OAAO;AAEL,YAAM,aAAa,MAAM,KAAK,QAAQ,cAAA;AACtC,gBAAU,WAAW,OAAO,CAAC,MAAM,EAAE,cAAc,KAAK,SAAS;AAAA,IACnE;AAGA,eAAW,UAAU,SAAS;AAC5B,YAAM,aAAa,OAAO,QAAQ,KAAK,CAAC,UAAU;AAChD,cAAM,WAAW,MAAM,UAAU,MAAM;AACvC,eAAO,MAAM,UAAU,SAAS,WAAW;AAAA,MAC7C,CAAC;AAED,UAAI,YAAY;AACd,cAAM,KAAK,YAAY,OAAO,QAAQ,OAAO,KAAK;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,QAAgB,OAA4C;AACxE,UAAM,KAAK,KAAA;AAEX,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,KAAK,WAAW,QAAQ,KAAK;AACzE,WAAO,WAAW,QAAQ,OAAO,WAAW,OAAO,QAAQ,SAAS;AAAA,EACtE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAgB,QAAgB,OAA4C;AAChF,UAAM,KAAK,KAAA;AAEX,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,KAAK,WAAW,QAAQ,KAAK;AACzE,WAAO,QAAQ,eAAe;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,QAAgB,OAAyC;AAC1E,UAAM,KAAK,KAAA;AAEX,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,KAAK,WAAW,QAAQ,KAAK;AACzE,QAAI,QAAQ;AACV,aAAO,aAAa;AACpB,aAAO,aAAa,KAAK,IAAA;AACzB,YAAM,KAAK,QAAQ,UAAU,MAAM;AAAA,IACrC;AAAA,EACF;AAAA,EAEA,MAAM,eAAe,QAA+B;AAClD,UAAM,KAAK,KAAA;AAGX,UAAM,UAAU,MAAM,KAAK,QAAQ,uBAAuB,KAAK,WAAW,MAAM;AAGhF,eAAW,UAAU,SAAS;AAC5B,YAAM,KAAK,YAAY,OAAO,QAAQ,OAAO,KAAK;AAAA,IACpD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBACJ,QACA,OACuE;AACvE,UAAM,KAAK,KAAA;AAGX,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,KAAK,WAAW,QAAQ,KAAK;AAEzE,QAAI,CAAC,UAAU,OAAO,QAAQ,WAAW,GAAG;AAC1C,aAAO;AAAA,IACT;AAGA,UAAM,UAAU,CAAC,GAAG,OAAO,OAAO;AAClC,QAAI,aAAa;AAEjB,WAAO,IAAI,eAAsD;AAAA,MAC/D,MAAM,OAAO,eAAe;AAC1B,YAAI,cAAc,QAAQ,QAAQ;AAChC,qBAAW,MAAA;AACX;AAAA,QACF;AAEA,cAAM,QAAQ,QAAQ,UAAU;AAChC,YAAI,CAAC,OAAO;AACV,qBAAW,MAAA;AACX;AAAA,QACF;AAEA,YAAI;AAEF,gBAAM,YAAY,MAAM,KAAK,UAAU,KAAK,OAAO,UAAU,OAAO,KAAK,SAAS;AAClF,cAAI,CAAC,WAAW;AACd,uBAAW,MAAA;AACX;AAAA,UACF;AAGA,gBAAM,QAAQ,KAAK;AAAA,YACjB;AAAA,YACA,MAAM;AAAA,YACN;AAAA,YACA,MAAM;AAAA,YACN,MAAM;AAAA,UAAA;AAER,qBAAW,QAAQ,KAAK;AAExB;AAAA,QACF,SAAS,OAAO;AAEd,cAAI,iBAAiB,gBAAgB,MAAM,SAAS,iBAAiB;AACnE,uBAAW,MAAA;AAAA,UACb,OAAO;AACL,uBAAW,MAAM,KAAK;AAAA,UACxB;AAAA,QACF;AAAA,MACF;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEA,MAAM,QAAuB;AAC3B,UAAM,KAAK,KAAA;AAGX,QAAI;AACF,YAAM,KAAK,QAAQ,MAAA;AAAA,IACrB,SAAS,OAAO;AACd,cAAQ,MAAM,wCAAwC,KAAK;AAC3D,YAAM;AAAA,IACR;AAGA,QAAI;AACF,YAAM,KAAK,UAAU,MAAM,KAAK,SAAS;AAAA,IAC3C,SAAS,OAAO;AACd,UAAK,OAAe,SAAS,iBAAiB;AAC5C,gBAAQ,KAAK,mCAAmC,KAAK;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,YACN,MACA,QACA,OACA,YAA6B,OAC7B,aAAqB,GACkB;AACvC,QAAI,UAAU,SAAS;AACrB,aAAO,IAAI,kBAAkB;AAAA,QAC3B,MAAM;AAAA,QACN,WAAW;AAAA,QACX,UAAU;AAAA,QACV;AAAA,MAAA,CACD;AAAA,IACH,OAAO;AACL,aAAO,IAAI,kBAAkB;AAAA,QAC3B,MAAM;AAAA,QACN,WAAW;AAAA,QACX,UAAU;AAAA,QACV;AAAA,MAAA,CACD;AAAA,IACH;AAAA,EACF;AAAA,EAEA,MAAc,YAAY,QAAgB,OAAe,WAAmC;AAC1F,UAAM,kBAAkB,aAAa,KAAK;AAG1C,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,iBAAiB,QAAQ,KAAK;AAG1E,QAAI,QAAQ;AACV,YAAM,KAAK,UAAU,WAAW,OAAO,UAAU,eAAe;AAAA,IAClE;AAGA,UAAM,KAAK,QAAQ,aAAa,iBAAiB,QAAQ,KAAK;AAAA,EAChE;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,WAAW,MAAM,UAAU,QAAQ,SAAA;AACzC,UAAM,QAAQ,SAAS,SAAS;AAEhC,QAAI,SAAS,KAAK,QAAS;AAE3B,YAAQ;AAAA,MACN,4DAA4D,KAAK,aAAa,KAAK,OAAO;AAAA,IAAA;AAG5F,UAAM,WAAW,QAAQ,KAAK;AAC9B,QAAI,eAAe;AAGnB,UAAM,UAAU,MAAM,KAAK,QAAQ,4BAAA;AAGnC,eAAW,UAAU,SAAS;AAC5B,UAAI,gBAAgB,SAAU;AAE9B,UAAI,OAAO,cAAc,KAAK,WAAW;AACvC,cAAM,KAAK,YAAY,OAAO,QAAQ,OAAO,OAAO,OAAO,SAAS;AACpE,wBAAgB,OAAO;AAAA,MACzB;AAAA,IACF;AAGA,QAAI,eAAe,UAAU;AAC3B,iBAAW,UAAU,SAAS;AAC5B,YAAI,gBAAgB,SAAU;AAE9B,YAAI,OAAO,cAAc,KAAK,WAAW;AACvC,gBAAM,KAAK,YAAY,OAAO,QAAQ,OAAO,KAAK;AAClD,0BAAgB,OAAO;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,cAKE;AAGA,WAAO;AAAA,MACL,WAAW,KAAK,WAAW,OAAO;AAAA,MAClC,YAAY;AAAA;AAAA,MACZ,SAAS;AAAA;AAAA,MACT,SAAS;AAAA;AAAA,IAAA;AAAA,EAEb;AAAA,EAEA,MAAM,kBAAkB,QAAkC;AACxD,QAAI,OAAO,cAAc,eAAe,CAAC,UAAU,SAAS,UAAU;AAEpE,YAAM,IAAI,MAAM,2BAA2B;AAAA,IAC7C;AAEA,UAAM,WAAW,MAAM,UAAU,QAAQ,SAAA;AACzC,UAAM,gBAAgB,SAAS,SAAS,MAAM,SAAS,SAAS,OAAO,OAAO;AAC9E,WAAO,eAAe;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAgB,QAAgB,OAA+C;AACnF,UAAM,KAAK,KAAA;AAEX,UAAM,SAAS,MAAM,KAAK,QAAQ,UAAU,KAAK,WAAW,QAAQ,KAAK;AACzE,WAAO,QAAQ,YAAY;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAEJ;AACA,UAAM,KAAK,KAAA;AAEX,UAAM,UAAU,MAAM,KAAK,QAAQ,cAAA;AACnC,UAAM,+BAAe,IAAA;AAMrB,eAAW,UAAU,SAAS;AAC5B,YAAM,WAAW,SAAS,IAAI,OAAO,SAAS,KAAK;AAAA,QACjD,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MAAA;AAGd,eAAS,IAAI,OAAO,WAAW;AAAA,QAC7B,YAAY,SAAS,aAAa,OAAO;AAAA,QACzC,WAAW,SAAS,YAAY;AAAA,QAChC,YAAY,KAAK,IAAI,SAAS,YAAY,OAAO,UAAU;AAAA,MAAA,CAC5D;AAAA,IACH;AAEA,WAAO,MAAM,KAAK,SAAS,QAAA,CAAS,EAAE,IAAI,CAAC,CAAC,WAAW,KAAK,OAAO;AAAA,MACjE;AAAA,MACA,GAAG;AAAA,IAAA,EACH;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,iBAAwC;AACzD,UAAM,KAAK,KAAA;AAGX,UAAM,UAAU,MAAM,KAAK,QAAQ,sBAAsB,eAAe;AAGxE,eAAW,UAAU,SAAS;AAC5B,YAAM,KAAK,YAAY,OAAO,QAAQ,OAAO,OAAO,eAAe;AAAA,IACrE;AAGA,UAAM,KAAK,UAAU,uBAAuB,eAAe;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAe,WAAoC;AACvD,UAAM,KAAK,KAAA;AAEX,UAAM,UAAU,MAAM,KAAK,QAAQ,sBAAsB,SAAS;AAClE,WAAO,QAAQ,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,YAAY,CAAC;AAAA,EACzD;AACF;"}
|
|
@@ -1,89 +0,0 @@
|
|
|
1
|
-
import { OPFSManager } from "../storage/opfs/OPFSManager.js";
|
|
2
|
-
class L2OPFSStore {
|
|
3
|
-
opfsManager;
|
|
4
|
-
constructor() {
|
|
5
|
-
this.opfsManager = new OPFSManager();
|
|
6
|
-
}
|
|
7
|
-
async init() {
|
|
8
|
-
await this.opfsManager.init();
|
|
9
|
-
}
|
|
10
|
-
/**
|
|
11
|
-
* Read chunk data from OPFS file
|
|
12
|
-
*/
|
|
13
|
-
async read(fileName, batch, projectId) {
|
|
14
|
-
try {
|
|
15
|
-
return await this.opfsManager.readRange(
|
|
16
|
-
{ projectId, prefix: "l2", fileName },
|
|
17
|
-
batch.byteOffset,
|
|
18
|
-
batch.byteOffset + batch.byteLength
|
|
19
|
-
);
|
|
20
|
-
} catch (error) {
|
|
21
|
-
if (error?.name === "NotFoundError") {
|
|
22
|
-
return null;
|
|
23
|
-
}
|
|
24
|
-
throw error;
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
/**
|
|
28
|
-
* Append chunks to OPFS file (or create new file)
|
|
29
|
-
*/
|
|
30
|
-
async append(fileName, chunks, existingBatches, projectId) {
|
|
31
|
-
const path = { projectId, prefix: "l2", fileName };
|
|
32
|
-
let offset = 0;
|
|
33
|
-
if (existingBatches && existingBatches.length > 0) {
|
|
34
|
-
const lastBatch = existingBatches[existingBatches.length - 1];
|
|
35
|
-
if (lastBatch) {
|
|
36
|
-
offset = lastBatch.byteOffset + lastBatch.byteLength;
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
const writable = await this.opfsManager.createWritableStream(path);
|
|
40
|
-
const batches = [];
|
|
41
|
-
for (const chunk of chunks) {
|
|
42
|
-
const data = await this.chunkToArrayBuffer(chunk);
|
|
43
|
-
await writable.write({ type: "write", position: offset, data });
|
|
44
|
-
batches.push({
|
|
45
|
-
startUs: chunk.timestamp,
|
|
46
|
-
durationUs: chunk.duration || 0,
|
|
47
|
-
byteOffset: offset,
|
|
48
|
-
byteLength: data.byteLength,
|
|
49
|
-
type: chunk.type
|
|
50
|
-
});
|
|
51
|
-
offset += data.byteLength;
|
|
52
|
-
}
|
|
53
|
-
await writable.close();
|
|
54
|
-
return batches;
|
|
55
|
-
}
|
|
56
|
-
/**
|
|
57
|
-
* Delete a file from OPFS
|
|
58
|
-
*/
|
|
59
|
-
async deleteFile(fileName, projectId) {
|
|
60
|
-
await this.opfsManager.deleteFile({ projectId, prefix: "l2", fileName });
|
|
61
|
-
}
|
|
62
|
-
/**
|
|
63
|
-
* Delete entire project directory
|
|
64
|
-
*/
|
|
65
|
-
async deleteProjectDirectory(projectId) {
|
|
66
|
-
await this.opfsManager.deleteProjectDirectory(projectId, "l2");
|
|
67
|
-
}
|
|
68
|
-
/**
|
|
69
|
-
* Check if a file exists in OPFS
|
|
70
|
-
*/
|
|
71
|
-
async fileExists(fileName, projectId) {
|
|
72
|
-
return await this.opfsManager.exists({ projectId, prefix: "l2", fileName });
|
|
73
|
-
}
|
|
74
|
-
/**
|
|
75
|
-
* Clear all OPFS data for current project
|
|
76
|
-
*/
|
|
77
|
-
async clear(projectId) {
|
|
78
|
-
await this.deleteProjectDirectory(projectId);
|
|
79
|
-
}
|
|
80
|
-
async chunkToArrayBuffer(chunk) {
|
|
81
|
-
const buffer = new ArrayBuffer(chunk.byteLength);
|
|
82
|
-
chunk.copyTo(buffer);
|
|
83
|
-
return buffer;
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
export {
|
|
87
|
-
L2OPFSStore
|
|
88
|
-
};
|
|
89
|
-
//# sourceMappingURL=L2OPFSStore.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"L2OPFSStore.js","sources":["../../../src/cache/l2/L2OPFSStore.ts"],"sourcesContent":["import { OPFSManager } from '../storage/opfs/OPFSManager';\nimport type { ChunkBatch } from '../storage/opfs/types';\n\n/**\n * L2-specific OPFS wrapper\n * Adapts OPFSManager for L2 cache chunk operations\n */\nexport class L2OPFSStore {\n private opfsManager: OPFSManager;\n\n constructor() {\n this.opfsManager = new OPFSManager();\n }\n\n async init(): Promise<void> {\n await this.opfsManager.init();\n }\n\n /**\n * Read chunk data from OPFS file\n */\n async read(fileName: string, batch: ChunkBatch, projectId: string): Promise<ArrayBuffer | null> {\n try {\n return await this.opfsManager.readRange(\n { projectId, prefix: 'l2', fileName },\n batch.byteOffset,\n batch.byteOffset + batch.byteLength\n );\n } catch (error) {\n if ((error as any)?.name === 'NotFoundError') {\n return null;\n }\n throw error;\n }\n }\n\n /**\n * Append chunks to OPFS file (or create new file)\n */\n async append(\n fileName: string,\n chunks: Array<EncodedVideoChunk | EncodedAudioChunk>,\n existingBatches: ChunkBatch[] | undefined,\n projectId: string\n ): Promise<ChunkBatch[]> {\n const path = { projectId, prefix: 'l2' as const, fileName };\n\n // Calculate starting offset from existing batches\n let offset = 0;\n if (existingBatches && existingBatches.length > 0) {\n const lastBatch = existingBatches[existingBatches.length - 1];\n if (lastBatch) {\n offset = lastBatch.byteOffset + lastBatch.byteLength;\n }\n }\n\n const writable = await this.opfsManager.createWritableStream(path);\n const batches: ChunkBatch[] = [];\n\n for (const chunk of chunks) {\n const data = await this.chunkToArrayBuffer(chunk);\n await writable.write({ type: 'write', position: offset, data });\n\n batches.push({\n startUs: chunk.timestamp,\n durationUs: chunk.duration || 0,\n byteOffset: offset,\n byteLength: data.byteLength,\n type: chunk.type,\n });\n\n offset += data.byteLength;\n }\n\n await writable.close();\n return batches;\n }\n\n /**\n * Delete a file from OPFS\n */\n async deleteFile(fileName: string, projectId: string): Promise<void> {\n await this.opfsManager.deleteFile({ projectId, prefix: 'l2', fileName });\n }\n\n /**\n * Delete entire project directory\n */\n async deleteProjectDirectory(projectId: string): Promise<void> {\n await this.opfsManager.deleteProjectDirectory(projectId, 'l2');\n }\n\n /**\n * Check if a file exists in OPFS\n */\n async fileExists(fileName: string, projectId: string): Promise<boolean> {\n return await this.opfsManager.exists({ projectId, prefix: 'l2', fileName });\n }\n\n /**\n * Clear all OPFS data for current project\n */\n async clear(projectId: string): Promise<void> {\n await this.deleteProjectDirectory(projectId);\n }\n\n private async chunkToArrayBuffer(\n chunk: EncodedVideoChunk | EncodedAudioChunk\n ): Promise<ArrayBuffer> {\n const buffer = new ArrayBuffer(chunk.byteLength);\n chunk.copyTo(buffer);\n return buffer;\n }\n}\n"],"names":[],"mappings":";AAOO,MAAM,YAAY;AAAA,EACf;AAAA,EAER,cAAc;AACZ,SAAK,cAAc,IAAI,YAAA;AAAA,EACzB;AAAA,EAEA,MAAM,OAAsB;AAC1B,UAAM,KAAK,YAAY,KAAA;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,UAAkB,OAAmB,WAAgD;AAC9F,QAAI;AACF,aAAO,MAAM,KAAK,YAAY;AAAA,QAC5B,EAAE,WAAW,QAAQ,MAAM,SAAA;AAAA,QAC3B,MAAM;AAAA,QACN,MAAM,aAAa,MAAM;AAAA,MAAA;AAAA,IAE7B,SAAS,OAAO;AACd,UAAK,OAAe,SAAS,iBAAiB;AAC5C,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OACJ,UACA,QACA,iBACA,WACuB;AACvB,UAAM,OAAO,EAAE,WAAW,QAAQ,MAAe,SAAA;AAGjD,QAAI,SAAS;AACb,QAAI,mBAAmB,gBAAgB,SAAS,GAAG;AACjD,YAAM,YAAY,gBAAgB,gBAAgB,SAAS,CAAC;AAC5D,UAAI,WAAW;AACb,iBAAS,UAAU,aAAa,UAAU;AAAA,MAC5C;AAAA,IACF;AAEA,UAAM,WAAW,MAAM,KAAK,YAAY,qBAAqB,IAAI;AACjE,UAAM,UAAwB,CAAA;AAE9B,eAAW,SAAS,QAAQ;AAC1B,YAAM,OAAO,MAAM,KAAK,mBAAmB,KAAK;AAChD,YAAM,SAAS,MAAM,EAAE,MAAM,SAAS,UAAU,QAAQ,MAAM;AAE9D,cAAQ,KAAK;AAAA,QACX,SAAS,MAAM;AAAA,QACf,YAAY,MAAM,YAAY;AAAA,QAC9B,YAAY;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB,MAAM,MAAM;AAAA,MAAA,CACb;AAED,gBAAU,KAAK;AAAA,IACjB;AAEA,UAAM,SAAS,MAAA;AACf,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,UAAkB,WAAkC;AACnE,UAAM,KAAK,YAAY,WAAW,EAAE,WAAW,QAAQ,MAAM,UAAU;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,uBAAuB,WAAkC;AAC7D,UAAM,KAAK,YAAY,uBAAuB,WAAW,IAAI;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,UAAkB,WAAqC;AACtE,WAAO,MAAM,KAAK,YAAY,OAAO,EAAE,WAAW,QAAQ,MAAM,UAAU;AAAA,EAC5E;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MAAM,WAAkC;AAC5C,UAAM,KAAK,uBAAuB,SAAS;AAAA,EAC7C;AAAA,EAEA,MAAc,mBACZ,OACsB;AACtB,UAAM,SAAS,IAAI,YAAY,MAAM,UAAU;AAC/C,UAAM,OAAO,MAAM;AACnB,WAAO;AAAA,EACT;AACF;"}
|