@meframe/core 0.1.6 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache/CacheManager.d.ts +12 -0
- package/dist/cache/CacheManager.d.ts.map +1 -1
- package/dist/cache/CacheManager.js +11 -0
- package/dist/cache/CacheManager.js.map +1 -1
- package/dist/cache/l1/AudioL1Cache.d.ts +12 -0
- package/dist/cache/l1/AudioL1Cache.d.ts.map +1 -1
- package/dist/cache/l1/AudioL1Cache.js +36 -0
- package/dist/cache/l1/AudioL1Cache.js.map +1 -1
- package/dist/controllers/PlaybackController.d.ts +16 -28
- package/dist/controllers/PlaybackController.d.ts.map +1 -1
- package/dist/controllers/PlaybackController.js +274 -253
- package/dist/controllers/PlaybackController.js.map +1 -1
- package/dist/controllers/PlaybackStateMachine.d.ts +16 -0
- package/dist/controllers/PlaybackStateMachine.d.ts.map +1 -0
- package/dist/controllers/PlaybackStateMachine.js +308 -0
- package/dist/controllers/PlaybackStateMachine.js.map +1 -0
- package/dist/controllers/index.d.ts +2 -1
- package/dist/controllers/index.d.ts.map +1 -1
- package/dist/controllers/types.d.ts +165 -2
- package/dist/controllers/types.d.ts.map +1 -1
- package/dist/controllers/types.js +53 -0
- package/dist/controllers/types.js.map +1 -0
- package/dist/orchestrator/GlobalAudioSession.d.ts +4 -1
- package/dist/orchestrator/GlobalAudioSession.d.ts.map +1 -1
- package/dist/orchestrator/GlobalAudioSession.js +40 -3
- package/dist/orchestrator/GlobalAudioSession.js.map +1 -1
- package/dist/utils/timeout-utils.d.ts +9 -0
- package/dist/utils/timeout-utils.d.ts.map +1 -0
- package/package.json +1 -1
|
@@ -1,31 +1,35 @@
|
|
|
1
|
+
import { PlaybackActionType, PlaybackState, PlaybackCommandType } from "./types.js";
|
|
1
2
|
import { MeframeEvent } from "../event/events.js";
|
|
2
3
|
import { WaiterReplacedError } from "../utils/errors.js";
|
|
3
4
|
import { VideoComposer } from "../stages/compose/VideoComposer.js";
|
|
4
5
|
import { isVideoClip } from "../model/types.js";
|
|
6
|
+
import { PlaybackStateMachine } from "./PlaybackStateMachine.js";
|
|
5
7
|
class PlaybackController {
|
|
6
8
|
orchestrator;
|
|
7
9
|
eventBus;
|
|
8
10
|
canvas;
|
|
9
11
|
videoComposer = null;
|
|
10
|
-
// Playback
|
|
12
|
+
// Playback time (external)
|
|
11
13
|
currentTimeUs = 0;
|
|
12
|
-
state = "idle";
|
|
13
14
|
playbackRate = 1;
|
|
14
15
|
volume = 1;
|
|
15
16
|
loop = false;
|
|
16
|
-
//
|
|
17
|
+
// Time base
|
|
17
18
|
rafId = null;
|
|
18
19
|
startTimeUs = 0;
|
|
19
|
-
//
|
|
20
|
-
// Frame
|
|
20
|
+
// AudioContext timeline origin (microseconds)
|
|
21
|
+
// Frame stats
|
|
21
22
|
frameCount = 0;
|
|
22
23
|
lastFrameTime = 0;
|
|
23
24
|
fps = 0;
|
|
25
|
+
// Audio
|
|
24
26
|
audioContext;
|
|
25
27
|
audioSession;
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
28
|
+
lastAudioScheduleTime = 0;
|
|
29
|
+
AUDIO_SCHEDULE_INTERVAL = 1e5;
|
|
30
|
+
// 100ms
|
|
31
|
+
// State machine
|
|
32
|
+
fsm = new PlaybackStateMachine();
|
|
29
33
|
// Unified window management for both video and audio
|
|
30
34
|
windowEnd = 0;
|
|
31
35
|
WINDOW_DURATION = 3e6;
|
|
@@ -33,10 +37,6 @@ class PlaybackController {
|
|
|
33
37
|
PREHEAT_DISTANCE = 1e6;
|
|
34
38
|
// 1s preheat trigger distance
|
|
35
39
|
preheatInProgress = false;
|
|
36
|
-
// Audio scheduling throttle to reduce CPU overhead
|
|
37
|
-
lastAudioScheduleTime = 0;
|
|
38
|
-
AUDIO_SCHEDULE_INTERVAL = 1e5;
|
|
39
|
-
// 100ms (~3 frames at 30fps)
|
|
40
40
|
constructor(orchestrator, eventBus, options) {
|
|
41
41
|
this.orchestrator = orchestrator;
|
|
42
42
|
this.audioSession = orchestrator.audioSession;
|
|
@@ -62,162 +62,62 @@ class PlaybackController {
|
|
|
62
62
|
if (options.loop !== void 0) {
|
|
63
63
|
this.loop = options.loop;
|
|
64
64
|
}
|
|
65
|
+
this.setupEventListeners();
|
|
65
66
|
if (options.autoStart) {
|
|
66
67
|
this.play();
|
|
67
68
|
}
|
|
68
|
-
this.setupEventListeners();
|
|
69
69
|
}
|
|
70
70
|
async renderCover() {
|
|
71
|
-
await this.renderCurrentFrame(0);
|
|
71
|
+
await this.renderCurrentFrame(0, { immediate: false });
|
|
72
72
|
}
|
|
73
|
-
//
|
|
73
|
+
// ========= Public API =========
|
|
74
74
|
play() {
|
|
75
|
-
|
|
76
|
-
this.lastAudioScheduleTime = 0;
|
|
77
|
-
this.wasPlayingBeforeSeek = true;
|
|
78
|
-
this.startPlayback();
|
|
79
|
-
}
|
|
80
|
-
async startPlayback() {
|
|
81
|
-
const wasIdle = this.state === "idle";
|
|
82
|
-
const seekId = this.currentSeekId;
|
|
83
|
-
this.state = "playing";
|
|
84
|
-
try {
|
|
85
|
-
await this.renderCurrentFrame(this.currentTimeUs);
|
|
86
|
-
if (seekId !== this.currentSeekId || this.state !== "playing") {
|
|
87
|
-
return;
|
|
88
|
-
}
|
|
89
|
-
this.initWindow(this.currentTimeUs);
|
|
90
|
-
await this.audioSession.startPlayback(this.currentTimeUs, this.audioContext);
|
|
91
|
-
this.startTimeUs = this.audioContext.currentTime * 1e6 - this.currentTimeUs / this.playbackRate;
|
|
92
|
-
this.playbackLoop();
|
|
93
|
-
this.eventBus.emit(MeframeEvent.PlaybackPlay);
|
|
94
|
-
} catch (error) {
|
|
95
|
-
console.error("[PlaybackController] Failed to start playback:", error);
|
|
96
|
-
this.state = wasIdle ? "idle" : "paused";
|
|
97
|
-
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
98
|
-
}
|
|
75
|
+
this.dispatch({ type: PlaybackActionType.Play });
|
|
99
76
|
}
|
|
100
77
|
pause() {
|
|
101
|
-
this.
|
|
102
|
-
this.wasPlayingBeforeSeek = false;
|
|
103
|
-
if (this.rafId !== null) {
|
|
104
|
-
cancelAnimationFrame(this.rafId);
|
|
105
|
-
this.rafId = null;
|
|
106
|
-
}
|
|
107
|
-
this.audioSession.stopPlayback();
|
|
108
|
-
this.currentSeekId++;
|
|
109
|
-
this.eventBus.emit(MeframeEvent.PlaybackPause);
|
|
78
|
+
this.dispatch({ type: PlaybackActionType.Pause });
|
|
110
79
|
}
|
|
111
80
|
stop() {
|
|
112
|
-
this.
|
|
113
|
-
this.currentTimeUs = 0;
|
|
114
|
-
this.state = "idle";
|
|
115
|
-
this.wasPlayingBeforeSeek = false;
|
|
116
|
-
this.frameCount = 0;
|
|
117
|
-
this.lastFrameTime = 0;
|
|
118
|
-
this.lastAudioScheduleTime = 0;
|
|
119
|
-
const ctx = this.canvas.getContext("2d");
|
|
120
|
-
if (ctx && "clearRect" in ctx) {
|
|
121
|
-
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
122
|
-
}
|
|
123
|
-
this.audioSession.reset();
|
|
124
|
-
this.audioSession.resetPlaybackStates();
|
|
125
|
-
this.eventBus.emit(MeframeEvent.PlaybackStop);
|
|
81
|
+
this.dispatch({ type: PlaybackActionType.Stop });
|
|
126
82
|
}
|
|
127
83
|
async seek(timeUs) {
|
|
128
|
-
const
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
this.audioSession.stopPlayback();
|
|
135
|
-
this.lastAudioScheduleTime = 0;
|
|
136
|
-
const clamped = this.clampTime(timeUs);
|
|
137
|
-
this.currentTimeUs = clamped;
|
|
138
|
-
this.currentSeekId++;
|
|
139
|
-
this.state = "seeking";
|
|
140
|
-
const seekId = this.currentSeekId;
|
|
141
|
-
try {
|
|
142
|
-
const keyframeTimeUs = await this.orchestrator.tryRenderKeyframe(clamped);
|
|
143
|
-
if (keyframeTimeUs !== null) {
|
|
144
|
-
const renderState = await this.orchestrator.getRenderState(clamped, {
|
|
145
|
-
immediate: true,
|
|
146
|
-
relativeTimeUs: keyframeTimeUs
|
|
147
|
-
});
|
|
148
|
-
if (renderState && this.videoComposer) {
|
|
149
|
-
await this.videoComposer.composeFrame({
|
|
150
|
-
timeUs: clamped,
|
|
151
|
-
layers: renderState.layers,
|
|
152
|
-
transition: renderState.transition
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
if (seekId !== this.currentSeekId) {
|
|
157
|
-
return;
|
|
158
|
-
}
|
|
159
|
-
await this.audioSession.ensureAudioForTime(clamped, { immediate: false });
|
|
160
|
-
await this.orchestrator.getFrame(clamped, {
|
|
161
|
-
immediate: false,
|
|
162
|
-
preheat: true
|
|
163
|
-
});
|
|
164
|
-
this.initWindow(clamped);
|
|
165
|
-
if (seekId !== this.currentSeekId) {
|
|
166
|
-
return;
|
|
167
|
-
}
|
|
168
|
-
await this.renderCurrentFrame(clamped);
|
|
169
|
-
if (seekId !== this.currentSeekId) {
|
|
170
|
-
return;
|
|
171
|
-
}
|
|
172
|
-
this.eventBus.emit(MeframeEvent.PlaybackSeek, { timeUs: this.currentTimeUs });
|
|
173
|
-
if (this.wasPlayingBeforeSeek) {
|
|
174
|
-
await this.startPlayback();
|
|
175
|
-
} else {
|
|
176
|
-
this.state = previousState === "idle" ? "idle" : "paused";
|
|
177
|
-
}
|
|
178
|
-
} catch (error) {
|
|
179
|
-
if (seekId !== this.currentSeekId) {
|
|
180
|
-
return;
|
|
181
|
-
}
|
|
182
|
-
console.error("[PlaybackController] Seek error:", error);
|
|
183
|
-
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
184
|
-
this.state = previousState === "idle" ? "idle" : "paused";
|
|
185
|
-
}
|
|
84
|
+
const { done } = this.dispatch({
|
|
85
|
+
type: PlaybackActionType.Seek,
|
|
86
|
+
timeUs,
|
|
87
|
+
durationUs: this.duration
|
|
88
|
+
});
|
|
89
|
+
await done;
|
|
186
90
|
}
|
|
187
|
-
// Playback properties
|
|
188
91
|
setRate(rate) {
|
|
189
92
|
const currentTimeUs = this.currentTimeUs;
|
|
190
93
|
this.playbackRate = rate;
|
|
191
94
|
this.startTimeUs = this.audioContext.currentTime * 1e6 - currentTimeUs / rate;
|
|
192
|
-
this.eventBus.emit(MeframeEvent.PlaybackRateChange, { rate });
|
|
193
95
|
this.audioSession.setPlaybackRate(this.playbackRate);
|
|
96
|
+
this.eventBus.emit(MeframeEvent.PlaybackRateChange, { rate });
|
|
194
97
|
}
|
|
195
98
|
setVolume(volume) {
|
|
196
99
|
this.volume = Math.max(0, Math.min(1, volume));
|
|
197
|
-
this.eventBus.emit(MeframeEvent.PlaybackVolumeChange, { volume: this.volume });
|
|
198
100
|
this.audioSession.setVolume(this.volume);
|
|
101
|
+
this.eventBus.emit(MeframeEvent.PlaybackVolumeChange, { volume: this.volume });
|
|
199
102
|
}
|
|
200
103
|
setMute(muted) {
|
|
201
104
|
if (muted) {
|
|
202
105
|
this.audioSession.stopPlayback();
|
|
203
|
-
|
|
204
|
-
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
if (this.fsm.snapshot.state === PlaybackState.Playing) {
|
|
109
|
+
void this.audioSession.startPlayback(this.currentTimeUs, this.audioContext);
|
|
205
110
|
}
|
|
206
111
|
}
|
|
207
112
|
setLoop(loop) {
|
|
208
113
|
this.loop = loop;
|
|
209
114
|
}
|
|
210
115
|
get duration() {
|
|
211
|
-
|
|
212
|
-
if (modelDuration !== void 0) {
|
|
213
|
-
return modelDuration;
|
|
214
|
-
}
|
|
215
|
-
return 0;
|
|
116
|
+
return this.orchestrator.compositionModel?.durationUs ?? 0;
|
|
216
117
|
}
|
|
217
118
|
get isPlaying() {
|
|
218
|
-
return this.state ===
|
|
119
|
+
return this.fsm.snapshot.state === PlaybackState.Playing;
|
|
219
120
|
}
|
|
220
|
-
// Resume is just an alias for play
|
|
221
121
|
resume() {
|
|
222
122
|
this.play();
|
|
223
123
|
}
|
|
@@ -227,82 +127,242 @@ class PlaybackController {
|
|
|
227
127
|
off(event, handler) {
|
|
228
128
|
this.eventBus.off(event, handler);
|
|
229
129
|
}
|
|
230
|
-
//
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
130
|
+
// ========= State machine wiring =========
|
|
131
|
+
dispatch(action) {
|
|
132
|
+
const { token, commands } = this.fsm.dispatch(action, { currentTimeUs: this.currentTimeUs });
|
|
133
|
+
const done = this.executeCommands(commands, token);
|
|
134
|
+
return { token, done };
|
|
135
|
+
}
|
|
136
|
+
executeCommands(commands, token) {
|
|
137
|
+
const maybe = this.executeSeq(commands, token, 0);
|
|
138
|
+
return maybe ?? Promise.resolve();
|
|
139
|
+
}
|
|
140
|
+
executeSeq(commands, token, startIndex) {
|
|
141
|
+
for (let i = startIndex; i < commands.length; i++) {
|
|
142
|
+
if (!this.isCurrentToken(token)) return;
|
|
143
|
+
const maybe = this.executeCommand(commands[i], token);
|
|
144
|
+
if (maybe) {
|
|
145
|
+
return maybe.then(() => {
|
|
146
|
+
if (!this.isCurrentToken(token)) return;
|
|
147
|
+
const cont = this.executeSeq(commands, token, i + 1);
|
|
148
|
+
return cont ?? Promise.resolve();
|
|
149
|
+
});
|
|
236
150
|
}
|
|
237
|
-
return;
|
|
238
151
|
}
|
|
239
|
-
|
|
240
|
-
|
|
152
|
+
}
|
|
153
|
+
executePar(commands, token) {
|
|
154
|
+
const promises = [];
|
|
155
|
+
for (const c of commands) {
|
|
156
|
+
if (!this.isCurrentToken(token)) return;
|
|
157
|
+
const maybe = this.executeCommand(c, token);
|
|
158
|
+
if (maybe) promises.push(maybe);
|
|
159
|
+
}
|
|
160
|
+
if (promises.length === 0) return;
|
|
161
|
+
return Promise.all(promises).then(() => void 0);
|
|
162
|
+
}
|
|
163
|
+
executeCommand(command, token) {
|
|
164
|
+
if (!this.isCurrentToken(token)) return;
|
|
165
|
+
switch (command.type) {
|
|
166
|
+
case PlaybackCommandType.Seq:
|
|
167
|
+
return this.executeSeq(command.commands, token, 0);
|
|
168
|
+
case PlaybackCommandType.Par:
|
|
169
|
+
return this.executePar(command.commands, token);
|
|
170
|
+
case PlaybackCommandType.Try: {
|
|
171
|
+
const handleError = (error) => {
|
|
172
|
+
if (!this.isCurrentToken(token)) return;
|
|
173
|
+
if (command.ignoreWaiterReplacedError && error instanceof WaiterReplacedError) return;
|
|
174
|
+
if (command.logPrefix) console.error(command.logPrefix, error);
|
|
175
|
+
const onErrorDone = command.onError ? this.dispatch(command.onError).done : void 0;
|
|
176
|
+
const emit = () => {
|
|
177
|
+
if (command.emitPlaybackError) {
|
|
178
|
+
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
179
|
+
}
|
|
180
|
+
};
|
|
181
|
+
if (onErrorDone) {
|
|
182
|
+
return onErrorDone.then(() => {
|
|
183
|
+
emit();
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
emit();
|
|
187
|
+
};
|
|
188
|
+
try {
|
|
189
|
+
const maybe = this.executeCommand(command.command, token);
|
|
190
|
+
if (maybe) {
|
|
191
|
+
return maybe.catch(handleError);
|
|
192
|
+
}
|
|
193
|
+
return;
|
|
194
|
+
} catch (error) {
|
|
195
|
+
return handleError(error) ?? Promise.resolve();
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
case PlaybackCommandType.Dispatch:
|
|
199
|
+
return this.dispatch(command.action).done;
|
|
200
|
+
case PlaybackCommandType.SetTime: {
|
|
201
|
+
this.currentTimeUs = command.timeUs;
|
|
202
|
+
return;
|
|
203
|
+
}
|
|
204
|
+
case PlaybackCommandType.SetFrozenTime:
|
|
205
|
+
case PlaybackCommandType.SetWantsPlay:
|
|
206
|
+
case PlaybackCommandType.SetState: {
|
|
207
|
+
return;
|
|
208
|
+
}
|
|
209
|
+
case PlaybackCommandType.CancelRaf: {
|
|
210
|
+
this.cancelRaf();
|
|
211
|
+
return;
|
|
212
|
+
}
|
|
213
|
+
case PlaybackCommandType.StopAudio: {
|
|
214
|
+
this.audioSession.stopPlayback();
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
case PlaybackCommandType.ResetAudioPlaybackStates: {
|
|
218
|
+
this.audioSession.resetPlaybackStates();
|
|
219
|
+
return;
|
|
220
|
+
}
|
|
221
|
+
case PlaybackCommandType.ResetAudioSession: {
|
|
222
|
+
this.audioSession.reset();
|
|
241
223
|
return;
|
|
242
224
|
}
|
|
243
|
-
|
|
244
|
-
|
|
225
|
+
case PlaybackCommandType.ClearCanvas: {
|
|
226
|
+
this.clearCanvas();
|
|
245
227
|
return;
|
|
246
228
|
}
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
229
|
+
case PlaybackCommandType.SetLastAudioScheduleTime: {
|
|
230
|
+
this.lastAudioScheduleTime = command.timeUs;
|
|
231
|
+
return;
|
|
250
232
|
}
|
|
251
|
-
|
|
252
|
-
|
|
233
|
+
case PlaybackCommandType.SetStartTimeBase: {
|
|
234
|
+
this.startTimeUs = command.startTimeUs;
|
|
253
235
|
return;
|
|
254
236
|
}
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
237
|
+
case PlaybackCommandType.SyncTimeBaseToAudioClock: {
|
|
238
|
+
this.startTimeUs = this.audioContext.currentTime * 1e6 - command.timeUs / this.playbackRate;
|
|
239
|
+
return;
|
|
240
|
+
}
|
|
241
|
+
case PlaybackCommandType.InitWindow: {
|
|
242
|
+
this.initWindow(command.timeUs);
|
|
243
|
+
return;
|
|
244
|
+
}
|
|
245
|
+
case PlaybackCommandType.SetCacheWindow: {
|
|
246
|
+
this.orchestrator.cacheManager.setWindow(command.timeUs);
|
|
247
|
+
return;
|
|
248
|
+
}
|
|
249
|
+
case PlaybackCommandType.Emit: {
|
|
250
|
+
if (command.payload === void 0) {
|
|
251
|
+
this.eventBus.emit(command.event);
|
|
252
|
+
} else {
|
|
253
|
+
this.eventBus.emit(command.event, command.payload);
|
|
254
|
+
}
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
case PlaybackCommandType.RenderFrame: {
|
|
258
|
+
return this.renderCurrentFrame(command.timeUs, {
|
|
259
|
+
immediate: command.immediate,
|
|
260
|
+
relativeTimeUs: command.relativeTimeUs
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
case PlaybackCommandType.MaybeRenderKeyframePreview: {
|
|
264
|
+
return this.orchestrator.tryRenderKeyframe(command.timeUs).then((keyframeTimeUs) => {
|
|
265
|
+
if (!this.isCurrentToken(token)) return;
|
|
266
|
+
if (keyframeTimeUs === null) return;
|
|
267
|
+
return this.orchestrator.getRenderState(command.timeUs, {
|
|
268
|
+
immediate: true,
|
|
269
|
+
relativeTimeUs: keyframeTimeUs
|
|
270
|
+
}).then((keyframeRenderState) => {
|
|
271
|
+
if (!this.isCurrentToken(token)) return;
|
|
272
|
+
if (!keyframeRenderState) return;
|
|
273
|
+
return this.compose(command.timeUs, keyframeRenderState);
|
|
274
|
+
});
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
case PlaybackCommandType.EnsureAudio: {
|
|
278
|
+
return this.audioSession.ensureAudioForTime(command.timeUs, {
|
|
279
|
+
immediate: command.immediate
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
case PlaybackCommandType.GetFrame: {
|
|
283
|
+
return this.orchestrator.getFrame(command.timeUs, {
|
|
284
|
+
immediate: command.immediate,
|
|
285
|
+
preheat: command.preheat
|
|
286
|
+
}).then(() => void 0);
|
|
287
|
+
}
|
|
288
|
+
case PlaybackCommandType.StartAudioPlayback: {
|
|
289
|
+
return this.audioSession.startPlayback(command.timeUs, this.audioContext);
|
|
260
290
|
}
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
291
|
+
case PlaybackCommandType.StartRafLoop: {
|
|
292
|
+
this.startPlaybackLoop(token);
|
|
293
|
+
return;
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
cancelRaf() {
|
|
298
|
+
if (this.rafId !== null) {
|
|
299
|
+
cancelAnimationFrame(this.rafId);
|
|
300
|
+
this.rafId = null;
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
isCurrentToken(token) {
|
|
304
|
+
return token === this.fsm.snapshot.token;
|
|
305
|
+
}
|
|
306
|
+
startPlaybackLoop(token) {
|
|
307
|
+
this.rafId = requestAnimationFrame(() => {
|
|
308
|
+
void this.onRafTick(token);
|
|
265
309
|
});
|
|
266
310
|
}
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
if (this.currentTimeUs >= this.duration) {
|
|
271
|
-
if (this.loop) {
|
|
272
|
-
this.currentTimeUs = 0;
|
|
273
|
-
this.startTimeUs = this.audioContext.currentTime * 1e6;
|
|
274
|
-
this.audioSession.resetPlaybackStates();
|
|
275
|
-
this.lastAudioScheduleTime = 0;
|
|
276
|
-
this.initWindow(0);
|
|
277
|
-
} else {
|
|
278
|
-
this.pause();
|
|
279
|
-
this.currentTimeUs = 0;
|
|
280
|
-
this.state = "ended";
|
|
281
|
-
this.eventBus.emit(MeframeEvent.PlaybackEnded, { timeUs: this.duration });
|
|
282
|
-
}
|
|
311
|
+
async onRafTick(token) {
|
|
312
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) {
|
|
313
|
+
return;
|
|
283
314
|
}
|
|
284
|
-
this.
|
|
315
|
+
const candidateTimeUs = (this.audioContext.currentTime * 1e6 - this.startTimeUs) * this.playbackRate;
|
|
316
|
+
this.dispatch({
|
|
317
|
+
type: PlaybackActionType.ClockTick,
|
|
318
|
+
candidateTimeUs,
|
|
319
|
+
durationUs: this.duration,
|
|
320
|
+
loop: this.loop,
|
|
321
|
+
audioNowUs: this.audioContext.currentTime * 1e6
|
|
322
|
+
});
|
|
323
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) {
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
if (this.currentTimeUs - this.lastAudioScheduleTime >= this.AUDIO_SCHEDULE_INTERVAL) {
|
|
327
|
+
await this.audioSession.scheduleAudio(this.currentTimeUs, this.audioContext);
|
|
328
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) return;
|
|
329
|
+
this.lastAudioScheduleTime = this.currentTimeUs;
|
|
330
|
+
}
|
|
331
|
+
const renderState = await this.orchestrator.getRenderState(this.currentTimeUs, {
|
|
332
|
+
immediate: true
|
|
333
|
+
});
|
|
334
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) {
|
|
335
|
+
return;
|
|
336
|
+
}
|
|
337
|
+
if (!renderState) {
|
|
338
|
+
this.dispatch({ type: PlaybackActionType.EnterBuffering, timeUs: this.currentTimeUs });
|
|
339
|
+
return;
|
|
340
|
+
}
|
|
341
|
+
await this.compose(this.currentTimeUs, renderState);
|
|
342
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) return;
|
|
343
|
+
this.updateFps();
|
|
344
|
+
this.frameCount++;
|
|
345
|
+
this.orchestrator.cacheManager.setWindow(this.currentTimeUs);
|
|
285
346
|
this.checkAndPreheatWindow();
|
|
347
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) return;
|
|
348
|
+
this.startPlaybackLoop(token);
|
|
349
|
+
}
|
|
350
|
+
updateFps() {
|
|
351
|
+
const now = performance.now();
|
|
352
|
+
if (this.lastFrameTime > 0) {
|
|
353
|
+
const deltaTime = now - this.lastFrameTime;
|
|
354
|
+
const instantFps = 1e3 / deltaTime;
|
|
355
|
+
this.fps = this.fps > 0 ? this.fps * 0.9 + instantFps * 0.1 : instantFps;
|
|
356
|
+
}
|
|
357
|
+
this.lastFrameTime = now;
|
|
286
358
|
}
|
|
287
|
-
/**
|
|
288
|
-
* Initialize window at given time (called on play/seek)
|
|
289
|
-
* Sets unified window for both video and audio
|
|
290
|
-
*/
|
|
291
359
|
initWindow(timeUs) {
|
|
292
360
|
this.windowEnd = timeUs + this.WINDOW_DURATION;
|
|
293
361
|
this.preheatInProgress = false;
|
|
294
362
|
this.orchestrator.cacheManager.setWindow(timeUs);
|
|
295
363
|
}
|
|
296
|
-
/**
|
|
297
|
-
* Check if approaching window end and trigger preheat for next window
|
|
298
|
-
*
|
|
299
|
-
* Strategy: Unified sliding window for both video and audio
|
|
300
|
-
* - Current window: [windowStart, windowEnd] (3s duration)
|
|
301
|
-
* - When playback reaches windowEnd - 1s, preheat next window
|
|
302
|
-
* - Next window: [windowEnd, windowEnd + 3s]
|
|
303
|
-
*/
|
|
304
364
|
checkAndPreheatWindow() {
|
|
305
|
-
if (this.preheatInProgress || this.state !==
|
|
365
|
+
if (this.preheatInProgress || this.fsm.snapshot.state !== PlaybackState.Playing) {
|
|
306
366
|
return;
|
|
307
367
|
}
|
|
308
368
|
const distanceToWindowEnd = this.windowEnd - this.currentTimeUs;
|
|
@@ -311,15 +371,11 @@ class PlaybackController {
|
|
|
311
371
|
return;
|
|
312
372
|
}
|
|
313
373
|
if (distanceToWindowEnd > 0 && distanceToWindowEnd <= this.PREHEAT_DISTANCE) {
|
|
314
|
-
this.preheatNextWindow();
|
|
374
|
+
void this.preheatNextWindow();
|
|
315
375
|
}
|
|
316
376
|
}
|
|
317
|
-
/**
|
|
318
|
-
* Preheat next window by decoding from current playback time
|
|
319
|
-
* Supports cross-clip window preheating for seamless playback
|
|
320
|
-
* Preheats both video and audio in parallel
|
|
321
|
-
*/
|
|
322
377
|
async preheatNextWindow() {
|
|
378
|
+
if (this.preheatInProgress) return;
|
|
323
379
|
this.preheatInProgress = true;
|
|
324
380
|
try {
|
|
325
381
|
const windowStart = this.currentTimeUs;
|
|
@@ -344,70 +400,35 @@ class PlaybackController {
|
|
|
344
400
|
this.preheatInProgress = false;
|
|
345
401
|
}
|
|
346
402
|
}
|
|
347
|
-
async renderCurrentFrame(timeUs) {
|
|
403
|
+
async renderCurrentFrame(timeUs, options) {
|
|
348
404
|
if (!this.videoComposer) {
|
|
349
405
|
console.error("[PlaybackController] VideoComposer not initialized");
|
|
350
406
|
return;
|
|
351
407
|
}
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
if (this.state === "playing") {
|
|
358
|
-
await this.handlePlaybackBuffering(timeUs);
|
|
359
|
-
}
|
|
360
|
-
return;
|
|
361
|
-
}
|
|
362
|
-
await this.videoComposer.composeFrame({
|
|
363
|
-
timeUs,
|
|
364
|
-
layers: renderState.layers,
|
|
365
|
-
transition: renderState.transition
|
|
366
|
-
});
|
|
367
|
-
} catch (error) {
|
|
368
|
-
console.error("Render error:", error);
|
|
369
|
-
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
370
|
-
}
|
|
371
|
-
}
|
|
372
|
-
async handlePlaybackBuffering(timeUs) {
|
|
373
|
-
if (this.state !== "playing") {
|
|
408
|
+
const renderState = await this.orchestrator.getRenderState(timeUs, {
|
|
409
|
+
immediate: options.immediate,
|
|
410
|
+
relativeTimeUs: options.relativeTimeUs
|
|
411
|
+
});
|
|
412
|
+
if (!renderState) {
|
|
374
413
|
return;
|
|
375
414
|
}
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
this.
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
return;
|
|
386
|
-
}
|
|
387
|
-
this.state = "playing";
|
|
388
|
-
this.startTimeUs = this.audioContext.currentTime * 1e6 - timeUs / this.playbackRate;
|
|
389
|
-
this.lastAudioScheduleTime = 0;
|
|
390
|
-
await this.audioSession.startPlayback(timeUs, this.audioContext);
|
|
391
|
-
this.eventBus.emit(MeframeEvent.PlaybackPlay);
|
|
392
|
-
if (!this.rafId) {
|
|
393
|
-
this.playbackLoop();
|
|
394
|
-
}
|
|
395
|
-
} catch (error) {
|
|
396
|
-
if (error instanceof WaiterReplacedError) {
|
|
397
|
-
return;
|
|
398
|
-
}
|
|
399
|
-
if (seekId !== this.currentSeekId) {
|
|
400
|
-
return;
|
|
401
|
-
}
|
|
402
|
-
console.error("[PlaybackController] Buffering error:", error);
|
|
403
|
-
this.state = "paused";
|
|
404
|
-
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
405
|
-
}
|
|
415
|
+
await this.compose(timeUs, renderState);
|
|
416
|
+
}
|
|
417
|
+
async compose(timeUs, renderState) {
|
|
418
|
+
if (!this.videoComposer) return;
|
|
419
|
+
await this.videoComposer.composeFrame({
|
|
420
|
+
timeUs,
|
|
421
|
+
layers: renderState.layers,
|
|
422
|
+
transition: renderState.transition
|
|
423
|
+
});
|
|
406
424
|
}
|
|
407
|
-
|
|
408
|
-
|
|
425
|
+
clearCanvas() {
|
|
426
|
+
const ctx = this.canvas.getContext("2d");
|
|
427
|
+
if (ctx && "clearRect" in ctx) {
|
|
428
|
+
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
429
|
+
}
|
|
409
430
|
}
|
|
410
|
-
// Cleanup
|
|
431
|
+
// ========= Cleanup / event handlers =========
|
|
411
432
|
dispose() {
|
|
412
433
|
this.stop();
|
|
413
434
|
this.eventBus.off(MeframeEvent.CacheCover, this.onCacheCover);
|
|
@@ -418,8 +439,8 @@ class PlaybackController {
|
|
|
418
439
|
}
|
|
419
440
|
}
|
|
420
441
|
onCacheCover = () => {
|
|
421
|
-
if (this.state ===
|
|
422
|
-
this.renderCurrentFrame(0);
|
|
442
|
+
if (this.fsm.snapshot.state === PlaybackState.Idle && this.currentTimeUs === 0) {
|
|
443
|
+
void this.renderCurrentFrame(0, { immediate: false });
|
|
423
444
|
}
|
|
424
445
|
};
|
|
425
446
|
onModelSet = () => {
|
|
@@ -431,8 +452,8 @@ class PlaybackController {
|
|
|
431
452
|
fps: model.fps || 30,
|
|
432
453
|
backgroundColor: model.renderConfig?.backgroundColor || "#000"
|
|
433
454
|
});
|
|
434
|
-
this.audioSession.ensureAudioForTime(this.currentTimeUs, { immediate: false });
|
|
435
|
-
this.renderCurrentFrame(this.currentTimeUs);
|
|
455
|
+
void this.audioSession.ensureAudioForTime(this.currentTimeUs, { immediate: false });
|
|
456
|
+
void this.renderCurrentFrame(this.currentTimeUs, { immediate: false });
|
|
436
457
|
};
|
|
437
458
|
setupEventListeners() {
|
|
438
459
|
this.eventBus.on(MeframeEvent.CacheCover, this.onCacheCover);
|