@meframe/core 0.1.5 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache/CacheManager.d.ts +12 -0
- package/dist/cache/CacheManager.d.ts.map +1 -1
- package/dist/cache/CacheManager.js +11 -0
- package/dist/cache/CacheManager.js.map +1 -1
- package/dist/cache/l1/AudioL1Cache.d.ts +12 -0
- package/dist/cache/l1/AudioL1Cache.d.ts.map +1 -1
- package/dist/cache/l1/AudioL1Cache.js +36 -0
- package/dist/cache/l1/AudioL1Cache.js.map +1 -1
- package/dist/controllers/PlaybackController.d.ts +16 -28
- package/dist/controllers/PlaybackController.d.ts.map +1 -1
- package/dist/controllers/PlaybackController.js +274 -254
- package/dist/controllers/PlaybackController.js.map +1 -1
- package/dist/controllers/PlaybackStateMachine.d.ts +16 -0
- package/dist/controllers/PlaybackStateMachine.d.ts.map +1 -0
- package/dist/controllers/PlaybackStateMachine.js +308 -0
- package/dist/controllers/PlaybackStateMachine.js.map +1 -0
- package/dist/controllers/index.d.ts +2 -1
- package/dist/controllers/index.d.ts.map +1 -1
- package/dist/controllers/types.d.ts +165 -2
- package/dist/controllers/types.d.ts.map +1 -1
- package/dist/controllers/types.js +53 -0
- package/dist/controllers/types.js.map +1 -0
- package/dist/orchestrator/GlobalAudioSession.d.ts +4 -1
- package/dist/orchestrator/GlobalAudioSession.d.ts.map +1 -1
- package/dist/orchestrator/GlobalAudioSession.js +46 -5
- package/dist/orchestrator/GlobalAudioSession.js.map +1 -1
- package/dist/utils/timeout-utils.d.ts +9 -0
- package/dist/utils/timeout-utils.d.ts.map +1 -0
- package/package.json +1 -1
|
@@ -1,31 +1,35 @@
|
|
|
1
|
+
import { PlaybackActionType, PlaybackState, PlaybackCommandType } from "./types.js";
|
|
1
2
|
import { MeframeEvent } from "../event/events.js";
|
|
2
3
|
import { WaiterReplacedError } from "../utils/errors.js";
|
|
3
4
|
import { VideoComposer } from "../stages/compose/VideoComposer.js";
|
|
4
5
|
import { isVideoClip } from "../model/types.js";
|
|
6
|
+
import { PlaybackStateMachine } from "./PlaybackStateMachine.js";
|
|
5
7
|
class PlaybackController {
|
|
6
8
|
orchestrator;
|
|
7
9
|
eventBus;
|
|
8
10
|
canvas;
|
|
9
11
|
videoComposer = null;
|
|
10
|
-
// Playback
|
|
12
|
+
// Playback time (external)
|
|
11
13
|
currentTimeUs = 0;
|
|
12
|
-
state = "idle";
|
|
13
14
|
playbackRate = 1;
|
|
14
15
|
volume = 1;
|
|
15
16
|
loop = false;
|
|
16
|
-
//
|
|
17
|
+
// Time base
|
|
17
18
|
rafId = null;
|
|
18
19
|
startTimeUs = 0;
|
|
19
|
-
//
|
|
20
|
-
// Frame
|
|
20
|
+
// AudioContext timeline origin (microseconds)
|
|
21
|
+
// Frame stats
|
|
21
22
|
frameCount = 0;
|
|
22
23
|
lastFrameTime = 0;
|
|
23
24
|
fps = 0;
|
|
25
|
+
// Audio
|
|
24
26
|
audioContext;
|
|
25
27
|
audioSession;
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
28
|
+
lastAudioScheduleTime = 0;
|
|
29
|
+
AUDIO_SCHEDULE_INTERVAL = 1e5;
|
|
30
|
+
// 100ms
|
|
31
|
+
// State machine
|
|
32
|
+
fsm = new PlaybackStateMachine();
|
|
29
33
|
// Unified window management for both video and audio
|
|
30
34
|
windowEnd = 0;
|
|
31
35
|
WINDOW_DURATION = 3e6;
|
|
@@ -33,10 +37,6 @@ class PlaybackController {
|
|
|
33
37
|
PREHEAT_DISTANCE = 1e6;
|
|
34
38
|
// 1s preheat trigger distance
|
|
35
39
|
preheatInProgress = false;
|
|
36
|
-
// Audio scheduling throttle to reduce CPU overhead
|
|
37
|
-
lastAudioScheduleTime = 0;
|
|
38
|
-
AUDIO_SCHEDULE_INTERVAL = 1e5;
|
|
39
|
-
// 100ms (~3 frames at 30fps)
|
|
40
40
|
constructor(orchestrator, eventBus, options) {
|
|
41
41
|
this.orchestrator = orchestrator;
|
|
42
42
|
this.audioSession = orchestrator.audioSession;
|
|
@@ -62,162 +62,62 @@ class PlaybackController {
|
|
|
62
62
|
if (options.loop !== void 0) {
|
|
63
63
|
this.loop = options.loop;
|
|
64
64
|
}
|
|
65
|
+
this.setupEventListeners();
|
|
65
66
|
if (options.autoStart) {
|
|
66
67
|
this.play();
|
|
67
68
|
}
|
|
68
|
-
this.setupEventListeners();
|
|
69
69
|
}
|
|
70
70
|
async renderCover() {
|
|
71
|
-
await this.renderCurrentFrame(0);
|
|
71
|
+
await this.renderCurrentFrame(0, { immediate: false });
|
|
72
72
|
}
|
|
73
|
-
//
|
|
73
|
+
// ========= Public API =========
|
|
74
74
|
play() {
|
|
75
|
-
|
|
76
|
-
this.lastAudioScheduleTime = 0;
|
|
77
|
-
this.wasPlayingBeforeSeek = true;
|
|
78
|
-
this.startPlayback();
|
|
79
|
-
}
|
|
80
|
-
async startPlayback() {
|
|
81
|
-
const wasIdle = this.state === "idle";
|
|
82
|
-
const seekId = this.currentSeekId;
|
|
83
|
-
this.state = "playing";
|
|
84
|
-
try {
|
|
85
|
-
await this.renderCurrentFrame(this.currentTimeUs);
|
|
86
|
-
if (seekId !== this.currentSeekId || this.state !== "playing") {
|
|
87
|
-
return;
|
|
88
|
-
}
|
|
89
|
-
this.initWindow(this.currentTimeUs);
|
|
90
|
-
await this.audioSession.startPlayback(this.currentTimeUs, this.audioContext);
|
|
91
|
-
this.startTimeUs = this.audioContext.currentTime * 1e6 - this.currentTimeUs / this.playbackRate;
|
|
92
|
-
this.playbackLoop();
|
|
93
|
-
this.eventBus.emit(MeframeEvent.PlaybackPlay);
|
|
94
|
-
} catch (error) {
|
|
95
|
-
console.error("[PlaybackController] Failed to start playback:", error);
|
|
96
|
-
this.state = wasIdle ? "idle" : "paused";
|
|
97
|
-
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
98
|
-
}
|
|
75
|
+
this.dispatch({ type: PlaybackActionType.Play });
|
|
99
76
|
}
|
|
100
77
|
pause() {
|
|
101
|
-
this.
|
|
102
|
-
this.wasPlayingBeforeSeek = false;
|
|
103
|
-
if (this.rafId !== null) {
|
|
104
|
-
cancelAnimationFrame(this.rafId);
|
|
105
|
-
this.rafId = null;
|
|
106
|
-
}
|
|
107
|
-
this.audioSession.stopPlayback();
|
|
108
|
-
this.currentSeekId++;
|
|
109
|
-
this.eventBus.emit(MeframeEvent.PlaybackPause);
|
|
78
|
+
this.dispatch({ type: PlaybackActionType.Pause });
|
|
110
79
|
}
|
|
111
80
|
stop() {
|
|
112
|
-
this.
|
|
113
|
-
this.currentTimeUs = 0;
|
|
114
|
-
this.state = "idle";
|
|
115
|
-
this.wasPlayingBeforeSeek = false;
|
|
116
|
-
this.frameCount = 0;
|
|
117
|
-
this.lastFrameTime = 0;
|
|
118
|
-
this.lastAudioScheduleTime = 0;
|
|
119
|
-
const ctx = this.canvas.getContext("2d");
|
|
120
|
-
if (ctx && "clearRect" in ctx) {
|
|
121
|
-
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
122
|
-
}
|
|
123
|
-
this.audioSession.reset();
|
|
124
|
-
this.audioSession.resetPlaybackStates();
|
|
125
|
-
this.eventBus.emit(MeframeEvent.PlaybackStop);
|
|
81
|
+
this.dispatch({ type: PlaybackActionType.Stop });
|
|
126
82
|
}
|
|
127
83
|
async seek(timeUs) {
|
|
128
|
-
const
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
this.audioSession.stopPlayback();
|
|
135
|
-
this.lastAudioScheduleTime = 0;
|
|
136
|
-
const clamped = this.clampTime(timeUs);
|
|
137
|
-
this.currentTimeUs = clamped;
|
|
138
|
-
this.currentSeekId++;
|
|
139
|
-
this.state = "seeking";
|
|
140
|
-
const seekId = this.currentSeekId;
|
|
141
|
-
try {
|
|
142
|
-
const keyframeTimeUs = await this.orchestrator.tryRenderKeyframe(clamped);
|
|
143
|
-
if (keyframeTimeUs !== null) {
|
|
144
|
-
const renderState = await this.orchestrator.getRenderState(clamped, {
|
|
145
|
-
immediate: true,
|
|
146
|
-
relativeTimeUs: keyframeTimeUs
|
|
147
|
-
});
|
|
148
|
-
if (renderState && this.videoComposer) {
|
|
149
|
-
await this.videoComposer.composeFrame({
|
|
150
|
-
timeUs: clamped,
|
|
151
|
-
layers: renderState.layers,
|
|
152
|
-
transition: renderState.transition
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
if (seekId !== this.currentSeekId) {
|
|
157
|
-
return;
|
|
158
|
-
}
|
|
159
|
-
await this.audioSession.ensureAudioForTime(clamped, { immediate: false });
|
|
160
|
-
await this.orchestrator.getFrame(clamped, {
|
|
161
|
-
immediate: false,
|
|
162
|
-
preheat: true
|
|
163
|
-
});
|
|
164
|
-
this.initWindow(clamped);
|
|
165
|
-
if (seekId !== this.currentSeekId) {
|
|
166
|
-
return;
|
|
167
|
-
}
|
|
168
|
-
await this.renderCurrentFrame(clamped);
|
|
169
|
-
if (seekId !== this.currentSeekId) {
|
|
170
|
-
return;
|
|
171
|
-
}
|
|
172
|
-
this.eventBus.emit(MeframeEvent.PlaybackSeek, { timeUs: this.currentTimeUs });
|
|
173
|
-
if (this.wasPlayingBeforeSeek) {
|
|
174
|
-
await this.startPlayback();
|
|
175
|
-
} else {
|
|
176
|
-
this.state = previousState === "idle" ? "idle" : "paused";
|
|
177
|
-
}
|
|
178
|
-
} catch (error) {
|
|
179
|
-
if (seekId !== this.currentSeekId) {
|
|
180
|
-
return;
|
|
181
|
-
}
|
|
182
|
-
console.error("[PlaybackController] Seek error:", error);
|
|
183
|
-
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
184
|
-
this.state = previousState === "idle" ? "idle" : "paused";
|
|
185
|
-
}
|
|
84
|
+
const { done } = this.dispatch({
|
|
85
|
+
type: PlaybackActionType.Seek,
|
|
86
|
+
timeUs,
|
|
87
|
+
durationUs: this.duration
|
|
88
|
+
});
|
|
89
|
+
await done;
|
|
186
90
|
}
|
|
187
|
-
// Playback properties
|
|
188
91
|
setRate(rate) {
|
|
189
92
|
const currentTimeUs = this.currentTimeUs;
|
|
190
93
|
this.playbackRate = rate;
|
|
191
94
|
this.startTimeUs = this.audioContext.currentTime * 1e6 - currentTimeUs / rate;
|
|
192
|
-
this.eventBus.emit(MeframeEvent.PlaybackRateChange, { rate });
|
|
193
95
|
this.audioSession.setPlaybackRate(this.playbackRate);
|
|
96
|
+
this.eventBus.emit(MeframeEvent.PlaybackRateChange, { rate });
|
|
194
97
|
}
|
|
195
98
|
setVolume(volume) {
|
|
196
99
|
this.volume = Math.max(0, Math.min(1, volume));
|
|
197
|
-
this.eventBus.emit(MeframeEvent.PlaybackVolumeChange, { volume: this.volume });
|
|
198
100
|
this.audioSession.setVolume(this.volume);
|
|
101
|
+
this.eventBus.emit(MeframeEvent.PlaybackVolumeChange, { volume: this.volume });
|
|
199
102
|
}
|
|
200
103
|
setMute(muted) {
|
|
201
104
|
if (muted) {
|
|
202
105
|
this.audioSession.stopPlayback();
|
|
203
|
-
|
|
204
|
-
|
|
106
|
+
return;
|
|
107
|
+
}
|
|
108
|
+
if (this.fsm.snapshot.state === PlaybackState.Playing) {
|
|
109
|
+
void this.audioSession.startPlayback(this.currentTimeUs, this.audioContext);
|
|
205
110
|
}
|
|
206
111
|
}
|
|
207
112
|
setLoop(loop) {
|
|
208
113
|
this.loop = loop;
|
|
209
114
|
}
|
|
210
115
|
get duration() {
|
|
211
|
-
|
|
212
|
-
if (modelDuration !== void 0) {
|
|
213
|
-
return modelDuration;
|
|
214
|
-
}
|
|
215
|
-
return 0;
|
|
116
|
+
return this.orchestrator.compositionModel?.durationUs ?? 0;
|
|
216
117
|
}
|
|
217
118
|
get isPlaying() {
|
|
218
|
-
return this.state ===
|
|
119
|
+
return this.fsm.snapshot.state === PlaybackState.Playing;
|
|
219
120
|
}
|
|
220
|
-
// Resume is just an alias for play
|
|
221
121
|
resume() {
|
|
222
122
|
this.play();
|
|
223
123
|
}
|
|
@@ -227,83 +127,242 @@ class PlaybackController {
|
|
|
227
127
|
off(event, handler) {
|
|
228
128
|
this.eventBus.off(event, handler);
|
|
229
129
|
}
|
|
230
|
-
//
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
130
|
+
// ========= State machine wiring =========
|
|
131
|
+
dispatch(action) {
|
|
132
|
+
const { token, commands } = this.fsm.dispatch(action, { currentTimeUs: this.currentTimeUs });
|
|
133
|
+
const done = this.executeCommands(commands, token);
|
|
134
|
+
return { token, done };
|
|
135
|
+
}
|
|
136
|
+
executeCommands(commands, token) {
|
|
137
|
+
const maybe = this.executeSeq(commands, token, 0);
|
|
138
|
+
return maybe ?? Promise.resolve();
|
|
139
|
+
}
|
|
140
|
+
executeSeq(commands, token, startIndex) {
|
|
141
|
+
for (let i = startIndex; i < commands.length; i++) {
|
|
142
|
+
if (!this.isCurrentToken(token)) return;
|
|
143
|
+
const maybe = this.executeCommand(commands[i], token);
|
|
144
|
+
if (maybe) {
|
|
145
|
+
return maybe.then(() => {
|
|
146
|
+
if (!this.isCurrentToken(token)) return;
|
|
147
|
+
const cont = this.executeSeq(commands, token, i + 1);
|
|
148
|
+
return cont ?? Promise.resolve();
|
|
149
|
+
});
|
|
236
150
|
}
|
|
237
|
-
return;
|
|
238
151
|
}
|
|
239
|
-
|
|
240
|
-
|
|
152
|
+
}
|
|
153
|
+
executePar(commands, token) {
|
|
154
|
+
const promises = [];
|
|
155
|
+
for (const c of commands) {
|
|
156
|
+
if (!this.isCurrentToken(token)) return;
|
|
157
|
+
const maybe = this.executeCommand(c, token);
|
|
158
|
+
if (maybe) promises.push(maybe);
|
|
159
|
+
}
|
|
160
|
+
if (promises.length === 0) return;
|
|
161
|
+
return Promise.all(promises).then(() => void 0);
|
|
162
|
+
}
|
|
163
|
+
executeCommand(command, token) {
|
|
164
|
+
if (!this.isCurrentToken(token)) return;
|
|
165
|
+
switch (command.type) {
|
|
166
|
+
case PlaybackCommandType.Seq:
|
|
167
|
+
return this.executeSeq(command.commands, token, 0);
|
|
168
|
+
case PlaybackCommandType.Par:
|
|
169
|
+
return this.executePar(command.commands, token);
|
|
170
|
+
case PlaybackCommandType.Try: {
|
|
171
|
+
const handleError = (error) => {
|
|
172
|
+
if (!this.isCurrentToken(token)) return;
|
|
173
|
+
if (command.ignoreWaiterReplacedError && error instanceof WaiterReplacedError) return;
|
|
174
|
+
if (command.logPrefix) console.error(command.logPrefix, error);
|
|
175
|
+
const onErrorDone = command.onError ? this.dispatch(command.onError).done : void 0;
|
|
176
|
+
const emit = () => {
|
|
177
|
+
if (command.emitPlaybackError) {
|
|
178
|
+
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
179
|
+
}
|
|
180
|
+
};
|
|
181
|
+
if (onErrorDone) {
|
|
182
|
+
return onErrorDone.then(() => {
|
|
183
|
+
emit();
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
emit();
|
|
187
|
+
};
|
|
188
|
+
try {
|
|
189
|
+
const maybe = this.executeCommand(command.command, token);
|
|
190
|
+
if (maybe) {
|
|
191
|
+
return maybe.catch(handleError);
|
|
192
|
+
}
|
|
193
|
+
return;
|
|
194
|
+
} catch (error) {
|
|
195
|
+
return handleError(error) ?? Promise.resolve();
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
case PlaybackCommandType.Dispatch:
|
|
199
|
+
return this.dispatch(command.action).done;
|
|
200
|
+
case PlaybackCommandType.SetTime: {
|
|
201
|
+
this.currentTimeUs = command.timeUs;
|
|
202
|
+
return;
|
|
203
|
+
}
|
|
204
|
+
case PlaybackCommandType.SetFrozenTime:
|
|
205
|
+
case PlaybackCommandType.SetWantsPlay:
|
|
206
|
+
case PlaybackCommandType.SetState: {
|
|
207
|
+
return;
|
|
208
|
+
}
|
|
209
|
+
case PlaybackCommandType.CancelRaf: {
|
|
210
|
+
this.cancelRaf();
|
|
211
|
+
return;
|
|
212
|
+
}
|
|
213
|
+
case PlaybackCommandType.StopAudio: {
|
|
214
|
+
this.audioSession.stopPlayback();
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
case PlaybackCommandType.ResetAudioPlaybackStates: {
|
|
218
|
+
this.audioSession.resetPlaybackStates();
|
|
219
|
+
return;
|
|
220
|
+
}
|
|
221
|
+
case PlaybackCommandType.ResetAudioSession: {
|
|
222
|
+
this.audioSession.reset();
|
|
241
223
|
return;
|
|
242
224
|
}
|
|
243
|
-
|
|
244
|
-
|
|
225
|
+
case PlaybackCommandType.ClearCanvas: {
|
|
226
|
+
this.clearCanvas();
|
|
245
227
|
return;
|
|
246
228
|
}
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
this.lastAudioScheduleTime = this.currentTimeUs;
|
|
229
|
+
case PlaybackCommandType.SetLastAudioScheduleTime: {
|
|
230
|
+
this.lastAudioScheduleTime = command.timeUs;
|
|
231
|
+
return;
|
|
251
232
|
}
|
|
252
|
-
|
|
253
|
-
|
|
233
|
+
case PlaybackCommandType.SetStartTimeBase: {
|
|
234
|
+
this.startTimeUs = command.startTimeUs;
|
|
254
235
|
return;
|
|
255
236
|
}
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
237
|
+
case PlaybackCommandType.SyncTimeBaseToAudioClock: {
|
|
238
|
+
this.startTimeUs = this.audioContext.currentTime * 1e6 - command.timeUs / this.playbackRate;
|
|
239
|
+
return;
|
|
240
|
+
}
|
|
241
|
+
case PlaybackCommandType.InitWindow: {
|
|
242
|
+
this.initWindow(command.timeUs);
|
|
243
|
+
return;
|
|
244
|
+
}
|
|
245
|
+
case PlaybackCommandType.SetCacheWindow: {
|
|
246
|
+
this.orchestrator.cacheManager.setWindow(command.timeUs);
|
|
247
|
+
return;
|
|
248
|
+
}
|
|
249
|
+
case PlaybackCommandType.Emit: {
|
|
250
|
+
if (command.payload === void 0) {
|
|
251
|
+
this.eventBus.emit(command.event);
|
|
252
|
+
} else {
|
|
253
|
+
this.eventBus.emit(command.event, command.payload);
|
|
254
|
+
}
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
case PlaybackCommandType.RenderFrame: {
|
|
258
|
+
return this.renderCurrentFrame(command.timeUs, {
|
|
259
|
+
immediate: command.immediate,
|
|
260
|
+
relativeTimeUs: command.relativeTimeUs
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
case PlaybackCommandType.MaybeRenderKeyframePreview: {
|
|
264
|
+
return this.orchestrator.tryRenderKeyframe(command.timeUs).then((keyframeTimeUs) => {
|
|
265
|
+
if (!this.isCurrentToken(token)) return;
|
|
266
|
+
if (keyframeTimeUs === null) return;
|
|
267
|
+
return this.orchestrator.getRenderState(command.timeUs, {
|
|
268
|
+
immediate: true,
|
|
269
|
+
relativeTimeUs: keyframeTimeUs
|
|
270
|
+
}).then((keyframeRenderState) => {
|
|
271
|
+
if (!this.isCurrentToken(token)) return;
|
|
272
|
+
if (!keyframeRenderState) return;
|
|
273
|
+
return this.compose(command.timeUs, keyframeRenderState);
|
|
274
|
+
});
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
case PlaybackCommandType.EnsureAudio: {
|
|
278
|
+
return this.audioSession.ensureAudioForTime(command.timeUs, {
|
|
279
|
+
immediate: command.immediate
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
case PlaybackCommandType.GetFrame: {
|
|
283
|
+
return this.orchestrator.getFrame(command.timeUs, {
|
|
284
|
+
immediate: command.immediate,
|
|
285
|
+
preheat: command.preheat
|
|
286
|
+
}).then(() => void 0);
|
|
287
|
+
}
|
|
288
|
+
case PlaybackCommandType.StartAudioPlayback: {
|
|
289
|
+
return this.audioSession.startPlayback(command.timeUs, this.audioContext);
|
|
261
290
|
}
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
291
|
+
case PlaybackCommandType.StartRafLoop: {
|
|
292
|
+
this.startPlaybackLoop(token);
|
|
293
|
+
return;
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
cancelRaf() {
|
|
298
|
+
if (this.rafId !== null) {
|
|
299
|
+
cancelAnimationFrame(this.rafId);
|
|
300
|
+
this.rafId = null;
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
isCurrentToken(token) {
|
|
304
|
+
return token === this.fsm.snapshot.token;
|
|
305
|
+
}
|
|
306
|
+
startPlaybackLoop(token) {
|
|
307
|
+
this.rafId = requestAnimationFrame(() => {
|
|
308
|
+
void this.onRafTick(token);
|
|
266
309
|
});
|
|
267
310
|
}
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
if (this.currentTimeUs >= this.duration) {
|
|
272
|
-
if (this.loop) {
|
|
273
|
-
this.currentTimeUs = 0;
|
|
274
|
-
this.startTimeUs = this.audioContext.currentTime * 1e6;
|
|
275
|
-
this.audioSession.resetPlaybackStates();
|
|
276
|
-
this.lastAudioScheduleTime = 0;
|
|
277
|
-
this.initWindow(0);
|
|
278
|
-
} else {
|
|
279
|
-
this.pause();
|
|
280
|
-
this.currentTimeUs = 0;
|
|
281
|
-
this.state = "ended";
|
|
282
|
-
this.eventBus.emit(MeframeEvent.PlaybackEnded, { timeUs: this.duration });
|
|
283
|
-
}
|
|
311
|
+
async onRafTick(token) {
|
|
312
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) {
|
|
313
|
+
return;
|
|
284
314
|
}
|
|
285
|
-
this.
|
|
315
|
+
const candidateTimeUs = (this.audioContext.currentTime * 1e6 - this.startTimeUs) * this.playbackRate;
|
|
316
|
+
this.dispatch({
|
|
317
|
+
type: PlaybackActionType.ClockTick,
|
|
318
|
+
candidateTimeUs,
|
|
319
|
+
durationUs: this.duration,
|
|
320
|
+
loop: this.loop,
|
|
321
|
+
audioNowUs: this.audioContext.currentTime * 1e6
|
|
322
|
+
});
|
|
323
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) {
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
if (this.currentTimeUs - this.lastAudioScheduleTime >= this.AUDIO_SCHEDULE_INTERVAL) {
|
|
327
|
+
await this.audioSession.scheduleAudio(this.currentTimeUs, this.audioContext);
|
|
328
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) return;
|
|
329
|
+
this.lastAudioScheduleTime = this.currentTimeUs;
|
|
330
|
+
}
|
|
331
|
+
const renderState = await this.orchestrator.getRenderState(this.currentTimeUs, {
|
|
332
|
+
immediate: true
|
|
333
|
+
});
|
|
334
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) {
|
|
335
|
+
return;
|
|
336
|
+
}
|
|
337
|
+
if (!renderState) {
|
|
338
|
+
this.dispatch({ type: PlaybackActionType.EnterBuffering, timeUs: this.currentTimeUs });
|
|
339
|
+
return;
|
|
340
|
+
}
|
|
341
|
+
await this.compose(this.currentTimeUs, renderState);
|
|
342
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) return;
|
|
343
|
+
this.updateFps();
|
|
344
|
+
this.frameCount++;
|
|
345
|
+
this.orchestrator.cacheManager.setWindow(this.currentTimeUs);
|
|
286
346
|
this.checkAndPreheatWindow();
|
|
347
|
+
if (!this.isCurrentToken(token) || this.fsm.snapshot.state !== PlaybackState.Playing) return;
|
|
348
|
+
this.startPlaybackLoop(token);
|
|
349
|
+
}
|
|
350
|
+
updateFps() {
|
|
351
|
+
const now = performance.now();
|
|
352
|
+
if (this.lastFrameTime > 0) {
|
|
353
|
+
const deltaTime = now - this.lastFrameTime;
|
|
354
|
+
const instantFps = 1e3 / deltaTime;
|
|
355
|
+
this.fps = this.fps > 0 ? this.fps * 0.9 + instantFps * 0.1 : instantFps;
|
|
356
|
+
}
|
|
357
|
+
this.lastFrameTime = now;
|
|
287
358
|
}
|
|
288
|
-
/**
|
|
289
|
-
* Initialize window at given time (called on play/seek)
|
|
290
|
-
* Sets unified window for both video and audio
|
|
291
|
-
*/
|
|
292
359
|
initWindow(timeUs) {
|
|
293
360
|
this.windowEnd = timeUs + this.WINDOW_DURATION;
|
|
294
361
|
this.preheatInProgress = false;
|
|
295
362
|
this.orchestrator.cacheManager.setWindow(timeUs);
|
|
296
363
|
}
|
|
297
|
-
/**
|
|
298
|
-
* Check if approaching window end and trigger preheat for next window
|
|
299
|
-
*
|
|
300
|
-
* Strategy: Unified sliding window for both video and audio
|
|
301
|
-
* - Current window: [windowStart, windowEnd] (3s duration)
|
|
302
|
-
* - When playback reaches windowEnd - 1s, preheat next window
|
|
303
|
-
* - Next window: [windowEnd, windowEnd + 3s]
|
|
304
|
-
*/
|
|
305
364
|
checkAndPreheatWindow() {
|
|
306
|
-
if (this.preheatInProgress || this.state !==
|
|
365
|
+
if (this.preheatInProgress || this.fsm.snapshot.state !== PlaybackState.Playing) {
|
|
307
366
|
return;
|
|
308
367
|
}
|
|
309
368
|
const distanceToWindowEnd = this.windowEnd - this.currentTimeUs;
|
|
@@ -312,15 +371,11 @@ class PlaybackController {
|
|
|
312
371
|
return;
|
|
313
372
|
}
|
|
314
373
|
if (distanceToWindowEnd > 0 && distanceToWindowEnd <= this.PREHEAT_DISTANCE) {
|
|
315
|
-
this.preheatNextWindow();
|
|
374
|
+
void this.preheatNextWindow();
|
|
316
375
|
}
|
|
317
376
|
}
|
|
318
|
-
/**
|
|
319
|
-
* Preheat next window by decoding from current playback time
|
|
320
|
-
* Supports cross-clip window preheating for seamless playback
|
|
321
|
-
* Preheats both video and audio in parallel
|
|
322
|
-
*/
|
|
323
377
|
async preheatNextWindow() {
|
|
378
|
+
if (this.preheatInProgress) return;
|
|
324
379
|
this.preheatInProgress = true;
|
|
325
380
|
try {
|
|
326
381
|
const windowStart = this.currentTimeUs;
|
|
@@ -345,70 +400,35 @@ class PlaybackController {
|
|
|
345
400
|
this.preheatInProgress = false;
|
|
346
401
|
}
|
|
347
402
|
}
|
|
348
|
-
async renderCurrentFrame(timeUs) {
|
|
403
|
+
async renderCurrentFrame(timeUs, options) {
|
|
349
404
|
if (!this.videoComposer) {
|
|
350
405
|
console.error("[PlaybackController] VideoComposer not initialized");
|
|
351
406
|
return;
|
|
352
407
|
}
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
if (this.state === "playing") {
|
|
359
|
-
await this.handlePlaybackBuffering(timeUs);
|
|
360
|
-
}
|
|
361
|
-
return;
|
|
362
|
-
}
|
|
363
|
-
await this.videoComposer.composeFrame({
|
|
364
|
-
timeUs,
|
|
365
|
-
layers: renderState.layers,
|
|
366
|
-
transition: renderState.transition
|
|
367
|
-
});
|
|
368
|
-
} catch (error) {
|
|
369
|
-
console.error("Render error:", error);
|
|
370
|
-
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
async handlePlaybackBuffering(timeUs) {
|
|
374
|
-
if (this.state !== "playing") {
|
|
408
|
+
const renderState = await this.orchestrator.getRenderState(timeUs, {
|
|
409
|
+
immediate: options.immediate,
|
|
410
|
+
relativeTimeUs: options.relativeTimeUs
|
|
411
|
+
});
|
|
412
|
+
if (!renderState) {
|
|
375
413
|
return;
|
|
376
414
|
}
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
this.
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
return;
|
|
387
|
-
}
|
|
388
|
-
this.state = "playing";
|
|
389
|
-
this.startTimeUs = this.audioContext.currentTime * 1e6 - timeUs / this.playbackRate;
|
|
390
|
-
this.lastAudioScheduleTime = 0;
|
|
391
|
-
await this.audioSession.startPlayback(timeUs, this.audioContext);
|
|
392
|
-
this.eventBus.emit(MeframeEvent.PlaybackPlay);
|
|
393
|
-
if (!this.rafId) {
|
|
394
|
-
this.playbackLoop();
|
|
395
|
-
}
|
|
396
|
-
} catch (error) {
|
|
397
|
-
if (error instanceof WaiterReplacedError) {
|
|
398
|
-
return;
|
|
399
|
-
}
|
|
400
|
-
if (seekId !== this.currentSeekId) {
|
|
401
|
-
return;
|
|
402
|
-
}
|
|
403
|
-
console.error("[PlaybackController] Buffering error:", error);
|
|
404
|
-
this.state = "paused";
|
|
405
|
-
this.eventBus.emit(MeframeEvent.PlaybackError, error);
|
|
406
|
-
}
|
|
415
|
+
await this.compose(timeUs, renderState);
|
|
416
|
+
}
|
|
417
|
+
async compose(timeUs, renderState) {
|
|
418
|
+
if (!this.videoComposer) return;
|
|
419
|
+
await this.videoComposer.composeFrame({
|
|
420
|
+
timeUs,
|
|
421
|
+
layers: renderState.layers,
|
|
422
|
+
transition: renderState.transition
|
|
423
|
+
});
|
|
407
424
|
}
|
|
408
|
-
|
|
409
|
-
|
|
425
|
+
clearCanvas() {
|
|
426
|
+
const ctx = this.canvas.getContext("2d");
|
|
427
|
+
if (ctx && "clearRect" in ctx) {
|
|
428
|
+
ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
|
|
429
|
+
}
|
|
410
430
|
}
|
|
411
|
-
// Cleanup
|
|
431
|
+
// ========= Cleanup / event handlers =========
|
|
412
432
|
dispose() {
|
|
413
433
|
this.stop();
|
|
414
434
|
this.eventBus.off(MeframeEvent.CacheCover, this.onCacheCover);
|
|
@@ -419,8 +439,8 @@ class PlaybackController {
|
|
|
419
439
|
}
|
|
420
440
|
}
|
|
421
441
|
onCacheCover = () => {
|
|
422
|
-
if (this.state ===
|
|
423
|
-
this.renderCurrentFrame(0);
|
|
442
|
+
if (this.fsm.snapshot.state === PlaybackState.Idle && this.currentTimeUs === 0) {
|
|
443
|
+
void this.renderCurrentFrame(0, { immediate: false });
|
|
424
444
|
}
|
|
425
445
|
};
|
|
426
446
|
onModelSet = () => {
|
|
@@ -432,8 +452,8 @@ class PlaybackController {
|
|
|
432
452
|
fps: model.fps || 30,
|
|
433
453
|
backgroundColor: model.renderConfig?.backgroundColor || "#000"
|
|
434
454
|
});
|
|
435
|
-
this.audioSession.ensureAudioForTime(this.currentTimeUs, { immediate: false });
|
|
436
|
-
this.renderCurrentFrame(this.currentTimeUs);
|
|
455
|
+
void this.audioSession.ensureAudioForTime(this.currentTimeUs, { immediate: false });
|
|
456
|
+
void this.renderCurrentFrame(this.currentTimeUs, { immediate: false });
|
|
437
457
|
};
|
|
438
458
|
setupEventListeners() {
|
|
439
459
|
this.eventBus.on(MeframeEvent.CacheCover, this.onCacheCover);
|