reze-engine 0.3.0 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/player.ts ADDED
@@ -0,0 +1,490 @@
1
+ import { bezierInterpolate } from "./bezier-interpolate"
2
+ import { Quat, Vec3 } from "./math"
3
+ import { BoneFrame, MorphFrame, VMDKeyFrame, VMDLoader } from "./vmd-loader"
4
+
5
+ export interface AnimationPose {
6
+ boneRotations: Map<string, Quat>
7
+ boneTranslations: Map<string, Vec3>
8
+ morphWeights: Map<string, number>
9
+ }
10
+
11
+ export interface AnimationProgress {
12
+ current: number
13
+ duration: number
14
+ percentage: number
15
+ }
16
+
17
+ export class Player {
18
+ // Animation data
19
+ private frames: VMDKeyFrame[] = []
20
+ private boneTracks: Map<string, Array<{ boneFrame: BoneFrame; time: number }>> = new Map()
21
+ private morphTracks: Map<string, Array<{ morphFrame: MorphFrame; time: number }>> = new Map()
22
+ private duration: number = 0
23
+
24
+ // Playback state
25
+ private isPlaying: boolean = false
26
+ private isPaused: boolean = false
27
+ private currentTime: number = 0
28
+
29
+ // Timing
30
+ private startTime: number = 0 // Real-time when playback started
31
+ private pausedTime: number = 0 // Accumulated paused duration
32
+ private pauseStartTime: number = 0
33
+
34
+ // Audio
35
+ private audioElement?: HTMLAudioElement
36
+ private audioUrl?: string
37
+ private audioLoaded: boolean = false
38
+
39
+ /**
40
+ * Load VMD animation file and optionally audio
41
+ */
42
+ async loadVmd(vmdUrl: string, audioUrl?: string): Promise<void> {
43
+ // Load animation
44
+ this.frames = await VMDLoader.load(vmdUrl)
45
+ this.processFrames()
46
+
47
+ // Load audio if provided
48
+ if (audioUrl) {
49
+ await this.loadAudio(audioUrl)
50
+ }
51
+ }
52
+
53
+ /**
54
+ * Load audio file
55
+ */
56
+ async loadAudio(url: string): Promise<void> {
57
+ this.audioUrl = url
58
+ this.audioLoaded = false
59
+
60
+ return new Promise((resolve, reject) => {
61
+ const audio = new Audio(url)
62
+ audio.preload = "auto"
63
+
64
+ audio.addEventListener("loadeddata", () => {
65
+ this.audioElement = audio
66
+ this.audioLoaded = true
67
+ resolve()
68
+ })
69
+
70
+ audio.addEventListener("error", (e) => {
71
+ console.warn("Failed to load audio:", url, e)
72
+ this.audioLoaded = false
73
+ // Don't reject - animation should still work without audio
74
+ resolve()
75
+ })
76
+
77
+ audio.load()
78
+ })
79
+ }
80
+
81
+ /**
82
+ * Process frames into tracks
83
+ */
84
+ private processFrames(): void {
85
+ // Process bone frames
86
+ const allBoneKeyFrames: Array<{ boneFrame: BoneFrame; time: number }> = []
87
+ for (const keyFrame of this.frames) {
88
+ for (const boneFrame of keyFrame.boneFrames) {
89
+ allBoneKeyFrames.push({
90
+ boneFrame,
91
+ time: keyFrame.time,
92
+ })
93
+ }
94
+ }
95
+
96
+ const boneKeyFramesByBone = new Map<string, Array<{ boneFrame: BoneFrame; time: number }>>()
97
+ for (const { boneFrame, time } of allBoneKeyFrames) {
98
+ if (!boneKeyFramesByBone.has(boneFrame.boneName)) {
99
+ boneKeyFramesByBone.set(boneFrame.boneName, [])
100
+ }
101
+ boneKeyFramesByBone.get(boneFrame.boneName)!.push({ boneFrame, time })
102
+ }
103
+
104
+ for (const keyFrames of boneKeyFramesByBone.values()) {
105
+ keyFrames.sort((a, b) => a.time - b.time)
106
+ }
107
+
108
+ // Process morph frames
109
+ const allMorphKeyFrames: Array<{ morphFrame: MorphFrame; time: number }> = []
110
+ for (const keyFrame of this.frames) {
111
+ for (const morphFrame of keyFrame.morphFrames) {
112
+ allMorphKeyFrames.push({
113
+ morphFrame,
114
+ time: keyFrame.time,
115
+ })
116
+ }
117
+ }
118
+
119
+ const morphKeyFramesByMorph = new Map<string, Array<{ morphFrame: MorphFrame; time: number }>>()
120
+ for (const { morphFrame, time } of allMorphKeyFrames) {
121
+ if (!morphKeyFramesByMorph.has(morphFrame.morphName)) {
122
+ morphKeyFramesByMorph.set(morphFrame.morphName, [])
123
+ }
124
+ morphKeyFramesByMorph.get(morphFrame.morphName)!.push({ morphFrame, time })
125
+ }
126
+
127
+ for (const keyFrames of morphKeyFramesByMorph.values()) {
128
+ keyFrames.sort((a, b) => a.time - b.time)
129
+ }
130
+
131
+ // Store tracks
132
+ this.boneTracks = boneKeyFramesByBone
133
+ this.morphTracks = morphKeyFramesByMorph
134
+
135
+ // Calculate animation duration from max frame time
136
+ let maxFrameTime = 0
137
+ for (const keyFrames of this.boneTracks.values()) {
138
+ if (keyFrames.length > 0) {
139
+ const lastTime = keyFrames[keyFrames.length - 1].time
140
+ if (lastTime > maxFrameTime) {
141
+ maxFrameTime = lastTime
142
+ }
143
+ }
144
+ }
145
+ for (const keyFrames of this.morphTracks.values()) {
146
+ if (keyFrames.length > 0) {
147
+ const lastTime = keyFrames[keyFrames.length - 1].time
148
+ if (lastTime > maxFrameTime) {
149
+ maxFrameTime = lastTime
150
+ }
151
+ }
152
+ }
153
+ this.duration = maxFrameTime > 0 ? maxFrameTime : 0
154
+ }
155
+
156
+ /**
157
+ * Start or resume playback
158
+ */
159
+ play(): void {
160
+ if (this.frames.length === 0) return
161
+
162
+ if (this.isPaused) {
163
+ // Resume from paused position - don't adjust time, just continue from where we paused
164
+ this.isPaused = false
165
+ // Adjust start time so current time calculation continues smoothly
166
+ this.startTime = performance.now() - this.currentTime * 1000
167
+ } else {
168
+ // Start from beginning or current seek position
169
+ this.startTime = performance.now() - this.currentTime * 1000
170
+ this.pausedTime = 0
171
+ }
172
+
173
+ this.isPlaying = true
174
+
175
+ // Play audio if available
176
+ if (this.audioElement && this.audioLoaded) {
177
+ this.audioElement.currentTime = this.currentTime
178
+ this.audioElement.play().catch((error) => {
179
+ console.warn("Audio play failed:", error)
180
+ })
181
+ }
182
+ }
183
+
184
+ /**
185
+ * Pause playback
186
+ */
187
+ pause(): void {
188
+ if (!this.isPlaying || this.isPaused) return
189
+
190
+ this.isPaused = true
191
+ this.pauseStartTime = performance.now()
192
+
193
+ // Pause audio if available
194
+ if (this.audioElement) {
195
+ this.audioElement.pause()
196
+ }
197
+ }
198
+
199
+ /**
200
+ * Stop playback and reset to beginning
201
+ */
202
+ stop(): void {
203
+ this.isPlaying = false
204
+ this.isPaused = false
205
+ this.currentTime = 0
206
+ this.startTime = 0
207
+ this.pausedTime = 0
208
+
209
+ // Stop audio if available
210
+ if (this.audioElement) {
211
+ this.audioElement.pause()
212
+ this.audioElement.currentTime = 0
213
+ }
214
+ }
215
+
216
+ /**
217
+ * Seek to specific time
218
+ */
219
+ seek(time: number): void {
220
+ const clampedTime = Math.max(0, Math.min(time, this.duration))
221
+ this.currentTime = clampedTime
222
+
223
+ // Adjust start time if playing
224
+ if (this.isPlaying && !this.isPaused) {
225
+ this.startTime = performance.now() - clampedTime * 1000
226
+ this.pausedTime = 0
227
+ }
228
+
229
+ // Seek audio if available
230
+ if (this.audioElement && this.audioLoaded) {
231
+ this.audioElement.currentTime = clampedTime
232
+ }
233
+ }
234
+
235
+ /**
236
+ * Update playback and return current pose
237
+ * Returns null if not playing, but returns current pose if paused
238
+ */
239
+ update(currentRealTime: number): AnimationPose | null {
240
+ if (!this.isPlaying || this.frames.length === 0) {
241
+ return null
242
+ }
243
+
244
+ // If paused, return current pose at paused time (no time update)
245
+ if (this.isPaused) {
246
+ return this.getPoseAtTime(this.currentTime)
247
+ }
248
+
249
+ // Calculate current animation time
250
+ const elapsedSeconds = (currentRealTime - this.startTime) / 1000
251
+ this.currentTime = elapsedSeconds
252
+
253
+ // Check if animation ended
254
+ if (this.currentTime >= this.duration) {
255
+ this.currentTime = this.duration
256
+ this.pause() // Auto-pause at end
257
+ return this.getPoseAtTime(this.currentTime)
258
+ }
259
+
260
+ // Sync audio if present (with tolerance)
261
+ if (this.audioElement && this.audioLoaded) {
262
+ const audioTime = this.audioElement.currentTime
263
+ const syncTolerance = 0.1 // 100ms tolerance
264
+ if (Math.abs(audioTime - this.currentTime) > syncTolerance) {
265
+ this.audioElement.currentTime = this.currentTime
266
+ }
267
+ }
268
+
269
+ return this.getPoseAtTime(this.currentTime)
270
+ }
271
+
272
+ /**
273
+ * Get pose at specific time (pure function)
274
+ */
275
+ getPoseAtTime(time: number): AnimationPose {
276
+ const pose: AnimationPose = {
277
+ boneRotations: new Map(),
278
+ boneTranslations: new Map(),
279
+ morphWeights: new Map(),
280
+ }
281
+
282
+ // Helper to find upper bound index (binary search)
283
+ const upperBoundFrameIndex = (time: number, keyFrames: Array<{ boneFrame: BoneFrame; time: number }>): number => {
284
+ let left = 0
285
+ let right = keyFrames.length
286
+ while (left < right) {
287
+ const mid = Math.floor((left + right) / 2)
288
+ if (keyFrames[mid].time <= time) {
289
+ left = mid + 1
290
+ } else {
291
+ right = mid
292
+ }
293
+ }
294
+ return left
295
+ }
296
+
297
+ // Process each bone track
298
+ for (const [boneName, keyFrames] of this.boneTracks.entries()) {
299
+ if (keyFrames.length === 0) continue
300
+
301
+ // Clamp frame time to track range
302
+ const startTime = keyFrames[0].time
303
+ const endTime = keyFrames[keyFrames.length - 1].time
304
+ const clampedFrameTime = Math.max(startTime, Math.min(endTime, time))
305
+
306
+ const upperBoundIndex = upperBoundFrameIndex(clampedFrameTime, keyFrames)
307
+ const upperBoundIndexMinusOne = upperBoundIndex - 1
308
+
309
+ if (upperBoundIndexMinusOne < 0) continue
310
+
311
+ const timeB = keyFrames[upperBoundIndex]?.time
312
+ const boneFrameA = keyFrames[upperBoundIndexMinusOne].boneFrame
313
+
314
+ if (timeB === undefined) {
315
+ // Last keyframe or beyond - use the last keyframe value
316
+ pose.boneRotations.set(boneName, boneFrameA.rotation)
317
+ pose.boneTranslations.set(boneName, boneFrameA.translation)
318
+ } else {
319
+ // Interpolate between two keyframes
320
+ const timeA = keyFrames[upperBoundIndexMinusOne].time
321
+ const boneFrameB = keyFrames[upperBoundIndex].boneFrame
322
+ const gradient = (clampedFrameTime - timeA) / (timeB - timeA)
323
+
324
+ // Interpolate rotation using Bezier
325
+ const interp = boneFrameB.interpolation
326
+ const rotWeight = bezierInterpolate(
327
+ interp[0] / 127, // x1
328
+ interp[1] / 127, // x2
329
+ interp[2] / 127, // y1
330
+ interp[3] / 127, // y2
331
+ gradient
332
+ )
333
+ const interpolatedRotation = Quat.slerp(boneFrameA.rotation, boneFrameB.rotation, rotWeight)
334
+
335
+ // Interpolate translation using Bezier (separate curves for X, Y, Z)
336
+ const xWeight = bezierInterpolate(
337
+ interp[0] / 127, // X_x1
338
+ interp[8] / 127, // X_x2
339
+ interp[4] / 127, // X_y1
340
+ interp[12] / 127, // X_y2
341
+ gradient
342
+ )
343
+ const yWeight = bezierInterpolate(
344
+ interp[16] / 127, // Y_x1
345
+ interp[24] / 127, // Y_x2
346
+ interp[20] / 127, // Y_y1
347
+ interp[28] / 127, // Y_y2
348
+ gradient
349
+ )
350
+ const zWeight = bezierInterpolate(
351
+ interp[32] / 127, // Z_x1
352
+ interp[40] / 127, // Z_x2
353
+ interp[36] / 127, // Z_y1
354
+ interp[44] / 127, // Z_y2
355
+ gradient
356
+ )
357
+
358
+ const interpolatedTranslation = new Vec3(
359
+ boneFrameA.translation.x + (boneFrameB.translation.x - boneFrameA.translation.x) * xWeight,
360
+ boneFrameA.translation.y + (boneFrameB.translation.y - boneFrameA.translation.y) * yWeight,
361
+ boneFrameA.translation.z + (boneFrameB.translation.z - boneFrameA.translation.z) * zWeight
362
+ )
363
+
364
+ pose.boneRotations.set(boneName, interpolatedRotation)
365
+ pose.boneTranslations.set(boneName, interpolatedTranslation)
366
+ }
367
+ }
368
+
369
+ // Helper to find upper bound index for morph frames
370
+ const upperBoundMorphIndex = (time: number, keyFrames: Array<{ morphFrame: MorphFrame; time: number }>): number => {
371
+ let left = 0
372
+ let right = keyFrames.length
373
+ while (left < right) {
374
+ const mid = Math.floor((left + right) / 2)
375
+ if (keyFrames[mid].time <= time) {
376
+ left = mid + 1
377
+ } else {
378
+ right = mid
379
+ }
380
+ }
381
+ return left
382
+ }
383
+
384
+ // Process each morph track
385
+ for (const [morphName, keyFrames] of this.morphTracks.entries()) {
386
+ if (keyFrames.length === 0) continue
387
+
388
+ // Clamp frame time to track range
389
+ const startTime = keyFrames[0].time
390
+ const endTime = keyFrames[keyFrames.length - 1].time
391
+ const clampedFrameTime = Math.max(startTime, Math.min(endTime, time))
392
+
393
+ const upperBoundIndex = upperBoundMorphIndex(clampedFrameTime, keyFrames)
394
+ const upperBoundIndexMinusOne = upperBoundIndex - 1
395
+
396
+ if (upperBoundIndexMinusOne < 0) continue
397
+
398
+ const timeB = keyFrames[upperBoundIndex]?.time
399
+ const morphFrameA = keyFrames[upperBoundIndexMinusOne].morphFrame
400
+
401
+ if (timeB === undefined) {
402
+ // Last keyframe or beyond - use the last keyframe value
403
+ pose.morphWeights.set(morphName, morphFrameA.weight)
404
+ } else {
405
+ // Linear interpolation between two keyframes
406
+ const timeA = keyFrames[upperBoundIndexMinusOne].time
407
+ const morphFrameB = keyFrames[upperBoundIndex].morphFrame
408
+ const gradient = (clampedFrameTime - timeA) / (timeB - timeA)
409
+ const interpolatedWeight = morphFrameA.weight + (morphFrameB.weight - morphFrameA.weight) * gradient
410
+
411
+ pose.morphWeights.set(morphName, interpolatedWeight)
412
+ }
413
+ }
414
+
415
+ return pose
416
+ }
417
+
418
+ /**
419
+ * Get current playback progress
420
+ */
421
+ getProgress(): AnimationProgress {
422
+ return {
423
+ current: this.currentTime,
424
+ duration: this.duration,
425
+ percentage: this.duration > 0 ? (this.currentTime / this.duration) * 100 : 0,
426
+ }
427
+ }
428
+
429
+ /**
430
+ * Get current time
431
+ */
432
+ getCurrentTime(): number {
433
+ return this.currentTime
434
+ }
435
+
436
+ /**
437
+ * Get animation duration
438
+ */
439
+ getDuration(): number {
440
+ return this.duration
441
+ }
442
+
443
+ /**
444
+ * Check if playing
445
+ */
446
+ isPlayingState(): boolean {
447
+ return this.isPlaying && !this.isPaused
448
+ }
449
+
450
+ /**
451
+ * Check if paused
452
+ */
453
+ isPausedState(): boolean {
454
+ return this.isPaused
455
+ }
456
+
457
+ /**
458
+ * Check if has audio
459
+ */
460
+ hasAudio(): boolean {
461
+ return this.audioElement !== undefined && this.audioLoaded
462
+ }
463
+
464
+ /**
465
+ * Set audio volume (0.0 to 1.0)
466
+ */
467
+ setVolume(volume: number): void {
468
+ if (this.audioElement) {
469
+ this.audioElement.volume = Math.max(0, Math.min(1, volume))
470
+ }
471
+ }
472
+
473
+ /**
474
+ * Mute audio
475
+ */
476
+ mute(): void {
477
+ if (this.audioElement) {
478
+ this.audioElement.muted = true
479
+ }
480
+ }
481
+
482
+ /**
483
+ * Unmute audio
484
+ */
485
+ unmute(): void {
486
+ if (this.audioElement) {
487
+ this.audioElement.muted = false
488
+ }
489
+ }
490
+ }