@spatialwalk/avatarkit 1.0.0-beta.61 → 1.0.0-beta.63
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/README.md +69 -32
- package/dist/{StreamingAudioPlayer-DIcPerS7.js → StreamingAudioPlayer-CO9WTktN.js} +1 -1
- package/dist/core/AvatarController.d.ts +2 -0
- package/dist/core/AvatarView.d.ts +3 -2
- package/dist/{index-jWgogoMs.js → index-C1md-jKJ.js} +148 -85
- package/dist/index.js +1 -1
- package/dist/utils/animation-interpolation.d.ts +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,37 @@
|
|
|
2
2
|
|
|
3
3
|
All notable changes to this project will be documented in this file.
|
|
4
4
|
|
|
5
|
+
## [1.0.0-beta.63] - 2026-01-14
|
|
6
|
+
|
|
7
|
+
### ✨ New Features
|
|
8
|
+
- **Audio Context Initialization API** - Added `initializeAudioContext()` method to `AvatarController`
|
|
9
|
+
- Must be called in a user gesture context (click, touchstart, etc.) before any audio operations
|
|
10
|
+
- Ensures AudioContext is created and initialized in a user gesture context, preventing browser security policy issues
|
|
11
|
+
- All audio operations (`send()`, `yieldAudioData()`, `start()`, `playback()`, etc.) now require prior initialization
|
|
12
|
+
|
|
13
|
+
### 🔧 Improvements
|
|
14
|
+
- **Initialization Flow** - Removed all lazy initialization logic for audio context
|
|
15
|
+
- Audio context initialization is now centralized in `initializeAudioContext()` method
|
|
16
|
+
- All audio operations check for initialization before proceeding
|
|
17
|
+
- Clear error messages when audio operations are attempted without initialization
|
|
18
|
+
|
|
19
|
+
### 🐛 Bugfixes
|
|
20
|
+
- **Audio Context User Gesture Requirement** - Fixed issue where AudioContext could not be properly initialized when external applications request recording permissions
|
|
21
|
+
- Audio context must now be initialized in user gesture context, ensuring browser security policies are satisfied
|
|
22
|
+
|
|
23
|
+
## [1.0.0-beta.62] - 2026-01-14
|
|
24
|
+
|
|
25
|
+
### ✨ New Features
|
|
26
|
+
- **Bezier Curve Transition Animation** - Implemented Bezier curve easing functions for smoother transitions
|
|
27
|
+
- Added Bezier curve interpolation with different curves for different facial components (jaw, expression, eye, neck, global)
|
|
28
|
+
- Replaced linear interpolation with Bezier curve interpolation for more natural animation
|
|
29
|
+
- Split transition duration into start (200ms) and end (1600ms) for different transition types
|
|
30
|
+
|
|
31
|
+
### 🔧 Improvements
|
|
32
|
+
- **Transition API Enhancement** - Updated `generateTransitionFromIdle()` to support both start and end transitions
|
|
33
|
+
- Added `transitionType` parameter: `'start'` for Idle -> Flame, `'end'` for Flame -> Idle
|
|
34
|
+
- Removed deprecated linear interpolation code and unused easing functions
|
|
35
|
+
|
|
5
36
|
## [1.0.0-beta.61] - 2026-01-14
|
|
6
37
|
|
|
7
38
|
### 🔧 Improvements
|
package/README.md
CHANGED
|
@@ -20,6 +20,10 @@ npm install @spatialwalk/avatarkit
|
|
|
20
20
|
|
|
21
21
|
## 🎯 Quick Start
|
|
22
22
|
|
|
23
|
+
### ⚠️ Important: Audio Context Initialization
|
|
24
|
+
|
|
25
|
+
**Before using any audio-related features, you MUST initialize the audio context in a user gesture context** (e.g., `click`, `touchstart` event handlers). This is required by browser security policies. Calling `initializeAudioContext()` outside a user gesture will fail.
|
|
26
|
+
|
|
23
27
|
### Basic Usage
|
|
24
28
|
|
|
25
29
|
```typescript
|
|
@@ -70,13 +74,21 @@ const avatar = await avatarManager.load('character-id', (progress) => {
|
|
|
70
74
|
const container = document.getElementById('avatar-container')
|
|
71
75
|
const avatarView = new AvatarView(avatar, container)
|
|
72
76
|
|
|
73
|
-
// 4.
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
avatarView.
|
|
79
|
-
|
|
77
|
+
// 4. ⚠️ CRITICAL: Initialize audio context (MUST be called in user gesture context)
|
|
78
|
+
// This method MUST be called within a user gesture event handler (click, touchstart, etc.)
|
|
79
|
+
// to satisfy browser security policies. Calling it outside a user gesture will fail.
|
|
80
|
+
button.addEventListener('click', async () => {
|
|
81
|
+
// Initialize audio context - MUST be in user gesture context
|
|
82
|
+
await avatarView.controller.initializeAudioContext()
|
|
83
|
+
|
|
84
|
+
// 5. Start real-time communication (SDK mode only)
|
|
85
|
+
await avatarView.controller.start()
|
|
86
|
+
|
|
87
|
+
// 6. Send audio data (SDK mode, must be mono PCM16 format matching configured sample rate)
|
|
88
|
+
const audioData = new ArrayBuffer(1024) // Example: PCM16 audio data at configured sample rate
|
|
89
|
+
avatarView.controller.send(audioData, false) // Send audio data
|
|
90
|
+
avatarView.controller.send(audioData, true) // end=true marks the end of current conversation round
|
|
91
|
+
})
|
|
80
92
|
```
|
|
81
93
|
|
|
82
94
|
### Host Mode Example
|
|
@@ -89,10 +101,17 @@ avatarView.avatarController.send(audioData, true) // end=true marks the end of c
|
|
|
89
101
|
const container = document.getElementById('avatar-container')
|
|
90
102
|
const avatarView = new AvatarView(avatar, container)
|
|
91
103
|
|
|
92
|
-
// 4.
|
|
93
|
-
//
|
|
94
|
-
|
|
95
|
-
|
|
104
|
+
// 4. ⚠️ CRITICAL: Initialize audio context (MUST be called in user gesture context)
|
|
105
|
+
// This method MUST be called within a user gesture event handler (click, touchstart, etc.)
|
|
106
|
+
// to satisfy browser security policies. Calling it outside a user gesture will fail.
|
|
107
|
+
button.addEventListener('click', async () => {
|
|
108
|
+
// Initialize audio context - MUST be in user gesture context
|
|
109
|
+
await avatarView.controller.initializeAudioContext()
|
|
110
|
+
|
|
111
|
+
// 5. Host Mode Workflow:
|
|
112
|
+
// Send audio data first to get conversationId, then use it to send animation data
|
|
113
|
+
const conversationId = avatarView.controller.yieldAudioData(audioData, false)
|
|
114
|
+
avatarView.controller.yieldFramesData(animationDataArray, conversationId) // animationDataArray: (Uint8Array | ArrayBuffer)[]
|
|
96
115
|
```
|
|
97
116
|
|
|
98
117
|
### Complete Examples
|
|
@@ -350,34 +369,52 @@ Audio/animation playback controller (playback layer), manages synchronized playb
|
|
|
350
369
|
#### SDK Mode Methods
|
|
351
370
|
|
|
352
371
|
```typescript
|
|
353
|
-
//
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
//
|
|
357
|
-
|
|
358
|
-
//
|
|
359
|
-
|
|
360
|
-
|
|
372
|
+
// ⚠️ CRITICAL: Initialize audio context first (MUST be called in user gesture context)
|
|
373
|
+
// This method MUST be called within a user gesture event handler (click, touchstart, etc.)
|
|
374
|
+
// to satisfy browser security policies. Calling it outside a user gesture will fail.
|
|
375
|
+
// All audio operations (start, send, etc.) require prior initialization.
|
|
376
|
+
button.addEventListener('click', async () => {
|
|
377
|
+
// Initialize audio context - MUST be in user gesture context
|
|
378
|
+
await avatarView.controller.initializeAudioContext()
|
|
379
|
+
|
|
380
|
+
// Start WebSocket service
|
|
381
|
+
await avatarView.controller.start()
|
|
382
|
+
|
|
383
|
+
// Send audio data (must be 16kHz mono PCM16 format)
|
|
384
|
+
const conversationId = avatarView.controller.send(audioData: ArrayBuffer, end: boolean)
|
|
385
|
+
// Returns: conversationId - Conversation ID for this conversation session
|
|
386
|
+
// end: false (default) - Continue sending audio data for current conversation
|
|
387
|
+
// end: true - Mark the end of current conversation round. After end=true, sending new audio data will interrupt any ongoing playback from the previous conversation round
|
|
388
|
+
})
|
|
361
389
|
|
|
362
390
|
// Close WebSocket service
|
|
363
|
-
avatarView.
|
|
391
|
+
avatarView.controller.close()
|
|
364
392
|
```
|
|
365
393
|
|
|
366
394
|
#### Host Mode Methods
|
|
367
395
|
|
|
368
396
|
```typescript
|
|
369
|
-
//
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
)
|
|
374
|
-
//
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
397
|
+
// ⚠️ CRITICAL: Initialize audio context first (MUST be called in user gesture context)
|
|
398
|
+
// This method MUST be called within a user gesture event handler (click, touchstart, etc.)
|
|
399
|
+
// to satisfy browser security policies. Calling it outside a user gesture will fail.
|
|
400
|
+
// All audio operations (yieldAudioData, yieldFramesData, etc.) require prior initialization.
|
|
401
|
+
button.addEventListener('click', async () => {
|
|
402
|
+
// Initialize audio context - MUST be in user gesture context
|
|
403
|
+
await avatarView.controller.initializeAudioContext()
|
|
404
|
+
|
|
405
|
+
// Stream audio chunks (must be 16kHz mono PCM16 format)
|
|
406
|
+
const conversationId = avatarView.controller.yieldAudioData(
|
|
407
|
+
data: Uint8Array, // Audio chunk data
|
|
408
|
+
isLast: boolean = false // Whether this is the last chunk
|
|
409
|
+
)
|
|
410
|
+
// Returns: conversationId - Conversation ID for this audio session
|
|
411
|
+
|
|
412
|
+
// Stream animation keyframes (requires conversationId from audio data)
|
|
413
|
+
avatarView.controller.yieldFramesData(
|
|
414
|
+
keyframesDataArray: (Uint8Array | ArrayBuffer)[], // Animation keyframes binary data array (each element is a protobuf encoded Message)
|
|
415
|
+
conversationId: string // Conversation ID (required)
|
|
416
|
+
)
|
|
417
|
+
})
|
|
381
418
|
```
|
|
382
419
|
|
|
383
420
|
**⚠️ Important: Conversation ID (conversationId) Management**
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
var __defProp = Object.defineProperty;
|
|
2
2
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
3
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
|
-
import { A as APP_CONFIG, l as logger, e as errorToMessage, a as logEvent } from "./index-
|
|
4
|
+
import { A as APP_CONFIG, l as logger, e as errorToMessage, a as logEvent } from "./index-C1md-jKJ.js";
|
|
5
5
|
class StreamingAudioPlayer {
|
|
6
6
|
constructor(options) {
|
|
7
7
|
__publicField(this, "audioContext", null);
|
|
@@ -44,6 +44,8 @@ export declare class AvatarController {
|
|
|
44
44
|
playbackMode?: DrivingServiceMode;
|
|
45
45
|
});
|
|
46
46
|
getCurrentConversationId(): string | null;
|
|
47
|
+
initializeAudioContext(): Promise<void>;
|
|
48
|
+
private checkAudioContextInitialized;
|
|
47
49
|
start(): Promise<void>;
|
|
48
50
|
send(audioData: ArrayBuffer, end?: boolean): string | null;
|
|
49
51
|
close(): void;
|
|
@@ -22,7 +22,8 @@ export declare class AvatarView {
|
|
|
22
22
|
private currentFPS;
|
|
23
23
|
private transitionKeyframes;
|
|
24
24
|
private transitionStartTime;
|
|
25
|
-
private readonly
|
|
25
|
+
private readonly startTransitionDurationMs;
|
|
26
|
+
private readonly endTransitionDurationMs;
|
|
26
27
|
private cachedIdleFirstFrame;
|
|
27
28
|
private idleCurrentFrameIndex;
|
|
28
29
|
private characterHandle;
|
|
@@ -61,7 +62,7 @@ export declare class AvatarView {
|
|
|
61
62
|
private stopRealtimeRendering;
|
|
62
63
|
dispose(): void;
|
|
63
64
|
renderFlame(flame: Flame, enableIdleRendering?: boolean): Promise<void>;
|
|
64
|
-
generateTransitionFromIdle(toFlame: Flame, frameCount: number): Promise<Flame[]>;
|
|
65
|
+
generateTransitionFromIdle(toFlame: Flame, frameCount: number, transitionType?: 'start' | 'end'): Promise<Flame[]>;
|
|
65
66
|
private rerenderCurrentFrameWithNewCamera;
|
|
66
67
|
private handleResize;
|
|
67
68
|
get transform(): {
|
|
@@ -7624,7 +7624,7 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7624
7624
|
if (this.streamingPlayer) {
|
|
7625
7625
|
return;
|
|
7626
7626
|
}
|
|
7627
|
-
const { StreamingAudioPlayer } = await import("./StreamingAudioPlayer-
|
|
7627
|
+
const { StreamingAudioPlayer } = await import("./StreamingAudioPlayer-CO9WTktN.js");
|
|
7628
7628
|
const { AvatarSDK: AvatarSDK2 } = await Promise.resolve().then(() => AvatarSDK$1);
|
|
7629
7629
|
const audioFormat = AvatarSDK2.getAudioFormat();
|
|
7630
7630
|
this.streamingPlayer = new StreamingAudioPlayer({
|
|
@@ -8961,7 +8961,7 @@ class AvatarSDK {
|
|
|
8961
8961
|
}
|
|
8962
8962
|
__publicField(AvatarSDK, "_isInitialized", false);
|
|
8963
8963
|
__publicField(AvatarSDK, "_configuration", null);
|
|
8964
|
-
__publicField(AvatarSDK, "_version", "1.0.0-beta.
|
|
8964
|
+
__publicField(AvatarSDK, "_version", "1.0.0-beta.63");
|
|
8965
8965
|
__publicField(AvatarSDK, "_avatarCore", null);
|
|
8966
8966
|
__publicField(AvatarSDK, "_dynamicSdkConfig", null);
|
|
8967
8967
|
const AvatarSDK$1 = Object.freeze(Object.defineProperty({
|
|
@@ -10741,38 +10741,71 @@ class AvatarController {
|
|
|
10741
10741
|
getCurrentConversationId() {
|
|
10742
10742
|
return this.getEffectiveConversationId();
|
|
10743
10743
|
}
|
|
10744
|
-
async
|
|
10745
|
-
|
|
10746
|
-
|
|
10747
|
-
|
|
10748
|
-
"NETWORK_LAYER_NOT_AVAILABLE"
|
|
10749
|
-
);
|
|
10744
|
+
async initializeAudioContext() {
|
|
10745
|
+
var _a;
|
|
10746
|
+
if ((_a = this.animationPlayer) == null ? void 0 : _a.isStreamingReady()) {
|
|
10747
|
+
return;
|
|
10750
10748
|
}
|
|
10751
10749
|
if (!this.animationPlayer) {
|
|
10752
10750
|
this.animationPlayer = new AnimationPlayer();
|
|
10751
|
+
}
|
|
10752
|
+
if (!this.animationPlayer.isStreamingReady()) {
|
|
10753
10753
|
try {
|
|
10754
10754
|
await this.animationPlayer.createAndInitializeStreamingPlayer();
|
|
10755
10755
|
} catch (error) {
|
|
10756
10756
|
const message = error instanceof Error ? error.message : String(error);
|
|
10757
|
-
logger.error("[AvatarController] Failed to
|
|
10758
|
-
|
|
10759
|
-
|
|
10760
|
-
|
|
10761
|
-
|
|
10762
|
-
});
|
|
10763
|
-
throw error;
|
|
10757
|
+
logger.error("[AvatarController] Failed to initialize audio context:", message);
|
|
10758
|
+
throw new SPAvatarError(
|
|
10759
|
+
`Failed to initialize audio context: ${message}`,
|
|
10760
|
+
"AUDIO_CONTEXT_INIT_FAILED"
|
|
10761
|
+
);
|
|
10764
10762
|
}
|
|
10765
10763
|
}
|
|
10764
|
+
const streamingPlayer = this.animationPlayer.getStreamingPlayer();
|
|
10765
|
+
if (streamingPlayer) {
|
|
10766
|
+
const audioContext = streamingPlayer.audioContext;
|
|
10767
|
+
if (audioContext && audioContext.state === "suspended") {
|
|
10768
|
+
try {
|
|
10769
|
+
await audioContext.resume();
|
|
10770
|
+
} catch (err) {
|
|
10771
|
+
logger.warn("[AvatarController] Failed to resume AudioContext during initialization:", err);
|
|
10772
|
+
}
|
|
10773
|
+
}
|
|
10774
|
+
}
|
|
10775
|
+
}
|
|
10776
|
+
checkAudioContextInitialized() {
|
|
10777
|
+
var _a;
|
|
10778
|
+
if (!((_a = this.animationPlayer) == null ? void 0 : _a.isStreamingReady())) {
|
|
10779
|
+
throw new SPAvatarError(
|
|
10780
|
+
"Audio context not initialized. Call initializeAudioContext() in a user gesture context first.",
|
|
10781
|
+
"AUDIO_CONTEXT_NOT_INITIALIZED"
|
|
10782
|
+
);
|
|
10783
|
+
}
|
|
10784
|
+
}
|
|
10785
|
+
async start() {
|
|
10786
|
+
if (!this.networkLayer) {
|
|
10787
|
+
throw new SPAvatarError(
|
|
10788
|
+
"Network layer not available. Use SDK mode.",
|
|
10789
|
+
"NETWORK_LAYER_NOT_AVAILABLE"
|
|
10790
|
+
);
|
|
10791
|
+
}
|
|
10792
|
+
this.checkAudioContextInitialized();
|
|
10766
10793
|
await this.networkLayer.connect(this.avatar.id);
|
|
10767
10794
|
}
|
|
10768
10795
|
send(audioData, end = false) {
|
|
10769
|
-
var _a, _b, _c;
|
|
10796
|
+
var _a, _b, _c, _d;
|
|
10797
|
+
try {
|
|
10798
|
+
this.checkAudioContextInitialized();
|
|
10799
|
+
} catch (error) {
|
|
10800
|
+
(_a = this.onError) == null ? void 0 : _a.call(this, error);
|
|
10801
|
+
return null;
|
|
10802
|
+
}
|
|
10770
10803
|
if (!this.networkLayer) {
|
|
10771
|
-
(
|
|
10804
|
+
(_b = this.onError) == null ? void 0 : _b.call(this, new SPAvatarError("Network layer not available", "NETWORK_LAYER_NOT_AVAILABLE"));
|
|
10772
10805
|
return null;
|
|
10773
10806
|
}
|
|
10774
10807
|
if (!this.networkLayer.canSend()) {
|
|
10775
|
-
(
|
|
10808
|
+
(_c = this.onError) == null ? void 0 : _c.call(this, new SPAvatarError("Service not connected", "NOT_CONNECTED"));
|
|
10776
10809
|
logEvent("character_manager", "warning", {
|
|
10777
10810
|
avatar_id: this.avatar.id,
|
|
10778
10811
|
event: "send_not_connected"
|
|
@@ -10790,7 +10823,7 @@ class AvatarController {
|
|
|
10790
10823
|
}
|
|
10791
10824
|
if (!this.isPlaying && this.currentState === AvatarState.idle) {
|
|
10792
10825
|
this.currentState = AvatarState.active;
|
|
10793
|
-
(
|
|
10826
|
+
(_d = this.onConversationState) == null ? void 0 : _d.call(this, this.mapToConversationState(AvatarState.active));
|
|
10794
10827
|
}
|
|
10795
10828
|
return this.networkLayer.getCurrentConversationId();
|
|
10796
10829
|
}
|
|
@@ -10810,18 +10843,13 @@ class AvatarController {
|
|
|
10810
10843
|
(_a = this.onConnectionState) == null ? void 0 : _a.call(this, ConnectionState.disconnected);
|
|
10811
10844
|
}
|
|
10812
10845
|
async playback(initialAudioChunks, initialKeyframes) {
|
|
10846
|
+
this.checkAudioContextInitialized();
|
|
10813
10847
|
if (this.isPlaying || this.currentConversationId) {
|
|
10814
10848
|
this.interrupt();
|
|
10815
10849
|
}
|
|
10816
10850
|
this.currentConversationId = this.generateAndLogNewConversationId();
|
|
10817
10851
|
this.reqEnd = false;
|
|
10818
10852
|
this.clearPlaybackData();
|
|
10819
|
-
if (!this.animationPlayer) {
|
|
10820
|
-
this.animationPlayer = new AnimationPlayer();
|
|
10821
|
-
}
|
|
10822
|
-
if (!this.animationPlayer.isStreamingReady()) {
|
|
10823
|
-
await this.animationPlayer.createAndInitializeStreamingPlayer();
|
|
10824
|
-
}
|
|
10825
10853
|
if (initialAudioChunks && initialAudioChunks.length > 0) {
|
|
10826
10854
|
this.pendingAudioChunks.push(...initialAudioChunks);
|
|
10827
10855
|
}
|
|
@@ -10848,7 +10876,13 @@ class AvatarController {
|
|
|
10848
10876
|
return this.currentConversationId;
|
|
10849
10877
|
}
|
|
10850
10878
|
yieldAudioData(data, isLast = false) {
|
|
10851
|
-
var _a, _b;
|
|
10879
|
+
var _a, _b, _c;
|
|
10880
|
+
try {
|
|
10881
|
+
this.checkAudioContextInitialized();
|
|
10882
|
+
} catch (error) {
|
|
10883
|
+
(_a = this.onError) == null ? void 0 : _a.call(this, error);
|
|
10884
|
+
return null;
|
|
10885
|
+
}
|
|
10852
10886
|
if (this.reqEnd && this.isPlaying && this.currentConversationId) {
|
|
10853
10887
|
this.interrupt();
|
|
10854
10888
|
this.currentConversationId = this.generateAndLogNewConversationId();
|
|
@@ -10879,12 +10913,12 @@ class AvatarController {
|
|
|
10879
10913
|
metrics.tap2Timestamp = Date.now();
|
|
10880
10914
|
}
|
|
10881
10915
|
}
|
|
10882
|
-
if (this.isPlaying && ((
|
|
10916
|
+
if (this.isPlaying && ((_b = this.animationPlayer) == null ? void 0 : _b.isStreamingReady())) {
|
|
10883
10917
|
this.animationPlayer.addAudioChunk(data, isLast);
|
|
10884
10918
|
} else {
|
|
10885
10919
|
if (data.length > 0 || isLast) {
|
|
10886
10920
|
this.pendingAudioChunks.push({ data, isLast });
|
|
10887
|
-
(
|
|
10921
|
+
(_c = this.onConversationState) == null ? void 0 : _c.call(this, this.mapToConversationState(AvatarState.active));
|
|
10888
10922
|
}
|
|
10889
10923
|
}
|
|
10890
10924
|
return this.currentConversationId;
|
|
@@ -11237,6 +11271,7 @@ class AvatarController {
|
|
|
11237
11271
|
}
|
|
11238
11272
|
async startStreamingPlaybackInternal() {
|
|
11239
11273
|
var _a, _b, _c;
|
|
11274
|
+
this.checkAudioContextInitialized();
|
|
11240
11275
|
if (this.isPlaying) {
|
|
11241
11276
|
this.isStartingPlayback = false;
|
|
11242
11277
|
return;
|
|
@@ -11245,30 +11280,15 @@ class AvatarController {
|
|
|
11245
11280
|
return;
|
|
11246
11281
|
}
|
|
11247
11282
|
this.isStartingPlayback = true;
|
|
11248
|
-
if (!this.animationPlayer) {
|
|
11249
|
-
this.animationPlayer = new AnimationPlayer();
|
|
11250
|
-
}
|
|
11251
|
-
if (!this.animationPlayer.isStreamingReady()) {
|
|
11252
|
-
try {
|
|
11253
|
-
await this.animationPlayer.createAndInitializeStreamingPlayer();
|
|
11254
|
-
} catch (error) {
|
|
11255
|
-
this.isStartingPlayback = false;
|
|
11256
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
11257
|
-
logger.error("[AvatarController] Failed to create streaming player:", message);
|
|
11258
|
-
logEvent("character_player", "error", {
|
|
11259
|
-
avatar_id: this.avatar.id,
|
|
11260
|
-
event: "streaming_player_init_failed",
|
|
11261
|
-
reason: message
|
|
11262
|
-
});
|
|
11263
|
-
throw error;
|
|
11264
|
-
}
|
|
11265
|
-
}
|
|
11266
11283
|
if (!this.currentKeyframes || this.currentKeyframes.length === 0) {
|
|
11267
11284
|
this.isStartingPlayback = false;
|
|
11268
11285
|
logger.warn("[AvatarController] No animation data to play");
|
|
11269
11286
|
return;
|
|
11270
11287
|
}
|
|
11271
11288
|
try {
|
|
11289
|
+
if (!this.animationPlayer) {
|
|
11290
|
+
throw new SPAvatarError("Animation player not initialized", "ANIMATION_PLAYER_NOT_INITIALIZED");
|
|
11291
|
+
}
|
|
11272
11292
|
await this.animationPlayer.prepareStreamingPlayer(() => {
|
|
11273
11293
|
var _a2, _b2;
|
|
11274
11294
|
this.isPlaying = false;
|
|
@@ -11490,21 +11510,7 @@ class AvatarController {
|
|
|
11490
11510
|
async startAudioOnlyPlayback() {
|
|
11491
11511
|
var _a, _b;
|
|
11492
11512
|
if (!this.animationPlayer) {
|
|
11493
|
-
|
|
11494
|
-
}
|
|
11495
|
-
if (!this.animationPlayer.isStreamingReady()) {
|
|
11496
|
-
try {
|
|
11497
|
-
await this.animationPlayer.createAndInitializeStreamingPlayer();
|
|
11498
|
-
} catch (error) {
|
|
11499
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
11500
|
-
logger.error("[AvatarController] Failed to create streaming player for audio-only mode:", message);
|
|
11501
|
-
logEvent("character_player", "error", {
|
|
11502
|
-
avatar_id: this.avatar.id,
|
|
11503
|
-
event: "audio_only_streaming_player_init_failed",
|
|
11504
|
-
reason: message
|
|
11505
|
-
});
|
|
11506
|
-
throw error;
|
|
11507
|
-
}
|
|
11513
|
+
throw new SPAvatarError("Animation player not initialized", "ANIMATION_PLAYER_NOT_INITIALIZED");
|
|
11508
11514
|
}
|
|
11509
11515
|
try {
|
|
11510
11516
|
await this.animationPlayer.prepareStreamingPlayer(() => {
|
|
@@ -11587,7 +11593,9 @@ class AvatarController {
|
|
|
11587
11593
|
}
|
|
11588
11594
|
addAudioChunkToBuffer(data, isLast) {
|
|
11589
11595
|
if (!this.animationPlayer) {
|
|
11590
|
-
|
|
11596
|
+
logger.warn("[AvatarController] animationPlayer is null in addAudioChunkToBuffer, this should not happen");
|
|
11597
|
+
this.pendingAudioChunks.push({ data, isLast });
|
|
11598
|
+
return;
|
|
11591
11599
|
}
|
|
11592
11600
|
if (this.isPlaying && this.animationPlayer.isStreamingReady()) {
|
|
11593
11601
|
this.animationPlayer.addAudioChunk(data, isLast);
|
|
@@ -13851,31 +13859,82 @@ function lerpArrays(from, to2, progress) {
|
|
|
13851
13859
|
}
|
|
13852
13860
|
return result2;
|
|
13853
13861
|
}
|
|
13854
|
-
|
|
13855
|
-
|
|
13862
|
+
const clamp01 = (x2) => Math.max(0, Math.min(1, x2));
|
|
13863
|
+
function createBezierEasing(x1, y1, x2, y2) {
|
|
13864
|
+
const cx = 3 * x1;
|
|
13865
|
+
const bx = 3 * (x2 - x1) - cx;
|
|
13866
|
+
const ax = 1 - cx - bx;
|
|
13867
|
+
const cy = 3 * y1;
|
|
13868
|
+
const by = 3 * (y2 - y1) - cy;
|
|
13869
|
+
const ay = 1 - cy - by;
|
|
13870
|
+
const sampleCurveX = (t2) => ((ax * t2 + bx) * t2 + cx) * t2;
|
|
13871
|
+
const sampleCurveY = (t2) => ((ay * t2 + by) * t2 + cy) * t2;
|
|
13872
|
+
const sampleCurveDerivativeX = (t2) => (3 * ax * t2 + 2 * bx) * t2 + cx;
|
|
13873
|
+
const solveCurveX = (x3) => {
|
|
13874
|
+
let t2 = x3;
|
|
13875
|
+
for (let i2 = 0; i2 < 8; i2++) {
|
|
13876
|
+
const error = sampleCurveX(t2) - x3;
|
|
13877
|
+
if (Math.abs(error) < 1e-6) break;
|
|
13878
|
+
const d2 = sampleCurveDerivativeX(t2);
|
|
13879
|
+
if (Math.abs(d2) < 1e-6) break;
|
|
13880
|
+
t2 -= error / d2;
|
|
13881
|
+
}
|
|
13882
|
+
return t2;
|
|
13883
|
+
};
|
|
13884
|
+
return (x3) => {
|
|
13885
|
+
if (x3 <= 0) return 0;
|
|
13886
|
+
if (x3 >= 1) return 1;
|
|
13887
|
+
return sampleCurveY(solveCurveX(x3));
|
|
13888
|
+
};
|
|
13889
|
+
}
|
|
13890
|
+
const BEZIER_CURVES = {
|
|
13891
|
+
jaw: createBezierEasing(0.2, 0.8, 0.3, 1),
|
|
13892
|
+
expression: createBezierEasing(0.4, 0, 0.2, 1),
|
|
13893
|
+
eye: createBezierEasing(0.3, 0, 0.1, 1),
|
|
13894
|
+
neck: createBezierEasing(0.1, 0.2, 0.2, 1),
|
|
13895
|
+
global: createBezierEasing(0.42, 0, 0.58, 1)
|
|
13896
|
+
};
|
|
13897
|
+
const TIME_SCALE = {
|
|
13898
|
+
jaw: 2.5,
|
|
13899
|
+
expression: 1.6,
|
|
13900
|
+
eye: 1.3,
|
|
13901
|
+
neck: 1,
|
|
13902
|
+
global: 1
|
|
13903
|
+
};
|
|
13904
|
+
function bezierLerp(from, to2, progress) {
|
|
13905
|
+
const getT = (key) => {
|
|
13906
|
+
const scaledProgress = clamp01(progress * TIME_SCALE[key]);
|
|
13907
|
+
return BEZIER_CURVES[key](scaledProgress);
|
|
13908
|
+
};
|
|
13856
13909
|
return {
|
|
13857
|
-
translation: lerpArrays(from.translation || [0, 0, 0], to2.translation || [0, 0, 0],
|
|
13858
|
-
rotation: lerpArrays(from.rotation || [0, 0, 0], to2.rotation || [0, 0, 0],
|
|
13859
|
-
neckPose: lerpArrays(from.neckPose || [0, 0, 0], to2.neckPose || [0, 0, 0],
|
|
13860
|
-
jawPose: lerpArrays(from.jawPose || [0, 0, 0], to2.jawPose || [0, 0, 0],
|
|
13861
|
-
eyePose: lerpArrays(from.eyePose || [0, 0, 0, 0, 0, 0], to2.eyePose || [0, 0, 0, 0, 0, 0],
|
|
13910
|
+
translation: lerpArrays(from.translation || [0, 0, 0], to2.translation || [0, 0, 0], getT("global")),
|
|
13911
|
+
rotation: lerpArrays(from.rotation || [0, 0, 0], to2.rotation || [0, 0, 0], getT("global")),
|
|
13912
|
+
neckPose: lerpArrays(from.neckPose || [0, 0, 0], to2.neckPose || [0, 0, 0], getT("neck")),
|
|
13913
|
+
jawPose: lerpArrays(from.jawPose || [0, 0, 0], to2.jawPose || [0, 0, 0], getT("jaw")),
|
|
13914
|
+
eyePose: lerpArrays(from.eyePose || [0, 0, 0, 0, 0, 0], to2.eyePose || [0, 0, 0, 0, 0, 0], getT("eye")),
|
|
13862
13915
|
eyeLid: (() => {
|
|
13863
13916
|
const fromEyelid = from.eyeLid;
|
|
13864
13917
|
const toEyelid = to2.eyeLid;
|
|
13865
|
-
if (fromEyelid
|
|
13866
|
-
return lerpArrays(fromEyelid, toEyelid,
|
|
13918
|
+
if ((fromEyelid == null ? void 0 : fromEyelid.length) && (toEyelid == null ? void 0 : toEyelid.length))
|
|
13919
|
+
return lerpArrays(fromEyelid, toEyelid, getT("eye"));
|
|
13867
13920
|
return fromEyelid || toEyelid || [];
|
|
13868
13921
|
})(),
|
|
13869
|
-
expression: lerpArrays(from.expression || [], to2.expression || [],
|
|
13922
|
+
expression: lerpArrays(from.expression || [], to2.expression || [], getT("expression"))
|
|
13870
13923
|
};
|
|
13871
13924
|
}
|
|
13872
13925
|
function generateTransitionFrames(from, to2, durationMs, fps = 25) {
|
|
13873
13926
|
const steps = Math.max(1, Math.floor(durationMs / 1e3 * fps));
|
|
13874
13927
|
const frames = Array.from({ length: steps });
|
|
13928
|
+
if (steps === 1) {
|
|
13929
|
+
frames[0] = to2;
|
|
13930
|
+
return frames;
|
|
13931
|
+
}
|
|
13875
13932
|
for (let i2 = 0; i2 < steps; i2++) {
|
|
13876
13933
|
const progress = i2 / (steps - 1);
|
|
13877
|
-
frames[i2] =
|
|
13934
|
+
frames[i2] = bezierLerp(from, to2, progress);
|
|
13878
13935
|
}
|
|
13936
|
+
frames[0] = from;
|
|
13937
|
+
frames[frames.length - 1] = to2;
|
|
13879
13938
|
return frames;
|
|
13880
13939
|
}
|
|
13881
13940
|
class AvatarView {
|
|
@@ -13900,7 +13959,8 @@ class AvatarView {
|
|
|
13900
13959
|
__publicField(this, "currentFPS", 0);
|
|
13901
13960
|
__publicField(this, "transitionKeyframes", []);
|
|
13902
13961
|
__publicField(this, "transitionStartTime", 0);
|
|
13903
|
-
__publicField(this, "
|
|
13962
|
+
__publicField(this, "startTransitionDurationMs", 200);
|
|
13963
|
+
__publicField(this, "endTransitionDurationMs", 1600);
|
|
13904
13964
|
__publicField(this, "cachedIdleFirstFrame", null);
|
|
13905
13965
|
__publicField(this, "idleCurrentFrameIndex", 0);
|
|
13906
13966
|
__publicField(this, "characterHandle", null);
|
|
@@ -13957,12 +14017,12 @@ class AvatarView {
|
|
|
13957
14017
|
toFixed.expression = ensureLen(toFixed.expression, exprLen);
|
|
13958
14018
|
return { from: fromFixed, to: toFixed };
|
|
13959
14019
|
}
|
|
13960
|
-
generateAndAlignTransitionFrames(from, to2) {
|
|
14020
|
+
generateAndAlignTransitionFrames(from, to2, durationMs) {
|
|
13961
14021
|
const aligned = this.alignFlamePair(from, to2);
|
|
13962
14022
|
let keyframes = generateTransitionFrames(
|
|
13963
14023
|
aligned.from,
|
|
13964
14024
|
aligned.to,
|
|
13965
|
-
|
|
14025
|
+
durationMs,
|
|
13966
14026
|
APP_CONFIG.animation.fps
|
|
13967
14027
|
);
|
|
13968
14028
|
if (keyframes.length < 2) {
|
|
@@ -14321,7 +14381,8 @@ class AvatarView {
|
|
|
14321
14381
|
return;
|
|
14322
14382
|
}
|
|
14323
14383
|
const elapsed = performance.now() - this.transitionStartTime;
|
|
14324
|
-
const
|
|
14384
|
+
const currentTransitionDurationMs = state === "transitioningToSpeaking" ? this.startTransitionDurationMs : this.endTransitionDurationMs;
|
|
14385
|
+
const progress = Math.min(1, Math.max(0, elapsed / currentTransitionDurationMs));
|
|
14325
14386
|
const steps = this.transitionKeyframes.length;
|
|
14326
14387
|
const idx = Math.min(steps - 1, Math.floor(progress * (steps - 1)));
|
|
14327
14388
|
const currentFrame = this.transitionKeyframes[idx];
|
|
@@ -14347,7 +14408,7 @@ class AvatarView {
|
|
|
14347
14408
|
return;
|
|
14348
14409
|
}
|
|
14349
14410
|
}
|
|
14350
|
-
if (state === "transitioningToSpeaking" && this.transitionStartTime > 0 && this.transitionKeyframes.length > 0 && elapsed >= this.
|
|
14411
|
+
if (state === "transitioningToSpeaking" && this.transitionStartTime > 0 && this.transitionKeyframes.length > 0 && elapsed >= this.startTransitionDurationMs + 100) {
|
|
14351
14412
|
this.setState("speaking");
|
|
14352
14413
|
this.transitionKeyframes = [];
|
|
14353
14414
|
this.avatarController.onTransitionComplete();
|
|
@@ -14477,7 +14538,7 @@ class AvatarView {
|
|
|
14477
14538
|
await this.getCachedIdleFirstFrame();
|
|
14478
14539
|
const firstSpeaking = keyframes[0];
|
|
14479
14540
|
const firstSpeakingWithPostProcessing = this.avatarController.applyPostProcessingToFlame(firstSpeaking);
|
|
14480
|
-
this.transitionKeyframes = this.generateAndAlignTransitionFrames(idleFrameProto, firstSpeakingWithPostProcessing);
|
|
14541
|
+
this.transitionKeyframes = this.generateAndAlignTransitionFrames(idleFrameProto, firstSpeakingWithPostProcessing, this.startTransitionDurationMs);
|
|
14481
14542
|
this.transitionStartTime = performance.now();
|
|
14482
14543
|
if (this.transitionKeyframes.length === 0) {
|
|
14483
14544
|
this.setState("speaking");
|
|
@@ -14530,7 +14591,7 @@ class AvatarView {
|
|
|
14530
14591
|
const lastSpeaking = this.avatarController.applyPostProcessingToFlame(lastSpeakingRaw);
|
|
14531
14592
|
const idleFirstProto = await this.getCachedIdleFirstFrame();
|
|
14532
14593
|
if (idleFirstProto) {
|
|
14533
|
-
this.transitionKeyframes = this.generateAndAlignTransitionFrames(lastSpeaking, idleFirstProto);
|
|
14594
|
+
this.transitionKeyframes = this.generateAndAlignTransitionFrames(lastSpeaking, idleFirstProto, this.endTransitionDurationMs);
|
|
14534
14595
|
this.transitionStartTime = performance.now();
|
|
14535
14596
|
if (this.transitionKeyframes.length > 0 && this.renderingState === "transitioningToIdle") {
|
|
14536
14597
|
if (APP_CONFIG.debug)
|
|
@@ -14640,7 +14701,7 @@ class AvatarView {
|
|
|
14640
14701
|
throw error;
|
|
14641
14702
|
}
|
|
14642
14703
|
}
|
|
14643
|
-
async generateTransitionFromIdle(toFlame, frameCount) {
|
|
14704
|
+
async generateTransitionFromIdle(toFlame, frameCount, transitionType = "start") {
|
|
14644
14705
|
if (!this.isInitialized) {
|
|
14645
14706
|
throw new Error("AvatarView not initialized");
|
|
14646
14707
|
}
|
|
@@ -14656,16 +14717,18 @@ class AvatarView {
|
|
|
14656
14717
|
const idleFrameProto = convertWasmParamsToProtoFlame(idleParams);
|
|
14657
14718
|
const toFlameWithPostProcessing = this.avatarController.applyPostProcessingToFlame(toFlame);
|
|
14658
14719
|
const aligned = this.alignFlamePair(idleFrameProto, toFlameWithPostProcessing);
|
|
14720
|
+
const from = transitionType === "start" ? aligned.from : aligned.to;
|
|
14721
|
+
const to2 = transitionType === "start" ? aligned.to : aligned.from;
|
|
14659
14722
|
const fps = APP_CONFIG.animation.fps;
|
|
14660
14723
|
const durationMs = frameCount / fps * 1e3;
|
|
14661
14724
|
const transitionFrames = generateTransitionFrames(
|
|
14662
|
-
|
|
14663
|
-
|
|
14725
|
+
from,
|
|
14726
|
+
to2,
|
|
14664
14727
|
durationMs,
|
|
14665
14728
|
fps
|
|
14666
14729
|
);
|
|
14667
|
-
transitionFrames[0] =
|
|
14668
|
-
transitionFrames[transitionFrames.length - 1] =
|
|
14730
|
+
transitionFrames[0] = from;
|
|
14731
|
+
transitionFrames[transitionFrames.length - 1] = to2;
|
|
14669
14732
|
return transitionFrames;
|
|
14670
14733
|
} catch (error) {
|
|
14671
14734
|
logger.error("[AvatarView] Failed to generate transition from idle:", error instanceof Error ? error.message : String(error));
|
package/dist/index.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { Flame } from '../generated/driveningress/v1/driveningress';
|
|
2
2
|
|
|
3
|
-
export declare function
|
|
3
|
+
export declare function bezierLerp(from: Flame, to: Flame, progress: number): Flame;
|
|
4
4
|
|
|
5
5
|
export declare function generateTransitionFrames(from: Flame, to: Flame, durationMs: number, fps?: number): Flame[];
|
|
6
6
|
|
package/package.json
CHANGED