@spatialwalk/avatarkit 1.0.0-beta.2 → 1.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +120 -0
- package/README.md +429 -158
- package/dist/{StreamingAudioPlayer-CMEiGwxE.js → StreamingAudioPlayer-L87IFoao.js} +89 -58
- package/dist/StreamingAudioPlayer-L87IFoao.js.map +1 -0
- package/dist/animation/AnimationWebSocketClient.d.ts.map +1 -1
- package/dist/audio/AnimationPlayer.d.ts +4 -0
- package/dist/audio/AnimationPlayer.d.ts.map +1 -1
- package/dist/audio/StreamingAudioPlayer.d.ts +10 -0
- package/dist/audio/StreamingAudioPlayer.d.ts.map +1 -1
- package/dist/avatar_core_wasm-D4eEi7Eh.js +1666 -0
- package/dist/{avatar_core_wasm-DmkU6dYn.js.map → avatar_core_wasm-D4eEi7Eh.js.map} +1 -1
- package/dist/avatar_core_wasm.wasm +0 -0
- package/dist/config/app-config.d.ts +3 -7
- package/dist/config/app-config.d.ts.map +1 -1
- package/dist/config/constants.d.ts +19 -3
- package/dist/config/constants.d.ts.map +1 -1
- package/dist/core/AvatarController.d.ts +91 -64
- package/dist/core/AvatarController.d.ts.map +1 -1
- package/dist/core/AvatarDownloader.d.ts.map +1 -1
- package/dist/core/AvatarKit.d.ts +6 -0
- package/dist/core/AvatarKit.d.ts.map +1 -1
- package/dist/core/AvatarManager.d.ts.map +1 -1
- package/dist/core/AvatarView.d.ts +28 -30
- package/dist/core/AvatarView.d.ts.map +1 -1
- package/dist/core/NetworkLayer.d.ts +59 -0
- package/dist/core/NetworkLayer.d.ts.map +1 -0
- package/dist/index-BDxVrKwm.js +5942 -0
- package/dist/index-BDxVrKwm.js.map +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +12 -11
- package/dist/renderer/RenderSystem.d.ts +4 -2
- package/dist/renderer/RenderSystem.d.ts.map +1 -1
- package/dist/types/index.d.ts +18 -0
- package/dist/types/index.d.ts.map +1 -1
- package/dist/utils/cls-tracker.d.ts +17 -0
- package/dist/utils/cls-tracker.d.ts.map +1 -0
- package/dist/utils/logger.d.ts +1 -1
- package/dist/utils/logger.d.ts.map +1 -1
- package/package.json +8 -4
- package/dist/StreamingAudioPlayer-CMEiGwxE.js.map +0 -1
- package/dist/avatar_core_wasm-DmkU6dYn.js +0 -1666
- package/dist/index-CNhquYUE.js +0 -9712
- package/dist/index-CNhquYUE.js.map +0 -1
- package/dist/utils/posthog-tracker.d.ts +0 -82
- package/dist/utils/posthog-tracker.d.ts.map +0 -1
package/README.md
CHANGED
|
@@ -1,25 +1,25 @@
|
|
|
1
1
|
# SPAvatarKit SDK
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
Real-time virtual avatar rendering SDK based on 3D Gaussian Splatting, supporting audio-driven animation rendering and high-quality 3D rendering.
|
|
4
4
|
|
|
5
|
-
## 🚀
|
|
5
|
+
## 🚀 Features
|
|
6
6
|
|
|
7
|
-
- **3D Gaussian Splatting
|
|
8
|
-
-
|
|
9
|
-
- **WebGPU/WebGL
|
|
10
|
-
- **WASM
|
|
11
|
-
- **TypeScript
|
|
12
|
-
-
|
|
7
|
+
- **3D Gaussian Splatting Rendering** - Based on the latest point cloud rendering technology, providing high-quality 3D virtual avatars
|
|
8
|
+
- **Audio-Driven Real-Time Animation Rendering** - Users provide audio data, SDK handles receiving animation data and rendering
|
|
9
|
+
- **WebGPU/WebGL Dual Rendering Backend** - Automatically selects the best rendering backend for compatibility
|
|
10
|
+
- **WASM High-Performance Computing** - Uses C++ compiled WebAssembly modules for geometric calculations
|
|
11
|
+
- **TypeScript Support** - Complete type definitions and IntelliSense
|
|
12
|
+
- **Modular Architecture** - Clear component separation, easy to integrate and extend
|
|
13
13
|
|
|
14
|
-
## 📦
|
|
14
|
+
## 📦 Installation
|
|
15
15
|
|
|
16
16
|
```bash
|
|
17
17
|
npm install @spatialwalk/avatarkit
|
|
18
18
|
```
|
|
19
19
|
|
|
20
|
-
## 🎯
|
|
20
|
+
## 🎯 Quick Start
|
|
21
21
|
|
|
22
|
-
###
|
|
22
|
+
### Basic Usage
|
|
23
23
|
|
|
24
24
|
```typescript
|
|
25
25
|
import {
|
|
@@ -30,173 +30,356 @@ import {
|
|
|
30
30
|
Environment
|
|
31
31
|
} from '@spatialwalk/avatarkit'
|
|
32
32
|
|
|
33
|
-
// 1.
|
|
33
|
+
// 1. Initialize SDK
|
|
34
34
|
const configuration: Configuration = {
|
|
35
35
|
environment: Environment.test,
|
|
36
36
|
}
|
|
37
37
|
|
|
38
38
|
await AvatarKit.initialize('your-app-id', configuration)
|
|
39
39
|
|
|
40
|
-
//
|
|
40
|
+
// Set sessionToken (if needed, call separately)
|
|
41
41
|
// AvatarKit.setSessionToken('your-session-token')
|
|
42
42
|
|
|
43
|
-
// 2.
|
|
43
|
+
// 2. Load character
|
|
44
44
|
const avatarManager = new AvatarManager()
|
|
45
45
|
const avatar = await avatarManager.load('character-id', (progress) => {
|
|
46
46
|
console.log(`Loading progress: ${progress.progress}%`)
|
|
47
47
|
})
|
|
48
48
|
|
|
49
|
-
// 3.
|
|
49
|
+
// 3. Create view (automatically creates Canvas and AvatarController)
|
|
50
|
+
// Network mode (default)
|
|
50
51
|
const container = document.getElementById('avatar-container')
|
|
51
|
-
const avatarView = new AvatarView(avatar,
|
|
52
|
+
const avatarView = new AvatarView(avatar, {
|
|
53
|
+
container: container,
|
|
54
|
+
playbackMode: 'network' // Optional, 'network' is default
|
|
55
|
+
})
|
|
52
56
|
|
|
53
|
-
// 4.
|
|
57
|
+
// 4. Start real-time communication (network mode only)
|
|
54
58
|
await avatarView.avatarController.start()
|
|
55
59
|
|
|
56
|
-
// 5.
|
|
57
|
-
//
|
|
58
|
-
|
|
59
|
-
const
|
|
60
|
-
|
|
61
|
-
avatarView.avatarController.send(audioData,
|
|
60
|
+
// 5. Send audio data (network mode)
|
|
61
|
+
// ⚠️ Important: Audio must be 16kHz mono PCM16 format
|
|
62
|
+
// If audio is Uint8Array, you can use slice().buffer to convert to ArrayBuffer
|
|
63
|
+
const audioUint8 = new Uint8Array(1024) // Example: 16kHz PCM16 audio data (512 samples = 1024 bytes)
|
|
64
|
+
const audioData = audioUint8.slice().buffer // Simplified conversion, works for ArrayBuffer and SharedArrayBuffer
|
|
65
|
+
avatarView.avatarController.send(audioData, false) // Send audio data, will automatically start playing after accumulating enough data
|
|
66
|
+
avatarView.avatarController.send(audioData, true) // end=true means immediately return animation data, no longer accumulating
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
### External Data Mode Example
|
|
70
|
+
|
|
71
|
+
```typescript
|
|
72
|
+
import { AvatarPlaybackMode } from '@spatialwalk/avatarkit'
|
|
73
|
+
|
|
74
|
+
// 1-3. Same as network mode (initialize SDK, load character)
|
|
75
|
+
|
|
76
|
+
// 3. Create view with external data mode
|
|
77
|
+
const container = document.getElementById('avatar-container')
|
|
78
|
+
const avatarView = new AvatarView(avatar, {
|
|
79
|
+
container: container,
|
|
80
|
+
playbackMode: AvatarPlaybackMode.external
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
// 4. Start playback with initial data (obtained from your service)
|
|
84
|
+
// Note: Audio and animation data should be obtained from your backend service
|
|
85
|
+
const initialAudioChunks = [{ data: audioData1, isLast: false }, { data: audioData2, isLast: false }]
|
|
86
|
+
const initialKeyframes = animationData1 // Animation keyframes from your service
|
|
87
|
+
|
|
88
|
+
await avatarView.avatarController.play(initialAudioChunks, initialKeyframes)
|
|
89
|
+
|
|
90
|
+
// 5. Stream additional data as needed
|
|
91
|
+
avatarView.avatarController.sendAudioChunk(audioData3, false)
|
|
92
|
+
avatarView.avatarController.sendKeyframes(animationData2)
|
|
62
93
|
```
|
|
63
94
|
|
|
64
|
-
###
|
|
95
|
+
### Complete Examples
|
|
65
96
|
|
|
66
|
-
|
|
97
|
+
Check the example code in the GitHub repository for complete usage flows for both modes.
|
|
67
98
|
|
|
68
|
-
|
|
99
|
+
**Example Project:** [Avatarkit-web-demo](https://github.com/spatialwalk/Avatarkit-web-demo)
|
|
69
100
|
|
|
70
|
-
|
|
101
|
+
This repository contains complete examples for Vanilla JS, Vue 3, and React, demonstrating:
|
|
102
|
+
- Network mode: Real-time audio input with automatic animation data reception
|
|
103
|
+
- External data mode: Custom data sources with manual audio/animation data management
|
|
71
104
|
|
|
72
|
-
## 🏗️
|
|
105
|
+
## 🏗️ Architecture Overview
|
|
73
106
|
|
|
74
|
-
###
|
|
107
|
+
### Three-Layer Architecture
|
|
75
108
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
109
|
+
The SDK uses a three-layer architecture for clear separation of concerns:
|
|
110
|
+
|
|
111
|
+
1. **Rendering Layer (AvatarView)** - Responsible for 3D rendering only
|
|
112
|
+
2. **Playback Layer (AvatarController)** - Manages audio/animation synchronization and playback
|
|
113
|
+
3. **Network Layer (NetworkLayer)** - Handles WebSocket communication (only in network mode)
|
|
114
|
+
|
|
115
|
+
### Core Components
|
|
116
|
+
|
|
117
|
+
- **AvatarKit** - SDK initialization and management
|
|
118
|
+
- **AvatarManager** - Character resource loading and management
|
|
119
|
+
- **AvatarView** - 3D rendering view (rendering layer)
|
|
120
|
+
- **AvatarController** - Audio/animation playback controller (playback layer)
|
|
121
|
+
- **NetworkLayer** - WebSocket communication (network layer, automatically composed in network mode)
|
|
122
|
+
- **AvatarCoreAdapter** - WASM module adapter
|
|
123
|
+
|
|
124
|
+
### Playback Modes
|
|
125
|
+
|
|
126
|
+
The SDK supports two playback modes, configured when creating `AvatarView`:
|
|
127
|
+
|
|
128
|
+
#### 1. Network Mode (Default)
|
|
129
|
+
- SDK handles WebSocket communication automatically
|
|
130
|
+
- Send audio data via `AvatarController.send()`
|
|
131
|
+
- SDK receives animation data from backend and synchronizes playback
|
|
132
|
+
- Best for: Real-time audio input scenarios
|
|
133
|
+
|
|
134
|
+
#### 2. External Data Mode
|
|
135
|
+
- External components manage their own network/data fetching
|
|
136
|
+
- External components provide both audio and animation data
|
|
137
|
+
- SDK only handles synchronized playback
|
|
138
|
+
- Best for: Custom data sources, pre-recorded content, or custom network implementations
|
|
139
|
+
|
|
140
|
+
### Data Flow
|
|
141
|
+
|
|
142
|
+
#### Network Mode Flow
|
|
143
|
+
|
|
144
|
+
```
|
|
145
|
+
User audio input (16kHz mono PCM16)
|
|
146
|
+
↓
|
|
147
|
+
AvatarController.send()
|
|
148
|
+
↓
|
|
149
|
+
NetworkLayer → WebSocket → Backend processing
|
|
150
|
+
↓
|
|
151
|
+
Backend returns animation data (FLAME keyframes)
|
|
152
|
+
↓
|
|
153
|
+
NetworkLayer → AvatarController → AnimationPlayer
|
|
154
|
+
↓
|
|
155
|
+
FLAME parameters → AvatarCore.computeFrameFlatFromParams() → Splat data
|
|
156
|
+
↓
|
|
157
|
+
AvatarController (playback loop) → AvatarView.renderRealtimeFrame()
|
|
158
|
+
↓
|
|
159
|
+
RenderSystem → WebGPU/WebGL → Canvas rendering
|
|
160
|
+
```
|
|
81
161
|
|
|
82
|
-
|
|
162
|
+
#### External Data Mode Flow
|
|
83
163
|
|
|
84
164
|
```
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
165
|
+
External data source (audio + animation)
|
|
166
|
+
↓
|
|
167
|
+
AvatarController.play(initialAudio, initialKeyframes) // Start playback
|
|
168
|
+
↓
|
|
169
|
+
AvatarController.sendAudioChunk() // Stream additional audio
|
|
170
|
+
AvatarController.sendKeyframes() // Stream additional animation
|
|
171
|
+
↓
|
|
172
|
+
AvatarController → AnimationPlayer (synchronized playback)
|
|
173
|
+
↓
|
|
174
|
+
FLAME parameters → AvatarCore.computeFrameFlatFromParams() → Splat data
|
|
175
|
+
↓
|
|
176
|
+
AvatarController (playback loop) → AvatarView.renderRealtimeFrame()
|
|
177
|
+
↓
|
|
178
|
+
RenderSystem → WebGPU/WebGL → Canvas rendering
|
|
92
179
|
```
|
|
93
180
|
|
|
94
|
-
|
|
181
|
+
**Note:**
|
|
182
|
+
- In network mode, users provide audio data, SDK handles network communication and animation data reception
|
|
183
|
+
- In external data mode, users provide both audio and animation data, SDK handles synchronized playback only
|
|
184
|
+
|
|
185
|
+
### Audio Format Requirements
|
|
186
|
+
|
|
187
|
+
**⚠️ Important:** The SDK requires audio data to be in **16kHz mono PCM16** format:
|
|
188
|
+
|
|
189
|
+
- **Sample Rate**: 16kHz (16000 Hz) - This is a backend requirement
|
|
190
|
+
- **Channels**: Mono (single channel)
|
|
191
|
+
- **Format**: PCM16 (16-bit signed integer, little-endian)
|
|
192
|
+
- **Byte Order**: Little-endian
|
|
193
|
+
|
|
194
|
+
**Audio Data Format:**
|
|
195
|
+
- Each sample is 2 bytes (16-bit)
|
|
196
|
+
- Audio data should be provided as `ArrayBuffer` or `Uint8Array`
|
|
197
|
+
- For example: 1 second of audio = 16000 samples × 2 bytes = 32000 bytes
|
|
198
|
+
|
|
199
|
+
**Resampling:**
|
|
200
|
+
- If your audio source is at a different sample rate (e.g., 24kHz, 48kHz), you must resample it to 16kHz before sending to the SDK
|
|
201
|
+
- For high-quality resampling, we recommend using Web Audio API's `OfflineAudioContext` with anti-aliasing filtering
|
|
202
|
+
- See example projects for resampling implementation
|
|
95
203
|
|
|
96
|
-
## 📚 API
|
|
204
|
+
## 📚 API Reference
|
|
97
205
|
|
|
98
206
|
### AvatarKit
|
|
99
207
|
|
|
100
|
-
SDK
|
|
208
|
+
The core management class of the SDK, responsible for initialization and global configuration.
|
|
101
209
|
|
|
102
210
|
```typescript
|
|
103
|
-
//
|
|
211
|
+
// Initialize SDK
|
|
104
212
|
await AvatarKit.initialize(appId: string, configuration: Configuration)
|
|
105
213
|
|
|
106
|
-
//
|
|
214
|
+
// Check initialization status
|
|
107
215
|
const isInitialized = AvatarKit.isInitialized
|
|
108
216
|
|
|
109
|
-
//
|
|
217
|
+
// Cleanup resources (must be called when no longer in use)
|
|
110
218
|
AvatarKit.cleanup()
|
|
111
219
|
```
|
|
112
220
|
|
|
113
221
|
### AvatarManager
|
|
114
222
|
|
|
115
|
-
|
|
223
|
+
Character resource manager, responsible for downloading, caching, and loading character data.
|
|
116
224
|
|
|
117
225
|
```typescript
|
|
118
226
|
const manager = new AvatarManager()
|
|
119
227
|
|
|
120
|
-
//
|
|
228
|
+
// Load character
|
|
121
229
|
const avatar = await manager.load(
|
|
122
230
|
characterId: string,
|
|
123
231
|
onProgress?: (progress: LoadProgressInfo) => void
|
|
124
232
|
)
|
|
125
233
|
|
|
126
|
-
//
|
|
234
|
+
// Clear cache
|
|
127
235
|
manager.clearCache()
|
|
128
236
|
```
|
|
129
237
|
|
|
130
238
|
### AvatarView
|
|
131
239
|
|
|
132
|
-
3D
|
|
240
|
+
3D rendering view (rendering layer), responsible for 3D rendering only. Internally automatically creates and manages `AvatarController`.
|
|
133
241
|
|
|
134
|
-
**⚠️
|
|
242
|
+
**⚠️ Important Limitation:** Currently, the SDK only supports one AvatarView instance at a time. If you need to switch characters, you must first call the `dispose()` method to clean up the current AvatarView, then create a new instance.
|
|
243
|
+
|
|
244
|
+
**Playback Mode Configuration:**
|
|
245
|
+
- The playback mode is fixed when creating `AvatarView` and persists throughout its lifecycle
|
|
246
|
+
- Cannot be changed after creation
|
|
135
247
|
|
|
136
248
|
```typescript
|
|
137
|
-
|
|
138
|
-
|
|
249
|
+
import { AvatarPlaybackMode } from '@spatialwalk/avatarkit'
|
|
250
|
+
|
|
251
|
+
// Create view (Canvas is automatically added to container)
|
|
252
|
+
// Network mode (default)
|
|
253
|
+
const container = document.getElementById('avatar-container')
|
|
254
|
+
const avatarView = new AvatarView(avatar: Avatar, {
|
|
255
|
+
container: container,
|
|
256
|
+
playbackMode: AvatarPlaybackMode.network // Optional, default is 'network'
|
|
257
|
+
})
|
|
258
|
+
|
|
259
|
+
// External data mode
|
|
260
|
+
const avatarView = new AvatarView(avatar: Avatar, {
|
|
261
|
+
container: container,
|
|
262
|
+
playbackMode: AvatarPlaybackMode.external
|
|
263
|
+
})
|
|
139
264
|
|
|
140
|
-
//
|
|
265
|
+
// Get Canvas element
|
|
141
266
|
const canvas = avatarView.getCanvas()
|
|
142
267
|
|
|
143
|
-
//
|
|
144
|
-
avatarView.
|
|
145
|
-
avatarView.setBackgroundOpaque(true)
|
|
268
|
+
// Get playback mode
|
|
269
|
+
const mode = avatarView.playbackMode // 'network' | 'external'
|
|
146
270
|
|
|
147
|
-
//
|
|
271
|
+
// Update camera configuration
|
|
148
272
|
avatarView.updateCameraConfig(cameraConfig: CameraConfig)
|
|
149
273
|
|
|
150
|
-
//
|
|
274
|
+
// Cleanup resources (must be called before switching characters)
|
|
151
275
|
avatarView.dispose()
|
|
152
276
|
```
|
|
153
277
|
|
|
154
|
-
|
|
278
|
+
**Character Switching Example:**
|
|
155
279
|
|
|
156
280
|
```typescript
|
|
157
|
-
//
|
|
281
|
+
// Before switching characters, must clean up old AvatarView first
|
|
158
282
|
if (currentAvatarView) {
|
|
159
283
|
currentAvatarView.dispose()
|
|
160
284
|
currentAvatarView = null
|
|
161
285
|
}
|
|
162
286
|
|
|
163
|
-
//
|
|
287
|
+
// Load new character
|
|
164
288
|
const newAvatar = await avatarManager.load('new-character-id')
|
|
165
289
|
|
|
166
|
-
//
|
|
167
|
-
currentAvatarView = new AvatarView(newAvatar,
|
|
168
|
-
|
|
290
|
+
// Create new AvatarView (with same or different playback mode)
|
|
291
|
+
currentAvatarView = new AvatarView(newAvatar, {
|
|
292
|
+
container: container,
|
|
293
|
+
playbackMode: AvatarPlaybackMode.network
|
|
294
|
+
})
|
|
295
|
+
|
|
296
|
+
// Network mode: start connection
|
|
297
|
+
if (currentAvatarView.playbackMode === AvatarPlaybackMode.network) {
|
|
298
|
+
await currentAvatarView.avatarController.start()
|
|
299
|
+
}
|
|
169
300
|
```
|
|
170
301
|
|
|
171
302
|
### AvatarController
|
|
172
303
|
|
|
173
|
-
|
|
304
|
+
Audio/animation playback controller (playback layer), manages synchronized playback of audio and animation. Automatically composes `NetworkLayer` in network mode.
|
|
305
|
+
|
|
306
|
+
**Two Usage Patterns:**
|
|
307
|
+
|
|
308
|
+
#### Network Mode Methods
|
|
174
309
|
|
|
175
310
|
```typescript
|
|
176
|
-
//
|
|
311
|
+
// Start WebSocket service
|
|
177
312
|
await avatarView.avatarController.start()
|
|
178
313
|
|
|
179
|
-
//
|
|
314
|
+
// Send audio data (SDK handles receiving animation data automatically)
|
|
180
315
|
avatarView.avatarController.send(audioData: ArrayBuffer, end: boolean)
|
|
181
|
-
// audioData:
|
|
182
|
-
//
|
|
183
|
-
//
|
|
316
|
+
// audioData: Audio data (ArrayBuffer format, must be 16kHz mono PCM16)
|
|
317
|
+
// - Sample rate: 16kHz (16000 Hz) - backend requirement
|
|
318
|
+
// - Format: PCM16 (16-bit signed integer, little-endian)
|
|
319
|
+
// - Channels: Mono (single channel)
|
|
320
|
+
// - Example: 1 second = 16000 samples × 2 bytes = 32000 bytes
|
|
321
|
+
// end: false (default) - Normal audio data sending, server will accumulate audio data, automatically returns animation data and starts synchronized playback of animation and audio after accumulating enough data
|
|
322
|
+
// end: true - Immediately return animation data, no longer accumulating, used for ending current conversation or scenarios requiring immediate response
|
|
323
|
+
|
|
324
|
+
// Close WebSocket service
|
|
325
|
+
avatarView.avatarController.close()
|
|
326
|
+
```
|
|
327
|
+
|
|
328
|
+
#### External Data Mode Methods
|
|
329
|
+
|
|
330
|
+
```typescript
|
|
331
|
+
// Start playback with initial audio and animation data
|
|
332
|
+
await avatarView.avatarController.play(
|
|
333
|
+
initialAudioChunks?: Array<{ data: Uint8Array, isLast: boolean }>, // Initial audio chunks (16kHz mono PCM16)
|
|
334
|
+
initialKeyframes?: any[] // Initial animation keyframes (obtained from your service)
|
|
335
|
+
)
|
|
184
336
|
|
|
185
|
-
//
|
|
337
|
+
// Stream additional audio chunks (after play() is called)
|
|
338
|
+
avatarView.avatarController.sendAudioChunk(
|
|
339
|
+
data: Uint8Array, // Audio chunk data
|
|
340
|
+
isLast: boolean = false // Whether this is the last chunk
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
// Stream additional animation keyframes (after play() is called)
|
|
344
|
+
avatarView.avatarController.sendKeyframes(
|
|
345
|
+
keyframes: any[] // Additional animation keyframes (obtained from your service)
|
|
346
|
+
)
|
|
347
|
+
```
|
|
348
|
+
|
|
349
|
+
#### Common Methods (Both Modes)
|
|
350
|
+
|
|
351
|
+
```typescript
|
|
352
|
+
// Interrupt current playback (stops and clears data)
|
|
186
353
|
avatarView.avatarController.interrupt()
|
|
187
354
|
|
|
188
|
-
//
|
|
355
|
+
// Clear all data and resources
|
|
356
|
+
avatarView.avatarController.clear()
|
|
357
|
+
|
|
358
|
+
// Get connection state (network mode only)
|
|
359
|
+
const isConnected = avatarView.avatarController.connected
|
|
360
|
+
|
|
361
|
+
// Start service (network mode only)
|
|
362
|
+
await avatarView.avatarController.start()
|
|
363
|
+
|
|
364
|
+
// Close service (network mode only)
|
|
189
365
|
avatarView.avatarController.close()
|
|
190
366
|
|
|
191
|
-
//
|
|
192
|
-
avatarView.avatarController.
|
|
367
|
+
// Get current avatar state
|
|
368
|
+
const state = avatarView.avatarController.state
|
|
369
|
+
|
|
370
|
+
// Set event callbacks
|
|
371
|
+
avatarView.avatarController.onConnectionState = (state: ConnectionState) => {} // Network mode only
|
|
193
372
|
avatarView.avatarController.onAvatarState = (state: AvatarState) => {}
|
|
194
373
|
avatarView.avatarController.onError = (error: Error) => {}
|
|
195
|
-
|
|
196
|
-
// 注意:不支持 sendText() 方法,调用会抛出错误
|
|
197
374
|
```
|
|
198
375
|
|
|
199
|
-
|
|
376
|
+
**Important Notes:**
|
|
377
|
+
- `start()` and `close()` are only available in network mode
|
|
378
|
+
- `play()`, `sendAudioChunk()`, and `sendKeyframes()` are only available in external data mode
|
|
379
|
+
- `interrupt()` and `clear()` are available in both modes
|
|
380
|
+
- The playback mode is determined when creating `AvatarView` and cannot be changed
|
|
381
|
+
|
|
382
|
+
## 🔧 Configuration
|
|
200
383
|
|
|
201
384
|
### Configuration
|
|
202
385
|
|
|
@@ -206,14 +389,37 @@ interface Configuration {
|
|
|
206
389
|
}
|
|
207
390
|
```
|
|
208
391
|
|
|
209
|
-
|
|
210
|
-
- `environment`:
|
|
211
|
-
- `sessionToken`:
|
|
392
|
+
**Description:**
|
|
393
|
+
- `environment`: Specifies the environment (cn/us/test), SDK will automatically use the corresponding API address and WebSocket address based on the environment
|
|
394
|
+
- `sessionToken`: Set separately via `AvatarKit.setSessionToken()`, not in Configuration
|
|
212
395
|
|
|
396
|
+
```typescript
|
|
213
397
|
enum Environment {
|
|
214
|
-
cn = 'cn', //
|
|
215
|
-
us = 'us', //
|
|
216
|
-
test = 'test' //
|
|
398
|
+
cn = 'cn', // China region
|
|
399
|
+
us = 'us', // US region
|
|
400
|
+
test = 'test' // Test environment
|
|
401
|
+
}
|
|
402
|
+
```
|
|
403
|
+
|
|
404
|
+
### AvatarViewOptions
|
|
405
|
+
|
|
406
|
+
```typescript
|
|
407
|
+
interface AvatarViewOptions {
|
|
408
|
+
playbackMode?: AvatarPlaybackMode // Playback mode, default is 'network'
|
|
409
|
+
container?: HTMLElement // Canvas container element
|
|
410
|
+
}
|
|
411
|
+
```
|
|
412
|
+
|
|
413
|
+
**Description:**
|
|
414
|
+
- `playbackMode`: Specifies the playback mode (`'network'` or `'external'`), default is `'network'`
|
|
415
|
+
- `'network'`: SDK handles WebSocket communication, send audio via `send()`
|
|
416
|
+
- `'external'`: External components provide audio and animation data, SDK handles synchronized playback
|
|
417
|
+
- `container`: Optional container element for Canvas, if not provided, Canvas will be created but not added to DOM
|
|
418
|
+
|
|
419
|
+
```typescript
|
|
420
|
+
enum AvatarPlaybackMode {
|
|
421
|
+
network = 'network', // Network mode: SDK handles WebSocket communication
|
|
422
|
+
external = 'external' // External data mode: External provides data, SDK handles playback
|
|
217
423
|
}
|
|
218
424
|
```
|
|
219
425
|
|
|
@@ -221,17 +427,17 @@ enum Environment {
|
|
|
221
427
|
|
|
222
428
|
```typescript
|
|
223
429
|
interface CameraConfig {
|
|
224
|
-
position: [number, number, number] //
|
|
225
|
-
target: [number, number, number] //
|
|
226
|
-
fov: number //
|
|
227
|
-
near: number //
|
|
228
|
-
far: number //
|
|
229
|
-
up?: [number, number, number] //
|
|
230
|
-
aspect?: number //
|
|
430
|
+
position: [number, number, number] // Camera position
|
|
431
|
+
target: [number, number, number] // Camera target
|
|
432
|
+
fov: number // Field of view angle
|
|
433
|
+
near: number // Near clipping plane
|
|
434
|
+
far: number // Far clipping plane
|
|
435
|
+
up?: [number, number, number] // Up direction
|
|
436
|
+
aspect?: number // Aspect ratio
|
|
231
437
|
}
|
|
232
438
|
```
|
|
233
439
|
|
|
234
|
-
## 📊
|
|
440
|
+
## 📊 State Management
|
|
235
441
|
|
|
236
442
|
### ConnectionState
|
|
237
443
|
|
|
@@ -248,77 +454,77 @@ enum ConnectionState {
|
|
|
248
454
|
|
|
249
455
|
```typescript
|
|
250
456
|
enum AvatarState {
|
|
251
|
-
idle = 'idle', //
|
|
252
|
-
active = 'active', //
|
|
253
|
-
playing = 'playing' //
|
|
457
|
+
idle = 'idle', // Idle state, showing breathing animation
|
|
458
|
+
active = 'active', // Active, waiting for playable content
|
|
459
|
+
playing = 'playing' // Playing
|
|
254
460
|
}
|
|
255
461
|
```
|
|
256
462
|
|
|
257
|
-
## 🎨
|
|
463
|
+
## 🎨 Rendering System
|
|
258
464
|
|
|
259
|
-
SDK
|
|
465
|
+
The SDK supports two rendering backends:
|
|
260
466
|
|
|
261
|
-
- **WebGPU** -
|
|
262
|
-
- **WebGL** -
|
|
467
|
+
- **WebGPU** - High-performance rendering for modern browsers
|
|
468
|
+
- **WebGL** - Better compatibility traditional rendering
|
|
263
469
|
|
|
264
|
-
|
|
470
|
+
The rendering system automatically selects the best backend, no manual configuration needed.
|
|
265
471
|
|
|
266
|
-
## 🔍
|
|
472
|
+
## 🔍 Debugging and Monitoring
|
|
267
473
|
|
|
268
|
-
###
|
|
474
|
+
### Logging System
|
|
269
475
|
|
|
270
|
-
SDK
|
|
476
|
+
The SDK has a built-in complete logging system, supporting different levels of log output:
|
|
271
477
|
|
|
272
478
|
```typescript
|
|
273
479
|
import { logger } from '@spatialwalk/avatarkit'
|
|
274
480
|
|
|
275
|
-
//
|
|
481
|
+
// Set log level
|
|
276
482
|
logger.setLevel('verbose') // 'basic' | 'verbose'
|
|
277
483
|
|
|
278
|
-
//
|
|
484
|
+
// Manual log output
|
|
279
485
|
logger.log('Info message')
|
|
280
486
|
logger.warn('Warning message')
|
|
281
487
|
logger.error('Error message')
|
|
282
488
|
```
|
|
283
489
|
|
|
284
|
-
###
|
|
490
|
+
### Performance Monitoring
|
|
285
491
|
|
|
286
|
-
SDK
|
|
492
|
+
The SDK provides performance monitoring interfaces to monitor rendering performance:
|
|
287
493
|
|
|
288
494
|
```typescript
|
|
289
|
-
//
|
|
495
|
+
// Get rendering performance statistics
|
|
290
496
|
const stats = avatarView.getPerformanceStats()
|
|
291
497
|
|
|
292
498
|
if (stats) {
|
|
293
|
-
console.log(
|
|
294
|
-
console.log(
|
|
295
|
-
console.log(
|
|
499
|
+
console.log(`Render time: ${stats.renderTime.toFixed(2)}ms`)
|
|
500
|
+
console.log(`Sort time: ${stats.sortTime.toFixed(2)}ms`)
|
|
501
|
+
console.log(`Rendering backend: ${stats.backend}`)
|
|
296
502
|
|
|
297
|
-
//
|
|
503
|
+
// Calculate frame rate
|
|
298
504
|
const fps = 1000 / stats.renderTime
|
|
299
|
-
console.log(
|
|
505
|
+
console.log(`Frame rate: ${fps.toFixed(2)} FPS`)
|
|
300
506
|
}
|
|
301
507
|
|
|
302
|
-
//
|
|
508
|
+
// Regular performance monitoring
|
|
303
509
|
setInterval(() => {
|
|
304
510
|
const stats = avatarView.getPerformanceStats()
|
|
305
511
|
if (stats) {
|
|
306
|
-
//
|
|
512
|
+
// Send to monitoring service or display on UI
|
|
307
513
|
console.log('Performance:', stats)
|
|
308
514
|
}
|
|
309
515
|
}, 1000)
|
|
310
516
|
```
|
|
311
517
|
|
|
312
|
-
|
|
313
|
-
- `renderTime`:
|
|
314
|
-
- `sortTime`:
|
|
315
|
-
- `backend`:
|
|
518
|
+
**Performance Statistics Description:**
|
|
519
|
+
- `renderTime`: Total rendering time (milliseconds), includes sorting and GPU rendering
|
|
520
|
+
- `sortTime`: Sorting time (milliseconds), uses Radix Sort algorithm to depth-sort point cloud
|
|
521
|
+
- `backend`: Currently used rendering backend (`'webgpu'` | `'webgl'` | `null`)
|
|
316
522
|
|
|
317
|
-
## 🚨
|
|
523
|
+
## 🚨 Error Handling
|
|
318
524
|
|
|
319
525
|
### SPAvatarError
|
|
320
526
|
|
|
321
|
-
SDK
|
|
527
|
+
The SDK uses custom error types, providing more detailed error information:
|
|
322
528
|
|
|
323
529
|
```typescript
|
|
324
530
|
import { SPAvatarError } from '@spatialwalk/avatarkit'
|
|
@@ -334,70 +540,135 @@ try {
|
|
|
334
540
|
}
|
|
335
541
|
```
|
|
336
542
|
|
|
337
|
-
###
|
|
543
|
+
### Error Callbacks
|
|
338
544
|
|
|
339
545
|
```typescript
|
|
340
546
|
avatarView.avatarController.onError = (error: Error) => {
|
|
341
547
|
console.error('AvatarController error:', error)
|
|
342
|
-
//
|
|
548
|
+
// Handle error, such as reconnection, user notification, etc.
|
|
343
549
|
}
|
|
344
550
|
```
|
|
345
551
|
|
|
346
|
-
## 🔄
|
|
552
|
+
## 🔄 Resource Management
|
|
553
|
+
|
|
554
|
+
### Lifecycle Management
|
|
347
555
|
|
|
348
|
-
|
|
556
|
+
#### Network Mode Lifecycle
|
|
349
557
|
|
|
350
558
|
```typescript
|
|
351
|
-
//
|
|
352
|
-
const
|
|
559
|
+
// Initialize
|
|
560
|
+
const container = document.getElementById('avatar-container')
|
|
561
|
+
const avatarView = new AvatarView(avatar, {
|
|
562
|
+
container: container,
|
|
563
|
+
playbackMode: AvatarPlaybackMode.network
|
|
564
|
+
})
|
|
353
565
|
await avatarView.avatarController.start()
|
|
354
566
|
|
|
355
|
-
//
|
|
567
|
+
// Use
|
|
356
568
|
avatarView.avatarController.send(audioData, false)
|
|
357
569
|
|
|
358
|
-
//
|
|
359
|
-
avatarView.
|
|
570
|
+
// Cleanup
|
|
571
|
+
avatarView.avatarController.close()
|
|
572
|
+
avatarView.dispose() // Automatically cleans up all resources
|
|
360
573
|
```
|
|
361
574
|
|
|
362
|
-
|
|
363
|
-
- SDK 目前只支持同时存在一个 AvatarView 实例
|
|
364
|
-
- 切换角色时,必须先调用 `dispose()` 清理旧的 AvatarView,然后再创建新的实例
|
|
365
|
-
- 未正确清理可能导致资源泄漏和渲染错误
|
|
575
|
+
#### External Data Mode Lifecycle
|
|
366
576
|
|
|
367
|
-
|
|
577
|
+
```typescript
|
|
578
|
+
// Initialize
|
|
579
|
+
const container = document.getElementById('avatar-container')
|
|
580
|
+
const avatarView = new AvatarView(avatar, {
|
|
581
|
+
container: container,
|
|
582
|
+
playbackMode: AvatarPlaybackMode.external
|
|
583
|
+
})
|
|
584
|
+
|
|
585
|
+
// Use
|
|
586
|
+
const initialAudioChunks = [{ data: audioData1, isLast: false }]
|
|
587
|
+
await avatarView.avatarController.play(initialAudioChunks, initialKeyframes)
|
|
588
|
+
avatarView.avatarController.sendAudioChunk(audioChunk, false)
|
|
589
|
+
avatarView.avatarController.sendKeyframes(keyframes)
|
|
590
|
+
|
|
591
|
+
// Cleanup
|
|
592
|
+
avatarView.avatarController.clear() // Clear all data and resources
|
|
593
|
+
avatarView.dispose() // Automatically cleans up all resources
|
|
594
|
+
```
|
|
368
595
|
|
|
369
|
-
|
|
370
|
-
-
|
|
371
|
-
-
|
|
596
|
+
**⚠️ Important Notes:**
|
|
597
|
+
- SDK currently only supports one AvatarView instance at a time
|
|
598
|
+
- When switching characters, must first call `dispose()` to clean up old AvatarView, then create new instance
|
|
599
|
+
- Not properly cleaning up may cause resource leaks and rendering errors
|
|
600
|
+
- In network mode, call `close()` before `dispose()` to properly close WebSocket connections
|
|
601
|
+
- In external data mode, call `clear()` before `dispose()` to clear all playback data
|
|
372
602
|
|
|
373
|
-
###
|
|
603
|
+
### Memory Optimization
|
|
374
604
|
|
|
375
|
-
|
|
605
|
+
- SDK automatically manages WASM memory allocation
|
|
606
|
+
- Supports dynamic loading/unloading of character and animation resources
|
|
607
|
+
- Provides memory usage monitoring interface
|
|
608
|
+
|
|
609
|
+
### Audio Data Sending
|
|
610
|
+
|
|
611
|
+
#### Network Mode
|
|
612
|
+
|
|
613
|
+
The `send()` method receives audio data in `ArrayBuffer` format:
|
|
614
|
+
|
|
615
|
+
**Audio Format Requirements:**
|
|
616
|
+
- **Sample Rate**: 16kHz (16000 Hz) - **Backend requirement, must be exactly 16kHz**
|
|
617
|
+
- **Format**: PCM16 (16-bit signed integer, little-endian)
|
|
618
|
+
- **Channels**: Mono (single channel)
|
|
619
|
+
- **Data Size**: Each sample is 2 bytes, so 1 second of audio = 16000 samples × 2 bytes = 32000 bytes
|
|
620
|
+
|
|
621
|
+
**Usage:**
|
|
622
|
+
- `audioData`: Audio data (ArrayBuffer format, must be 16kHz mono PCM16)
|
|
623
|
+
- `end=false` (default) - Normal audio data sending, server will accumulate audio data, automatically returns animation data and starts synchronized playback of animation and audio after accumulating enough data
|
|
624
|
+
- `end=true` - Immediately return animation data, no longer accumulating, used for ending current conversation or scenarios requiring immediate response
|
|
625
|
+
- **Important**: No need to wait for `end=true` to start playing, it will automatically start playing after accumulating enough audio data
|
|
626
|
+
|
|
627
|
+
#### External Data Mode
|
|
628
|
+
|
|
629
|
+
The `play()` method starts playback with initial data, then use `sendAudioChunk()` to stream additional audio:
|
|
630
|
+
|
|
631
|
+
**Audio Format Requirements:**
|
|
632
|
+
- Same as network mode: 16kHz mono PCM16 format
|
|
633
|
+
- Audio data should be provided as `Uint8Array` in chunks with `isLast` flag
|
|
634
|
+
|
|
635
|
+
**Usage:**
|
|
636
|
+
```typescript
|
|
637
|
+
// Start playback with initial audio and animation data
|
|
638
|
+
// Note: Audio and animation data should be obtained from your backend service
|
|
639
|
+
const initialAudioChunks = [
|
|
640
|
+
{ data: audioData1, isLast: false },
|
|
641
|
+
{ data: audioData2, isLast: false }
|
|
642
|
+
]
|
|
643
|
+
await avatarController.play(initialAudioChunks, initialKeyframes)
|
|
644
|
+
|
|
645
|
+
// Stream additional audio chunks
|
|
646
|
+
avatarController.sendAudioChunk(audioChunk, isLast)
|
|
647
|
+
```
|
|
376
648
|
|
|
377
|
-
|
|
378
|
-
-
|
|
379
|
-
- `
|
|
380
|
-
- `
|
|
381
|
-
- **重要**:不需要等待 `end=true` 才开始播放,积累到一定音频数据后就会自动开始播放
|
|
649
|
+
**Resampling (Both Modes):**
|
|
650
|
+
- If your audio source is at a different sample rate (e.g., 24kHz, 48kHz), you **must** resample it to 16kHz before sending
|
|
651
|
+
- For high-quality resampling, use Web Audio API's `OfflineAudioContext` with anti-aliasing filtering
|
|
652
|
+
- See example projects (`vanilla`, `react`, `vue`) for complete resampling implementation
|
|
382
653
|
|
|
383
|
-
## 🌐
|
|
654
|
+
## 🌐 Browser Compatibility
|
|
384
655
|
|
|
385
|
-
- **Chrome/Edge** 90+ (
|
|
656
|
+
- **Chrome/Edge** 90+ (WebGPU recommended)
|
|
386
657
|
- **Firefox** 90+ (WebGL)
|
|
387
658
|
- **Safari** 14+ (WebGL)
|
|
388
|
-
-
|
|
659
|
+
- **Mobile** iOS 14+, Android 8+
|
|
389
660
|
|
|
390
|
-
## 📝
|
|
661
|
+
## 📝 License
|
|
391
662
|
|
|
392
663
|
MIT License
|
|
393
664
|
|
|
394
|
-
## 🤝
|
|
665
|
+
## 🤝 Contributing
|
|
395
666
|
|
|
396
|
-
|
|
667
|
+
Issues and Pull Requests are welcome!
|
|
397
668
|
|
|
398
|
-
## 📞
|
|
669
|
+
## 📞 Support
|
|
399
670
|
|
|
400
|
-
|
|
401
|
-
-
|
|
402
|
-
-
|
|
403
|
-
- GitHub
|
|
671
|
+
For questions, please contact:
|
|
672
|
+
- Email: support@spavatar.com
|
|
673
|
+
- Documentation: https://docs.spavatar.com
|
|
674
|
+
- GitHub: https://github.com/spavatar/sdk
|