@spatialwalk/avatarkit 1.0.0-beta.70 → 1.0.0-beta.72
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/README.md +128 -49
- package/dist/{StreamingAudioPlayer-Bi2685bX.js → StreamingAudioPlayer-fV-zTOrl.js} +24 -19
- package/dist/animation/AnimationWebSocketClient.d.ts +6 -61
- package/dist/animation/utils/eventEmitter.d.ts +1 -8
- package/dist/animation/utils/flameConverter.d.ts +3 -23
- package/dist/audio/AnimationPlayer.d.ts +6 -75
- package/dist/audio/StreamingAudioPlayer.d.ts +2 -150
- package/dist/{index-CvW_c7G-.js → index-BginsqFH.js} +75 -32
- package/dist/index.js +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -5,6 +5,18 @@ All notable changes to this project will be documented in this file.
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
7
|
|
|
8
|
+
## [1.0.0-beta.72] - 2026-01-17
|
|
9
|
+
|
|
10
|
+
### 📚 Documentation
|
|
11
|
+
- **README Optimization** - Enhanced README with authentication section and improved API documentation
|
|
12
|
+
|
|
13
|
+
## [1.0.0-beta.71] - 2026-01-17
|
|
14
|
+
|
|
15
|
+
### 🔧 Improvements
|
|
16
|
+
- **Internal API** - Marked all audio and animation internal classes with `@internal` to prevent public API exposure
|
|
17
|
+
- **Type Safety** - Hidden `AnimationPlayer`, `StreamingAudioPlayer`, `AnimationWebSocketClient`, and related utilities from public API
|
|
18
|
+
- **Internal Documentation** - Fixed missing English translations in `AnimationWebSocketClient.ts`
|
|
19
|
+
|
|
8
20
|
## [1.0.0-beta.70] - 2026-01-17
|
|
9
21
|
|
|
10
22
|
### 🔧 Improvements
|
package/README.md
CHANGED
|
@@ -18,13 +18,34 @@ Real-time virtual avatar rendering SDK based on 3D Gaussian Splatting, supportin
|
|
|
18
18
|
npm install @spatialwalk/avatarkit
|
|
19
19
|
```
|
|
20
20
|
|
|
21
|
-
##
|
|
21
|
+
## 🚀 Demo Repository
|
|
22
22
|
|
|
23
|
-
|
|
23
|
+
<div align="center">
|
|
24
24
|
|
|
25
|
-
###
|
|
25
|
+
### 📌 **Quick Start: Check Out Our Demo Repository**
|
|
26
26
|
|
|
27
|
-
|
|
27
|
+
We provide complete example code and best practices to help you quickly integrate the SDK.
|
|
28
|
+
|
|
29
|
+
**The demo repository includes:**
|
|
30
|
+
- ✅ Complete integration examples
|
|
31
|
+
- ✅ Usage examples for both SDK mode and Host mode
|
|
32
|
+
- ✅ Audio processing examples (PCM16, WAV, MP3, etc.)
|
|
33
|
+
- ✅ Vite configuration examples
|
|
34
|
+
- ✅ Best practices for common scenarios
|
|
35
|
+
|
|
36
|
+
**[👉 View Demo Repository](https://github.com/spatialwalk/avatarkit-demo)** | *If not yet created, please contact the team*
|
|
37
|
+
|
|
38
|
+
</div>
|
|
39
|
+
|
|
40
|
+
---
|
|
41
|
+
|
|
42
|
+
## 🔧 Vite Configuration (Recommended)
|
|
43
|
+
|
|
44
|
+
If you are using Vite as your build tool, we strongly recommend using our Vite plugin to automatically handle WASM file configuration. The plugin automatically handles all necessary configurations, so you don't need to set them up manually.
|
|
45
|
+
|
|
46
|
+
### Using the Plugin
|
|
47
|
+
|
|
48
|
+
Add the plugin to `vite.config.ts`:
|
|
28
49
|
|
|
29
50
|
```typescript
|
|
30
51
|
import { defineConfig } from 'vite'
|
|
@@ -32,27 +53,27 @@ import { avatarkitVitePlugin } from '@spatialwalk/avatarkit/vite'
|
|
|
32
53
|
|
|
33
54
|
export default defineConfig({
|
|
34
55
|
plugins: [
|
|
35
|
-
avatarkitVitePlugin(), //
|
|
56
|
+
avatarkitVitePlugin(), // Just add this line
|
|
36
57
|
],
|
|
37
58
|
})
|
|
38
59
|
```
|
|
39
60
|
|
|
40
|
-
###
|
|
61
|
+
### Plugin Features
|
|
41
62
|
|
|
42
|
-
|
|
63
|
+
The plugin automatically handles:
|
|
43
64
|
|
|
44
|
-
- ✅
|
|
45
|
-
- ✅
|
|
46
|
-
-
|
|
47
|
-
-
|
|
48
|
-
-
|
|
49
|
-
- ✅ **WASM JS Glue
|
|
50
|
-
- ✅ **Cloudflare Pages
|
|
51
|
-
- ✅ **Vite
|
|
65
|
+
- ✅ **Development Server**: Automatically sets the correct MIME type (`application/wasm`) for WASM files
|
|
66
|
+
- ✅ **Build Time**: Automatically copies WASM files to `dist/assets/` directory
|
|
67
|
+
- Smart Detection: Extracts referenced WASM file names (including hash) from JS glue files
|
|
68
|
+
- Auto Matching: Ensures copied WASM files match references in JS glue files
|
|
69
|
+
- Hash Support: Correctly handles hashed WASM files (e.g., `avatar_core_wasm-{hash}.wasm`)
|
|
70
|
+
- ✅ **WASM JS Glue**: Automatically copies WASM JS glue files to `dist/assets/` directory
|
|
71
|
+
- ✅ **Cloudflare Pages**: Automatically generates `_headers` file to ensure WASM files use the correct MIME type
|
|
72
|
+
- ✅ **Vite Configuration**: Automatically configures `optimizeDeps`, `assetsInclude`, `assetsInlineLimit`, and other options
|
|
52
73
|
|
|
53
|
-
###
|
|
74
|
+
### Manual Configuration (Without Plugin)
|
|
54
75
|
|
|
55
|
-
|
|
76
|
+
If you don't use the Vite plugin, you need to manually configure the following:
|
|
56
77
|
|
|
57
78
|
```typescript
|
|
58
79
|
// vite.config.ts
|
|
@@ -74,7 +95,7 @@ export default defineConfig({
|
|
|
74
95
|
},
|
|
75
96
|
},
|
|
76
97
|
},
|
|
77
|
-
//
|
|
98
|
+
// Development server needs to manually configure middleware to set WASM MIME type
|
|
78
99
|
configureServer(server) {
|
|
79
100
|
server.middlewares.use((req, res, next) => {
|
|
80
101
|
if (req.url?.endsWith('.wasm')) {
|
|
@@ -86,6 +107,53 @@ export default defineConfig({
|
|
|
86
107
|
})
|
|
87
108
|
```
|
|
88
109
|
|
|
110
|
+
## 🔐 Authentication
|
|
111
|
+
|
|
112
|
+
All environments require an **App ID** and **Session Token** for authentication.
|
|
113
|
+
|
|
114
|
+
### App ID
|
|
115
|
+
|
|
116
|
+
The App ID is used to identify your application. You can obtain your App ID by:
|
|
117
|
+
|
|
118
|
+
1. **For Testing**: Use the default test App ID provided in demo repositories (paired with test Session Token, only works with publicly available test avatars like Rohan, Dr.Kellan, Priya, Josh, etc.)
|
|
119
|
+
2. **For Production**: Visit the [Developer Platform](https://dash.spatialreal.ai) to create your own App and avatars. You will receive your own App ID after creating an App.
|
|
120
|
+
|
|
121
|
+
### Session Token
|
|
122
|
+
|
|
123
|
+
The Session Token is required for WebSocket authentication and must be obtained from your SDK provider.
|
|
124
|
+
|
|
125
|
+
**⚠️ Important Notes:**
|
|
126
|
+
- The Session Token must be valid and not expired
|
|
127
|
+
- In production applications, you **must** manually inject a valid Session Token obtained from your SDK provider
|
|
128
|
+
- The default Session Token provided in demo repositories is **only for demonstration purposes** and can only be used with test avatars
|
|
129
|
+
- If you want to create your own avatars and test them, please visit the [Developer Platform](https://dash.spatialreal.ai) to create your own App and generate Session Tokens
|
|
130
|
+
|
|
131
|
+
**How to Set Session Token:**
|
|
132
|
+
|
|
133
|
+
```typescript
|
|
134
|
+
// Initialize SDK with App ID
|
|
135
|
+
await AvatarSDK.initialize('your-app-id', configuration)
|
|
136
|
+
|
|
137
|
+
// Set Session Token (can be called before or after initialization)
|
|
138
|
+
// If called before initialization, the token will be automatically set when you initialize the SDK
|
|
139
|
+
AvatarSDK.setSessionToken('your-session-token')
|
|
140
|
+
|
|
141
|
+
// Get current Session Token
|
|
142
|
+
const sessionToken = AvatarSDK.sessionToken
|
|
143
|
+
```
|
|
144
|
+
|
|
145
|
+
**Token Management:**
|
|
146
|
+
- The Session Token can be set at any time using `AvatarSDK.setSessionToken(token)`
|
|
147
|
+
- If you set the token before initializing the SDK, it will be automatically applied during initialization
|
|
148
|
+
- If you set the token after initialization, it will be applied immediately
|
|
149
|
+
- Handle token refresh logic in your application as needed (e.g., when token expires)
|
|
150
|
+
|
|
151
|
+
**For Production Integration:**
|
|
152
|
+
- Obtain a valid Session Token from your SDK provider
|
|
153
|
+
- Store the token securely (never expose it in client-side code if possible)
|
|
154
|
+
- Implement token refresh logic to handle token expiration
|
|
155
|
+
- Use `AvatarSDK.setSessionToken(token)` to inject the token programmatically
|
|
156
|
+
|
|
89
157
|
## 🎯 Quick Start
|
|
90
158
|
|
|
91
159
|
### ⚠️ Important: Audio Context Initialization
|
|
@@ -126,8 +194,10 @@ const configuration: Configuration = {
|
|
|
126
194
|
|
|
127
195
|
await AvatarSDK.initialize('your-app-id', configuration)
|
|
128
196
|
|
|
129
|
-
// Set
|
|
130
|
-
//
|
|
197
|
+
// Set Session Token (required for authentication)
|
|
198
|
+
// You must obtain a valid Session Token from your SDK provider
|
|
199
|
+
// See Authentication section above for more details
|
|
200
|
+
AvatarSDK.setSessionToken('your-session-token')
|
|
131
201
|
|
|
132
202
|
// 2. Load avatar
|
|
133
203
|
const avatarManager = AvatarManager.shared
|
|
@@ -408,7 +478,9 @@ const appId = AvatarSDK.appId
|
|
|
408
478
|
// Get configuration
|
|
409
479
|
const config = AvatarSDK.configuration
|
|
410
480
|
|
|
411
|
-
// Set
|
|
481
|
+
// Set Session Token (required for authentication)
|
|
482
|
+
// You must obtain a valid Session Token from your SDK provider
|
|
483
|
+
// See Authentication section for more details
|
|
412
484
|
AvatarSDK.setSessionToken('your-session-token')
|
|
413
485
|
|
|
414
486
|
// Set userId (optional, for telemetry)
|
|
@@ -437,7 +509,7 @@ const manager = AvatarManager.shared
|
|
|
437
509
|
|
|
438
510
|
// Load avatar
|
|
439
511
|
const avatar = await manager.load(
|
|
440
|
-
|
|
512
|
+
id: string,
|
|
441
513
|
onProgress?: (progress: LoadProgressInfo) => void
|
|
442
514
|
)
|
|
443
515
|
|
|
@@ -505,7 +577,7 @@ const newAvatar = await avatarManager.load('new-character-id')
|
|
|
505
577
|
currentAvatarView = new AvatarView(newAvatar, container)
|
|
506
578
|
|
|
507
579
|
// SDK mode: start connection (will throw error if not in SDK mode)
|
|
508
|
-
|
|
580
|
+
await currentAvatarView.controller.start()
|
|
509
581
|
```
|
|
510
582
|
|
|
511
583
|
### AvatarController
|
|
@@ -581,24 +653,30 @@ button.addEventListener('click', async () => {
|
|
|
581
653
|
|
|
582
654
|
```typescript
|
|
583
655
|
|
|
656
|
+
// Pause playback (from playing state)
|
|
657
|
+
avatarView.controller.pause()
|
|
658
|
+
|
|
659
|
+
// Resume playback (from paused state)
|
|
660
|
+
await avatarView.controller.resume()
|
|
661
|
+
|
|
584
662
|
// Interrupt current playback (stops and clears data)
|
|
585
|
-
avatarView.
|
|
663
|
+
avatarView.controller.interrupt()
|
|
586
664
|
|
|
587
665
|
// Clear all data and resources
|
|
588
|
-
avatarView.
|
|
666
|
+
avatarView.controller.clear()
|
|
589
667
|
|
|
590
668
|
// Get current conversation ID (for Host mode)
|
|
591
|
-
const conversationId = avatarView.
|
|
669
|
+
const conversationId = avatarView.controller.getCurrentConversationId()
|
|
592
670
|
// Returns: Current conversationId for the active audio session, or null if no active session
|
|
593
671
|
|
|
594
672
|
// Volume control (affects only avatar audio player, not system volume)
|
|
595
|
-
avatarView.
|
|
596
|
-
const currentVolume = avatarView.
|
|
673
|
+
avatarView.controller.setVolume(0.5) // Set volume to 50% (0.0 to 1.0)
|
|
674
|
+
const currentVolume = avatarView.controller.getVolume() // Get current volume (0.0 to 1.0)
|
|
597
675
|
|
|
598
676
|
// Set event callbacks
|
|
599
|
-
avatarView.
|
|
600
|
-
avatarView.
|
|
601
|
-
avatarView.
|
|
677
|
+
avatarView.controller.onConnectionState = (state: ConnectionState) => {} // SDK mode only
|
|
678
|
+
avatarView.controller.onConversationState = (state: ConversationState) => {}
|
|
679
|
+
avatarView.controller.onError = (error: Error) => {}
|
|
602
680
|
```
|
|
603
681
|
|
|
604
682
|
#### Avatar Transform Methods
|
|
@@ -674,7 +752,7 @@ enum LogLevel {
|
|
|
674
752
|
- Supported values: 8000, 16000, 22050, 24000, 32000, 44100, 48000
|
|
675
753
|
- The configured sample rate will be used for both audio recording and playback
|
|
676
754
|
- `characterApiBaseUrl`: Internal debug config, can be ignored
|
|
677
|
-
- `sessionToken`: Set separately via `AvatarSDK.setSessionToken()`, not in Configuration
|
|
755
|
+
- `sessionToken`: **Required for authentication**. Set separately via `AvatarSDK.setSessionToken()`, not in Configuration. See [Authentication](#-authentication) section for details
|
|
678
756
|
|
|
679
757
|
```typescript
|
|
680
758
|
enum Environment {
|
|
@@ -734,7 +812,7 @@ enum ConversationState {
|
|
|
734
812
|
The SDK supports two rendering backends:
|
|
735
813
|
|
|
736
814
|
- **WebGPU** - High-performance rendering for modern browsers
|
|
737
|
-
- **WebGL** - Better compatibility traditional rendering
|
|
815
|
+
- **WebGL** - Better compatibility for traditional rendering
|
|
738
816
|
|
|
739
817
|
The rendering system automatically selects the best backend, no manual configuration needed.
|
|
740
818
|
|
|
@@ -748,7 +826,7 @@ The SDK uses custom error types, providing more detailed error information:
|
|
|
748
826
|
import { AvatarError } from '@spatialwalk/avatarkit'
|
|
749
827
|
|
|
750
828
|
try {
|
|
751
|
-
await avatarView.
|
|
829
|
+
await avatarView.controller.start()
|
|
752
830
|
} catch (error) {
|
|
753
831
|
if (error instanceof AvatarError) {
|
|
754
832
|
console.error('SDK Error:', error.message, error.code)
|
|
@@ -761,7 +839,7 @@ try {
|
|
|
761
839
|
### Error Callbacks
|
|
762
840
|
|
|
763
841
|
```typescript
|
|
764
|
-
avatarView.
|
|
842
|
+
avatarView.controller.onError = (error: Error) => {
|
|
765
843
|
console.error('AvatarController error:', error)
|
|
766
844
|
// Handle error, such as reconnection, user notification, etc.
|
|
767
845
|
}
|
|
@@ -777,14 +855,13 @@ avatarView.avatarController.onError = (error: Error) => {
|
|
|
777
855
|
// Initialize
|
|
778
856
|
const container = document.getElementById('avatar-container')
|
|
779
857
|
const avatarView = new AvatarView(avatar, container)
|
|
780
|
-
await avatarView.
|
|
858
|
+
await avatarView.controller.start()
|
|
781
859
|
|
|
782
860
|
// Use
|
|
783
|
-
avatarView.
|
|
861
|
+
avatarView.controller.send(audioData, false)
|
|
784
862
|
|
|
785
|
-
// Cleanup
|
|
786
|
-
avatarView.
|
|
787
|
-
avatarView.dispose() // Automatically cleans up all resources
|
|
863
|
+
// Cleanup - dispose() automatically cleans up all resources including WebSocket connections
|
|
864
|
+
avatarView.dispose()
|
|
788
865
|
```
|
|
789
866
|
|
|
790
867
|
#### Host Mode Lifecycle
|
|
@@ -795,19 +872,21 @@ const container = document.getElementById('avatar-container')
|
|
|
795
872
|
const avatarView = new AvatarView(avatar, container)
|
|
796
873
|
|
|
797
874
|
// Use
|
|
798
|
-
const conversationId = avatarView.
|
|
799
|
-
avatarView.
|
|
875
|
+
const conversationId = avatarView.controller.yieldAudioData(audioChunk, false)
|
|
876
|
+
avatarView.controller.yieldFramesData(keyframesDataArray, conversationId) // keyframesDataArray: (Uint8Array | ArrayBuffer)[]
|
|
800
877
|
|
|
801
|
-
// Cleanup
|
|
802
|
-
avatarView.
|
|
803
|
-
avatarView.dispose() // Automatically cleans up all resources
|
|
878
|
+
// Cleanup - dispose() automatically cleans up all resources including playback data
|
|
879
|
+
avatarView.dispose()
|
|
804
880
|
```
|
|
805
881
|
|
|
806
882
|
**⚠️ Important Notes:**
|
|
807
|
-
-
|
|
808
|
-
-
|
|
809
|
-
-
|
|
810
|
-
-
|
|
883
|
+
- `dispose()` automatically cleans up all resources, including:
|
|
884
|
+
- WebSocket connections (SDK mode)
|
|
885
|
+
- Playback data and animation resources (both modes)
|
|
886
|
+
- Render system and canvas elements
|
|
887
|
+
- All event listeners and callbacks
|
|
888
|
+
- Not properly calling `dispose()` may cause resource leaks and rendering errors
|
|
889
|
+
- If you need to manually close WebSocket connections or clear playback data before disposing, you can call `avatarView.controller.close()` (SDK mode) or `avatarView.controller.clear()` (both modes) first, but it's not required as `dispose()` handles this automatically
|
|
811
890
|
|
|
812
891
|
### Memory Optimization
|
|
813
892
|
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
var __defProp = Object.defineProperty;
|
|
2
2
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
3
|
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
|
-
import { A as APP_CONFIG, l as logger, e as errorToMessage, a as logEvent } from "./index-
|
|
4
|
+
import { A as APP_CONFIG, l as logger, e as errorToMessage, a as logEvent } from "./index-BginsqFH.js";
|
|
5
5
|
class StreamingAudioPlayer {
|
|
6
|
-
//
|
|
6
|
+
// Mark if AudioContext is being resumed, avoid concurrent resume requests
|
|
7
7
|
constructor(options) {
|
|
8
8
|
// AudioContext is managed internally
|
|
9
9
|
__publicField(this, "audioContext", null);
|
|
@@ -28,17 +28,17 @@ class StreamingAudioPlayer {
|
|
|
28
28
|
__publicField(this, "autoStartEnabled", true);
|
|
29
29
|
// Control whether to auto-start when buffer is ready
|
|
30
30
|
__publicField(this, "autoContinue", false);
|
|
31
|
-
//
|
|
31
|
+
// Mark if should auto-continue playback (used after auto-pause when end=false and no data)
|
|
32
32
|
// Audio buffer queue
|
|
33
33
|
__publicField(this, "audioChunks", []);
|
|
34
34
|
__publicField(this, "scheduledChunks", 0);
|
|
35
35
|
// Number of chunks already scheduled
|
|
36
36
|
__publicField(this, "activeSources", /* @__PURE__ */ new Set());
|
|
37
37
|
__publicField(this, "lastScheduledChunkEndTime", 0);
|
|
38
|
-
//
|
|
38
|
+
// End time of last scheduled chunk (relative time)
|
|
39
39
|
__publicField(this, "lastGetCurrentTimeLog", 0);
|
|
40
|
-
//
|
|
41
|
-
//
|
|
40
|
+
// Timestamp of last getCurrentTime log (for throttling)
|
|
41
|
+
// Track start time (absolute time) and duration of each scheduled chunk for accurate current playback time calculation
|
|
42
42
|
__publicField(this, "scheduledChunkInfo", []);
|
|
43
43
|
// Volume control
|
|
44
44
|
__publicField(this, "gainNode", null);
|
|
@@ -96,13 +96,14 @@ class StreamingAudioPlayer {
|
|
|
96
96
|
}
|
|
97
97
|
}
|
|
98
98
|
/**
|
|
99
|
-
*
|
|
100
|
-
*
|
|
99
|
+
* Ensure AudioContext is running (auto-resume if suspended)
|
|
100
|
+
* Only auto-resume when playing and not paused, avoid interfering with normal pause/resume logic
|
|
101
101
|
*
|
|
102
|
-
*
|
|
103
|
-
* -
|
|
104
|
-
* -
|
|
105
|
-
* -
|
|
102
|
+
* Optimizations:
|
|
103
|
+
* - Fast path: if already in running state, return directly
|
|
104
|
+
* - Avoid concurrent resume: use isResuming flag to prevent duplicate resume requests
|
|
105
|
+
* - Handle closed state: if AudioContext is closed, cannot resume
|
|
106
|
+
* @internal
|
|
106
107
|
*/
|
|
107
108
|
async ensureAudioContextRunning() {
|
|
108
109
|
if (!this.audioContext) {
|
|
@@ -372,7 +373,8 @@ class StreamingAudioPlayer {
|
|
|
372
373
|
}
|
|
373
374
|
/**
|
|
374
375
|
* Get current playback time (seconds)
|
|
375
|
-
*
|
|
376
|
+
* Returns total actual playback duration
|
|
377
|
+
* @internal
|
|
376
378
|
*/
|
|
377
379
|
getCurrentTime() {
|
|
378
380
|
if (!this.audioContext || !this.isPlaying) {
|
|
@@ -403,7 +405,8 @@ class StreamingAudioPlayer {
|
|
|
403
405
|
}
|
|
404
406
|
/**
|
|
405
407
|
* Get total duration of buffered audio (seconds)
|
|
406
|
-
*
|
|
408
|
+
* Calculate total duration of all buffered chunks
|
|
409
|
+
* @internal
|
|
407
410
|
*/
|
|
408
411
|
getBufferedDuration() {
|
|
409
412
|
if (!this.audioContext) {
|
|
@@ -598,9 +601,10 @@ class StreamingAudioPlayer {
|
|
|
598
601
|
this.log("Flushed (soft)", { remainingScheduled: this.scheduledChunks });
|
|
599
602
|
}
|
|
600
603
|
/**
|
|
601
|
-
*
|
|
602
|
-
*
|
|
603
|
-
* @param volume
|
|
604
|
+
* Set volume (0.0 - 1.0)
|
|
605
|
+
* Note: This only controls avatar audio player volume, does not affect system volume
|
|
606
|
+
* @param volume Volume value, range 0.0 to 1.0 (0.0 is mute, 1.0 is max volume)
|
|
607
|
+
* @internal
|
|
604
608
|
*/
|
|
605
609
|
setVolume(volume) {
|
|
606
610
|
if (volume < 0 || volume > 1) {
|
|
@@ -613,8 +617,9 @@ class StreamingAudioPlayer {
|
|
|
613
617
|
}
|
|
614
618
|
}
|
|
615
619
|
/**
|
|
616
|
-
*
|
|
617
|
-
* @returns
|
|
620
|
+
* Get current volume
|
|
621
|
+
* @returns Current volume value (0.0 - 1.0)
|
|
622
|
+
* @internal
|
|
618
623
|
*/
|
|
619
624
|
getVolume() {
|
|
620
625
|
return this.volume;
|
|
@@ -1,61 +1,6 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
clientId?: string;
|
|
8
|
-
}
|
|
9
|
-
export declare class AnimationWebSocketClient extends EventEmitter {
|
|
10
|
-
private wsUrl;
|
|
11
|
-
private reconnectAttempts;
|
|
12
|
-
private jwtToken?;
|
|
13
|
-
private appId?;
|
|
14
|
-
private clientId?;
|
|
15
|
-
private ws;
|
|
16
|
-
private currentCharacterId;
|
|
17
|
-
private currentRetryCount;
|
|
18
|
-
private isConnecting;
|
|
19
|
-
private isManuallyDisconnected;
|
|
20
|
-
private reconnectTimer;
|
|
21
|
-
private sessionConfigured;
|
|
22
|
-
constructor(options: AnimationWebSocketClientOptions);
|
|
23
|
-
/**
|
|
24
|
-
* 连接WebSocket
|
|
25
|
-
*/
|
|
26
|
-
connect(characterId: string): Promise<void>;
|
|
27
|
-
/**
|
|
28
|
-
* 断开连接
|
|
29
|
-
*/
|
|
30
|
-
disconnect(): void;
|
|
31
|
-
/**
|
|
32
|
-
* 发送音频数据
|
|
33
|
-
* @param conversationId - 会话ID(在 protobuf 协议中映射为 reqId 字段)
|
|
34
|
-
*/
|
|
35
|
-
sendAudioData(conversationId: string, audioData: ArrayBuffer, end: boolean): boolean;
|
|
36
|
-
/**
|
|
37
|
-
* 生成会话ID
|
|
38
|
-
* 使用统一的会话ID生成规则:YYYYMMDDHHmmss_nanoid
|
|
39
|
-
*/
|
|
40
|
-
generateConversationId(): string;
|
|
41
|
-
/**
|
|
42
|
-
* 获取连接状态
|
|
43
|
-
*/
|
|
44
|
-
isConnected(): boolean;
|
|
45
|
-
/**
|
|
46
|
-
* 获取当前角色ID
|
|
47
|
-
*/
|
|
48
|
-
getCurrentCharacterId(): string;
|
|
49
|
-
private buildWebSocketUrl;
|
|
50
|
-
private connectWebSocket;
|
|
51
|
-
/**
|
|
52
|
-
* 清理 URL 用于日志记录(隐藏敏感信息)
|
|
53
|
-
*/
|
|
54
|
-
private sanitizeUrlForLog;
|
|
55
|
-
/**
|
|
56
|
-
* v2 协议:配置会话(发送采样率等参数)
|
|
57
|
-
*/
|
|
58
|
-
private configureSession;
|
|
59
|
-
private handleMessage;
|
|
60
|
-
private scheduleReconnect;
|
|
61
|
-
}
|
|
1
|
+
/**
|
|
2
|
+
* AnimationWebSocketClient: Animation-specific WebSocket client
|
|
3
|
+
* Uses driveningress/v2 protocol, only handles audio input and animation output
|
|
4
|
+
* @internal
|
|
5
|
+
*/
|
|
6
|
+
export {};
|
|
@@ -1,12 +1,5 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Simple Event Emitter
|
|
3
|
+
* @internal
|
|
3
4
|
*/
|
|
4
|
-
type EventHandler = (...args: any[]) => void;
|
|
5
|
-
export declare class EventEmitter {
|
|
6
|
-
private events;
|
|
7
|
-
on(event: string, handler: EventHandler): void;
|
|
8
|
-
off(event: string, handler: EventHandler): void;
|
|
9
|
-
emit(event: string, ...args: any[]): void;
|
|
10
|
-
removeAllListeners(event?: string): void;
|
|
11
|
-
}
|
|
12
5
|
export {};
|
|
@@ -1,25 +1,5 @@
|
|
|
1
|
-
import { Flame } from '../../generated/driveningress/v1/driveningress';
|
|
2
|
-
export interface FlameParams {
|
|
3
|
-
shape_params?: number[];
|
|
4
|
-
expr_params?: number[];
|
|
5
|
-
rotation?: number[];
|
|
6
|
-
translation?: number[];
|
|
7
|
-
neck_pose?: number[];
|
|
8
|
-
jaw_pose?: number[];
|
|
9
|
-
eyes_pose?: number[];
|
|
10
|
-
eyelid?: number[];
|
|
11
|
-
has_eyelid?: boolean;
|
|
12
|
-
}
|
|
13
1
|
/**
|
|
14
|
-
* Convert proto Flame to WASM FlameParams
|
|
2
|
+
* Convert proto Flame to WASM FlameParams
|
|
3
|
+
* @internal
|
|
15
4
|
*/
|
|
16
|
-
export
|
|
17
|
-
/**
|
|
18
|
-
* Convert WASM FlameParams to proto Flame format
|
|
19
|
-
* Used for transition animation from idle to speaking
|
|
20
|
-
*/
|
|
21
|
-
export declare function convertWasmParamsToProtoFlame(wasmParams: FlameParams): Flame;
|
|
22
|
-
/**
|
|
23
|
-
* Create a neutral proto Flame (zero pose)
|
|
24
|
-
*/
|
|
25
|
-
export declare function createNeutralFlameProto(): Flame;
|
|
5
|
+
export {};
|
|
@@ -1,75 +1,6 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
private onEndedCallback?;
|
|
8
|
-
private static audioUnlocked;
|
|
9
|
-
private useStreaming;
|
|
10
|
-
/**
|
|
11
|
-
* 解锁音频上下文(Safari 自动播放策略)
|
|
12
|
-
* 必须在用户交互事件(如 click)中调用
|
|
13
|
-
*/
|
|
14
|
-
static unlockAudioContext(): Promise<void>;
|
|
15
|
-
/**
|
|
16
|
-
* Initialize with HTMLAudioElement (traditional way)
|
|
17
|
-
*/
|
|
18
|
-
initialize(audioUrl: string, onEnded?: () => void): Promise<void>;
|
|
19
|
-
/**
|
|
20
|
-
* Initialize with StreamingAudioPlayer (streaming way)
|
|
21
|
-
* @deprecated 使用 prepareStreamingPlayer() 代替
|
|
22
|
-
*/
|
|
23
|
-
initializeStreaming(streamingPlayer: StreamingAudioPlayer, onEnded?: () => void): Promise<void>;
|
|
24
|
-
/**
|
|
25
|
-
* 检查流式播放器是否已准备好
|
|
26
|
-
*/
|
|
27
|
-
isStreamingReady(): boolean;
|
|
28
|
-
/**
|
|
29
|
-
* 获取流式播放器实例
|
|
30
|
-
*/
|
|
31
|
-
getStreamingPlayer(): StreamingAudioPlayer | null;
|
|
32
|
-
/**
|
|
33
|
-
* 创建并初始化流式播放器
|
|
34
|
-
* 在服务连接建立时调用
|
|
35
|
-
*/
|
|
36
|
-
createAndInitializeStreamingPlayer(): Promise<void>;
|
|
37
|
-
/**
|
|
38
|
-
* 准备流式播放器(如果未创建则创建并初始化)
|
|
39
|
-
* 停止之前的播放并更新结束回调
|
|
40
|
-
*/
|
|
41
|
-
prepareStreamingPlayer(onEnded?: () => void): Promise<void>;
|
|
42
|
-
private setupEventListeners;
|
|
43
|
-
play(): Promise<void>;
|
|
44
|
-
stop(): void;
|
|
45
|
-
isPlaying(): boolean;
|
|
46
|
-
getCurrentFrameIndex(): number;
|
|
47
|
-
/**
|
|
48
|
-
* Get current playback time
|
|
49
|
-
*/
|
|
50
|
-
getCurrentTime(): number;
|
|
51
|
-
/**
|
|
52
|
-
* 添加音频块(仅用于流式播放)
|
|
53
|
-
*/
|
|
54
|
-
addAudioChunk(audio: Uint8Array, isLast?: boolean): void;
|
|
55
|
-
/**
|
|
56
|
-
* 暂停播放
|
|
57
|
-
*/
|
|
58
|
-
pause(): void;
|
|
59
|
-
/**
|
|
60
|
-
* 继续播放
|
|
61
|
-
*/
|
|
62
|
-
resume(): Promise<void>;
|
|
63
|
-
/**
|
|
64
|
-
* 设置音量 (0.0 - 1.0)
|
|
65
|
-
* 注意:这仅控制数字人音频播放器的音量,不影响系统音量
|
|
66
|
-
* @param volume 音量值,范围 0.0 到 1.0(0.0 为静音,1.0 为最大音量)
|
|
67
|
-
*/
|
|
68
|
-
setVolume(volume: number): void;
|
|
69
|
-
/**
|
|
70
|
-
* 获取当前音量
|
|
71
|
-
* @returns 当前音量值 (0.0 - 1.0)
|
|
72
|
-
*/
|
|
73
|
-
getVolume(): number;
|
|
74
|
-
dispose(): void;
|
|
75
|
-
}
|
|
1
|
+
/**
|
|
2
|
+
* Audio-animation synchronized player
|
|
3
|
+
* Supports HTMLAudioElement (traditional) and StreamingAudioPlayer (streaming)
|
|
4
|
+
* @internal
|
|
5
|
+
*/
|
|
6
|
+
export {};
|
|
@@ -2,154 +2,6 @@
|
|
|
2
2
|
* Streaming Audio Player
|
|
3
3
|
* Implements real-time audio playback using Web Audio API
|
|
4
4
|
* Supports dynamic PCM chunk addition without Workers
|
|
5
|
+
* @internal
|
|
5
6
|
*/
|
|
6
|
-
export
|
|
7
|
-
sampleRate?: number;
|
|
8
|
-
channelCount?: number;
|
|
9
|
-
debug?: boolean;
|
|
10
|
-
}
|
|
11
|
-
export declare class StreamingAudioPlayer {
|
|
12
|
-
private audioContext;
|
|
13
|
-
private sampleRate;
|
|
14
|
-
private channelCount;
|
|
15
|
-
private debug;
|
|
16
|
-
private sessionId;
|
|
17
|
-
private sessionStartTime;
|
|
18
|
-
private pausedTimeOffset;
|
|
19
|
-
private pausedAt;
|
|
20
|
-
private pausedAudioContextTime;
|
|
21
|
-
private scheduledTime;
|
|
22
|
-
private isPlaying;
|
|
23
|
-
private isPaused;
|
|
24
|
-
private autoStartEnabled;
|
|
25
|
-
private autoContinue;
|
|
26
|
-
private audioChunks;
|
|
27
|
-
private scheduledChunks;
|
|
28
|
-
private activeSources;
|
|
29
|
-
private lastScheduledChunkEndTime;
|
|
30
|
-
private lastGetCurrentTimeLog;
|
|
31
|
-
private scheduledChunkInfo;
|
|
32
|
-
private gainNode;
|
|
33
|
-
private volume;
|
|
34
|
-
private onEndedCallback?;
|
|
35
|
-
private stateChangeHandler?;
|
|
36
|
-
private isResuming;
|
|
37
|
-
constructor(options?: StreamingAudioPlayerOptions);
|
|
38
|
-
/**
|
|
39
|
-
* Initialize audio context (create and ensure it's ready)
|
|
40
|
-
*/
|
|
41
|
-
initialize(): Promise<void>;
|
|
42
|
-
/**
|
|
43
|
-
* 确保 AudioContext 正在运行(如果被暂停则自动恢复)
|
|
44
|
-
* 只在正在播放且未暂停时自动恢复,避免干扰正常的暂停/恢复逻辑
|
|
45
|
-
*
|
|
46
|
-
* 优化:
|
|
47
|
-
* - 快速路径:如果已经是 running 状态,直接返回
|
|
48
|
-
* - 避免并发恢复:使用 isResuming 标志防止重复恢复请求
|
|
49
|
-
* - 处理 closed 状态:如果 AudioContext 已关闭,无法恢复
|
|
50
|
-
*/
|
|
51
|
-
private ensureAudioContextRunning;
|
|
52
|
-
/**
|
|
53
|
-
* Add audio chunk (16-bit PCM)
|
|
54
|
-
*/
|
|
55
|
-
addChunk(pcmData: Uint8Array, isLast?: boolean): void;
|
|
56
|
-
/**
|
|
57
|
-
* Start new session (stop current and start fresh)
|
|
58
|
-
*/
|
|
59
|
-
startNewSession(audioChunks: Array<{
|
|
60
|
-
data: Uint8Array;
|
|
61
|
-
isLast: boolean;
|
|
62
|
-
}>): Promise<void>;
|
|
63
|
-
/**
|
|
64
|
-
* Start playback
|
|
65
|
-
*/
|
|
66
|
-
private startPlayback;
|
|
67
|
-
/**
|
|
68
|
-
* Schedule all pending chunks
|
|
69
|
-
*/
|
|
70
|
-
private scheduleAllChunks;
|
|
71
|
-
/**
|
|
72
|
-
* Schedule next audio chunk
|
|
73
|
-
*/
|
|
74
|
-
private scheduleNextChunk;
|
|
75
|
-
/**
|
|
76
|
-
* Convert PCM data to AudioBuffer
|
|
77
|
-
* Input: 16-bit PCM (int16), Output: AudioBuffer (float32 [-1, 1])
|
|
78
|
-
*/
|
|
79
|
-
private pcmToAudioBuffer;
|
|
80
|
-
/**
|
|
81
|
-
* Get current playback time (seconds)
|
|
82
|
-
* 返回实际播放的音频总时长
|
|
83
|
-
*/
|
|
84
|
-
getCurrentTime(): number;
|
|
85
|
-
/**
|
|
86
|
-
* Get total duration of buffered audio (seconds)
|
|
87
|
-
* 计算所有已缓冲 chunk 的总时长
|
|
88
|
-
*/
|
|
89
|
-
getBufferedDuration(): number;
|
|
90
|
-
/**
|
|
91
|
-
* Get current AudioContext time
|
|
92
|
-
* @returns Current AudioContext time in seconds, or 0 if AudioContext is not initialized
|
|
93
|
-
*/
|
|
94
|
-
getAudioContextTime(): number;
|
|
95
|
-
/**
|
|
96
|
-
* Pause playback
|
|
97
|
-
*/
|
|
98
|
-
pause(): void;
|
|
99
|
-
/**
|
|
100
|
-
* Resume playback
|
|
101
|
-
*/
|
|
102
|
-
resume(): Promise<void>;
|
|
103
|
-
/**
|
|
104
|
-
* Stop playback
|
|
105
|
-
*/
|
|
106
|
-
stop(): void;
|
|
107
|
-
/**
|
|
108
|
-
* Enable or disable auto-start (for delayed start scenarios)
|
|
109
|
-
*/
|
|
110
|
-
setAutoStart(enabled: boolean): void;
|
|
111
|
-
/**
|
|
112
|
-
* Start playback manually (for delayed start scenarios)
|
|
113
|
-
* This allows starting playback after transition animation completes
|
|
114
|
-
*/
|
|
115
|
-
play(): void;
|
|
116
|
-
/**
|
|
117
|
-
* Mark playback as ended
|
|
118
|
-
*/
|
|
119
|
-
markEnded(): void;
|
|
120
|
-
/**
|
|
121
|
-
* Set ended callback
|
|
122
|
-
*/
|
|
123
|
-
onEnded(callback: () => void): void;
|
|
124
|
-
/**
|
|
125
|
-
* Check if playing
|
|
126
|
-
*/
|
|
127
|
-
isPlayingNow(): boolean;
|
|
128
|
-
/**
|
|
129
|
-
* Dispose and cleanup
|
|
130
|
-
*/
|
|
131
|
-
dispose(): void;
|
|
132
|
-
/**
|
|
133
|
-
* Flush buffered audio
|
|
134
|
-
* - hard: stops all playing sources and clears all chunks
|
|
135
|
-
* - soft (default): clears UNSCHEDULED chunks only
|
|
136
|
-
*/
|
|
137
|
-
flush(options?: {
|
|
138
|
-
hard?: boolean;
|
|
139
|
-
}): void;
|
|
140
|
-
/**
|
|
141
|
-
* 设置音量 (0.0 - 1.0)
|
|
142
|
-
* 注意:这仅控制数字人音频播放器的音量,不影响系统音量
|
|
143
|
-
* @param volume 音量值,范围 0.0 到 1.0(0.0 为静音,1.0 为最大音量)
|
|
144
|
-
*/
|
|
145
|
-
setVolume(volume: number): void;
|
|
146
|
-
/**
|
|
147
|
-
* 获取当前音量
|
|
148
|
-
* @returns 当前音量值 (0.0 - 1.0)
|
|
149
|
-
*/
|
|
150
|
-
getVolume(): number;
|
|
151
|
-
/**
|
|
152
|
-
* Debug logging
|
|
153
|
-
*/
|
|
154
|
-
private log;
|
|
155
|
-
}
|
|
7
|
+
export {};
|
|
@@ -7780,8 +7780,9 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7780
7780
|
__publicField(this, "useStreaming", false);
|
|
7781
7781
|
}
|
|
7782
7782
|
/**
|
|
7783
|
-
*
|
|
7784
|
-
*
|
|
7783
|
+
* Unlock audio context (Safari autoplay policy)
|
|
7784
|
+
* Must be called in user interaction event (e.g., click)
|
|
7785
|
+
* @internal
|
|
7785
7786
|
*/
|
|
7786
7787
|
static async unlockAudioContext() {
|
|
7787
7788
|
if (_AnimationPlayer.audioUnlocked) {
|
|
@@ -7802,6 +7803,9 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7802
7803
|
}
|
|
7803
7804
|
/**
|
|
7804
7805
|
* Initialize with HTMLAudioElement (traditional way)
|
|
7806
|
+
* @param audioUrl Audio file URL
|
|
7807
|
+
* @param onEnded Optional callback when playback ends
|
|
7808
|
+
* @internal
|
|
7805
7809
|
*/
|
|
7806
7810
|
async initialize(audioUrl, onEnded) {
|
|
7807
7811
|
this.onEndedCallback = onEnded;
|
|
@@ -7820,7 +7824,8 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7820
7824
|
}
|
|
7821
7825
|
/**
|
|
7822
7826
|
* Initialize with StreamingAudioPlayer (streaming way)
|
|
7823
|
-
* @deprecated
|
|
7827
|
+
* @deprecated Use prepareStreamingPlayer() instead
|
|
7828
|
+
* @internal
|
|
7824
7829
|
*/
|
|
7825
7830
|
async initializeStreaming(streamingPlayer, onEnded) {
|
|
7826
7831
|
this.streamingPlayer = streamingPlayer;
|
|
@@ -7833,26 +7838,29 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7833
7838
|
});
|
|
7834
7839
|
}
|
|
7835
7840
|
/**
|
|
7836
|
-
*
|
|
7841
|
+
* Check if streaming player is ready
|
|
7842
|
+
* @internal
|
|
7837
7843
|
*/
|
|
7838
7844
|
isStreamingReady() {
|
|
7839
7845
|
return this.useStreaming && this.streamingPlayer !== null;
|
|
7840
7846
|
}
|
|
7841
7847
|
/**
|
|
7842
|
-
*
|
|
7848
|
+
* Get streaming player instance
|
|
7849
|
+
* @internal
|
|
7843
7850
|
*/
|
|
7844
7851
|
getStreamingPlayer() {
|
|
7845
7852
|
return this.streamingPlayer;
|
|
7846
7853
|
}
|
|
7847
7854
|
/**
|
|
7848
|
-
*
|
|
7849
|
-
*
|
|
7855
|
+
* Create and initialize streaming player
|
|
7856
|
+
* Called when service connection is established
|
|
7857
|
+
* @internal
|
|
7850
7858
|
*/
|
|
7851
7859
|
async createAndInitializeStreamingPlayer() {
|
|
7852
7860
|
if (this.streamingPlayer) {
|
|
7853
7861
|
return;
|
|
7854
7862
|
}
|
|
7855
|
-
const { StreamingAudioPlayer } = await import("./StreamingAudioPlayer-
|
|
7863
|
+
const { StreamingAudioPlayer } = await import("./StreamingAudioPlayer-fV-zTOrl.js");
|
|
7856
7864
|
const { AvatarSDK: AvatarSDK2 } = await Promise.resolve().then(() => AvatarSDK$1);
|
|
7857
7865
|
const audioFormat = AvatarSDK2.getAudioFormat();
|
|
7858
7866
|
this.streamingPlayer = new StreamingAudioPlayer({
|
|
@@ -7874,8 +7882,9 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7874
7882
|
this.useStreaming = true;
|
|
7875
7883
|
}
|
|
7876
7884
|
/**
|
|
7877
|
-
*
|
|
7878
|
-
*
|
|
7885
|
+
* Prepare streaming player (create and initialize if not created)
|
|
7886
|
+
* Stop previous playback and update end callback
|
|
7887
|
+
* @internal
|
|
7879
7888
|
*/
|
|
7880
7889
|
async prepareStreamingPlayer(onEnded) {
|
|
7881
7890
|
if (!this.streamingPlayer) {
|
|
@@ -7904,6 +7913,10 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7904
7913
|
(_a = this.onEndedCallback) == null ? void 0 : _a.call(this);
|
|
7905
7914
|
});
|
|
7906
7915
|
}
|
|
7916
|
+
/**
|
|
7917
|
+
* Start playback
|
|
7918
|
+
* @internal
|
|
7919
|
+
*/
|
|
7907
7920
|
async play() {
|
|
7908
7921
|
if (this.useStreaming) {
|
|
7909
7922
|
this._isPlaying = true;
|
|
@@ -7914,6 +7927,10 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7914
7927
|
await this.audio.play();
|
|
7915
7928
|
}
|
|
7916
7929
|
}
|
|
7930
|
+
/**
|
|
7931
|
+
* Stop playback
|
|
7932
|
+
* @internal
|
|
7933
|
+
*/
|
|
7917
7934
|
stop() {
|
|
7918
7935
|
var _a;
|
|
7919
7936
|
this._isPlaying = false;
|
|
@@ -7926,6 +7943,10 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7926
7943
|
}
|
|
7927
7944
|
}
|
|
7928
7945
|
}
|
|
7946
|
+
/**
|
|
7947
|
+
* Check if currently playing
|
|
7948
|
+
* @internal
|
|
7949
|
+
*/
|
|
7929
7950
|
isPlaying() {
|
|
7930
7951
|
var _a;
|
|
7931
7952
|
if (this.useStreaming) {
|
|
@@ -7933,12 +7954,17 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7933
7954
|
}
|
|
7934
7955
|
return this._isPlaying;
|
|
7935
7956
|
}
|
|
7957
|
+
/**
|
|
7958
|
+
* Get current frame index
|
|
7959
|
+
* @internal
|
|
7960
|
+
*/
|
|
7936
7961
|
getCurrentFrameIndex() {
|
|
7937
7962
|
const currentTime = this.getCurrentTime();
|
|
7938
7963
|
return Math.floor(currentTime * this.fps);
|
|
7939
7964
|
}
|
|
7940
7965
|
/**
|
|
7941
7966
|
* Get current playback time
|
|
7967
|
+
* @internal
|
|
7942
7968
|
*/
|
|
7943
7969
|
getCurrentTime() {
|
|
7944
7970
|
var _a, _b;
|
|
@@ -7948,7 +7974,8 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7948
7974
|
return ((_b = this.audio) == null ? void 0 : _b.currentTime) ?? 0;
|
|
7949
7975
|
}
|
|
7950
7976
|
/**
|
|
7951
|
-
*
|
|
7977
|
+
* Add audio chunk (only for streaming playback)
|
|
7978
|
+
* @internal
|
|
7952
7979
|
*/
|
|
7953
7980
|
addAudioChunk(audio, isLast = false) {
|
|
7954
7981
|
if (this.useStreaming && this.streamingPlayer) {
|
|
@@ -7958,7 +7985,8 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7958
7985
|
}
|
|
7959
7986
|
}
|
|
7960
7987
|
/**
|
|
7961
|
-
*
|
|
7988
|
+
* Pause playback
|
|
7989
|
+
* @internal
|
|
7962
7990
|
*/
|
|
7963
7991
|
pause() {
|
|
7964
7992
|
var _a;
|
|
@@ -7971,7 +7999,8 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7971
7999
|
}
|
|
7972
8000
|
}
|
|
7973
8001
|
/**
|
|
7974
|
-
*
|
|
8002
|
+
* Resume playback
|
|
8003
|
+
* @internal
|
|
7975
8004
|
*/
|
|
7976
8005
|
async resume() {
|
|
7977
8006
|
var _a;
|
|
@@ -7984,9 +8013,10 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7984
8013
|
}
|
|
7985
8014
|
}
|
|
7986
8015
|
/**
|
|
7987
|
-
*
|
|
7988
|
-
*
|
|
7989
|
-
* @param volume
|
|
8016
|
+
* Set volume (0.0 - 1.0)
|
|
8017
|
+
* Note: This only controls avatar audio player volume, does not affect system volume
|
|
8018
|
+
* @param volume Volume value, range 0.0 to 1.0 (0.0 is mute, 1.0 is max volume)
|
|
8019
|
+
* @internal
|
|
7990
8020
|
*/
|
|
7991
8021
|
setVolume(volume) {
|
|
7992
8022
|
if (this.useStreaming && this.streamingPlayer) {
|
|
@@ -7998,8 +8028,9 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
7998
8028
|
}
|
|
7999
8029
|
}
|
|
8000
8030
|
/**
|
|
8001
|
-
*
|
|
8002
|
-
* @returns
|
|
8031
|
+
* Get current volume
|
|
8032
|
+
* @returns Current volume value (0.0 - 1.0)
|
|
8033
|
+
* @internal
|
|
8003
8034
|
*/
|
|
8004
8035
|
getVolume() {
|
|
8005
8036
|
var _a;
|
|
@@ -8009,6 +8040,10 @@ const _AnimationPlayer = class _AnimationPlayer {
|
|
|
8009
8040
|
return ((_a = this.audio) == null ? void 0 : _a.volume) ?? 1;
|
|
8010
8041
|
}
|
|
8011
8042
|
}
|
|
8043
|
+
/**
|
|
8044
|
+
* Dispose and cleanup
|
|
8045
|
+
* @internal
|
|
8046
|
+
*/
|
|
8012
8047
|
dispose() {
|
|
8013
8048
|
this.stop();
|
|
8014
8049
|
if (this.audio) {
|
|
@@ -9496,7 +9531,7 @@ class AvatarSDK {
|
|
|
9496
9531
|
}
|
|
9497
9532
|
__publicField(AvatarSDK, "_isInitialized", false);
|
|
9498
9533
|
__publicField(AvatarSDK, "_configuration", null);
|
|
9499
|
-
__publicField(AvatarSDK, "_version", "1.0.0-beta.
|
|
9534
|
+
__publicField(AvatarSDK, "_version", "1.0.0-beta.72");
|
|
9500
9535
|
__publicField(AvatarSDK, "_avatarCore", null);
|
|
9501
9536
|
__publicField(AvatarSDK, "_dynamicSdkConfig", null);
|
|
9502
9537
|
const AvatarSDK$1 = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
@@ -10487,7 +10522,7 @@ class EventEmitter {
|
|
|
10487
10522
|
}
|
|
10488
10523
|
}
|
|
10489
10524
|
class AnimationWebSocketClient extends EventEmitter {
|
|
10490
|
-
// v2
|
|
10525
|
+
// v2 protocol: mark if session is configured
|
|
10491
10526
|
constructor(options) {
|
|
10492
10527
|
super();
|
|
10493
10528
|
__publicField(this, "wsUrl");
|
|
@@ -10509,7 +10544,8 @@ class AnimationWebSocketClient extends EventEmitter {
|
|
|
10509
10544
|
this.clientId = options.clientId;
|
|
10510
10545
|
}
|
|
10511
10546
|
/**
|
|
10512
|
-
*
|
|
10547
|
+
* Connect WebSocket
|
|
10548
|
+
* @internal
|
|
10513
10549
|
*/
|
|
10514
10550
|
async connect(characterId) {
|
|
10515
10551
|
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
|
|
@@ -10537,7 +10573,8 @@ class AnimationWebSocketClient extends EventEmitter {
|
|
|
10537
10573
|
}
|
|
10538
10574
|
}
|
|
10539
10575
|
/**
|
|
10540
|
-
*
|
|
10576
|
+
* Disconnect
|
|
10577
|
+
* @internal
|
|
10541
10578
|
*/
|
|
10542
10579
|
disconnect() {
|
|
10543
10580
|
if (this.ws) {
|
|
@@ -10557,8 +10594,9 @@ class AnimationWebSocketClient extends EventEmitter {
|
|
|
10557
10594
|
logger.log("[AnimationWebSocketClient] Disconnected");
|
|
10558
10595
|
}
|
|
10559
10596
|
/**
|
|
10560
|
-
*
|
|
10561
|
-
* @param conversationId -
|
|
10597
|
+
* Send audio data
|
|
10598
|
+
* @param conversationId - Conversation ID (mapped to reqId field in protobuf protocol)
|
|
10599
|
+
* @internal
|
|
10562
10600
|
*/
|
|
10563
10601
|
sendAudioData(conversationId, audioData, end) {
|
|
10564
10602
|
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) {
|
|
@@ -10596,25 +10634,28 @@ class AnimationWebSocketClient extends EventEmitter {
|
|
|
10596
10634
|
}
|
|
10597
10635
|
}
|
|
10598
10636
|
/**
|
|
10599
|
-
*
|
|
10600
|
-
*
|
|
10637
|
+
* Generate conversation ID
|
|
10638
|
+
* Uses unified conversation ID generation rule: YYYYMMDDHHmmss_nanoid
|
|
10639
|
+
* @internal
|
|
10601
10640
|
*/
|
|
10602
10641
|
generateConversationId() {
|
|
10603
10642
|
return idManager.generateNewConversationId();
|
|
10604
10643
|
}
|
|
10605
10644
|
/**
|
|
10606
|
-
*
|
|
10645
|
+
* Get connection state
|
|
10646
|
+
* @internal
|
|
10607
10647
|
*/
|
|
10608
10648
|
isConnected() {
|
|
10609
10649
|
return this.ws !== null && this.ws.readyState === WebSocket.OPEN;
|
|
10610
10650
|
}
|
|
10611
10651
|
/**
|
|
10612
|
-
*
|
|
10652
|
+
* Get current character ID
|
|
10653
|
+
* @internal
|
|
10613
10654
|
*/
|
|
10614
10655
|
getCurrentCharacterId() {
|
|
10615
10656
|
return this.currentCharacterId;
|
|
10616
10657
|
}
|
|
10617
|
-
// ==========
|
|
10658
|
+
// ========== Private Methods ==========
|
|
10618
10659
|
buildWebSocketUrl(characterId) {
|
|
10619
10660
|
const url = new URL(this.wsUrl);
|
|
10620
10661
|
url.searchParams.set("id", characterId);
|
|
@@ -10742,7 +10783,8 @@ class AnimationWebSocketClient extends EventEmitter {
|
|
|
10742
10783
|
});
|
|
10743
10784
|
}
|
|
10744
10785
|
/**
|
|
10745
|
-
*
|
|
10786
|
+
* Sanitize URL for logging (hide sensitive information)
|
|
10787
|
+
* @internal
|
|
10746
10788
|
*/
|
|
10747
10789
|
sanitizeUrlForLog(url) {
|
|
10748
10790
|
try {
|
|
@@ -10761,7 +10803,8 @@ class AnimationWebSocketClient extends EventEmitter {
|
|
|
10761
10803
|
}
|
|
10762
10804
|
}
|
|
10763
10805
|
/**
|
|
10764
|
-
* v2
|
|
10806
|
+
* v2 protocol: configure session (send sample rate and other parameters)
|
|
10807
|
+
* @internal
|
|
10765
10808
|
*/
|
|
10766
10809
|
configureSession() {
|
|
10767
10810
|
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) {
|
|
@@ -10775,7 +10818,7 @@ class AnimationWebSocketClient extends EventEmitter {
|
|
|
10775
10818
|
clientConfigureSession: {
|
|
10776
10819
|
sampleRate: audioFormatConfig.sampleRate,
|
|
10777
10820
|
bitrate: 0,
|
|
10778
|
-
//
|
|
10821
|
+
// Set according to actual requirements
|
|
10779
10822
|
audioFormat: AudioFormat.AUDIO_FORMAT_PCM_S16LE,
|
|
10780
10823
|
transportCompression: TransportCompression.TRANSPORT_COMPRESSION_NONE
|
|
10781
10824
|
}
|
package/dist/index.js
CHANGED
package/package.json
CHANGED