@spatialwalk/avatarkit 1.0.0-beta.7 → 1.0.0-beta.71
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +602 -10
- package/README.md +475 -312
- package/dist/StreamingAudioPlayer-D8Q8WiEg.js +638 -0
- package/dist/animation/AnimationWebSocketClient.d.ts +6 -50
- package/dist/animation/utils/eventEmitter.d.ts +1 -9
- package/dist/animation/utils/flameConverter.d.ts +3 -24
- package/dist/audio/AnimationPlayer.d.ts +6 -57
- package/dist/audio/StreamingAudioPlayer.d.ts +2 -118
- package/dist/avatar_core_wasm-Dv943JJl.js +2696 -0
- package/dist/{avatar_core_wasm.wasm → avatar_core_wasm-e68766db.wasm} +0 -0
- package/dist/config/app-config.d.ts +3 -4
- package/dist/config/constants.d.ts +10 -18
- package/dist/config/sdk-config-loader.d.ts +4 -10
- package/dist/core/Avatar.d.ts +2 -14
- package/dist/core/AvatarController.d.ts +95 -85
- package/dist/core/AvatarDownloader.d.ts +7 -92
- package/dist/core/AvatarManager.d.ts +22 -12
- package/dist/core/AvatarSDK.d.ts +35 -0
- package/dist/core/AvatarView.d.ts +55 -140
- package/dist/core/NetworkLayer.d.ts +7 -59
- package/dist/generated/common/v1/models.d.ts +36 -0
- package/dist/generated/driveningress/v1/driveningress.d.ts +0 -1
- package/dist/generated/driveningress/v2/driveningress.d.ts +82 -1
- package/dist/generated/google/protobuf/struct.d.ts +0 -1
- package/dist/generated/google/protobuf/timestamp.d.ts +0 -1
- package/dist/index-U8QcNdma.js +16477 -0
- package/dist/index.d.ts +2 -4
- package/dist/index.js +17 -18
- package/dist/renderer/RenderSystem.d.ts +9 -79
- package/dist/renderer/covariance.d.ts +3 -11
- package/dist/renderer/renderer.d.ts +6 -2
- package/dist/renderer/sortSplats.d.ts +3 -10
- package/dist/renderer/webgl/reorderData.d.ts +4 -11
- package/dist/renderer/webgl/webglRenderer.d.ts +34 -4
- package/dist/renderer/webgpu/webgpuRenderer.d.ts +30 -5
- package/dist/types/character-settings.d.ts +1 -1
- package/dist/types/character.d.ts +3 -15
- package/dist/types/index.d.ts +123 -43
- package/dist/utils/animation-interpolation.d.ts +4 -15
- package/dist/utils/client-id.d.ts +6 -0
- package/dist/utils/conversationId.d.ts +10 -0
- package/dist/utils/error-utils.d.ts +0 -1
- package/dist/utils/id-manager.d.ts +34 -0
- package/dist/utils/logger.d.ts +2 -11
- package/dist/utils/posthog-tracker.d.ts +8 -0
- package/dist/utils/pwa-cache-manager.d.ts +17 -0
- package/dist/utils/usage-tracker.d.ts +6 -0
- package/dist/vanilla/vite.config.d.ts +2 -0
- package/dist/vite.d.ts +19 -0
- package/dist/wasm/avatarCoreAdapter.d.ts +15 -126
- package/dist/wasm/avatarCoreMemory.d.ts +5 -2
- package/package.json +19 -8
- package/vite.d.ts +20 -0
- package/vite.js +126 -0
- package/dist/StreamingAudioPlayer-D7s8q5h0.js +0 -319
- package/dist/StreamingAudioPlayer-D7s8q5h0.js.map +0 -1
- package/dist/animation/AnimationWebSocketClient.d.ts.map +0 -1
- package/dist/animation/utils/eventEmitter.d.ts.map +0 -1
- package/dist/animation/utils/flameConverter.d.ts.map +0 -1
- package/dist/audio/AnimationPlayer.d.ts.map +0 -1
- package/dist/audio/StreamingAudioPlayer.d.ts.map +0 -1
- package/dist/avatar_core_wasm-D4eEi7Eh.js +0 -1666
- package/dist/avatar_core_wasm-D4eEi7Eh.js.map +0 -1
- package/dist/config/app-config.d.ts.map +0 -1
- package/dist/config/constants.d.ts.map +0 -1
- package/dist/config/sdk-config-loader.d.ts.map +0 -1
- package/dist/core/Avatar.d.ts.map +0 -1
- package/dist/core/AvatarController.d.ts.map +0 -1
- package/dist/core/AvatarDownloader.d.ts.map +0 -1
- package/dist/core/AvatarKit.d.ts +0 -66
- package/dist/core/AvatarKit.d.ts.map +0 -1
- package/dist/core/AvatarManager.d.ts.map +0 -1
- package/dist/core/AvatarView.d.ts.map +0 -1
- package/dist/core/NetworkLayer.d.ts.map +0 -1
- package/dist/generated/driveningress/v1/driveningress.d.ts.map +0 -1
- package/dist/generated/driveningress/v2/driveningress.d.ts.map +0 -1
- package/dist/generated/google/protobuf/struct.d.ts.map +0 -1
- package/dist/generated/google/protobuf/timestamp.d.ts.map +0 -1
- package/dist/index-CpSvWi6A.js +0 -6026
- package/dist/index-CpSvWi6A.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/renderer/RenderSystem.d.ts.map +0 -1
- package/dist/renderer/covariance.d.ts.map +0 -1
- package/dist/renderer/renderer.d.ts.map +0 -1
- package/dist/renderer/sortSplats.d.ts.map +0 -1
- package/dist/renderer/webgl/reorderData.d.ts.map +0 -1
- package/dist/renderer/webgl/webglRenderer.d.ts.map +0 -1
- package/dist/renderer/webgpu/webgpuRenderer.d.ts.map +0 -1
- package/dist/types/character-settings.d.ts.map +0 -1
- package/dist/types/character.d.ts.map +0 -1
- package/dist/types/index.d.ts.map +0 -1
- package/dist/utils/animation-interpolation.d.ts.map +0 -1
- package/dist/utils/cls-tracker.d.ts +0 -17
- package/dist/utils/cls-tracker.d.ts.map +0 -1
- package/dist/utils/error-utils.d.ts.map +0 -1
- package/dist/utils/logger.d.ts.map +0 -1
- package/dist/utils/reqId.d.ts +0 -20
- package/dist/utils/reqId.d.ts.map +0 -1
- package/dist/wasm/avatarCoreAdapter.d.ts.map +0 -1
- package/dist/wasm/avatarCoreMemory.d.ts.map +0 -1
|
@@ -0,0 +1,638 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
|
+
import { A as APP_CONFIG, l as logger, e as errorToMessage, a as logEvent } from "./index-U8QcNdma.js";
|
|
5
|
+
class StreamingAudioPlayer {
|
|
6
|
+
// Mark if AudioContext is being resumed, avoid concurrent resume requests
|
|
7
|
+
constructor(options) {
|
|
8
|
+
// AudioContext is managed internally
|
|
9
|
+
__publicField(this, "audioContext", null);
|
|
10
|
+
__publicField(this, "sampleRate");
|
|
11
|
+
__publicField(this, "channelCount");
|
|
12
|
+
__publicField(this, "debug");
|
|
13
|
+
// Session-level state
|
|
14
|
+
__publicField(this, "sessionId");
|
|
15
|
+
__publicField(this, "sessionStartTime", 0);
|
|
16
|
+
// AudioContext time when session started
|
|
17
|
+
__publicField(this, "pausedTimeOffset", 0);
|
|
18
|
+
// Accumulated paused time
|
|
19
|
+
__publicField(this, "pausedAt", 0);
|
|
20
|
+
// Time when paused
|
|
21
|
+
__publicField(this, "pausedAudioContextTime", 0);
|
|
22
|
+
// audioContext.currentTime when paused (for resume calculation)
|
|
23
|
+
__publicField(this, "scheduledTime", 0);
|
|
24
|
+
// Next chunk schedule time in AudioContext time
|
|
25
|
+
// Playback state
|
|
26
|
+
__publicField(this, "isPlaying", false);
|
|
27
|
+
__publicField(this, "isPaused", false);
|
|
28
|
+
__publicField(this, "autoStartEnabled", true);
|
|
29
|
+
// Control whether to auto-start when buffer is ready
|
|
30
|
+
__publicField(this, "autoContinue", false);
|
|
31
|
+
// Mark if should auto-continue playback (used after auto-pause when end=false and no data)
|
|
32
|
+
// Audio buffer queue
|
|
33
|
+
__publicField(this, "audioChunks", []);
|
|
34
|
+
__publicField(this, "scheduledChunks", 0);
|
|
35
|
+
// Number of chunks already scheduled
|
|
36
|
+
__publicField(this, "activeSources", /* @__PURE__ */ new Set());
|
|
37
|
+
__publicField(this, "lastScheduledChunkEndTime", 0);
|
|
38
|
+
// End time of last scheduled chunk (relative time)
|
|
39
|
+
__publicField(this, "lastGetCurrentTimeLog", 0);
|
|
40
|
+
// Timestamp of last getCurrentTime log (for throttling)
|
|
41
|
+
// Track start time (absolute time) and duration of each scheduled chunk for accurate current playback time calculation
|
|
42
|
+
__publicField(this, "scheduledChunkInfo", []);
|
|
43
|
+
// Volume control
|
|
44
|
+
__publicField(this, "gainNode", null);
|
|
45
|
+
__publicField(this, "volume", 1);
|
|
46
|
+
// Default volume 1.0 (0.0 - 1.0)
|
|
47
|
+
// Event callbacks
|
|
48
|
+
__publicField(this, "onEndedCallback");
|
|
49
|
+
// AudioContext state management
|
|
50
|
+
__publicField(this, "stateChangeHandler");
|
|
51
|
+
__publicField(this, "isResuming", false);
|
|
52
|
+
this.sessionId = `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
53
|
+
this.sampleRate = (options == null ? void 0 : options.sampleRate) ?? APP_CONFIG.audio.sampleRate;
|
|
54
|
+
this.channelCount = (options == null ? void 0 : options.channelCount) ?? 1;
|
|
55
|
+
this.debug = (options == null ? void 0 : options.debug) ?? false;
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Initialize audio context (create and ensure it's ready)
|
|
59
|
+
*/
|
|
60
|
+
async initialize() {
|
|
61
|
+
if (this.audioContext) {
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
try {
|
|
65
|
+
this.audioContext = new AudioContext({
|
|
66
|
+
sampleRate: this.sampleRate
|
|
67
|
+
});
|
|
68
|
+
this.gainNode = this.audioContext.createGain();
|
|
69
|
+
this.gainNode.gain.value = this.volume;
|
|
70
|
+
this.gainNode.connect(this.audioContext.destination);
|
|
71
|
+
if (this.audioContext.state === "suspended") {
|
|
72
|
+
await this.audioContext.resume();
|
|
73
|
+
}
|
|
74
|
+
this.stateChangeHandler = (event) => {
|
|
75
|
+
const context = event.target;
|
|
76
|
+
if (context.state === "suspended" && this.isPlaying && !this.isPaused) {
|
|
77
|
+
this.ensureAudioContextRunning().catch((err) => {
|
|
78
|
+
logger.errorWithError("[StreamingAudioPlayer] Failed to auto-resume AudioContext after external suspend:", err);
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
};
|
|
82
|
+
this.audioContext.addEventListener("statechange", this.stateChangeHandler);
|
|
83
|
+
this.log("AudioContext initialized", {
|
|
84
|
+
sessionId: this.sessionId,
|
|
85
|
+
sampleRate: this.audioContext.sampleRate,
|
|
86
|
+
state: this.audioContext.state
|
|
87
|
+
});
|
|
88
|
+
} catch (error) {
|
|
89
|
+
const message = errorToMessage(error);
|
|
90
|
+
logEvent("activeAudioSessionFailed", "warning", {
|
|
91
|
+
sessionId: this.sessionId,
|
|
92
|
+
reason: message
|
|
93
|
+
});
|
|
94
|
+
logger.error("Failed to initialize AudioContext:", message);
|
|
95
|
+
throw error instanceof Error ? error : new Error(message);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Ensure AudioContext is running (auto-resume if suspended)
|
|
100
|
+
* Only auto-resume when playing and not paused, avoid interfering with normal pause/resume logic
|
|
101
|
+
*
|
|
102
|
+
* Optimizations:
|
|
103
|
+
* - Fast path: if already in running state, return directly
|
|
104
|
+
* - Avoid concurrent resume: use isResuming flag to prevent duplicate resume requests
|
|
105
|
+
* - Handle closed state: if AudioContext is closed, cannot resume
|
|
106
|
+
* @internal
|
|
107
|
+
*/
|
|
108
|
+
async ensureAudioContextRunning() {
|
|
109
|
+
if (!this.audioContext) {
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
const state = this.audioContext.state;
|
|
113
|
+
if (state === "running") {
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
if (state === "closed") {
|
|
117
|
+
this.log("AudioContext is closed, cannot resume", {
|
|
118
|
+
sessionId: this.sessionId,
|
|
119
|
+
state
|
|
120
|
+
});
|
|
121
|
+
return;
|
|
122
|
+
}
|
|
123
|
+
if (state === "suspended" && this.isPlaying && !this.isPaused) {
|
|
124
|
+
if (this.isResuming) {
|
|
125
|
+
this.log("AudioContext resume already in progress, skipping duplicate request", {
|
|
126
|
+
sessionId: this.sessionId,
|
|
127
|
+
state
|
|
128
|
+
});
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
this.isResuming = true;
|
|
132
|
+
try {
|
|
133
|
+
this.log("AudioContext is suspended during playback, resuming...", {
|
|
134
|
+
sessionId: this.sessionId,
|
|
135
|
+
state,
|
|
136
|
+
isPlaying: this.isPlaying,
|
|
137
|
+
isPaused: this.isPaused
|
|
138
|
+
});
|
|
139
|
+
await this.audioContext.resume();
|
|
140
|
+
this.log("AudioContext resumed successfully", {
|
|
141
|
+
sessionId: this.sessionId,
|
|
142
|
+
state: this.audioContext.state
|
|
143
|
+
});
|
|
144
|
+
} catch (err) {
|
|
145
|
+
logger.errorWithError("[StreamingAudioPlayer] Failed to resume AudioContext:", err);
|
|
146
|
+
logEvent("character_player", "error", {
|
|
147
|
+
sessionId: this.sessionId,
|
|
148
|
+
event: "audio_context_resume_failed",
|
|
149
|
+
reason: err instanceof Error ? err.message : String(err)
|
|
150
|
+
});
|
|
151
|
+
} finally {
|
|
152
|
+
this.isResuming = false;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Add audio chunk (16-bit PCM)
|
|
158
|
+
*/
|
|
159
|
+
addChunk(pcmData, isLast = false) {
|
|
160
|
+
if (!this.audioContext) {
|
|
161
|
+
logger.error("AudioContext not initialized");
|
|
162
|
+
return;
|
|
163
|
+
}
|
|
164
|
+
if (this.isPlaying && !this.isPaused && this.audioContext.state === "suspended") {
|
|
165
|
+
this.ensureAudioContextRunning().catch((err) => {
|
|
166
|
+
logger.errorWithError("[StreamingAudioPlayer] Failed to ensure AudioContext running in addChunk:", err);
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
this.audioChunks.push({ data: pcmData, isLast });
|
|
170
|
+
this.log(`Added chunk ${this.audioChunks.length}`, {
|
|
171
|
+
size: pcmData.length,
|
|
172
|
+
totalChunks: this.audioChunks.length,
|
|
173
|
+
isLast,
|
|
174
|
+
isPlaying: this.isPlaying,
|
|
175
|
+
scheduledChunks: this.scheduledChunks
|
|
176
|
+
});
|
|
177
|
+
if (this.autoContinue && this.isPaused) {
|
|
178
|
+
this.log("[StreamingAudioPlayer] autoContinue=true, auto-resuming playback");
|
|
179
|
+
this.autoContinue = false;
|
|
180
|
+
this.resume().catch((err) => {
|
|
181
|
+
logger.errorWithError("Failed to auto-resume playback:", err);
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
if (!this.isPlaying && this.autoStartEnabled && this.audioChunks.length > 0) {
|
|
185
|
+
this.log("[StreamingAudioPlayer] Auto-starting playback from addChunk");
|
|
186
|
+
this.startPlayback().catch((err) => {
|
|
187
|
+
logger.errorWithError("[StreamingAudioPlayer] Failed to start playback from addChunk:", err);
|
|
188
|
+
});
|
|
189
|
+
} else if (this.isPlaying && !this.isPaused) {
|
|
190
|
+
this.log("[StreamingAudioPlayer] Already playing, scheduling next chunk");
|
|
191
|
+
this.scheduleNextChunk();
|
|
192
|
+
} else {
|
|
193
|
+
this.log("[StreamingAudioPlayer] Not playing and no chunks, waiting for more chunks");
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
/**
|
|
197
|
+
* Start new session (stop current and start fresh)
|
|
198
|
+
*/
|
|
199
|
+
async startNewSession(audioChunks) {
|
|
200
|
+
this.stop();
|
|
201
|
+
this.sessionId = `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
202
|
+
this.audioChunks = [];
|
|
203
|
+
this.scheduledChunks = 0;
|
|
204
|
+
this.pausedTimeOffset = 0;
|
|
205
|
+
this.pausedAt = 0;
|
|
206
|
+
this.pausedAudioContextTime = 0;
|
|
207
|
+
this.autoContinue = false;
|
|
208
|
+
this.log("Starting new session", {
|
|
209
|
+
chunks: audioChunks.length
|
|
210
|
+
});
|
|
211
|
+
for (const chunk of audioChunks) {
|
|
212
|
+
this.addChunk(chunk.data, chunk.isLast);
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
/**
|
|
216
|
+
* Start playback
|
|
217
|
+
*/
|
|
218
|
+
async startPlayback() {
|
|
219
|
+
if (!this.audioContext) {
|
|
220
|
+
this.log("[StreamingAudioPlayer] Cannot start playback: AudioContext not initialized");
|
|
221
|
+
return;
|
|
222
|
+
}
|
|
223
|
+
if (this.isPlaying) {
|
|
224
|
+
this.log("[StreamingAudioPlayer] Cannot start playback: Already playing");
|
|
225
|
+
return;
|
|
226
|
+
}
|
|
227
|
+
await this.ensureAudioContextRunning();
|
|
228
|
+
this.isPlaying = true;
|
|
229
|
+
this.sessionStartTime = this.audioContext.currentTime;
|
|
230
|
+
this.scheduledTime = this.sessionStartTime;
|
|
231
|
+
this.lastScheduledChunkEndTime = 0;
|
|
232
|
+
this.scheduledChunkInfo = [];
|
|
233
|
+
this.autoContinue = false;
|
|
234
|
+
this.log("[StreamingAudioPlayer] Starting playback", {
|
|
235
|
+
sessionStartTime: this.sessionStartTime,
|
|
236
|
+
bufferedChunks: this.audioChunks.length,
|
|
237
|
+
scheduledChunks: this.scheduledChunks,
|
|
238
|
+
activeSources: this.activeSources.size,
|
|
239
|
+
audioContextState: this.audioContext.state
|
|
240
|
+
});
|
|
241
|
+
this.scheduleAllChunks();
|
|
242
|
+
}
|
|
243
|
+
/**
|
|
244
|
+
* Schedule all pending chunks
|
|
245
|
+
*/
|
|
246
|
+
scheduleAllChunks() {
|
|
247
|
+
while (this.scheduledChunks < this.audioChunks.length) {
|
|
248
|
+
this.scheduleNextChunk();
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Schedule next audio chunk
|
|
253
|
+
*/
|
|
254
|
+
scheduleNextChunk() {
|
|
255
|
+
if (!this.audioContext) {
|
|
256
|
+
this.log("[StreamingAudioPlayer] Cannot schedule chunk: AudioContext not initialized");
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
259
|
+
if (!this.isPlaying || this.isPaused) {
|
|
260
|
+
this.log("[StreamingAudioPlayer] Cannot schedule chunk: Not playing or paused");
|
|
261
|
+
return;
|
|
262
|
+
}
|
|
263
|
+
if (this.audioContext.state === "suspended") {
|
|
264
|
+
this.ensureAudioContextRunning().catch((err) => {
|
|
265
|
+
logger.errorWithError("[StreamingAudioPlayer] Failed to ensure AudioContext running in scheduleNextChunk:", err);
|
|
266
|
+
});
|
|
267
|
+
}
|
|
268
|
+
const chunkIndex = this.scheduledChunks;
|
|
269
|
+
if (chunkIndex >= this.audioChunks.length) {
|
|
270
|
+
this.log(`[StreamingAudioPlayer] No more chunks to schedule (chunkIndex: ${chunkIndex}, totalChunks: ${this.audioChunks.length})`);
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
const chunk = this.audioChunks[chunkIndex];
|
|
274
|
+
if (chunk.data.length === 0 && !chunk.isLast) {
|
|
275
|
+
this.scheduledChunks++;
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
const pcmData = chunk.data;
|
|
279
|
+
const isLast = chunk.isLast;
|
|
280
|
+
const audioBuffer = this.pcmToAudioBuffer(pcmData);
|
|
281
|
+
if (!audioBuffer) {
|
|
282
|
+
const errorMessage = "Failed to create AudioBuffer from PCM data";
|
|
283
|
+
logger.error(errorMessage);
|
|
284
|
+
logEvent("character_player", "error", {
|
|
285
|
+
sessionId: this.sessionId,
|
|
286
|
+
event: "audio_buffer_creation_failed"
|
|
287
|
+
});
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
try {
|
|
291
|
+
const source = this.audioContext.createBufferSource();
|
|
292
|
+
source.buffer = audioBuffer;
|
|
293
|
+
source.connect(this.gainNode);
|
|
294
|
+
const chunkStartTime = this.scheduledTime;
|
|
295
|
+
source.start(chunkStartTime);
|
|
296
|
+
const actualStartTime = Math.max(chunkStartTime, this.audioContext.currentTime);
|
|
297
|
+
this.scheduledChunkInfo.push({
|
|
298
|
+
startTime: actualStartTime,
|
|
299
|
+
duration: audioBuffer.duration
|
|
300
|
+
});
|
|
301
|
+
this.activeSources.add(source);
|
|
302
|
+
source.onended = () => {
|
|
303
|
+
this.activeSources.delete(source);
|
|
304
|
+
if (this.activeSources.size === 0) {
|
|
305
|
+
const lastChunk = this.audioChunks[this.scheduledChunks - 1];
|
|
306
|
+
if (lastChunk && !lastChunk.isLast) {
|
|
307
|
+
this.log("All audio chunks ended but end=false, pausing and setting autoContinue");
|
|
308
|
+
this.autoContinue = true;
|
|
309
|
+
this.pause();
|
|
310
|
+
} else if (isLast) {
|
|
311
|
+
this.log("Last audio chunk ended, marking playback as ended");
|
|
312
|
+
this.markEnded();
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
};
|
|
316
|
+
this.scheduledTime += audioBuffer.duration;
|
|
317
|
+
this.lastScheduledChunkEndTime = this.scheduledTime - this.sessionStartTime - this.pausedTimeOffset;
|
|
318
|
+
this.scheduledChunks++;
|
|
319
|
+
this.log(`[StreamingAudioPlayer] Scheduled chunk ${chunkIndex + 1}/${this.audioChunks.length}`, {
|
|
320
|
+
startTime: this.scheduledTime - audioBuffer.duration,
|
|
321
|
+
duration: audioBuffer.duration,
|
|
322
|
+
nextScheduleTime: this.scheduledTime,
|
|
323
|
+
isLast,
|
|
324
|
+
activeSources: this.activeSources.size
|
|
325
|
+
});
|
|
326
|
+
} catch (err) {
|
|
327
|
+
logger.errorWithError("Failed to schedule audio chunk:", err);
|
|
328
|
+
logEvent("character_player", "error", {
|
|
329
|
+
sessionId: this.sessionId,
|
|
330
|
+
event: "schedule_chunk_failed",
|
|
331
|
+
reason: err instanceof Error ? err.message : String(err)
|
|
332
|
+
});
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
/**
|
|
336
|
+
* Convert PCM data to AudioBuffer
|
|
337
|
+
* Input: 16-bit PCM (int16), Output: AudioBuffer (float32 [-1, 1])
|
|
338
|
+
*/
|
|
339
|
+
pcmToAudioBuffer(pcmData) {
|
|
340
|
+
if (!this.audioContext) {
|
|
341
|
+
return null;
|
|
342
|
+
}
|
|
343
|
+
if (pcmData.length === 0) {
|
|
344
|
+
const silenceDuration = 0.01;
|
|
345
|
+
const numSamples2 = Math.floor(this.sampleRate * silenceDuration);
|
|
346
|
+
const audioBuffer2 = this.audioContext.createBuffer(
|
|
347
|
+
this.channelCount,
|
|
348
|
+
numSamples2,
|
|
349
|
+
this.sampleRate
|
|
350
|
+
);
|
|
351
|
+
for (let channel = 0; channel < this.channelCount; channel++) {
|
|
352
|
+
const channelData = audioBuffer2.getChannelData(channel);
|
|
353
|
+
channelData.fill(0);
|
|
354
|
+
}
|
|
355
|
+
return audioBuffer2;
|
|
356
|
+
}
|
|
357
|
+
const alignedData = new Uint8Array(pcmData);
|
|
358
|
+
const int16Array = new Int16Array(alignedData.buffer, 0, alignedData.length / 2);
|
|
359
|
+
const numSamples = int16Array.length / this.channelCount;
|
|
360
|
+
const audioBuffer = this.audioContext.createBuffer(
|
|
361
|
+
this.channelCount,
|
|
362
|
+
numSamples,
|
|
363
|
+
this.sampleRate
|
|
364
|
+
);
|
|
365
|
+
for (let channel = 0; channel < this.channelCount; channel++) {
|
|
366
|
+
const channelData = audioBuffer.getChannelData(channel);
|
|
367
|
+
for (let i = 0; i < numSamples; i++) {
|
|
368
|
+
const sampleIndex = i * this.channelCount + channel;
|
|
369
|
+
channelData[i] = int16Array[sampleIndex] / 32768;
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
return audioBuffer;
|
|
373
|
+
}
|
|
374
|
+
/**
|
|
375
|
+
* Get current playback time (seconds)
|
|
376
|
+
* Returns total actual playback duration
|
|
377
|
+
* @internal
|
|
378
|
+
*/
|
|
379
|
+
getCurrentTime() {
|
|
380
|
+
if (!this.audioContext || !this.isPlaying) {
|
|
381
|
+
return 0;
|
|
382
|
+
}
|
|
383
|
+
if (this.isPaused) {
|
|
384
|
+
return this.pausedAt;
|
|
385
|
+
}
|
|
386
|
+
const currentAudioTime = this.audioContext.currentTime;
|
|
387
|
+
if (this.activeSources.size === 0 && this.scheduledChunks > 0) {
|
|
388
|
+
return Math.max(0, this.lastScheduledChunkEndTime);
|
|
389
|
+
}
|
|
390
|
+
let totalPlayedDuration = 0;
|
|
391
|
+
for (let i = 0; i < this.scheduledChunkInfo.length; i++) {
|
|
392
|
+
const chunkInfo = this.scheduledChunkInfo[i];
|
|
393
|
+
const chunkEndTime = chunkInfo.startTime + chunkInfo.duration;
|
|
394
|
+
if (currentAudioTime < chunkInfo.startTime) {
|
|
395
|
+
break;
|
|
396
|
+
} else if (chunkEndTime <= currentAudioTime) {
|
|
397
|
+
totalPlayedDuration += chunkInfo.duration;
|
|
398
|
+
} else {
|
|
399
|
+
const playedTime = currentAudioTime - chunkInfo.startTime;
|
|
400
|
+
totalPlayedDuration += playedTime;
|
|
401
|
+
break;
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
return Math.max(0, totalPlayedDuration);
|
|
405
|
+
}
|
|
406
|
+
/**
|
|
407
|
+
* Get total duration of buffered audio (seconds)
|
|
408
|
+
* Calculate total duration of all buffered chunks
|
|
409
|
+
* @internal
|
|
410
|
+
*/
|
|
411
|
+
getBufferedDuration() {
|
|
412
|
+
if (!this.audioContext) {
|
|
413
|
+
return 0;
|
|
414
|
+
}
|
|
415
|
+
let totalDuration = 0;
|
|
416
|
+
for (const chunk of this.audioChunks) {
|
|
417
|
+
const chunkDuration = chunk.data.length / (this.sampleRate * this.channelCount * 2);
|
|
418
|
+
totalDuration += chunkDuration;
|
|
419
|
+
}
|
|
420
|
+
return totalDuration;
|
|
421
|
+
}
|
|
422
|
+
/**
|
|
423
|
+
* Get current AudioContext time
|
|
424
|
+
* @returns Current AudioContext time in seconds, or 0 if AudioContext is not initialized
|
|
425
|
+
*/
|
|
426
|
+
getAudioContextTime() {
|
|
427
|
+
var _a;
|
|
428
|
+
return ((_a = this.audioContext) == null ? void 0 : _a.currentTime) ?? 0;
|
|
429
|
+
}
|
|
430
|
+
/**
|
|
431
|
+
* Pause playback
|
|
432
|
+
*/
|
|
433
|
+
pause() {
|
|
434
|
+
if (!this.isPlaying || this.isPaused || !this.audioContext) {
|
|
435
|
+
return;
|
|
436
|
+
}
|
|
437
|
+
this.pausedAt = this.getCurrentTime();
|
|
438
|
+
this.pausedAudioContextTime = this.audioContext.currentTime;
|
|
439
|
+
this.isPaused = true;
|
|
440
|
+
if (this.audioContext.state === "running") {
|
|
441
|
+
this.audioContext.suspend().catch((err) => {
|
|
442
|
+
logger.errorWithError("Failed to suspend AudioContext:", err);
|
|
443
|
+
this.isPaused = false;
|
|
444
|
+
});
|
|
445
|
+
}
|
|
446
|
+
this.log("Playback paused", {
|
|
447
|
+
pausedAt: this.pausedAt,
|
|
448
|
+
pausedAudioContextTime: this.pausedAudioContextTime,
|
|
449
|
+
audioContextState: this.audioContext.state
|
|
450
|
+
});
|
|
451
|
+
}
|
|
452
|
+
/**
|
|
453
|
+
* Resume playback
|
|
454
|
+
*/
|
|
455
|
+
async resume() {
|
|
456
|
+
if (!this.isPaused || !this.audioContext || !this.isPlaying) {
|
|
457
|
+
return;
|
|
458
|
+
}
|
|
459
|
+
this.autoContinue = false;
|
|
460
|
+
if (this.audioContext.state === "suspended") {
|
|
461
|
+
try {
|
|
462
|
+
await this.audioContext.resume();
|
|
463
|
+
} catch (err) {
|
|
464
|
+
logger.errorWithError("Failed to resume AudioContext:", err);
|
|
465
|
+
throw err;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
const currentAudioTime = this.audioContext.currentTime;
|
|
469
|
+
this.sessionStartTime = this.pausedAudioContextTime - this.pausedAt - this.pausedTimeOffset;
|
|
470
|
+
this.isPaused = false;
|
|
471
|
+
if (this.scheduledChunks < this.audioChunks.length) {
|
|
472
|
+
this.scheduleAllChunks();
|
|
473
|
+
}
|
|
474
|
+
this.log("Playback resumed", {
|
|
475
|
+
pausedAt: this.pausedAt,
|
|
476
|
+
pausedAudioContextTime: this.pausedAudioContextTime,
|
|
477
|
+
currentAudioContextTime: currentAudioTime,
|
|
478
|
+
adjustedSessionStartTime: this.sessionStartTime,
|
|
479
|
+
audioContextState: this.audioContext.state
|
|
480
|
+
});
|
|
481
|
+
}
|
|
482
|
+
/**
|
|
483
|
+
* Stop playback
|
|
484
|
+
*/
|
|
485
|
+
stop() {
|
|
486
|
+
if (!this.audioContext) {
|
|
487
|
+
return;
|
|
488
|
+
}
|
|
489
|
+
if (this.isPaused && this.audioContext.state === "suspended") {
|
|
490
|
+
this.audioContext.resume().catch(() => {
|
|
491
|
+
});
|
|
492
|
+
this.isPaused = false;
|
|
493
|
+
}
|
|
494
|
+
this.isPlaying = false;
|
|
495
|
+
this.isPaused = false;
|
|
496
|
+
this.isResuming = false;
|
|
497
|
+
this.sessionStartTime = 0;
|
|
498
|
+
this.scheduledTime = 0;
|
|
499
|
+
for (const source of this.activeSources) {
|
|
500
|
+
source.onended = null;
|
|
501
|
+
try {
|
|
502
|
+
source.stop(0);
|
|
503
|
+
} catch {
|
|
504
|
+
}
|
|
505
|
+
try {
|
|
506
|
+
source.disconnect();
|
|
507
|
+
} catch {
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
this.activeSources.clear();
|
|
511
|
+
this.audioChunks = [];
|
|
512
|
+
this.scheduledChunks = 0;
|
|
513
|
+
this.autoContinue = false;
|
|
514
|
+
this.log("[StreamingAudioPlayer] Playback stopped, state reset");
|
|
515
|
+
}
|
|
516
|
+
/**
|
|
517
|
+
* Enable or disable auto-start (for delayed start scenarios)
|
|
518
|
+
*/
|
|
519
|
+
setAutoStart(enabled) {
|
|
520
|
+
this.autoStartEnabled = enabled;
|
|
521
|
+
this.log(`Auto-start ${enabled ? "enabled" : "disabled"}`);
|
|
522
|
+
}
|
|
523
|
+
/**
|
|
524
|
+
* Start playback manually (for delayed start scenarios)
|
|
525
|
+
* This allows starting playback after transition animation completes
|
|
526
|
+
*/
|
|
527
|
+
play() {
|
|
528
|
+
if (this.isPlaying) {
|
|
529
|
+
return;
|
|
530
|
+
}
|
|
531
|
+
this.autoStartEnabled = true;
|
|
532
|
+
this.startPlayback().catch((err) => {
|
|
533
|
+
logger.errorWithError("[StreamingAudioPlayer] Failed to start playback from play():", err);
|
|
534
|
+
});
|
|
535
|
+
}
|
|
536
|
+
/**
|
|
537
|
+
* Mark playback as ended
|
|
538
|
+
*/
|
|
539
|
+
markEnded() {
|
|
540
|
+
var _a;
|
|
541
|
+
this.log("Playback ended");
|
|
542
|
+
this.isPlaying = false;
|
|
543
|
+
(_a = this.onEndedCallback) == null ? void 0 : _a.call(this);
|
|
544
|
+
}
|
|
545
|
+
/**
|
|
546
|
+
* Set ended callback
|
|
547
|
+
*/
|
|
548
|
+
onEnded(callback) {
|
|
549
|
+
this.onEndedCallback = callback;
|
|
550
|
+
}
|
|
551
|
+
/**
|
|
552
|
+
* Check if playing
|
|
553
|
+
*/
|
|
554
|
+
isPlayingNow() {
|
|
555
|
+
return this.isPlaying && !this.isPaused;
|
|
556
|
+
}
|
|
557
|
+
/**
|
|
558
|
+
* Dispose and cleanup
|
|
559
|
+
*/
|
|
560
|
+
dispose() {
|
|
561
|
+
this.stop();
|
|
562
|
+
if (this.audioContext && this.stateChangeHandler) {
|
|
563
|
+
this.audioContext.removeEventListener("statechange", this.stateChangeHandler);
|
|
564
|
+
this.stateChangeHandler = void 0;
|
|
565
|
+
}
|
|
566
|
+
if (this.audioContext) {
|
|
567
|
+
this.audioContext.close();
|
|
568
|
+
this.audioContext = null;
|
|
569
|
+
this.gainNode = null;
|
|
570
|
+
}
|
|
571
|
+
this.audioChunks = [];
|
|
572
|
+
this.scheduledChunks = 0;
|
|
573
|
+
this.sessionStartTime = 0;
|
|
574
|
+
this.pausedTimeOffset = 0;
|
|
575
|
+
this.pausedAt = 0;
|
|
576
|
+
this.pausedAudioContextTime = 0;
|
|
577
|
+
this.scheduledTime = 0;
|
|
578
|
+
this.onEndedCallback = void 0;
|
|
579
|
+
this.log("StreamingAudioPlayer disposed");
|
|
580
|
+
}
|
|
581
|
+
/**
|
|
582
|
+
* Flush buffered audio
|
|
583
|
+
* - hard: stops all playing sources and clears all chunks
|
|
584
|
+
* - soft (default): clears UNSCHEDULED chunks only
|
|
585
|
+
*/
|
|
586
|
+
flush(options) {
|
|
587
|
+
const hard = (options == null ? void 0 : options.hard) === true;
|
|
588
|
+
if (hard) {
|
|
589
|
+
this.stop();
|
|
590
|
+
this.audioChunks = [];
|
|
591
|
+
this.scheduledChunks = 0;
|
|
592
|
+
this.sessionStartTime = 0;
|
|
593
|
+
this.pausedAt = 0;
|
|
594
|
+
this.scheduledTime = 0;
|
|
595
|
+
this.log("Flushed (hard)");
|
|
596
|
+
return;
|
|
597
|
+
}
|
|
598
|
+
if (this.scheduledChunks < this.audioChunks.length) {
|
|
599
|
+
this.audioChunks.splice(this.scheduledChunks);
|
|
600
|
+
}
|
|
601
|
+
this.log("Flushed (soft)", { remainingScheduled: this.scheduledChunks });
|
|
602
|
+
}
|
|
603
|
+
/**
|
|
604
|
+
* Set volume (0.0 - 1.0)
|
|
605
|
+
* Note: This only controls avatar audio player volume, does not affect system volume
|
|
606
|
+
* @param volume Volume value, range 0.0 to 1.0 (0.0 is mute, 1.0 is max volume)
|
|
607
|
+
* @internal
|
|
608
|
+
*/
|
|
609
|
+
setVolume(volume) {
|
|
610
|
+
if (volume < 0 || volume > 1) {
|
|
611
|
+
logger.warn(`[StreamingAudioPlayer] Volume out of range: ${volume}, clamping to [0, 1]`);
|
|
612
|
+
volume = Math.max(0, Math.min(1, volume));
|
|
613
|
+
}
|
|
614
|
+
this.volume = volume;
|
|
615
|
+
if (this.gainNode) {
|
|
616
|
+
this.gainNode.gain.value = volume;
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
/**
|
|
620
|
+
* Get current volume
|
|
621
|
+
* @returns Current volume value (0.0 - 1.0)
|
|
622
|
+
* @internal
|
|
623
|
+
*/
|
|
624
|
+
getVolume() {
|
|
625
|
+
return this.volume;
|
|
626
|
+
}
|
|
627
|
+
/**
|
|
628
|
+
* Debug logging
|
|
629
|
+
*/
|
|
630
|
+
log(message, data) {
|
|
631
|
+
if (this.debug) {
|
|
632
|
+
logger.log(`[StreamingAudioPlayer] ${message}`, data || "");
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
export {
|
|
637
|
+
StreamingAudioPlayer
|
|
638
|
+
};
|
|
@@ -1,50 +1,6 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
}
|
|
8
|
-
export declare class AnimationWebSocketClient extends EventEmitter {
|
|
9
|
-
private wsUrl;
|
|
10
|
-
private reconnectAttempts;
|
|
11
|
-
private debug;
|
|
12
|
-
private jwtToken?;
|
|
13
|
-
private ws;
|
|
14
|
-
private currentCharacterId;
|
|
15
|
-
private currentRetryCount;
|
|
16
|
-
private isConnecting;
|
|
17
|
-
private isManuallyDisconnected;
|
|
18
|
-
private reconnectTimer;
|
|
19
|
-
constructor(options: AnimationWebSocketClientOptions);
|
|
20
|
-
/**
|
|
21
|
-
* 连接WebSocket
|
|
22
|
-
*/
|
|
23
|
-
connect(characterId: string): Promise<void>;
|
|
24
|
-
/**
|
|
25
|
-
* 断开连接
|
|
26
|
-
*/
|
|
27
|
-
disconnect(): void;
|
|
28
|
-
/**
|
|
29
|
-
* 发送音频数据
|
|
30
|
-
*/
|
|
31
|
-
sendAudioData(reqId: string, audioData: ArrayBuffer, end: boolean): boolean;
|
|
32
|
-
/**
|
|
33
|
-
* 生成请求ID
|
|
34
|
-
* 使用统一的 ReqID 生成规则:YYYYMMDDHHmmss_nanoid
|
|
35
|
-
*/
|
|
36
|
-
generateReqId(): string;
|
|
37
|
-
/**
|
|
38
|
-
* 获取连接状态
|
|
39
|
-
*/
|
|
40
|
-
isConnected(): boolean;
|
|
41
|
-
/**
|
|
42
|
-
* 获取当前角色ID
|
|
43
|
-
*/
|
|
44
|
-
getCurrentCharacterId(): string;
|
|
45
|
-
private buildWebSocketUrl;
|
|
46
|
-
private connectWebSocket;
|
|
47
|
-
private handleMessage;
|
|
48
|
-
private scheduleReconnect;
|
|
49
|
-
}
|
|
50
|
-
//# sourceMappingURL=AnimationWebSocketClient.d.ts.map
|
|
1
|
+
/**
|
|
2
|
+
* AnimationWebSocketClient: Animation-specific WebSocket client
|
|
3
|
+
* Uses driveningress/v2 protocol, only handles audio input and animation output
|
|
4
|
+
* @internal
|
|
5
|
+
*/
|
|
6
|
+
export {};
|
|
@@ -1,13 +1,5 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Simple Event Emitter
|
|
3
|
+
* @internal
|
|
3
4
|
*/
|
|
4
|
-
type EventHandler = (...args: any[]) => void;
|
|
5
|
-
export declare class EventEmitter {
|
|
6
|
-
private events;
|
|
7
|
-
on(event: string, handler: EventHandler): void;
|
|
8
|
-
off(event: string, handler: EventHandler): void;
|
|
9
|
-
emit(event: string, ...args: any[]): void;
|
|
10
|
-
removeAllListeners(event?: string): void;
|
|
11
|
-
}
|
|
12
5
|
export {};
|
|
13
|
-
//# sourceMappingURL=eventEmitter.d.ts.map
|