@spatialwalk/avatarkit 1.0.0-beta.4 → 1.0.0-beta.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +378 -2
- package/README.md +261 -283
- package/dist/StreamingAudioPlayer-BXytpr5T.js +506 -0
- package/dist/animation/AnimationWebSocketClient.d.ts +9 -24
- package/dist/animation/utils/eventEmitter.d.ts +0 -4
- package/dist/animation/utils/flameConverter.d.ts +3 -11
- package/dist/audio/AnimationPlayer.d.ts +4 -32
- package/dist/audio/StreamingAudioPlayer.d.ts +12 -75
- package/dist/avatar_core_wasm-i0Ocpx6q.js +2693 -0
- package/dist/avatar_core_wasm.wasm +0 -0
- package/dist/config/app-config.d.ts +1 -6
- package/dist/config/constants.d.ts +5 -27
- package/dist/config/sdk-config-loader.d.ts +2 -9
- package/dist/core/Avatar.d.ts +0 -15
- package/dist/core/AvatarController.d.ts +35 -116
- package/dist/core/AvatarDownloader.d.ts +0 -95
- package/dist/core/AvatarManager.d.ts +10 -18
- package/dist/core/AvatarSDK.d.ts +21 -0
- package/dist/core/AvatarView.d.ts +24 -110
- package/dist/core/NetworkLayer.d.ts +1 -59
- package/dist/generated/common/v1/models.d.ts +29 -0
- package/dist/generated/driveningress/v1/driveningress.d.ts +1 -12
- package/dist/generated/driveningress/v2/driveningress.d.ts +81 -3
- package/dist/generated/google/protobuf/struct.d.ts +5 -39
- package/dist/generated/google/protobuf/timestamp.d.ts +1 -103
- package/dist/index-CRKYjlwp.js +14267 -0
- package/dist/index.d.ts +1 -6
- package/dist/index.js +17 -18
- package/dist/renderer/RenderSystem.d.ts +1 -79
- package/dist/renderer/covariance.d.ts +0 -12
- package/dist/renderer/renderer.d.ts +6 -2
- package/dist/renderer/sortSplats.d.ts +0 -11
- package/dist/renderer/webgl/reorderData.d.ts +0 -13
- package/dist/renderer/webgl/webglRenderer.d.ts +19 -42
- package/dist/renderer/webgpu/webgpuRenderer.d.ts +18 -31
- package/dist/types/character-settings.d.ts +0 -5
- package/dist/types/character.d.ts +3 -21
- package/dist/types/index.d.ts +72 -36
- package/dist/utils/animation-interpolation.d.ts +3 -13
- package/dist/utils/client-id.d.ts +1 -0
- package/dist/utils/conversationId.d.ts +1 -0
- package/dist/utils/error-utils.d.ts +1 -25
- package/dist/utils/heartbeat-manager.d.ts +18 -0
- package/dist/utils/id-manager.d.ts +38 -0
- package/dist/utils/logger.d.ts +5 -11
- package/dist/utils/posthog-tracker.d.ts +11 -0
- package/dist/utils/usage-tracker.d.ts +5 -0
- package/dist/vanilla/vite.config.d.ts +2 -0
- package/dist/wasm/avatarCoreAdapter.d.ts +11 -97
- package/dist/wasm/avatarCoreMemory.d.ts +5 -54
- package/package.json +15 -13
- package/dist/StreamingAudioPlayer-L87IFoao.js +0 -319
- package/dist/StreamingAudioPlayer-L87IFoao.js.map +0 -1
- package/dist/animation/AnimationWebSocketClient.d.ts.map +0 -1
- package/dist/animation/utils/eventEmitter.d.ts.map +0 -1
- package/dist/animation/utils/flameConverter.d.ts.map +0 -1
- package/dist/audio/AnimationPlayer.d.ts.map +0 -1
- package/dist/audio/StreamingAudioPlayer.d.ts.map +0 -1
- package/dist/avatar_core_wasm-D4eEi7Eh.js +0 -1666
- package/dist/avatar_core_wasm-D4eEi7Eh.js.map +0 -1
- package/dist/config/app-config.d.ts.map +0 -1
- package/dist/config/constants.d.ts.map +0 -1
- package/dist/config/sdk-config-loader.d.ts.map +0 -1
- package/dist/core/Avatar.d.ts.map +0 -1
- package/dist/core/AvatarController.d.ts.map +0 -1
- package/dist/core/AvatarDownloader.d.ts.map +0 -1
- package/dist/core/AvatarKit.d.ts +0 -66
- package/dist/core/AvatarKit.d.ts.map +0 -1
- package/dist/core/AvatarManager.d.ts.map +0 -1
- package/dist/core/AvatarView.d.ts.map +0 -1
- package/dist/core/NetworkLayer.d.ts.map +0 -1
- package/dist/generated/driveningress/v1/driveningress.d.ts.map +0 -1
- package/dist/generated/driveningress/v2/driveningress.d.ts.map +0 -1
- package/dist/generated/google/protobuf/struct.d.ts.map +0 -1
- package/dist/generated/google/protobuf/timestamp.d.ts.map +0 -1
- package/dist/index-BDxVrKwm.js +0 -5942
- package/dist/index-BDxVrKwm.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/renderer/RenderSystem.d.ts.map +0 -1
- package/dist/renderer/covariance.d.ts.map +0 -1
- package/dist/renderer/renderer.d.ts.map +0 -1
- package/dist/renderer/sortSplats.d.ts.map +0 -1
- package/dist/renderer/webgl/reorderData.d.ts.map +0 -1
- package/dist/renderer/webgl/webglRenderer.d.ts.map +0 -1
- package/dist/renderer/webgpu/webgpuRenderer.d.ts.map +0 -1
- package/dist/types/character-settings.d.ts.map +0 -1
- package/dist/types/character.d.ts.map +0 -1
- package/dist/types/index.d.ts.map +0 -1
- package/dist/utils/animation-interpolation.d.ts.map +0 -1
- package/dist/utils/cls-tracker.d.ts +0 -17
- package/dist/utils/cls-tracker.d.ts.map +0 -1
- package/dist/utils/error-utils.d.ts.map +0 -1
- package/dist/utils/logger.d.ts.map +0 -1
- package/dist/utils/reqId.d.ts +0 -20
- package/dist/utils/reqId.d.ts.map +0 -1
- package/dist/wasm/avatarCoreAdapter.d.ts.map +0 -1
- package/dist/wasm/avatarCoreMemory.d.ts.map +0 -1
|
@@ -0,0 +1,506 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
|
+
import { A as APP_CONFIG, e as errorToMessage, l as logEvent, a as logger } from "./index-CRKYjlwp.js";
|
|
5
|
+
class StreamingAudioPlayer {
|
|
6
|
+
constructor(options) {
|
|
7
|
+
__publicField(this, "audioContext", null);
|
|
8
|
+
__publicField(this, "sampleRate");
|
|
9
|
+
__publicField(this, "channelCount");
|
|
10
|
+
__publicField(this, "debug");
|
|
11
|
+
__publicField(this, "sessionId");
|
|
12
|
+
__publicField(this, "sessionStartTime", 0);
|
|
13
|
+
__publicField(this, "pausedTimeOffset", 0);
|
|
14
|
+
__publicField(this, "pausedAt", 0);
|
|
15
|
+
__publicField(this, "pausedAudioContextTime", 0);
|
|
16
|
+
__publicField(this, "scheduledTime", 0);
|
|
17
|
+
__publicField(this, "isPlaying", false);
|
|
18
|
+
__publicField(this, "isPaused", false);
|
|
19
|
+
__publicField(this, "autoStartEnabled", true);
|
|
20
|
+
__publicField(this, "autoContinue", false);
|
|
21
|
+
__publicField(this, "audioChunks", []);
|
|
22
|
+
__publicField(this, "scheduledChunks", 0);
|
|
23
|
+
__publicField(this, "activeSources", /* @__PURE__ */ new Set());
|
|
24
|
+
__publicField(this, "lastScheduledChunkEndTime", 0);
|
|
25
|
+
__publicField(this, "lastGetCurrentTimeLog", 0);
|
|
26
|
+
__publicField(this, "scheduledChunkInfo", []);
|
|
27
|
+
__publicField(this, "gainNode", null);
|
|
28
|
+
__publicField(this, "volume", 1);
|
|
29
|
+
__publicField(this, "onEndedCallback");
|
|
30
|
+
this.sessionId = `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
31
|
+
this.sampleRate = (options == null ? void 0 : options.sampleRate) ?? APP_CONFIG.audio.sampleRate;
|
|
32
|
+
this.channelCount = (options == null ? void 0 : options.channelCount) ?? 1;
|
|
33
|
+
this.debug = (options == null ? void 0 : options.debug) ?? false;
|
|
34
|
+
}
|
|
35
|
+
async initialize() {
|
|
36
|
+
if (this.audioContext) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
this.audioContext = new AudioContext({
|
|
41
|
+
sampleRate: this.sampleRate
|
|
42
|
+
});
|
|
43
|
+
this.gainNode = this.audioContext.createGain();
|
|
44
|
+
this.gainNode.gain.value = this.volume;
|
|
45
|
+
this.gainNode.connect(this.audioContext.destination);
|
|
46
|
+
if (this.audioContext.state === "suspended") {
|
|
47
|
+
await this.audioContext.resume();
|
|
48
|
+
}
|
|
49
|
+
this.log("AudioContext initialized", {
|
|
50
|
+
sessionId: this.sessionId,
|
|
51
|
+
sampleRate: this.audioContext.sampleRate,
|
|
52
|
+
state: this.audioContext.state
|
|
53
|
+
});
|
|
54
|
+
} catch (error) {
|
|
55
|
+
const message = errorToMessage(error);
|
|
56
|
+
logEvent("activeAudioSessionFailed", "warning", {
|
|
57
|
+
sessionId: this.sessionId,
|
|
58
|
+
reason: message
|
|
59
|
+
});
|
|
60
|
+
logger.error("Failed to initialize AudioContext:", message);
|
|
61
|
+
throw error instanceof Error ? error : new Error(message);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
addChunk(pcmData, isLast = false) {
|
|
65
|
+
if (!this.audioContext) {
|
|
66
|
+
logger.error("AudioContext not initialized");
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
this.audioChunks.push({ data: pcmData, isLast });
|
|
70
|
+
this.log(`Added chunk ${this.audioChunks.length}`, {
|
|
71
|
+
size: pcmData.length,
|
|
72
|
+
totalChunks: this.audioChunks.length,
|
|
73
|
+
isLast,
|
|
74
|
+
isPlaying: this.isPlaying,
|
|
75
|
+
scheduledChunks: this.scheduledChunks
|
|
76
|
+
});
|
|
77
|
+
if (this.autoContinue && this.isPaused) {
|
|
78
|
+
this.log("[StreamingAudioPlayer] autoContinue=true, auto-resuming playback");
|
|
79
|
+
this.autoContinue = false;
|
|
80
|
+
this.resume().catch((err) => {
|
|
81
|
+
logger.errorWithError("Failed to auto-resume playback:", err);
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
if (!this.isPlaying && this.autoStartEnabled && this.audioChunks.length > 0) {
|
|
85
|
+
this.log("[StreamingAudioPlayer] Auto-starting playback from addChunk");
|
|
86
|
+
this.startPlayback();
|
|
87
|
+
} else if (this.isPlaying && !this.isPaused) {
|
|
88
|
+
this.log("[StreamingAudioPlayer] Already playing, scheduling next chunk");
|
|
89
|
+
this.scheduleNextChunk();
|
|
90
|
+
} else {
|
|
91
|
+
this.log("[StreamingAudioPlayer] Not playing and no chunks, waiting for more chunks");
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
async startNewSession(audioChunks) {
|
|
95
|
+
this.stop();
|
|
96
|
+
this.sessionId = `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
97
|
+
this.audioChunks = [];
|
|
98
|
+
this.scheduledChunks = 0;
|
|
99
|
+
this.pausedTimeOffset = 0;
|
|
100
|
+
this.pausedAt = 0;
|
|
101
|
+
this.pausedAudioContextTime = 0;
|
|
102
|
+
this.autoContinue = false;
|
|
103
|
+
this.log("Starting new session", {
|
|
104
|
+
chunks: audioChunks.length
|
|
105
|
+
});
|
|
106
|
+
for (const chunk of audioChunks) {
|
|
107
|
+
this.addChunk(chunk.data, chunk.isLast);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
startPlayback() {
|
|
111
|
+
if (!this.audioContext) {
|
|
112
|
+
this.log("[StreamingAudioPlayer] Cannot start playback: AudioContext not initialized");
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
if (this.isPlaying) {
|
|
116
|
+
this.log("[StreamingAudioPlayer] Cannot start playback: Already playing");
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
this.isPlaying = true;
|
|
120
|
+
this.sessionStartTime = this.audioContext.currentTime;
|
|
121
|
+
this.scheduledTime = this.sessionStartTime;
|
|
122
|
+
this.lastScheduledChunkEndTime = 0;
|
|
123
|
+
this.scheduledChunkInfo = [];
|
|
124
|
+
this.autoContinue = false;
|
|
125
|
+
this.log("[StreamingAudioPlayer] Starting playback", {
|
|
126
|
+
sessionStartTime: this.sessionStartTime,
|
|
127
|
+
bufferedChunks: this.audioChunks.length,
|
|
128
|
+
scheduledChunks: this.scheduledChunks,
|
|
129
|
+
activeSources: this.activeSources.size
|
|
130
|
+
});
|
|
131
|
+
this.scheduleAllChunks();
|
|
132
|
+
}
|
|
133
|
+
scheduleAllChunks() {
|
|
134
|
+
while (this.scheduledChunks < this.audioChunks.length) {
|
|
135
|
+
this.scheduleNextChunk();
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
scheduleNextChunk() {
|
|
139
|
+
if (!this.audioContext) {
|
|
140
|
+
this.log("[StreamingAudioPlayer] Cannot schedule chunk: AudioContext not initialized");
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
if (!this.isPlaying || this.isPaused) {
|
|
144
|
+
this.log("[StreamingAudioPlayer] Cannot schedule chunk: Not playing or paused");
|
|
145
|
+
return;
|
|
146
|
+
}
|
|
147
|
+
const chunkIndex = this.scheduledChunks;
|
|
148
|
+
if (chunkIndex >= this.audioChunks.length) {
|
|
149
|
+
this.log(`[StreamingAudioPlayer] No more chunks to schedule (chunkIndex: ${chunkIndex}, totalChunks: ${this.audioChunks.length})`);
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
const chunk = this.audioChunks[chunkIndex];
|
|
153
|
+
if (chunk.data.length === 0 && !chunk.isLast) {
|
|
154
|
+
this.scheduledChunks++;
|
|
155
|
+
return;
|
|
156
|
+
}
|
|
157
|
+
const pcmData = chunk.data;
|
|
158
|
+
const isLast = chunk.isLast;
|
|
159
|
+
const audioBuffer = this.pcmToAudioBuffer(pcmData);
|
|
160
|
+
if (!audioBuffer) {
|
|
161
|
+
const errorMessage = "Failed to create AudioBuffer from PCM data";
|
|
162
|
+
logger.error(errorMessage);
|
|
163
|
+
logEvent("character_player", "error", {
|
|
164
|
+
sessionId: this.sessionId,
|
|
165
|
+
event: "audio_buffer_creation_failed"
|
|
166
|
+
});
|
|
167
|
+
return;
|
|
168
|
+
}
|
|
169
|
+
try {
|
|
170
|
+
const source = this.audioContext.createBufferSource();
|
|
171
|
+
source.buffer = audioBuffer;
|
|
172
|
+
source.connect(this.gainNode);
|
|
173
|
+
const chunkStartTime = this.scheduledTime;
|
|
174
|
+
source.start(chunkStartTime);
|
|
175
|
+
const actualStartTime = Math.max(chunkStartTime, this.audioContext.currentTime);
|
|
176
|
+
this.scheduledChunkInfo.push({
|
|
177
|
+
startTime: actualStartTime,
|
|
178
|
+
duration: audioBuffer.duration
|
|
179
|
+
});
|
|
180
|
+
this.activeSources.add(source);
|
|
181
|
+
source.onended = () => {
|
|
182
|
+
this.activeSources.delete(source);
|
|
183
|
+
if (this.activeSources.size === 0) {
|
|
184
|
+
const lastChunk = this.audioChunks[this.scheduledChunks - 1];
|
|
185
|
+
if (lastChunk && !lastChunk.isLast) {
|
|
186
|
+
this.log("All audio chunks ended but end=false, pausing and setting autoContinue");
|
|
187
|
+
this.autoContinue = true;
|
|
188
|
+
this.pause();
|
|
189
|
+
} else if (isLast) {
|
|
190
|
+
this.log("Last audio chunk ended, marking playback as ended");
|
|
191
|
+
this.markEnded();
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
};
|
|
195
|
+
this.scheduledTime += audioBuffer.duration;
|
|
196
|
+
this.lastScheduledChunkEndTime = this.scheduledTime - this.sessionStartTime - this.pausedTimeOffset;
|
|
197
|
+
this.scheduledChunks++;
|
|
198
|
+
this.log(`[StreamingAudioPlayer] Scheduled chunk ${chunkIndex + 1}/${this.audioChunks.length}`, {
|
|
199
|
+
startTime: this.scheduledTime - audioBuffer.duration,
|
|
200
|
+
duration: audioBuffer.duration,
|
|
201
|
+
nextScheduleTime: this.scheduledTime,
|
|
202
|
+
isLast,
|
|
203
|
+
activeSources: this.activeSources.size
|
|
204
|
+
});
|
|
205
|
+
} catch (err) {
|
|
206
|
+
logger.errorWithError("Failed to schedule audio chunk:", err);
|
|
207
|
+
logEvent("character_player", "error", {
|
|
208
|
+
sessionId: this.sessionId,
|
|
209
|
+
event: "schedule_chunk_failed",
|
|
210
|
+
reason: err instanceof Error ? err.message : String(err)
|
|
211
|
+
});
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
pcmToAudioBuffer(pcmData) {
|
|
215
|
+
if (!this.audioContext) {
|
|
216
|
+
return null;
|
|
217
|
+
}
|
|
218
|
+
if (pcmData.length === 0) {
|
|
219
|
+
const silenceDuration = 0.01;
|
|
220
|
+
const numSamples2 = Math.floor(this.sampleRate * silenceDuration);
|
|
221
|
+
const audioBuffer2 = this.audioContext.createBuffer(
|
|
222
|
+
this.channelCount,
|
|
223
|
+
numSamples2,
|
|
224
|
+
this.sampleRate
|
|
225
|
+
);
|
|
226
|
+
for (let channel = 0; channel < this.channelCount; channel++) {
|
|
227
|
+
const channelData = audioBuffer2.getChannelData(channel);
|
|
228
|
+
channelData.fill(0);
|
|
229
|
+
}
|
|
230
|
+
return audioBuffer2;
|
|
231
|
+
}
|
|
232
|
+
const alignedData = new Uint8Array(pcmData);
|
|
233
|
+
const int16Array = new Int16Array(alignedData.buffer, 0, alignedData.length / 2);
|
|
234
|
+
const numSamples = int16Array.length / this.channelCount;
|
|
235
|
+
const audioBuffer = this.audioContext.createBuffer(
|
|
236
|
+
this.channelCount,
|
|
237
|
+
numSamples,
|
|
238
|
+
this.sampleRate
|
|
239
|
+
);
|
|
240
|
+
for (let channel = 0; channel < this.channelCount; channel++) {
|
|
241
|
+
const channelData = audioBuffer.getChannelData(channel);
|
|
242
|
+
for (let i = 0; i < numSamples; i++) {
|
|
243
|
+
const sampleIndex = i * this.channelCount + channel;
|
|
244
|
+
channelData[i] = int16Array[sampleIndex] / 32768;
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
return audioBuffer;
|
|
248
|
+
}
|
|
249
|
+
getCurrentTime() {
|
|
250
|
+
if (!this.audioContext || !this.isPlaying) {
|
|
251
|
+
return 0;
|
|
252
|
+
}
|
|
253
|
+
if (this.isPaused) {
|
|
254
|
+
return this.pausedAt;
|
|
255
|
+
}
|
|
256
|
+
const currentAudioTime = this.audioContext.currentTime;
|
|
257
|
+
if (this.activeSources.size === 0 && this.scheduledChunks > 0) {
|
|
258
|
+
return Math.max(0, this.lastScheduledChunkEndTime);
|
|
259
|
+
}
|
|
260
|
+
let totalPlayedDuration = 0;
|
|
261
|
+
for (let i = 0; i < this.scheduledChunkInfo.length; i++) {
|
|
262
|
+
const chunkInfo = this.scheduledChunkInfo[i];
|
|
263
|
+
const chunkEndTime = chunkInfo.startTime + chunkInfo.duration;
|
|
264
|
+
if (currentAudioTime < chunkInfo.startTime) {
|
|
265
|
+
break;
|
|
266
|
+
} else if (chunkEndTime <= currentAudioTime) {
|
|
267
|
+
totalPlayedDuration += chunkInfo.duration;
|
|
268
|
+
} else {
|
|
269
|
+
const playedTime = currentAudioTime - chunkInfo.startTime;
|
|
270
|
+
totalPlayedDuration += playedTime;
|
|
271
|
+
break;
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
return Math.max(0, totalPlayedDuration);
|
|
275
|
+
}
|
|
276
|
+
getBufferedDuration() {
|
|
277
|
+
if (!this.audioContext) {
|
|
278
|
+
return 0;
|
|
279
|
+
}
|
|
280
|
+
let totalDuration = 0;
|
|
281
|
+
for (const chunk of this.audioChunks) {
|
|
282
|
+
const chunkDuration = chunk.data.length / (this.sampleRate * this.channelCount * 2);
|
|
283
|
+
totalDuration += chunkDuration;
|
|
284
|
+
}
|
|
285
|
+
return totalDuration;
|
|
286
|
+
}
|
|
287
|
+
getAudioContextTime() {
|
|
288
|
+
var _a;
|
|
289
|
+
return ((_a = this.audioContext) == null ? void 0 : _a.currentTime) ?? 0;
|
|
290
|
+
}
|
|
291
|
+
pause() {
|
|
292
|
+
if (!this.isPlaying || this.isPaused || !this.audioContext) {
|
|
293
|
+
return;
|
|
294
|
+
}
|
|
295
|
+
this.pausedAt = this.getCurrentTime();
|
|
296
|
+
this.pausedAudioContextTime = this.audioContext.currentTime;
|
|
297
|
+
this.isPaused = true;
|
|
298
|
+
if (this.audioContext.state === "running") {
|
|
299
|
+
this.audioContext.suspend().catch((err) => {
|
|
300
|
+
logger.errorWithError("Failed to suspend AudioContext:", err);
|
|
301
|
+
this.isPaused = false;
|
|
302
|
+
});
|
|
303
|
+
}
|
|
304
|
+
this.log("Playback paused", {
|
|
305
|
+
pausedAt: this.pausedAt,
|
|
306
|
+
pausedAudioContextTime: this.pausedAudioContextTime,
|
|
307
|
+
audioContextState: this.audioContext.state
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
async resume() {
|
|
311
|
+
if (!this.isPaused || !this.audioContext || !this.isPlaying) {
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
this.autoContinue = false;
|
|
315
|
+
if (this.audioContext.state === "suspended") {
|
|
316
|
+
try {
|
|
317
|
+
await this.audioContext.resume();
|
|
318
|
+
} catch (err) {
|
|
319
|
+
logger.errorWithError("Failed to resume AudioContext:", err);
|
|
320
|
+
throw err;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
const currentAudioTime = this.audioContext.currentTime;
|
|
324
|
+
this.sessionStartTime = this.pausedAudioContextTime - this.pausedAt - this.pausedTimeOffset;
|
|
325
|
+
this.isPaused = false;
|
|
326
|
+
if (this.scheduledChunks < this.audioChunks.length) {
|
|
327
|
+
this.scheduleAllChunks();
|
|
328
|
+
}
|
|
329
|
+
this.log("Playback resumed", {
|
|
330
|
+
pausedAt: this.pausedAt,
|
|
331
|
+
pausedAudioContextTime: this.pausedAudioContextTime,
|
|
332
|
+
currentAudioContextTime: currentAudioTime,
|
|
333
|
+
adjustedSessionStartTime: this.sessionStartTime,
|
|
334
|
+
audioContextState: this.audioContext.state
|
|
335
|
+
});
|
|
336
|
+
}
|
|
337
|
+
seek(targetTime, referenceAudioContextTime) {
|
|
338
|
+
if (!this.audioContext) {
|
|
339
|
+
logger.warn("[StreamingAudioPlayer] Cannot seek: AudioContext not initialized");
|
|
340
|
+
return;
|
|
341
|
+
}
|
|
342
|
+
if (this.isPaused && this.audioContext.state === "suspended") {
|
|
343
|
+
this.audioContext.resume().catch(() => {
|
|
344
|
+
});
|
|
345
|
+
this.isPaused = false;
|
|
346
|
+
}
|
|
347
|
+
for (const source of this.activeSources) {
|
|
348
|
+
source.onended = null;
|
|
349
|
+
try {
|
|
350
|
+
source.stop(0);
|
|
351
|
+
} catch {
|
|
352
|
+
}
|
|
353
|
+
try {
|
|
354
|
+
source.disconnect();
|
|
355
|
+
} catch {
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
this.activeSources.clear();
|
|
359
|
+
let accumulatedDuration = 0;
|
|
360
|
+
let targetChunkIndex = 0;
|
|
361
|
+
let targetChunkOffset = 0;
|
|
362
|
+
for (let i = 0; i < this.audioChunks.length; i++) {
|
|
363
|
+
const chunk = this.audioChunks[i];
|
|
364
|
+
const chunkDuration = chunk.data.length / (this.sampleRate * this.channelCount * 2);
|
|
365
|
+
if (accumulatedDuration + chunkDuration >= targetTime) {
|
|
366
|
+
targetChunkIndex = i;
|
|
367
|
+
targetChunkOffset = targetTime - accumulatedDuration;
|
|
368
|
+
break;
|
|
369
|
+
}
|
|
370
|
+
accumulatedDuration += chunkDuration;
|
|
371
|
+
}
|
|
372
|
+
if (targetTime >= accumulatedDuration) {
|
|
373
|
+
targetChunkIndex = this.audioChunks.length;
|
|
374
|
+
targetChunkOffset = 0;
|
|
375
|
+
}
|
|
376
|
+
this.scheduledChunks = targetChunkIndex;
|
|
377
|
+
this.scheduledChunkInfo = [];
|
|
378
|
+
const currentAudioTime = referenceAudioContextTime ?? this.audioContext.currentTime;
|
|
379
|
+
this.sessionStartTime = currentAudioTime - targetTime;
|
|
380
|
+
this.scheduledTime = currentAudioTime;
|
|
381
|
+
if (targetChunkOffset > 0 && targetChunkIndex < this.audioChunks.length) {
|
|
382
|
+
const offsetSamples = Math.floor(targetChunkOffset * this.sampleRate * this.channelCount * 2);
|
|
383
|
+
if (offsetSamples > 0 && offsetSamples < this.audioChunks[targetChunkIndex].data.length) {
|
|
384
|
+
this.audioChunks[targetChunkIndex].data = this.audioChunks[targetChunkIndex].data.slice(offsetSamples);
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
if (this.isPlaying && !this.isPaused) {
|
|
388
|
+
this.scheduleAllChunks();
|
|
389
|
+
}
|
|
390
|
+
this.log("Seeked to position", {
|
|
391
|
+
targetTime,
|
|
392
|
+
targetChunkIndex,
|
|
393
|
+
targetChunkOffset,
|
|
394
|
+
scheduledChunks: this.scheduledChunks,
|
|
395
|
+
sessionStartTime: this.sessionStartTime
|
|
396
|
+
});
|
|
397
|
+
}
|
|
398
|
+
stop() {
|
|
399
|
+
if (!this.audioContext) {
|
|
400
|
+
return;
|
|
401
|
+
}
|
|
402
|
+
if (this.isPaused && this.audioContext.state === "suspended") {
|
|
403
|
+
this.audioContext.resume().catch(() => {
|
|
404
|
+
});
|
|
405
|
+
this.isPaused = false;
|
|
406
|
+
}
|
|
407
|
+
this.isPlaying = false;
|
|
408
|
+
this.isPaused = false;
|
|
409
|
+
this.sessionStartTime = 0;
|
|
410
|
+
this.scheduledTime = 0;
|
|
411
|
+
for (const source of this.activeSources) {
|
|
412
|
+
source.onended = null;
|
|
413
|
+
try {
|
|
414
|
+
source.stop(0);
|
|
415
|
+
} catch {
|
|
416
|
+
}
|
|
417
|
+
try {
|
|
418
|
+
source.disconnect();
|
|
419
|
+
} catch {
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
this.activeSources.clear();
|
|
423
|
+
this.audioChunks = [];
|
|
424
|
+
this.scheduledChunks = 0;
|
|
425
|
+
this.autoContinue = false;
|
|
426
|
+
this.log("[StreamingAudioPlayer] Playback stopped, state reset");
|
|
427
|
+
}
|
|
428
|
+
setAutoStart(enabled) {
|
|
429
|
+
this.autoStartEnabled = enabled;
|
|
430
|
+
this.log(`Auto-start ${enabled ? "enabled" : "disabled"}`);
|
|
431
|
+
}
|
|
432
|
+
play() {
|
|
433
|
+
if (this.isPlaying) {
|
|
434
|
+
return;
|
|
435
|
+
}
|
|
436
|
+
this.autoStartEnabled = true;
|
|
437
|
+
this.startPlayback();
|
|
438
|
+
}
|
|
439
|
+
markEnded() {
|
|
440
|
+
var _a;
|
|
441
|
+
this.log("Playback ended");
|
|
442
|
+
this.isPlaying = false;
|
|
443
|
+
(_a = this.onEndedCallback) == null ? void 0 : _a.call(this);
|
|
444
|
+
}
|
|
445
|
+
onEnded(callback) {
|
|
446
|
+
this.onEndedCallback = callback;
|
|
447
|
+
}
|
|
448
|
+
isPlayingNow() {
|
|
449
|
+
return this.isPlaying && !this.isPaused;
|
|
450
|
+
}
|
|
451
|
+
dispose() {
|
|
452
|
+
this.stop();
|
|
453
|
+
if (this.audioContext) {
|
|
454
|
+
this.audioContext.close();
|
|
455
|
+
this.audioContext = null;
|
|
456
|
+
this.gainNode = null;
|
|
457
|
+
}
|
|
458
|
+
this.audioChunks = [];
|
|
459
|
+
this.scheduledChunks = 0;
|
|
460
|
+
this.sessionStartTime = 0;
|
|
461
|
+
this.pausedTimeOffset = 0;
|
|
462
|
+
this.pausedAt = 0;
|
|
463
|
+
this.pausedAudioContextTime = 0;
|
|
464
|
+
this.scheduledTime = 0;
|
|
465
|
+
this.onEndedCallback = void 0;
|
|
466
|
+
this.log("StreamingAudioPlayer disposed");
|
|
467
|
+
}
|
|
468
|
+
flush(options) {
|
|
469
|
+
const hard = (options == null ? void 0 : options.hard) === true;
|
|
470
|
+
if (hard) {
|
|
471
|
+
this.stop();
|
|
472
|
+
this.audioChunks = [];
|
|
473
|
+
this.scheduledChunks = 0;
|
|
474
|
+
this.sessionStartTime = 0;
|
|
475
|
+
this.pausedAt = 0;
|
|
476
|
+
this.scheduledTime = 0;
|
|
477
|
+
this.log("Flushed (hard)");
|
|
478
|
+
return;
|
|
479
|
+
}
|
|
480
|
+
if (this.scheduledChunks < this.audioChunks.length) {
|
|
481
|
+
this.audioChunks.splice(this.scheduledChunks);
|
|
482
|
+
}
|
|
483
|
+
this.log("Flushed (soft)", { remainingScheduled: this.scheduledChunks });
|
|
484
|
+
}
|
|
485
|
+
setVolume(volume) {
|
|
486
|
+
if (volume < 0 || volume > 1) {
|
|
487
|
+
logger.warn(`[StreamingAudioPlayer] Volume out of range: ${volume}, clamping to [0, 1]`);
|
|
488
|
+
volume = Math.max(0, Math.min(1, volume));
|
|
489
|
+
}
|
|
490
|
+
this.volume = volume;
|
|
491
|
+
if (this.gainNode) {
|
|
492
|
+
this.gainNode.gain.value = volume;
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
getVolume() {
|
|
496
|
+
return this.volume;
|
|
497
|
+
}
|
|
498
|
+
log(message, data) {
|
|
499
|
+
if (this.debug) {
|
|
500
|
+
logger.log(`[StreamingAudioPlayer] ${message}`, data || "");
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
export {
|
|
505
|
+
StreamingAudioPlayer
|
|
506
|
+
};
|
|
@@ -2,49 +2,34 @@ import { EventEmitter } from './utils/eventEmitter';
|
|
|
2
2
|
export interface AnimationWebSocketClientOptions {
|
|
3
3
|
wsUrl: string;
|
|
4
4
|
reconnectAttempts?: number;
|
|
5
|
-
debug?: boolean;
|
|
6
5
|
jwtToken?: string;
|
|
6
|
+
appId?: string;
|
|
7
|
+
clientId?: string;
|
|
7
8
|
}
|
|
8
9
|
export declare class AnimationWebSocketClient extends EventEmitter {
|
|
9
10
|
private wsUrl;
|
|
10
11
|
private reconnectAttempts;
|
|
11
|
-
private debug;
|
|
12
12
|
private jwtToken?;
|
|
13
|
+
private appId?;
|
|
14
|
+
private clientId?;
|
|
13
15
|
private ws;
|
|
14
16
|
private currentCharacterId;
|
|
15
17
|
private currentRetryCount;
|
|
16
18
|
private isConnecting;
|
|
17
19
|
private isManuallyDisconnected;
|
|
18
20
|
private reconnectTimer;
|
|
21
|
+
private sessionConfigured;
|
|
19
22
|
constructor(options: AnimationWebSocketClientOptions);
|
|
20
|
-
/**
|
|
21
|
-
* 连接WebSocket
|
|
22
|
-
*/
|
|
23
23
|
connect(characterId: string): Promise<void>;
|
|
24
|
-
/**
|
|
25
|
-
* 断开连接
|
|
26
|
-
*/
|
|
27
24
|
disconnect(): void;
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
*/
|
|
31
|
-
sendAudioData(reqId: string, audioData: ArrayBuffer, end: boolean): boolean;
|
|
32
|
-
/**
|
|
33
|
-
* 生成请求ID
|
|
34
|
-
* 使用统一的 ReqID 生成规则:YYYYMMDDHHmmss_nanoid
|
|
35
|
-
*/
|
|
36
|
-
generateReqId(): string;
|
|
37
|
-
/**
|
|
38
|
-
* 获取连接状态
|
|
39
|
-
*/
|
|
25
|
+
sendAudioData(conversationId: string, audioData: ArrayBuffer, end: boolean): boolean;
|
|
26
|
+
generateConversationId(): string;
|
|
40
27
|
isConnected(): boolean;
|
|
41
|
-
/**
|
|
42
|
-
* 获取当前角色ID
|
|
43
|
-
*/
|
|
44
28
|
getCurrentCharacterId(): string;
|
|
45
29
|
private buildWebSocketUrl;
|
|
46
30
|
private connectWebSocket;
|
|
31
|
+
private sanitizeUrlForLog;
|
|
32
|
+
private configureSession;
|
|
47
33
|
private handleMessage;
|
|
48
34
|
private scheduleReconnect;
|
|
49
35
|
}
|
|
50
|
-
//# sourceMappingURL=AnimationWebSocketClient.d.ts.map
|
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Simple Event Emitter
|
|
3
|
-
*/
|
|
4
1
|
type EventHandler = (...args: any[]) => void;
|
|
5
2
|
export declare class EventEmitter {
|
|
6
3
|
private events;
|
|
@@ -10,4 +7,3 @@ export declare class EventEmitter {
|
|
|
10
7
|
removeAllListeners(event?: string): void;
|
|
11
8
|
}
|
|
12
9
|
export {};
|
|
13
|
-
//# sourceMappingURL=eventEmitter.d.ts.map
|
|
@@ -10,17 +10,9 @@ export interface FlameParams {
|
|
|
10
10
|
eyelid?: number[];
|
|
11
11
|
has_eyelid?: boolean;
|
|
12
12
|
}
|
|
13
|
-
|
|
14
|
-
* Convert proto Flame to WASM FlameParams format
|
|
15
|
-
*/
|
|
13
|
+
|
|
16
14
|
export declare function convertProtoFlameToWasmParams(protoFlame: Flame): FlameParams;
|
|
17
|
-
|
|
18
|
-
* Convert WASM FlameParams to proto Flame format
|
|
19
|
-
* Used for transition animation from idle to speaking
|
|
20
|
-
*/
|
|
15
|
+
|
|
21
16
|
export declare function convertWasmParamsToProtoFlame(wasmParams: FlameParams): Flame;
|
|
22
|
-
|
|
23
|
-
* Create a neutral proto Flame (zero pose)
|
|
24
|
-
*/
|
|
17
|
+
|
|
25
18
|
export declare function createNeutralFlameProto(): Flame;
|
|
26
|
-
//# sourceMappingURL=flameConverter.d.ts.map
|
|
@@ -7,51 +7,23 @@ export declare class AnimationPlayer {
|
|
|
7
7
|
private onEndedCallback?;
|
|
8
8
|
private static audioUnlocked;
|
|
9
9
|
private useStreaming;
|
|
10
|
-
/**
|
|
11
|
-
* 解锁音频上下文(Safari 自动播放策略)
|
|
12
|
-
* 必须在用户交互事件(如 click)中调用
|
|
13
|
-
*/
|
|
14
10
|
static unlockAudioContext(): Promise<void>;
|
|
15
|
-
/**
|
|
16
|
-
* Initialize with HTMLAudioElement (traditional way)
|
|
17
|
-
*/
|
|
18
11
|
initialize(audioUrl: string, onEnded?: () => void): Promise<void>;
|
|
19
|
-
/**
|
|
20
|
-
* Initialize with StreamingAudioPlayer (streaming way)
|
|
21
|
-
* @deprecated 使用 prepareStreamingPlayer() 代替
|
|
22
|
-
*/
|
|
23
12
|
initializeStreaming(streamingPlayer: StreamingAudioPlayer, onEnded?: () => void): Promise<void>;
|
|
24
|
-
/**
|
|
25
|
-
* 检查流式播放器是否已准备好
|
|
26
|
-
*/
|
|
27
13
|
isStreamingReady(): boolean;
|
|
28
|
-
/**
|
|
29
|
-
* 获取流式播放器实例
|
|
30
|
-
*/
|
|
31
14
|
getStreamingPlayer(): StreamingAudioPlayer | null;
|
|
32
|
-
/**
|
|
33
|
-
* 创建并初始化流式播放器
|
|
34
|
-
* 在服务连接建立时调用
|
|
35
|
-
*/
|
|
36
15
|
createAndInitializeStreamingPlayer(): Promise<void>;
|
|
37
|
-
/**
|
|
38
|
-
* 准备流式播放器(如果未创建则创建并初始化)
|
|
39
|
-
* 停止之前的播放并更新结束回调
|
|
40
|
-
*/
|
|
41
16
|
prepareStreamingPlayer(onEnded?: () => void): Promise<void>;
|
|
42
17
|
private setupEventListeners;
|
|
43
18
|
play(): Promise<void>;
|
|
44
19
|
stop(): void;
|
|
45
20
|
isPlaying(): boolean;
|
|
46
21
|
getCurrentFrameIndex(): number;
|
|
47
|
-
/**
|
|
48
|
-
* Get current playback time
|
|
49
|
-
*/
|
|
50
22
|
getCurrentTime(): number;
|
|
51
|
-
/**
|
|
52
|
-
* 添加音频块(仅用于流式播放)
|
|
53
|
-
*/
|
|
54
23
|
addAudioChunk(audio: Uint8Array, isLast?: boolean): void;
|
|
24
|
+
pause(): void;
|
|
25
|
+
resume(): Promise<void>;
|
|
26
|
+
setVolume(volume: number): void;
|
|
27
|
+
getVolume(): number;
|
|
55
28
|
dispose(): void;
|
|
56
29
|
}
|
|
57
|
-
//# sourceMappingURL=AnimationPlayer.d.ts.map
|