@m4trix/core 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +72 -0
- package/dist/api/index.cjs +83 -0
- package/dist/api/index.cjs.map +1 -0
- package/dist/api/index.d.cts +74 -0
- package/dist/api/index.d.ts +74 -0
- package/dist/api/index.js +81 -0
- package/dist/api/index.js.map +1 -0
- package/dist/helper/index.cjs +253 -0
- package/dist/helper/index.cjs.map +1 -0
- package/dist/helper/index.d.cts +92 -0
- package/dist/helper/index.d.ts +92 -0
- package/dist/helper/index.js +251 -0
- package/dist/helper/index.js.map +1 -0
- package/dist/index.cjs +2670 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +8 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.js +2656 -0
- package/dist/index.js.map +1 -0
- package/dist/react/index.cjs +1324 -0
- package/dist/react/index.cjs.map +1 -0
- package/dist/react/index.d.cts +213 -0
- package/dist/react/index.d.ts +213 -0
- package/dist/react/index.js +1316 -0
- package/dist/react/index.js.map +1 -0
- package/dist/stream/index.cjs +716 -0
- package/dist/stream/index.cjs.map +1 -0
- package/dist/stream/index.d.cts +304 -0
- package/dist/stream/index.d.ts +304 -0
- package/dist/stream/index.js +712 -0
- package/dist/stream/index.js.map +1 -0
- package/dist/ui/index.cjs +316 -0
- package/dist/ui/index.cjs.map +1 -0
- package/dist/ui/index.d.cts +30 -0
- package/dist/ui/index.d.ts +30 -0
- package/dist/ui/index.js +314 -0
- package/dist/ui/index.js.map +1 -0
- package/package.json +123 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,2656 @@
|
|
|
1
|
+
import { useRef, useState, useCallback, useEffect } from 'react';
|
|
2
|
+
import { io } from 'socket.io-client';
|
|
3
|
+
import { css, LitElement, html } from 'lit';
|
|
4
|
+
import { property, state, customElement } from 'lit/decorators.js';
|
|
5
|
+
import { createRef, ref } from 'lit/directives/ref.js';
|
|
6
|
+
import { createTimeline } from 'animejs';
|
|
7
|
+
import { ToolMessage, AIMessage, HumanMessage } from '@langchain/core/messages';
|
|
8
|
+
import { Effect, pipe } from 'effect';
|
|
9
|
+
|
|
10
|
+
var __defProp = Object.defineProperty;
|
|
11
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
12
|
+
var __decorateClass = (decorators, target, key, kind) => {
|
|
13
|
+
var result = kind > 1 ? void 0 : kind ? __getOwnPropDesc(target, key) : target;
|
|
14
|
+
for (var i = decorators.length - 1, decorator; i >= 0; i--)
|
|
15
|
+
if (decorator = decorators[i])
|
|
16
|
+
result = (kind ? decorator(target, key, result) : decorator(result)) || result;
|
|
17
|
+
if (kind && result)
|
|
18
|
+
__defProp(target, key, result);
|
|
19
|
+
return result;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
// src/utility/Logger.ts
|
|
23
|
+
var _Logger = class _Logger {
|
|
24
|
+
constructor(namespace = "") {
|
|
25
|
+
this.namespace = namespace;
|
|
26
|
+
}
|
|
27
|
+
static enableGlobalLogging() {
|
|
28
|
+
_Logger.globalEnabled = true;
|
|
29
|
+
}
|
|
30
|
+
static disableGlobalLogging() {
|
|
31
|
+
_Logger.globalEnabled = false;
|
|
32
|
+
}
|
|
33
|
+
formatPrefix() {
|
|
34
|
+
return this.namespace ? `[${this.namespace}]` : "";
|
|
35
|
+
}
|
|
36
|
+
logIfEnabled(level, ...args) {
|
|
37
|
+
if (!_Logger.globalEnabled)
|
|
38
|
+
return;
|
|
39
|
+
const prefix = this.formatPrefix();
|
|
40
|
+
if (prefix) {
|
|
41
|
+
console[level](prefix, ...args);
|
|
42
|
+
} else {
|
|
43
|
+
console[level](...args);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
log(...args) {
|
|
47
|
+
this.logIfEnabled("log", ...args);
|
|
48
|
+
}
|
|
49
|
+
debug(...args) {
|
|
50
|
+
this.logIfEnabled("debug", ...args);
|
|
51
|
+
}
|
|
52
|
+
info(...args) {
|
|
53
|
+
this.logIfEnabled("info", ...args);
|
|
54
|
+
}
|
|
55
|
+
warn(...args) {
|
|
56
|
+
this.logIfEnabled("warn", ...args);
|
|
57
|
+
}
|
|
58
|
+
error(...args) {
|
|
59
|
+
this.logIfEnabled("error", ...args);
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
_Logger.globalEnabled = false;
|
|
63
|
+
var Logger = _Logger;
|
|
64
|
+
|
|
65
|
+
// src/react/adapter/VoiceEndpointAdapter.ts
|
|
66
|
+
var VoiceEndpointAdapter = class {
|
|
67
|
+
constructor(config) {
|
|
68
|
+
this.logger = new Logger("SuTr > EndpointAdapter");
|
|
69
|
+
this.config = config;
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
var BaseVoiceEndpointAdapter = class extends VoiceEndpointAdapter {
|
|
73
|
+
constructor(config) {
|
|
74
|
+
super(config);
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Send a voice file to the API endpoint and return a Pump stream of audio chunks
|
|
78
|
+
*/
|
|
79
|
+
async sendVoiceFile({
|
|
80
|
+
blob,
|
|
81
|
+
metadata
|
|
82
|
+
}) {
|
|
83
|
+
const formData = new FormData();
|
|
84
|
+
formData.append("audio", blob);
|
|
85
|
+
if (metadata) {
|
|
86
|
+
formData.append("metadata", JSON.stringify(metadata));
|
|
87
|
+
}
|
|
88
|
+
this.logger.debug("Sending voice file to", this.config.endpoint, formData);
|
|
89
|
+
const response = await fetch(
|
|
90
|
+
`${this.config.baseUrl || ""}${this.config.endpoint}`,
|
|
91
|
+
{
|
|
92
|
+
method: "POST",
|
|
93
|
+
headers: this.config.headers,
|
|
94
|
+
body: formData
|
|
95
|
+
}
|
|
96
|
+
);
|
|
97
|
+
if (!response.ok) {
|
|
98
|
+
throw new Error(`API error: ${response.status} ${await response.text()}`);
|
|
99
|
+
}
|
|
100
|
+
if (!response.body) {
|
|
101
|
+
throw new Error("No response body");
|
|
102
|
+
}
|
|
103
|
+
return response;
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
// src/react/utility/audio/InputAudioController.ts
|
|
108
|
+
var InputAudioController = class {
|
|
109
|
+
constructor() {
|
|
110
|
+
this.logger = new Logger("@m4trix/core > InputAudioController");
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
|
|
114
|
+
// src/react/utility/audio/WebAudioInputAudioController.ts
|
|
115
|
+
var DEFAULT_SLICING_INTERVAL = 3e3;
|
|
116
|
+
var WebAudioInputAudioController = class extends InputAudioController {
|
|
117
|
+
constructor(audioConfig = {}) {
|
|
118
|
+
super();
|
|
119
|
+
this.audioConfig = audioConfig;
|
|
120
|
+
// ─── Recording state ─────────────────────────────────────────────────────
|
|
121
|
+
this.audioContextState = {
|
|
122
|
+
context: null,
|
|
123
|
+
source: null,
|
|
124
|
+
analyser: null
|
|
125
|
+
};
|
|
126
|
+
this.mediaRecorder = null;
|
|
127
|
+
this.recordedChunks = [];
|
|
128
|
+
this.recordingStream = null;
|
|
129
|
+
}
|
|
130
|
+
get audioContext() {
|
|
131
|
+
return this.audioContextState.context;
|
|
132
|
+
}
|
|
133
|
+
async createAudioContext() {
|
|
134
|
+
const context = new AudioContext({
|
|
135
|
+
sampleRate: this.audioConfig.sampleRate || 16e3,
|
|
136
|
+
latencyHint: "interactive"
|
|
137
|
+
});
|
|
138
|
+
const analyser = context.createAnalyser();
|
|
139
|
+
analyser.fftSize = 2048;
|
|
140
|
+
return { context, source: null, analyser };
|
|
141
|
+
}
|
|
142
|
+
async cleanupAudioContext() {
|
|
143
|
+
this.logger.debug("Cleaning up audio context");
|
|
144
|
+
const { source, context } = this.audioContextState;
|
|
145
|
+
if (source)
|
|
146
|
+
source.disconnect();
|
|
147
|
+
if (context)
|
|
148
|
+
await context.close();
|
|
149
|
+
this.audioContextState = { context: null, source: null, analyser: null };
|
|
150
|
+
}
|
|
151
|
+
async startRecording({
|
|
152
|
+
onRecordedChunk,
|
|
153
|
+
onError
|
|
154
|
+
} = {}) {
|
|
155
|
+
try {
|
|
156
|
+
this.logger.debug("Starting recording");
|
|
157
|
+
this.recordedChunks = [];
|
|
158
|
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
159
|
+
this.recordingStream = stream;
|
|
160
|
+
if (!this.audioContextState.context) {
|
|
161
|
+
this.audioContextState = await this.createAudioContext();
|
|
162
|
+
}
|
|
163
|
+
this.mediaRecorder = new MediaRecorder(stream, {
|
|
164
|
+
mimeType: "audio/webm;codecs=opus"
|
|
165
|
+
});
|
|
166
|
+
this.mediaRecorder.ondataavailable = (e) => {
|
|
167
|
+
if (e.data.size > 0) {
|
|
168
|
+
this.recordedChunks.push(e.data);
|
|
169
|
+
onRecordedChunk?.(e.data);
|
|
170
|
+
this.logger.debug("Recorded chunk", e.data.size);
|
|
171
|
+
}
|
|
172
|
+
};
|
|
173
|
+
this.mediaRecorder.start(DEFAULT_SLICING_INTERVAL);
|
|
174
|
+
this.logger.debug("MediaRecorder started");
|
|
175
|
+
} catch (err) {
|
|
176
|
+
const error = err instanceof Error ? err : new Error("Failed to start recording");
|
|
177
|
+
this.logger.error(error);
|
|
178
|
+
onError?.(error);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
async stopRecording({
|
|
182
|
+
onRecordingCompleted
|
|
183
|
+
} = {}) {
|
|
184
|
+
this.logger.debug("Stopping recording");
|
|
185
|
+
if (!this.mediaRecorder || this.mediaRecorder.state === "inactive")
|
|
186
|
+
return;
|
|
187
|
+
await new Promise((resolve) => {
|
|
188
|
+
this.mediaRecorder.onstop = async () => {
|
|
189
|
+
if (this.recordedChunks.length) {
|
|
190
|
+
const blob = new Blob(this.recordedChunks, { type: "audio/webm" });
|
|
191
|
+
onRecordingCompleted?.(blob);
|
|
192
|
+
this.logger.debug("Recording completed", blob.size);
|
|
193
|
+
}
|
|
194
|
+
this.recordingStream?.getTracks().forEach((t) => t.stop());
|
|
195
|
+
this.recordingStream = null;
|
|
196
|
+
await this.cleanupAudioContext();
|
|
197
|
+
resolve();
|
|
198
|
+
};
|
|
199
|
+
this.mediaRecorder.stop();
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
/**
|
|
203
|
+
* Cleans up all audio recording resources.
|
|
204
|
+
*/
|
|
205
|
+
cleanup() {
|
|
206
|
+
this.cleanupAudioContext();
|
|
207
|
+
if (this.mediaRecorder && this.mediaRecorder.state !== "inactive") {
|
|
208
|
+
this.mediaRecorder.stop();
|
|
209
|
+
}
|
|
210
|
+
if (this.recordingStream) {
|
|
211
|
+
this.recordingStream.getTracks().forEach((t) => t.stop());
|
|
212
|
+
this.recordingStream = null;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
};
|
|
216
|
+
|
|
217
|
+
// src/react/utility/audio/OutputAudioController.ts
|
|
218
|
+
var OutputAudioController = class {
|
|
219
|
+
constructor(loggerName) {
|
|
220
|
+
this.logger = new Logger(loggerName);
|
|
221
|
+
}
|
|
222
|
+
};
|
|
223
|
+
|
|
224
|
+
// src/react/utility/audio/AudioElementOutputAudioController.ts
|
|
225
|
+
var AudioElementOutputAudioController = class extends OutputAudioController {
|
|
226
|
+
constructor() {
|
|
227
|
+
super("@m4trix/core > WebApiOutputAudioController");
|
|
228
|
+
// ─── Playback state ──────────────────────────────────────────────────────
|
|
229
|
+
this.currentHtmlAudio = null;
|
|
230
|
+
this.currentAudioUrl = null;
|
|
231
|
+
}
|
|
232
|
+
// ─── One-shot playback ────────────────────────────────────────────────────
|
|
233
|
+
/**
|
|
234
|
+
* Play either a Blob or a URL string.
|
|
235
|
+
* Uses <audio> under the hood for maximum browser compatibility.
|
|
236
|
+
*/
|
|
237
|
+
async playAudio({
|
|
238
|
+
source,
|
|
239
|
+
onComplete
|
|
240
|
+
}) {
|
|
241
|
+
if (this.currentHtmlAudio) {
|
|
242
|
+
this.currentHtmlAudio.pause();
|
|
243
|
+
this.currentHtmlAudio.src = "";
|
|
244
|
+
if (this.currentAudioUrl && source instanceof Blob) {
|
|
245
|
+
URL.revokeObjectURL(this.currentAudioUrl);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
const audio = new Audio();
|
|
249
|
+
this.currentHtmlAudio = audio;
|
|
250
|
+
let url;
|
|
251
|
+
if (source instanceof Blob) {
|
|
252
|
+
url = URL.createObjectURL(source);
|
|
253
|
+
this.currentAudioUrl = url;
|
|
254
|
+
audio.onended = () => {
|
|
255
|
+
URL.revokeObjectURL(url);
|
|
256
|
+
onComplete?.();
|
|
257
|
+
};
|
|
258
|
+
} else {
|
|
259
|
+
url = source;
|
|
260
|
+
}
|
|
261
|
+
audio.src = url;
|
|
262
|
+
try {
|
|
263
|
+
await audio.play();
|
|
264
|
+
} catch (err) {
|
|
265
|
+
this.logger.error("Playback failed, user gesture may be required", err);
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
// ─── Streaming playback ──────────────────────────────────────────────────
|
|
269
|
+
/**
|
|
270
|
+
* Stream audio from a Response via MediaSource Extensions.
|
|
271
|
+
* @param params.response The fetch Response whose body is an audio stream
|
|
272
|
+
* @param params.mimeCodec MIME type+codec string, e.g. 'audio/mpeg'
|
|
273
|
+
* @param params.onComplete Optional callback once the stream ends
|
|
274
|
+
*/
|
|
275
|
+
async playAudioStream({
|
|
276
|
+
response,
|
|
277
|
+
mimeCodec = "audio/mpeg",
|
|
278
|
+
onComplete
|
|
279
|
+
}) {
|
|
280
|
+
if (!response.ok || !response.body) {
|
|
281
|
+
throw new Error(`Invalid response (${response.status})`);
|
|
282
|
+
}
|
|
283
|
+
if (typeof MediaSource === "undefined" || !MediaSource.isTypeSupported(mimeCodec)) {
|
|
284
|
+
throw new Error(`Unsupported MIME type or codec: ${mimeCodec}`);
|
|
285
|
+
}
|
|
286
|
+
await this.stopPlayback();
|
|
287
|
+
const mediaSource = new MediaSource();
|
|
288
|
+
const url = URL.createObjectURL(mediaSource);
|
|
289
|
+
this.currentAudioUrl = url;
|
|
290
|
+
const audio = new Audio(url);
|
|
291
|
+
this.currentHtmlAudio = audio;
|
|
292
|
+
audio.autoplay = true;
|
|
293
|
+
audio.onended = () => {
|
|
294
|
+
URL.revokeObjectURL(url);
|
|
295
|
+
this.currentAudioUrl = null;
|
|
296
|
+
onComplete?.();
|
|
297
|
+
};
|
|
298
|
+
mediaSource.addEventListener(
|
|
299
|
+
"sourceopen",
|
|
300
|
+
() => {
|
|
301
|
+
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
|
|
302
|
+
const reader = response.body.getReader();
|
|
303
|
+
const pump = async () => {
|
|
304
|
+
const { done, value } = await reader.read();
|
|
305
|
+
if (done) {
|
|
306
|
+
mediaSource.endOfStream();
|
|
307
|
+
return;
|
|
308
|
+
}
|
|
309
|
+
if (value) {
|
|
310
|
+
sourceBuffer.appendBuffer(value);
|
|
311
|
+
}
|
|
312
|
+
if (sourceBuffer.updating) {
|
|
313
|
+
sourceBuffer.addEventListener("updateend", pump, { once: true });
|
|
314
|
+
} else {
|
|
315
|
+
pump();
|
|
316
|
+
}
|
|
317
|
+
};
|
|
318
|
+
pump();
|
|
319
|
+
},
|
|
320
|
+
{ once: true }
|
|
321
|
+
);
|
|
322
|
+
try {
|
|
323
|
+
await audio.play();
|
|
324
|
+
} catch (err) {
|
|
325
|
+
this.logger.error(
|
|
326
|
+
"Streaming playback failed, user gesture may be required",
|
|
327
|
+
err
|
|
328
|
+
);
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
// ─── Chunk-based streaming playback ─────────────────────────────────────
|
|
332
|
+
/**
|
|
333
|
+
* Initialize a streaming audio context for chunk-based playback.
|
|
334
|
+
* This creates the necessary MediaSource and SourceBuffer for subsequent chunk additions.
|
|
335
|
+
* Returns functions to add chunks and end the stream, encapsulated in a closure.
|
|
336
|
+
*
|
|
337
|
+
* @param mimeCodec MIME type+codec string, e.g. 'audio/mpeg'
|
|
338
|
+
* @param onComplete Optional callback once the stream ends
|
|
339
|
+
* @returns Object containing functions to add chunks and end the stream
|
|
340
|
+
*/
|
|
341
|
+
async initializeChunkStream({
|
|
342
|
+
onComplete,
|
|
343
|
+
mimeCodec = "audio/mpeg"
|
|
344
|
+
}) {
|
|
345
|
+
this.logger.debug(`Initializing chunk stream with codec: ${mimeCodec}`);
|
|
346
|
+
if (typeof MediaSource === "undefined") {
|
|
347
|
+
throw new Error("MediaSource API is not supported in this browser");
|
|
348
|
+
}
|
|
349
|
+
if (!MediaSource.isTypeSupported(mimeCodec)) {
|
|
350
|
+
this.logger.warn(
|
|
351
|
+
`Codec ${mimeCodec} not supported, falling back to standard audio/mpeg`
|
|
352
|
+
);
|
|
353
|
+
mimeCodec = "audio/mpeg";
|
|
354
|
+
if (!MediaSource.isTypeSupported(mimeCodec)) {
|
|
355
|
+
throw new Error(
|
|
356
|
+
"Neither the specified codec nor the fallback codec are supported"
|
|
357
|
+
);
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
await this.stopPlayback();
|
|
361
|
+
const mediaSource = new MediaSource();
|
|
362
|
+
let sourceBuffer = null;
|
|
363
|
+
const url = URL.createObjectURL(mediaSource);
|
|
364
|
+
this.currentAudioUrl = url;
|
|
365
|
+
const audio = new Audio(url);
|
|
366
|
+
this.currentHtmlAudio = audio;
|
|
367
|
+
audio.autoplay = false;
|
|
368
|
+
audio.controls = true;
|
|
369
|
+
audio.style.display = "none";
|
|
370
|
+
document.body.appendChild(audio);
|
|
371
|
+
let playbackStarted = false;
|
|
372
|
+
let hasReceivedFirstChunk = false;
|
|
373
|
+
let receivedChunksCount = 0;
|
|
374
|
+
const pendingChunks = [];
|
|
375
|
+
let isProcessingQueue = false;
|
|
376
|
+
this.logger.debug("Waiting for MediaSource to open...");
|
|
377
|
+
await new Promise((resolve, reject) => {
|
|
378
|
+
const timeout = setTimeout(() => {
|
|
379
|
+
reject(new Error("MediaSource failed to open (timeout)"));
|
|
380
|
+
}, 5e3);
|
|
381
|
+
mediaSource.addEventListener(
|
|
382
|
+
"sourceopen",
|
|
383
|
+
() => {
|
|
384
|
+
clearTimeout(timeout);
|
|
385
|
+
this.logger.debug("MediaSource open event received");
|
|
386
|
+
try {
|
|
387
|
+
sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
|
|
388
|
+
if (mediaSource.duration === Infinity || isNaN(mediaSource.duration)) {
|
|
389
|
+
mediaSource.duration = 1e3;
|
|
390
|
+
}
|
|
391
|
+
this.logger.debug("SourceBuffer created successfully");
|
|
392
|
+
resolve();
|
|
393
|
+
} catch (err) {
|
|
394
|
+
reject(new Error(`Failed to create SourceBuffer: ${err}`));
|
|
395
|
+
}
|
|
396
|
+
},
|
|
397
|
+
{ once: true }
|
|
398
|
+
);
|
|
399
|
+
});
|
|
400
|
+
const logger = this.logger;
|
|
401
|
+
const processQueue = async () => {
|
|
402
|
+
if (!sourceBuffer || pendingChunks.length === 0 || isProcessingQueue) {
|
|
403
|
+
return;
|
|
404
|
+
}
|
|
405
|
+
isProcessingQueue = true;
|
|
406
|
+
try {
|
|
407
|
+
while (pendingChunks.length > 0) {
|
|
408
|
+
if (sourceBuffer.updating) {
|
|
409
|
+
await new Promise((resolve) => {
|
|
410
|
+
sourceBuffer.addEventListener("updateend", () => resolve(), {
|
|
411
|
+
once: true
|
|
412
|
+
});
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
const nextChunk = pendingChunks.shift();
|
|
416
|
+
if (!nextChunk)
|
|
417
|
+
continue;
|
|
418
|
+
try {
|
|
419
|
+
sourceBuffer.appendBuffer(nextChunk);
|
|
420
|
+
logger.debug(
|
|
421
|
+
`Processed queued chunk of size ${nextChunk.byteLength}`
|
|
422
|
+
);
|
|
423
|
+
if (!playbackStarted && hasReceivedFirstChunk) {
|
|
424
|
+
await tryStartPlayback();
|
|
425
|
+
}
|
|
426
|
+
await new Promise((resolve) => {
|
|
427
|
+
sourceBuffer.addEventListener("updateend", () => resolve(), {
|
|
428
|
+
once: true
|
|
429
|
+
});
|
|
430
|
+
});
|
|
431
|
+
} catch (err) {
|
|
432
|
+
logger.error("Error appending queued chunk to source buffer", err);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
} finally {
|
|
436
|
+
isProcessingQueue = false;
|
|
437
|
+
}
|
|
438
|
+
};
|
|
439
|
+
const tryStartPlayback = async () => {
|
|
440
|
+
if (playbackStarted)
|
|
441
|
+
return;
|
|
442
|
+
playbackStarted = true;
|
|
443
|
+
logger.debug("Attempting to start audio playback...");
|
|
444
|
+
if (receivedChunksCount < 3 && audio.buffered.length > 0 && audio.buffered.end(0) < 0.5) {
|
|
445
|
+
logger.debug("Not enough data buffered yet, delaying playback");
|
|
446
|
+
return;
|
|
447
|
+
}
|
|
448
|
+
try {
|
|
449
|
+
if (audio.readyState === 0) {
|
|
450
|
+
logger.debug(
|
|
451
|
+
"Audio element not ready yet, waiting for canplay event"
|
|
452
|
+
);
|
|
453
|
+
await new Promise((resolve) => {
|
|
454
|
+
audio.addEventListener("canplay", () => resolve(), { once: true });
|
|
455
|
+
});
|
|
456
|
+
}
|
|
457
|
+
await audio.play();
|
|
458
|
+
logger.debug("Successfully started audio playback");
|
|
459
|
+
} catch (err) {
|
|
460
|
+
logger.error("Failed to start playback", err);
|
|
461
|
+
document.addEventListener(
|
|
462
|
+
"click",
|
|
463
|
+
async () => {
|
|
464
|
+
try {
|
|
465
|
+
await audio.play();
|
|
466
|
+
logger.debug("Started playback after user interaction");
|
|
467
|
+
} catch (innerErr) {
|
|
468
|
+
logger.error(
|
|
469
|
+
"Still failed to play after user interaction",
|
|
470
|
+
innerErr
|
|
471
|
+
);
|
|
472
|
+
}
|
|
473
|
+
},
|
|
474
|
+
{ once: true }
|
|
475
|
+
);
|
|
476
|
+
}
|
|
477
|
+
};
|
|
478
|
+
const addChunkToStream = async (chunk) => {
|
|
479
|
+
if (!sourceBuffer) {
|
|
480
|
+
throw new Error(
|
|
481
|
+
"Streaming context was closed or not properly initialized."
|
|
482
|
+
);
|
|
483
|
+
}
|
|
484
|
+
let arrayBufferChunk;
|
|
485
|
+
if (chunk instanceof Blob) {
|
|
486
|
+
logger.debug("Converting Blob to ArrayBuffer");
|
|
487
|
+
arrayBufferChunk = await chunk.arrayBuffer();
|
|
488
|
+
} else {
|
|
489
|
+
arrayBufferChunk = chunk;
|
|
490
|
+
}
|
|
491
|
+
if (!arrayBufferChunk || arrayBufferChunk.byteLength === 0) {
|
|
492
|
+
logger.warn("Received empty chunk, skipping");
|
|
493
|
+
return;
|
|
494
|
+
}
|
|
495
|
+
if (!hasReceivedFirstChunk) {
|
|
496
|
+
hasReceivedFirstChunk = true;
|
|
497
|
+
logger.debug(
|
|
498
|
+
`First chunk received, size: ${arrayBufferChunk.byteLength} bytes`
|
|
499
|
+
);
|
|
500
|
+
}
|
|
501
|
+
receivedChunksCount++;
|
|
502
|
+
pendingChunks.push(arrayBufferChunk);
|
|
503
|
+
logger.debug(
|
|
504
|
+
`Added chunk #${receivedChunksCount} to queue (size: ${arrayBufferChunk.byteLength} bytes)`
|
|
505
|
+
);
|
|
506
|
+
await processQueue();
|
|
507
|
+
if (!playbackStarted && hasReceivedFirstChunk && receivedChunksCount >= 3) {
|
|
508
|
+
await tryStartPlayback();
|
|
509
|
+
}
|
|
510
|
+
};
|
|
511
|
+
const endChunkStream = () => {
|
|
512
|
+
if (mediaSource && mediaSource.readyState === "open") {
|
|
513
|
+
try {
|
|
514
|
+
if (pendingChunks.length > 0 || sourceBuffer && sourceBuffer.updating) {
|
|
515
|
+
logger.debug("Waiting for pending chunks before ending stream");
|
|
516
|
+
setTimeout(() => endChunkStream(), 200);
|
|
517
|
+
return;
|
|
518
|
+
}
|
|
519
|
+
if (hasReceivedFirstChunk) {
|
|
520
|
+
mediaSource.endOfStream();
|
|
521
|
+
logger.debug("MediaSource stream ended successfully");
|
|
522
|
+
} else {
|
|
523
|
+
logger.warn("Stream ended without receiving any chunks");
|
|
524
|
+
}
|
|
525
|
+
} catch (err) {
|
|
526
|
+
logger.error("Error ending MediaSource stream", err);
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
audio.onended = null;
|
|
530
|
+
if (audio.parentNode) {
|
|
531
|
+
audio.parentNode.removeChild(audio);
|
|
532
|
+
}
|
|
533
|
+
if (this.currentAudioUrl === url) {
|
|
534
|
+
this.currentAudioUrl = null;
|
|
535
|
+
URL.revokeObjectURL(url);
|
|
536
|
+
}
|
|
537
|
+
sourceBuffer = null;
|
|
538
|
+
};
|
|
539
|
+
audio.onended = () => {
|
|
540
|
+
logger.debug("Audio playback completed");
|
|
541
|
+
endChunkStream();
|
|
542
|
+
onComplete?.();
|
|
543
|
+
};
|
|
544
|
+
return {
|
|
545
|
+
addChunkToStream,
|
|
546
|
+
endChunkStream
|
|
547
|
+
};
|
|
548
|
+
}
|
|
549
|
+
/**
|
|
550
|
+
* Stop any ongoing HTMLAudioElement playback.
|
|
551
|
+
*/
|
|
552
|
+
async stopPlayback() {
|
|
553
|
+
if (this.currentHtmlAudio) {
|
|
554
|
+
try {
|
|
555
|
+
this.currentHtmlAudio.pause();
|
|
556
|
+
this.currentHtmlAudio.src = "";
|
|
557
|
+
} catch (err) {
|
|
558
|
+
this.logger.error("Error stopping playback", err);
|
|
559
|
+
}
|
|
560
|
+
this.currentHtmlAudio = null;
|
|
561
|
+
}
|
|
562
|
+
if (this.currentAudioUrl) {
|
|
563
|
+
URL.revokeObjectURL(this.currentAudioUrl);
|
|
564
|
+
this.currentAudioUrl = null;
|
|
565
|
+
}
|
|
566
|
+
}
|
|
567
|
+
/**
|
|
568
|
+
* Cleans up all audio playback resources.
|
|
569
|
+
*/
|
|
570
|
+
cleanup() {
|
|
571
|
+
this.stopPlayback();
|
|
572
|
+
}
|
|
573
|
+
};
|
|
574
|
+
|
|
575
|
+
// src/react/hooks/use-conversation/useConversation.ts
|
|
576
|
+
Logger.enableGlobalLogging();
|
|
577
|
+
function useConversation(endpoint, {
|
|
578
|
+
onStartRecording,
|
|
579
|
+
onStopRecording,
|
|
580
|
+
onReceive,
|
|
581
|
+
autoPlay = true,
|
|
582
|
+
downstreamMode = "STREAM",
|
|
583
|
+
onError,
|
|
584
|
+
audioConfig = {},
|
|
585
|
+
requestData = {},
|
|
586
|
+
endpointConfig = {}
|
|
587
|
+
}) {
|
|
588
|
+
const { current: logger } = useRef(
|
|
589
|
+
new Logger("@m4trix/core > useConversation")
|
|
590
|
+
);
|
|
591
|
+
const inputAudioControllerRef = useRef(void 0);
|
|
592
|
+
const outputAudioControllerRef = useRef(
|
|
593
|
+
void 0
|
|
594
|
+
);
|
|
595
|
+
const endpointAdapterRef = useRef(
|
|
596
|
+
void 0
|
|
597
|
+
);
|
|
598
|
+
const [voiceAgentState, setVoiceAgentState] = useState("READY");
|
|
599
|
+
const [error, setError] = useState(null);
|
|
600
|
+
const handleError = useCallback(
|
|
601
|
+
(state2, err) => {
|
|
602
|
+
setError(err);
|
|
603
|
+
logger.error(`Error during ${state2}:`, err);
|
|
604
|
+
onError?.(state2, err);
|
|
605
|
+
},
|
|
606
|
+
[onError]
|
|
607
|
+
);
|
|
608
|
+
const startRecording = useCallback(() => {
|
|
609
|
+
if (inputAudioControllerRef.current) {
|
|
610
|
+
try {
|
|
611
|
+
logger.debug("Starting recording");
|
|
612
|
+
setVoiceAgentState("RECORDING");
|
|
613
|
+
inputAudioControllerRef.current.startRecording({
|
|
614
|
+
onError: (err) => {
|
|
615
|
+
handleError("RECORDING", err);
|
|
616
|
+
}
|
|
617
|
+
});
|
|
618
|
+
onStartRecording?.();
|
|
619
|
+
} catch (err) {
|
|
620
|
+
if (err instanceof Error) {
|
|
621
|
+
handleError("RECORDING", err);
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
}, [onStartRecording, handleError]);
|
|
626
|
+
const stopRecording = useCallback(async () => {
|
|
627
|
+
if (inputAudioControllerRef.current) {
|
|
628
|
+
try {
|
|
629
|
+
logger.debug("Stopping recording");
|
|
630
|
+
await inputAudioControllerRef.current.stopRecording({
|
|
631
|
+
onRecordingCompleted: async (allData) => {
|
|
632
|
+
setVoiceAgentState("PROCESSING");
|
|
633
|
+
try {
|
|
634
|
+
const response = await endpointAdapterRef.current?.sendVoiceFile({
|
|
635
|
+
blob: allData,
|
|
636
|
+
metadata: requestData
|
|
637
|
+
});
|
|
638
|
+
if (!response) {
|
|
639
|
+
throw new Error("No response received from endpoint");
|
|
640
|
+
}
|
|
641
|
+
setVoiceAgentState("RESPONDING");
|
|
642
|
+
if (autoPlay) {
|
|
643
|
+
if (downstreamMode === "STREAM") {
|
|
644
|
+
await outputAudioControllerRef.current?.playAudioStream({
|
|
645
|
+
response,
|
|
646
|
+
onComplete: () => {
|
|
647
|
+
setVoiceAgentState("READY");
|
|
648
|
+
}
|
|
649
|
+
});
|
|
650
|
+
} else if (downstreamMode === "DOWNLOAD") {
|
|
651
|
+
const responseBlob = await response.blob();
|
|
652
|
+
await outputAudioControllerRef.current?.playAudio({
|
|
653
|
+
source: responseBlob,
|
|
654
|
+
onComplete: () => {
|
|
655
|
+
setVoiceAgentState("READY");
|
|
656
|
+
}
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
} else {
|
|
660
|
+
setVoiceAgentState("READY");
|
|
661
|
+
}
|
|
662
|
+
onReceive?.(
|
|
663
|
+
allData,
|
|
664
|
+
async () => {
|
|
665
|
+
if (outputAudioControllerRef.current) {
|
|
666
|
+
if (downstreamMode === "STREAM") {
|
|
667
|
+
return outputAudioControllerRef.current.playAudioStream({
|
|
668
|
+
response,
|
|
669
|
+
onComplete: () => {
|
|
670
|
+
setVoiceAgentState("READY");
|
|
671
|
+
}
|
|
672
|
+
});
|
|
673
|
+
} else {
|
|
674
|
+
const responseBlob = await response.blob();
|
|
675
|
+
return outputAudioControllerRef.current.playAudio({
|
|
676
|
+
source: responseBlob,
|
|
677
|
+
onComplete: () => {
|
|
678
|
+
setVoiceAgentState("READY");
|
|
679
|
+
}
|
|
680
|
+
});
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
},
|
|
684
|
+
async () => {
|
|
685
|
+
if (outputAudioControllerRef.current) {
|
|
686
|
+
return outputAudioControllerRef.current.stopPlayback();
|
|
687
|
+
}
|
|
688
|
+
}
|
|
689
|
+
);
|
|
690
|
+
} catch (err) {
|
|
691
|
+
if (err instanceof Error) {
|
|
692
|
+
handleError("PROCESSING", err);
|
|
693
|
+
}
|
|
694
|
+
setVoiceAgentState("READY");
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
});
|
|
698
|
+
onStopRecording?.();
|
|
699
|
+
} catch (err) {
|
|
700
|
+
if (err instanceof Error) {
|
|
701
|
+
handleError("RECORDING", err);
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
}
|
|
705
|
+
}, [
|
|
706
|
+
onStopRecording,
|
|
707
|
+
requestData,
|
|
708
|
+
autoPlay,
|
|
709
|
+
downstreamMode,
|
|
710
|
+
handleError,
|
|
711
|
+
onReceive
|
|
712
|
+
]);
|
|
713
|
+
useEffect(() => {
|
|
714
|
+
if (endpointAdapterRef.current) {
|
|
715
|
+
return;
|
|
716
|
+
}
|
|
717
|
+
try {
|
|
718
|
+
const endpointAdapter = endpointConfig.endpointAdapter ? endpointConfig.endpointAdapter : new BaseVoiceEndpointAdapter({
|
|
719
|
+
baseUrl: endpointConfig.baseUrl,
|
|
720
|
+
endpoint,
|
|
721
|
+
headers: endpointConfig.headers
|
|
722
|
+
});
|
|
723
|
+
endpointAdapterRef.current = endpointAdapter;
|
|
724
|
+
if (!inputAudioControllerRef.current) {
|
|
725
|
+
inputAudioControllerRef.current = new WebAudioInputAudioController(
|
|
726
|
+
audioConfig
|
|
727
|
+
);
|
|
728
|
+
}
|
|
729
|
+
if (!outputAudioControllerRef.current) {
|
|
730
|
+
outputAudioControllerRef.current = new AudioElementOutputAudioController();
|
|
731
|
+
}
|
|
732
|
+
} catch (err) {
|
|
733
|
+
if (err instanceof Error) {
|
|
734
|
+
handleError("READY", err);
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
}, [endpoint, endpointConfig, audioConfig, handleError]);
|
|
738
|
+
useEffect(() => {
|
|
739
|
+
return () => {
|
|
740
|
+
inputAudioControllerRef.current?.cleanup();
|
|
741
|
+
outputAudioControllerRef.current?.cleanup();
|
|
742
|
+
};
|
|
743
|
+
}, []);
|
|
744
|
+
return {
|
|
745
|
+
startRecording,
|
|
746
|
+
stopRecording,
|
|
747
|
+
voiceAgentState,
|
|
748
|
+
error,
|
|
749
|
+
audioContext: inputAudioControllerRef.current?.audioContext || null
|
|
750
|
+
};
|
|
751
|
+
}
|
|
752
|
+
|
|
753
|
+
// src/react/adapter/socket/VoiceSocketAdapter.ts
|
|
754
|
+
var VoiceSocketAdapter = class {
|
|
755
|
+
constructor(config) {
|
|
756
|
+
this._isConnected = false;
|
|
757
|
+
this.logger = new Logger("@m4trix/core > VoiceSocketAdapter");
|
|
758
|
+
this.emitter = new Emitter();
|
|
759
|
+
this.config = config;
|
|
760
|
+
}
|
|
761
|
+
on(event, listener) {
|
|
762
|
+
this.emitter.on(event, listener);
|
|
763
|
+
}
|
|
764
|
+
off(event, listener) {
|
|
765
|
+
this.emitter.off(event, listener);
|
|
766
|
+
}
|
|
767
|
+
once(event, listener) {
|
|
768
|
+
this.emitter.once(event, listener);
|
|
769
|
+
}
|
|
770
|
+
emit(event, data) {
|
|
771
|
+
this.emitter.emit(event, data);
|
|
772
|
+
}
|
|
773
|
+
isConnected() {
|
|
774
|
+
return this._isConnected;
|
|
775
|
+
}
|
|
776
|
+
};
|
|
777
|
+
var Emitter = class {
|
|
778
|
+
constructor() {
|
|
779
|
+
this.target = new EventTarget();
|
|
780
|
+
}
|
|
781
|
+
on(type, listener) {
|
|
782
|
+
this.target.addEventListener(type, listener);
|
|
783
|
+
}
|
|
784
|
+
off(type, listener) {
|
|
785
|
+
this.target.removeEventListener(type, listener);
|
|
786
|
+
}
|
|
787
|
+
once(type, listener) {
|
|
788
|
+
const wrapper = (event) => {
|
|
789
|
+
this.off(type, wrapper);
|
|
790
|
+
listener(event.detail);
|
|
791
|
+
};
|
|
792
|
+
this.on(type, wrapper);
|
|
793
|
+
}
|
|
794
|
+
emit(type, detail) {
|
|
795
|
+
this.target.dispatchEvent(new CustomEvent(type, { detail }));
|
|
796
|
+
}
|
|
797
|
+
};
|
|
798
|
+
var VoiceSocketIOAdapter = class extends VoiceSocketAdapter {
|
|
799
|
+
constructor(config) {
|
|
800
|
+
super(config);
|
|
801
|
+
this.socket = null;
|
|
802
|
+
}
|
|
803
|
+
async connect() {
|
|
804
|
+
return new Promise((resolve, reject) => {
|
|
805
|
+
if (!this.socket) {
|
|
806
|
+
this.socket = io(this.config.baseUrl, {
|
|
807
|
+
extraHeaders: this.config.headers,
|
|
808
|
+
autoConnect: true
|
|
809
|
+
});
|
|
810
|
+
}
|
|
811
|
+
this.socket.on("connect", () => {
|
|
812
|
+
this._isConnected = true;
|
|
813
|
+
this.logger.debug("Connected to socket");
|
|
814
|
+
this.emit("connect");
|
|
815
|
+
resolve();
|
|
816
|
+
});
|
|
817
|
+
this.socket.on("disconnect", () => {
|
|
818
|
+
this._isConnected = false;
|
|
819
|
+
this.emit("disconnect");
|
|
820
|
+
this.logger.debug("Disconnected from socket");
|
|
821
|
+
if (this.config.autoReconnect)
|
|
822
|
+
this.connect();
|
|
823
|
+
});
|
|
824
|
+
this.socket.on("connect_error", (error) => {
|
|
825
|
+
this.logger.error("Error connecting to socket", error);
|
|
826
|
+
this.emit("error", error);
|
|
827
|
+
reject(error);
|
|
828
|
+
});
|
|
829
|
+
this.socket.on("voice:chunk_received", (chunk) => {
|
|
830
|
+
this.logger.debug("Received voice chunk", chunk.byteLength);
|
|
831
|
+
this.onVoiceChunkReceived(chunk);
|
|
832
|
+
});
|
|
833
|
+
this.socket.on("voice:received_end_of_response_stream", () => {
|
|
834
|
+
this.logger.debug("Received end of response stream");
|
|
835
|
+
this.onReceivedEndOfResponseStream();
|
|
836
|
+
});
|
|
837
|
+
this.socket.on("voice:file_received", (blob) => {
|
|
838
|
+
this.logger.debug("Received voice file");
|
|
839
|
+
this.onVoiceFileReceived(blob);
|
|
840
|
+
});
|
|
841
|
+
this.socket.on("control-message", (message) => {
|
|
842
|
+
this.logger.debug("Received control message", message);
|
|
843
|
+
this.emit("control-message", message);
|
|
844
|
+
});
|
|
845
|
+
});
|
|
846
|
+
}
|
|
847
|
+
disconnect() {
|
|
848
|
+
this.socket?.disconnect();
|
|
849
|
+
this.socket = null;
|
|
850
|
+
this._isConnected = false;
|
|
851
|
+
}
|
|
852
|
+
exposeSocket() {
|
|
853
|
+
return this.socket;
|
|
854
|
+
}
|
|
855
|
+
async sendVoiceChunk(chunk, metadata) {
|
|
856
|
+
this.logger.debug(
|
|
857
|
+
"Sending voice chunk %i",
|
|
858
|
+
chunk instanceof Blob ? chunk.size : chunk.byteLength
|
|
859
|
+
);
|
|
860
|
+
if (!this.socket || !this.isConnected)
|
|
861
|
+
throw new Error("Socket not connected");
|
|
862
|
+
let chunkToSend;
|
|
863
|
+
if (chunk instanceof Blob) {
|
|
864
|
+
chunkToSend = await chunk.arrayBuffer();
|
|
865
|
+
} else {
|
|
866
|
+
chunkToSend = chunk;
|
|
867
|
+
}
|
|
868
|
+
this.logger.debug("[Socket] Sending voice chunk", chunkToSend.byteLength);
|
|
869
|
+
this.socket.emit("voice:send_chunk", chunkToSend, metadata);
|
|
870
|
+
this.emit("chunk_sent", chunk);
|
|
871
|
+
}
|
|
872
|
+
sendVoiceFile(blob, metadata) {
|
|
873
|
+
this.logger.debug("Sending voice file", blob, metadata);
|
|
874
|
+
if (!this.socket || !this.isConnected)
|
|
875
|
+
throw new Error("Socket not connected");
|
|
876
|
+
this.socket.emit("voice:send_file", blob, metadata);
|
|
877
|
+
this.emit("file-sent", blob);
|
|
878
|
+
}
|
|
879
|
+
commitVoiceMessage() {
|
|
880
|
+
if (!this.socket || !this.isConnected)
|
|
881
|
+
throw new Error("Socket not connected");
|
|
882
|
+
this.socket.emit("voice:commit");
|
|
883
|
+
}
|
|
884
|
+
onVoiceChunkReceived(chunk) {
|
|
885
|
+
this.emit("chunk-received", chunk);
|
|
886
|
+
}
|
|
887
|
+
onVoiceFileReceived(blob) {
|
|
888
|
+
this.emit("file-received", blob);
|
|
889
|
+
}
|
|
890
|
+
onReceivedEndOfResponseStream() {
|
|
891
|
+
this.emit("received-end-of-response-stream");
|
|
892
|
+
}
|
|
893
|
+
};
|
|
894
|
+
|
|
895
|
+
// src/react/utility/audio/WebAudioOutputAudioController.ts
|
|
896
|
+
var STREAM_SAMPLE_RATE = 24e3;
|
|
897
|
+
var CHANNELS = 1;
|
|
898
|
+
var SLICE_DURATION_S = 0.25;
|
|
899
|
+
var FRAMES_PER_SLICE = Math.floor(STREAM_SAMPLE_RATE * SLICE_DURATION_S);
|
|
900
|
+
var BYTES_PER_SLICE = FRAMES_PER_SLICE * 2;
|
|
901
|
+
var SCHED_TOLERANCE = 0.05;
|
|
902
|
+
var WebAudioOutputAudioController = class extends OutputAudioController {
|
|
903
|
+
constructor() {
|
|
904
|
+
super("@m4trix/core > WebAudioOutputAudioController");
|
|
905
|
+
this.audioCtx = new AudioContext();
|
|
906
|
+
this.gain = this.audioCtx.createGain();
|
|
907
|
+
this.nextPlayTime = 0;
|
|
908
|
+
this.activeSources = /* @__PURE__ */ new Set();
|
|
909
|
+
this.userGestureHookAttached = false;
|
|
910
|
+
this.gain.connect(this.audioCtx.destination);
|
|
911
|
+
this.resetScheduler();
|
|
912
|
+
}
|
|
913
|
+
// ─────────────────────────────────────────────────────────────────────
|
|
914
|
+
// One‑shot playback
|
|
915
|
+
// ─────────────────────────────────────────────────────────────────────
|
|
916
|
+
async playAudio({
|
|
917
|
+
source,
|
|
918
|
+
onComplete
|
|
919
|
+
}) {
|
|
920
|
+
await this.stopPlayback();
|
|
921
|
+
const buf = await this.sourceToArrayBuffer(source);
|
|
922
|
+
const decoded = await this.decode(buf);
|
|
923
|
+
await this.ensureContextRunning();
|
|
924
|
+
const src = this.createSource(decoded, this.audioCtx.currentTime);
|
|
925
|
+
src.onended = () => {
|
|
926
|
+
this.activeSources.delete(src);
|
|
927
|
+
onComplete?.();
|
|
928
|
+
};
|
|
929
|
+
}
|
|
930
|
+
async playAudioStream() {
|
|
931
|
+
}
|
|
932
|
+
// ─────────────────────────────────────────────────────────────────────
|
|
933
|
+
// PCM streaming
|
|
934
|
+
// ─────────────────────────────────────────────────────────────────────
|
|
935
|
+
async initializeChunkStream({
|
|
936
|
+
onComplete
|
|
937
|
+
}) {
|
|
938
|
+
await this.stopPlayback();
|
|
939
|
+
await this.ensureContextRunning();
|
|
940
|
+
this.resetScheduler();
|
|
941
|
+
let streamEnded = false;
|
|
942
|
+
let pending = new Uint8Array(0);
|
|
943
|
+
const addChunkToStream = async (pkt) => {
|
|
944
|
+
if (streamEnded) {
|
|
945
|
+
this.logger.warn("Attempt to add chunk after stream ended \u2013 ignoring.");
|
|
946
|
+
return;
|
|
947
|
+
}
|
|
948
|
+
const bytes = new Uint8Array(
|
|
949
|
+
pkt instanceof Blob ? await pkt.arrayBuffer() : pkt
|
|
950
|
+
);
|
|
951
|
+
if (bytes.length === 0)
|
|
952
|
+
return;
|
|
953
|
+
const merged = new Uint8Array(pending.length + bytes.length);
|
|
954
|
+
merged.set(pending);
|
|
955
|
+
merged.set(bytes, pending.length);
|
|
956
|
+
pending = merged;
|
|
957
|
+
if (pending.length % 2 === 1)
|
|
958
|
+
return;
|
|
959
|
+
while (pending.length >= BYTES_PER_SLICE) {
|
|
960
|
+
const sliceBytes = pending.slice(0, BYTES_PER_SLICE);
|
|
961
|
+
pending = pending.slice(BYTES_PER_SLICE);
|
|
962
|
+
const aligned = sliceBytes.buffer.slice(
|
|
963
|
+
sliceBytes.byteOffset,
|
|
964
|
+
sliceBytes.byteOffset + sliceBytes.byteLength
|
|
965
|
+
);
|
|
966
|
+
const int16 = new Int16Array(aligned);
|
|
967
|
+
const buf = this.audioCtx.createBuffer(
|
|
968
|
+
CHANNELS,
|
|
969
|
+
int16.length,
|
|
970
|
+
STREAM_SAMPLE_RATE
|
|
971
|
+
);
|
|
972
|
+
const data = buf.getChannelData(0);
|
|
973
|
+
for (let i = 0; i < int16.length; i++)
|
|
974
|
+
data[i] = int16[i] / 32768;
|
|
975
|
+
this.scheduleBuffer(buf);
|
|
976
|
+
}
|
|
977
|
+
};
|
|
978
|
+
const endChunkStream = () => {
|
|
979
|
+
if (streamEnded)
|
|
980
|
+
return;
|
|
981
|
+
streamEnded = true;
|
|
982
|
+
if (onComplete) {
|
|
983
|
+
if (this.activeSources.size === 0)
|
|
984
|
+
onComplete();
|
|
985
|
+
else {
|
|
986
|
+
const last = Array.from(this.activeSources).pop();
|
|
987
|
+
if (last) {
|
|
988
|
+
const prev = last.onended;
|
|
989
|
+
last.onended = (e) => {
|
|
990
|
+
if (prev)
|
|
991
|
+
prev.call(last, e);
|
|
992
|
+
onComplete();
|
|
993
|
+
};
|
|
994
|
+
}
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
};
|
|
998
|
+
return { addChunkToStream, endChunkStream };
|
|
999
|
+
}
|
|
1000
|
+
// ─────────────────────────────────────────────────────────────────────
|
|
1001
|
+
// Buffer scheduling helpers
|
|
1002
|
+
// ─────────────────────────────────────────────────────────────────────
|
|
1003
|
+
scheduleBuffer(buf) {
|
|
1004
|
+
if (this.nextPlayTime < this.audioCtx.currentTime + SCHED_TOLERANCE) {
|
|
1005
|
+
this.nextPlayTime = this.audioCtx.currentTime + SCHED_TOLERANCE;
|
|
1006
|
+
}
|
|
1007
|
+
this.createSource(buf, this.nextPlayTime);
|
|
1008
|
+
this.nextPlayTime += buf.duration;
|
|
1009
|
+
}
|
|
1010
|
+
createSource(buf, when) {
|
|
1011
|
+
const src = this.audioCtx.createBufferSource();
|
|
1012
|
+
src.buffer = buf;
|
|
1013
|
+
src.connect(this.gain);
|
|
1014
|
+
src.start(when);
|
|
1015
|
+
this.activeSources.add(src);
|
|
1016
|
+
src.onended = () => {
|
|
1017
|
+
this.activeSources.delete(src);
|
|
1018
|
+
};
|
|
1019
|
+
return src;
|
|
1020
|
+
}
|
|
1021
|
+
resetScheduler() {
|
|
1022
|
+
this.nextPlayTime = this.audioCtx.currentTime;
|
|
1023
|
+
}
|
|
1024
|
+
// ─── External resource helpers ───────────────────────────────────────
|
|
1025
|
+
sourceToArrayBuffer(src) {
|
|
1026
|
+
return typeof src === "string" ? fetch(src).then((r) => {
|
|
1027
|
+
if (!r.ok)
|
|
1028
|
+
throw new Error(`${r.status}`);
|
|
1029
|
+
return r.arrayBuffer();
|
|
1030
|
+
}) : src.arrayBuffer();
|
|
1031
|
+
}
|
|
1032
|
+
decode(buf) {
|
|
1033
|
+
return new Promise(
|
|
1034
|
+
(res, rej) => this.audioCtx.decodeAudioData(buf, res, rej)
|
|
1035
|
+
);
|
|
1036
|
+
}
|
|
1037
|
+
// ─── Lifecycle methods ───────────────────────────────────────────────
|
|
1038
|
+
async stopPlayback() {
|
|
1039
|
+
for (const src of this.activeSources) {
|
|
1040
|
+
try {
|
|
1041
|
+
src.stop();
|
|
1042
|
+
} catch {
|
|
1043
|
+
}
|
|
1044
|
+
src.disconnect();
|
|
1045
|
+
}
|
|
1046
|
+
this.activeSources.clear();
|
|
1047
|
+
this.resetScheduler();
|
|
1048
|
+
}
|
|
1049
|
+
cleanup() {
|
|
1050
|
+
this.stopPlayback();
|
|
1051
|
+
if (this.audioCtx.state !== "closed")
|
|
1052
|
+
this.audioCtx.close();
|
|
1053
|
+
}
|
|
1054
|
+
// ─── Autoplay‑policy helper ──────────────────────────────────────────
|
|
1055
|
+
async ensureContextRunning() {
|
|
1056
|
+
if (this.audioCtx.state !== "suspended")
|
|
1057
|
+
return;
|
|
1058
|
+
try {
|
|
1059
|
+
await this.audioCtx.resume();
|
|
1060
|
+
} catch {
|
|
1061
|
+
}
|
|
1062
|
+
if (this.audioCtx.state === "running")
|
|
1063
|
+
return;
|
|
1064
|
+
if (!this.userGestureHookAttached) {
|
|
1065
|
+
this.userGestureHookAttached = true;
|
|
1066
|
+
const resume = async () => {
|
|
1067
|
+
try {
|
|
1068
|
+
await this.audioCtx.resume();
|
|
1069
|
+
} catch {
|
|
1070
|
+
}
|
|
1071
|
+
if (this.audioCtx.state === "running")
|
|
1072
|
+
document.removeEventListener("click", resume);
|
|
1073
|
+
};
|
|
1074
|
+
document.addEventListener("click", resume);
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
1077
|
+
};
|
|
1078
|
+
|
|
1079
|
+
// src/react/hooks/use-conversation/useSocketConversation.ts
|
|
1080
|
+
Logger.enableGlobalLogging();
|
|
1081
|
+
function useSocketConversation({
|
|
1082
|
+
scope,
|
|
1083
|
+
onStartRecording,
|
|
1084
|
+
onStopRecording,
|
|
1085
|
+
onReceive,
|
|
1086
|
+
upstreamMode = "STREAM_WHILE_TALK",
|
|
1087
|
+
onError,
|
|
1088
|
+
audioConfig = {},
|
|
1089
|
+
socketConfig = {}
|
|
1090
|
+
}) {
|
|
1091
|
+
const { current: logger } = useRef(
|
|
1092
|
+
new Logger("SuTr > useSocketConversation")
|
|
1093
|
+
);
|
|
1094
|
+
const inputAudioControllerRef = useRef(void 0);
|
|
1095
|
+
const outputAudioControllerRef = useRef(
|
|
1096
|
+
void 0
|
|
1097
|
+
);
|
|
1098
|
+
const socketAdapterRef = useRef(void 0);
|
|
1099
|
+
const [socket, setSocket] = useState(null);
|
|
1100
|
+
const [voiceAgentState, setVoiceAgentState] = useState("READY");
|
|
1101
|
+
const [error, setError] = useState(null);
|
|
1102
|
+
const shouldStreamWhileTalk = upstreamMode === "STREAM_WHILE_TALK";
|
|
1103
|
+
const handleError = useCallback(
|
|
1104
|
+
(state2, err) => {
|
|
1105
|
+
setError(err);
|
|
1106
|
+
logger.error(`Error during ${state2}:`, err);
|
|
1107
|
+
onError?.(state2, err);
|
|
1108
|
+
},
|
|
1109
|
+
[onError]
|
|
1110
|
+
);
|
|
1111
|
+
const subscribeToSocketEventsForChunkDownstreaming = useCallback(
|
|
1112
|
+
async (socketAdapter) => {
|
|
1113
|
+
logger.debug("Setting up audio stream for receiving chunks");
|
|
1114
|
+
try {
|
|
1115
|
+
const { addChunkToStream, endChunkStream } = await outputAudioControllerRef.current.initializeChunkStream({
|
|
1116
|
+
mimeCodec: "audio/mpeg",
|
|
1117
|
+
onComplete: () => {
|
|
1118
|
+
logger.debug("Audio stream playback completed");
|
|
1119
|
+
setVoiceAgentState("READY");
|
|
1120
|
+
}
|
|
1121
|
+
});
|
|
1122
|
+
let chunkCount = 0;
|
|
1123
|
+
const chunkReceivedEmitter = async (chunk) => {
|
|
1124
|
+
if (chunk instanceof ArrayBuffer) {
|
|
1125
|
+
chunkCount++;
|
|
1126
|
+
logger.debug(
|
|
1127
|
+
`Received voice chunk #${chunkCount} from socket, size: ${chunk.byteLength} bytes`
|
|
1128
|
+
);
|
|
1129
|
+
if (!chunk || chunk.byteLength === 0) {
|
|
1130
|
+
logger.warn("Received empty chunk, skipping");
|
|
1131
|
+
return;
|
|
1132
|
+
}
|
|
1133
|
+
try {
|
|
1134
|
+
await addChunkToStream(chunk);
|
|
1135
|
+
logger.debug(
|
|
1136
|
+
`Successfully added chunk #${chunkCount} to audio stream`
|
|
1137
|
+
);
|
|
1138
|
+
} catch (err) {
|
|
1139
|
+
logger.error(
|
|
1140
|
+
`Failed to add chunk #${chunkCount} to audio stream`,
|
|
1141
|
+
err
|
|
1142
|
+
);
|
|
1143
|
+
if (err instanceof Error) {
|
|
1144
|
+
handleError("DOWNSTREAMING", err);
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
}
|
|
1148
|
+
};
|
|
1149
|
+
socketAdapter.on("chunk-received", chunkReceivedEmitter);
|
|
1150
|
+
const endOfStreamEmitter = () => {
|
|
1151
|
+
logger.debug(
|
|
1152
|
+
`Received end of stream signal after ${chunkCount} chunks, ending chunk stream`
|
|
1153
|
+
);
|
|
1154
|
+
endChunkStream();
|
|
1155
|
+
setVoiceAgentState("READY");
|
|
1156
|
+
};
|
|
1157
|
+
socketAdapter.on("received-end-of-response-stream", endOfStreamEmitter);
|
|
1158
|
+
return () => {
|
|
1159
|
+
logger.debug("Cleaning up socket event listeners");
|
|
1160
|
+
socketAdapter.off("chunk-received", chunkReceivedEmitter);
|
|
1161
|
+
socketAdapter.off(
|
|
1162
|
+
"received-end-of-response-stream",
|
|
1163
|
+
endOfStreamEmitter
|
|
1164
|
+
);
|
|
1165
|
+
endChunkStream();
|
|
1166
|
+
};
|
|
1167
|
+
} catch (err) {
|
|
1168
|
+
if (err instanceof Error) {
|
|
1169
|
+
handleError("DOWNSTREAMING", err);
|
|
1170
|
+
}
|
|
1171
|
+
return () => {
|
|
1172
|
+
};
|
|
1173
|
+
}
|
|
1174
|
+
},
|
|
1175
|
+
[handleError]
|
|
1176
|
+
);
|
|
1177
|
+
const hookupSocketAdapter = useCallback(
|
|
1178
|
+
async (socketAdapter) => {
|
|
1179
|
+
logger.debug("Connecting to socket...");
|
|
1180
|
+
try {
|
|
1181
|
+
await socketAdapter.connect();
|
|
1182
|
+
socketAdapter.on("connect", () => {
|
|
1183
|
+
logger.debug("Socket adapter connected");
|
|
1184
|
+
setVoiceAgentState("READY");
|
|
1185
|
+
});
|
|
1186
|
+
socketAdapter.on("disconnect", () => {
|
|
1187
|
+
logger.debug("Socket adapter disconnected");
|
|
1188
|
+
});
|
|
1189
|
+
socketAdapter.on("error", (err) => {
|
|
1190
|
+
if (err instanceof Error) {
|
|
1191
|
+
handleError(voiceAgentState, err);
|
|
1192
|
+
} else {
|
|
1193
|
+
handleError(voiceAgentState, new Error("Unknown error"));
|
|
1194
|
+
}
|
|
1195
|
+
});
|
|
1196
|
+
setSocket(socketAdapter.exposeSocket());
|
|
1197
|
+
} catch (err) {
|
|
1198
|
+
if (err instanceof Error) {
|
|
1199
|
+
handleError("READY", err);
|
|
1200
|
+
}
|
|
1201
|
+
}
|
|
1202
|
+
},
|
|
1203
|
+
[handleError, voiceAgentState]
|
|
1204
|
+
);
|
|
1205
|
+
const startRecording = useCallback(() => {
|
|
1206
|
+
if (inputAudioControllerRef.current) {
|
|
1207
|
+
try {
|
|
1208
|
+
logger.debug("Starting recording");
|
|
1209
|
+
setVoiceAgentState("RECORDING");
|
|
1210
|
+
inputAudioControllerRef.current.startRecording({
|
|
1211
|
+
onRecordedChunk: async (chunk) => {
|
|
1212
|
+
if (shouldStreamWhileTalk) {
|
|
1213
|
+
try {
|
|
1214
|
+
await socketAdapterRef.current?.sendVoiceChunk(chunk);
|
|
1215
|
+
} catch (err) {
|
|
1216
|
+
if (err instanceof Error) {
|
|
1217
|
+
handleError("RECORDING", err);
|
|
1218
|
+
}
|
|
1219
|
+
}
|
|
1220
|
+
}
|
|
1221
|
+
}
|
|
1222
|
+
});
|
|
1223
|
+
onStartRecording?.();
|
|
1224
|
+
} catch (err) {
|
|
1225
|
+
if (err instanceof Error) {
|
|
1226
|
+
handleError("RECORDING", err);
|
|
1227
|
+
}
|
|
1228
|
+
}
|
|
1229
|
+
}
|
|
1230
|
+
}, [onStartRecording, shouldStreamWhileTalk, handleError]);
|
|
1231
|
+
const stopRecording = useCallback(async () => {
|
|
1232
|
+
if (inputAudioControllerRef.current) {
|
|
1233
|
+
try {
|
|
1234
|
+
logger.debug("Stopping recording");
|
|
1235
|
+
await inputAudioControllerRef.current.stopRecording({
|
|
1236
|
+
onRecordingCompleted: async (allData) => {
|
|
1237
|
+
setVoiceAgentState("PROCESSING");
|
|
1238
|
+
try {
|
|
1239
|
+
if (shouldStreamWhileTalk) {
|
|
1240
|
+
logger.debug("Committing voice message");
|
|
1241
|
+
await socketAdapterRef.current?.commitVoiceMessage();
|
|
1242
|
+
} else {
|
|
1243
|
+
await socketAdapterRef.current?.sendVoiceFile(allData);
|
|
1244
|
+
}
|
|
1245
|
+
setVoiceAgentState("DOWNSTREAMING");
|
|
1246
|
+
await subscribeToSocketEventsForChunkDownstreaming(
|
|
1247
|
+
socketAdapterRef.current
|
|
1248
|
+
);
|
|
1249
|
+
onReceive?.(
|
|
1250
|
+
allData,
|
|
1251
|
+
async () => {
|
|
1252
|
+
if (outputAudioControllerRef.current) {
|
|
1253
|
+
return outputAudioControllerRef.current.stopPlayback();
|
|
1254
|
+
}
|
|
1255
|
+
},
|
|
1256
|
+
async () => {
|
|
1257
|
+
if (outputAudioControllerRef.current) {
|
|
1258
|
+
return outputAudioControllerRef.current.stopPlayback();
|
|
1259
|
+
}
|
|
1260
|
+
}
|
|
1261
|
+
);
|
|
1262
|
+
} catch (err) {
|
|
1263
|
+
if (err instanceof Error) {
|
|
1264
|
+
handleError("PROCESSING", err);
|
|
1265
|
+
}
|
|
1266
|
+
}
|
|
1267
|
+
}
|
|
1268
|
+
});
|
|
1269
|
+
onStopRecording?.();
|
|
1270
|
+
} catch (err) {
|
|
1271
|
+
if (err instanceof Error) {
|
|
1272
|
+
handleError("RECORDING", err);
|
|
1273
|
+
}
|
|
1274
|
+
}
|
|
1275
|
+
}
|
|
1276
|
+
}, [
|
|
1277
|
+
onStopRecording,
|
|
1278
|
+
handleError,
|
|
1279
|
+
subscribeToSocketEventsForChunkDownstreaming,
|
|
1280
|
+
onReceive
|
|
1281
|
+
]);
|
|
1282
|
+
useEffect(() => {
|
|
1283
|
+
if (socketAdapterRef.current) {
|
|
1284
|
+
return;
|
|
1285
|
+
}
|
|
1286
|
+
try {
|
|
1287
|
+
const socketAdapter = socketConfig.socketAdapter ? socketConfig.socketAdapter : new VoiceSocketIOAdapter({
|
|
1288
|
+
scope,
|
|
1289
|
+
baseUrl: socketConfig.baseUrl || "",
|
|
1290
|
+
headers: socketConfig.headers
|
|
1291
|
+
});
|
|
1292
|
+
socketAdapterRef.current = socketAdapter;
|
|
1293
|
+
if (!socketAdapter.isConnected()) {
|
|
1294
|
+
hookupSocketAdapter(socketAdapter);
|
|
1295
|
+
}
|
|
1296
|
+
if (!inputAudioControllerRef.current) {
|
|
1297
|
+
inputAudioControllerRef.current = new WebAudioInputAudioController(
|
|
1298
|
+
audioConfig
|
|
1299
|
+
);
|
|
1300
|
+
}
|
|
1301
|
+
if (!outputAudioControllerRef.current) {
|
|
1302
|
+
outputAudioControllerRef.current = new WebAudioOutputAudioController();
|
|
1303
|
+
}
|
|
1304
|
+
} catch (err) {
|
|
1305
|
+
if (err instanceof Error) {
|
|
1306
|
+
handleError("READY", err);
|
|
1307
|
+
}
|
|
1308
|
+
}
|
|
1309
|
+
}, [scope, socketConfig, hookupSocketAdapter, audioConfig, handleError]);
|
|
1310
|
+
useEffect(() => {
|
|
1311
|
+
return () => {
|
|
1312
|
+
inputAudioControllerRef.current?.cleanup();
|
|
1313
|
+
outputAudioControllerRef.current?.cleanup();
|
|
1314
|
+
if (socketAdapterRef.current) {
|
|
1315
|
+
socketAdapterRef.current.disconnect();
|
|
1316
|
+
socketAdapterRef.current = void 0;
|
|
1317
|
+
}
|
|
1318
|
+
};
|
|
1319
|
+
}, []);
|
|
1320
|
+
return {
|
|
1321
|
+
startRecording,
|
|
1322
|
+
stopRecording,
|
|
1323
|
+
voiceAgentState,
|
|
1324
|
+
error,
|
|
1325
|
+
audioContext: inputAudioControllerRef.current?.audioContext || null,
|
|
1326
|
+
socket
|
|
1327
|
+
};
|
|
1328
|
+
}
|
|
1329
|
+
var AiCursorComponentStyle = css`
|
|
1330
|
+
:host {
|
|
1331
|
+
--ai-local-cursor-size: var(--sk-ai-cursor-size, 1rem);
|
|
1332
|
+
--ai-local-cursor-label-padding: var(
|
|
1333
|
+
--sk-ai-cursor-label-padding,
|
|
1334
|
+
0.25rem 0.25rem
|
|
1335
|
+
);
|
|
1336
|
+
--ai-local-cursor-border-radius: var(--sk-ai-cursor-border-radius, 0.25rem);
|
|
1337
|
+
--ai-local-label-offset: var(--sk-ai-cursor-label-offset, 1rem);
|
|
1338
|
+
|
|
1339
|
+
--ai-local-label-font-size: var(--sk-ai-cursor-label-font-size, 12px);
|
|
1340
|
+
--ai-local-label-font-weight: var(--sk-ai-cursor-label-font-weight, bold);
|
|
1341
|
+
--ai-local-label-color: var(--sk-ai-cursor-label-color, white);
|
|
1342
|
+
--ai-local-label-background-color: var(
|
|
1343
|
+
--sk-ai-cursor-label-background-color,
|
|
1344
|
+
black
|
|
1345
|
+
);
|
|
1346
|
+
--ai-local-label-border-color: var(
|
|
1347
|
+
--sk-ai-cursor-label-border-color,
|
|
1348
|
+
white
|
|
1349
|
+
);
|
|
1350
|
+
--ai-local-label-border-width: var(
|
|
1351
|
+
--sk-ai-cursor-label-border-width,
|
|
1352
|
+
0.1rem
|
|
1353
|
+
);
|
|
1354
|
+
|
|
1355
|
+
color: black;
|
|
1356
|
+
stroke: white;
|
|
1357
|
+
position: absolute;
|
|
1358
|
+
/* Insetting in the parent element (body) */
|
|
1359
|
+
top: 0;
|
|
1360
|
+
left: 0;
|
|
1361
|
+
bottom: 0;
|
|
1362
|
+
right: 0;
|
|
1363
|
+
pointer-events: none;
|
|
1364
|
+
width: var(--ai-local-cursor-size);
|
|
1365
|
+
height: var(--ai-local-cursor-size);
|
|
1366
|
+
}
|
|
1367
|
+
|
|
1368
|
+
#cursor-graphic-parent {
|
|
1369
|
+
position: absolute;
|
|
1370
|
+
top: 0;
|
|
1371
|
+
left: 0;
|
|
1372
|
+
}
|
|
1373
|
+
|
|
1374
|
+
#label-text {
|
|
1375
|
+
position: absolute;
|
|
1376
|
+
color: white;
|
|
1377
|
+
font-size: 12px;
|
|
1378
|
+
font-weight: bold;
|
|
1379
|
+
padding: var(--ai-local-cursor-label-padding);
|
|
1380
|
+
border-radius: var(--ai-local-cursor-border-radius);
|
|
1381
|
+
|
|
1382
|
+
white-space: nowrap;
|
|
1383
|
+
overflow: hidden;
|
|
1384
|
+
text-overflow: ellipsis;
|
|
1385
|
+
|
|
1386
|
+
width: fit-content;
|
|
1387
|
+
min-width: fit-content;
|
|
1388
|
+
top: var(--ai-local-label-offset);
|
|
1389
|
+
left: var(--ai-local-label-offset);
|
|
1390
|
+
|
|
1391
|
+
border: var(--ai-local-label-border-width) solid
|
|
1392
|
+
var(--ai-local-label-border-color);
|
|
1393
|
+
background-color: var(--ai-local-label-background-color);
|
|
1394
|
+
color: var(--ai-local-label-color);
|
|
1395
|
+
font-size: var(--ai-local-label-font-size);
|
|
1396
|
+
font-weight: var(--ai-local-label-font-weight);
|
|
1397
|
+
}
|
|
1398
|
+
`;
|
|
1399
|
+
|
|
1400
|
+
// src/ui/ai-cursor/rendering/AiCursorComponent.ts
|
|
1401
|
+
var AiCursorComponent = class extends LitElement {
|
|
1402
|
+
constructor() {
|
|
1403
|
+
super();
|
|
1404
|
+
this.eventHooks = {
|
|
1405
|
+
defineSetPosition: () => {
|
|
1406
|
+
},
|
|
1407
|
+
defineAddPositionToQueue: () => {
|
|
1408
|
+
},
|
|
1409
|
+
definePlayQueue: () => {
|
|
1410
|
+
},
|
|
1411
|
+
defineSetShowCursor: () => {
|
|
1412
|
+
}
|
|
1413
|
+
};
|
|
1414
|
+
this.isShowingCursor = true;
|
|
1415
|
+
this.labelText = "AI Cursor";
|
|
1416
|
+
this.cursorPosition = [0, 0];
|
|
1417
|
+
this._cursorRef = createRef();
|
|
1418
|
+
this._labelRef = createRef();
|
|
1419
|
+
}
|
|
1420
|
+
updated(_changedProperties) {
|
|
1421
|
+
if (_changedProperties.has("_cursorRef")) {
|
|
1422
|
+
if (this._cursorRef.value) {
|
|
1423
|
+
this.hookUpCallbacks();
|
|
1424
|
+
} else {
|
|
1425
|
+
this._timeline?.pause();
|
|
1426
|
+
this._timeline?.refresh();
|
|
1427
|
+
}
|
|
1428
|
+
}
|
|
1429
|
+
super.updated(_changedProperties);
|
|
1430
|
+
}
|
|
1431
|
+
render() {
|
|
1432
|
+
const cursorSvg = html`
|
|
1433
|
+
<svg
|
|
1434
|
+
width=${24}
|
|
1435
|
+
height=${24}
|
|
1436
|
+
viewBox="0 0 100 100"
|
|
1437
|
+
fill="none"
|
|
1438
|
+
xmlns="http://www.w3.org/2000/svg"
|
|
1439
|
+
>
|
|
1440
|
+
<g clip-path="url(#clip0_3576_285)">
|
|
1441
|
+
<path
|
|
1442
|
+
class="cursor-path"
|
|
1443
|
+
d="M2.14849 7.04749C1.35153 4.07321 4.07319 1.35155 7.04747 2.14851L77.3148 20.9766C80.2891 21.7735 81.2853 25.4914 79.108 27.6687L27.6687 79.108C25.4914 81.2853 21.7735 80.2891 20.9766 77.3149L2.14849 7.04749Z"
|
|
1444
|
+
fill="currentColor"
|
|
1445
|
+
/>
|
|
1446
|
+
</g>
|
|
1447
|
+
<defs>
|
|
1448
|
+
<clipPath id="clip0_3576_285">
|
|
1449
|
+
<rect width="100" height="100" fill="white" />
|
|
1450
|
+
</clipPath>
|
|
1451
|
+
</defs>
|
|
1452
|
+
</svg>
|
|
1453
|
+
`;
|
|
1454
|
+
return html`
|
|
1455
|
+
<span
|
|
1456
|
+
id="cursor-graphic-parent"
|
|
1457
|
+
${ref(this._cursorRef)}
|
|
1458
|
+
?hidden=${!this.isShowingCursor}
|
|
1459
|
+
>
|
|
1460
|
+
${cursorSvg}
|
|
1461
|
+
<span
|
|
1462
|
+
${ref(this._labelRef)}
|
|
1463
|
+
id="label-text"
|
|
1464
|
+
?hidden=${!this.isShowingCursor}
|
|
1465
|
+
>${this.labelText}</span
|
|
1466
|
+
>
|
|
1467
|
+
</span>
|
|
1468
|
+
`;
|
|
1469
|
+
}
|
|
1470
|
+
// private methods
|
|
1471
|
+
/**
|
|
1472
|
+
* The primary way to control the cursor is using an external API.
|
|
1473
|
+
* This interface exposes controlling methods. The Lit Component itself is
|
|
1474
|
+
* intended to be a controlled component.
|
|
1475
|
+
*/
|
|
1476
|
+
hookUpCallbacks() {
|
|
1477
|
+
const animationTarget = this._cursorRef.value;
|
|
1478
|
+
if (!animationTarget) {
|
|
1479
|
+
return;
|
|
1480
|
+
}
|
|
1481
|
+
this._timeline = createTimeline({ defaults: { duration: 750 } });
|
|
1482
|
+
if (!this._timeline) {
|
|
1483
|
+
return;
|
|
1484
|
+
}
|
|
1485
|
+
this.eventHooks.defineSetPosition((position) => {
|
|
1486
|
+
this._timeline?.add(animationTarget, {
|
|
1487
|
+
translateX: position[0],
|
|
1488
|
+
translateY: position[1],
|
|
1489
|
+
duration: 1
|
|
1490
|
+
});
|
|
1491
|
+
this._timeline?.play();
|
|
1492
|
+
});
|
|
1493
|
+
this.eventHooks.defineAddPositionToQueue((position) => {
|
|
1494
|
+
this._timeline?.add(animationTarget, {
|
|
1495
|
+
translateX: position[0],
|
|
1496
|
+
translateY: position[1],
|
|
1497
|
+
duration: 1e3
|
|
1498
|
+
});
|
|
1499
|
+
});
|
|
1500
|
+
this.eventHooks.defineSetShowCursor((show) => {
|
|
1501
|
+
this.isShowingCursor = show;
|
|
1502
|
+
});
|
|
1503
|
+
this.eventHooks.definePlayQueue(() => {
|
|
1504
|
+
this._timeline?.play();
|
|
1505
|
+
});
|
|
1506
|
+
}
|
|
1507
|
+
// Getters
|
|
1508
|
+
get cursorRef() {
|
|
1509
|
+
return this._cursorRef.value;
|
|
1510
|
+
}
|
|
1511
|
+
get labelRef() {
|
|
1512
|
+
return this._labelRef.value;
|
|
1513
|
+
}
|
|
1514
|
+
};
|
|
1515
|
+
// Define scoped styles right with your component, in plain CSS
|
|
1516
|
+
AiCursorComponent.styles = AiCursorComponentStyle;
|
|
1517
|
+
__decorateClass([
|
|
1518
|
+
property({
|
|
1519
|
+
type: Object
|
|
1520
|
+
})
|
|
1521
|
+
], AiCursorComponent.prototype, "eventHooks", 2);
|
|
1522
|
+
__decorateClass([
|
|
1523
|
+
property({ type: Boolean })
|
|
1524
|
+
], AiCursorComponent.prototype, "isShowingCursor", 2);
|
|
1525
|
+
__decorateClass([
|
|
1526
|
+
property({ type: String })
|
|
1527
|
+
], AiCursorComponent.prototype, "labelText", 2);
|
|
1528
|
+
__decorateClass([
|
|
1529
|
+
property({ type: Array })
|
|
1530
|
+
], AiCursorComponent.prototype, "cursorPosition", 2);
|
|
1531
|
+
__decorateClass([
|
|
1532
|
+
state()
|
|
1533
|
+
], AiCursorComponent.prototype, "_cursorRef", 2);
|
|
1534
|
+
__decorateClass([
|
|
1535
|
+
state()
|
|
1536
|
+
], AiCursorComponent.prototype, "_labelRef", 2);
|
|
1537
|
+
AiCursorComponent = __decorateClass([
|
|
1538
|
+
customElement("ai-cursor")
|
|
1539
|
+
], AiCursorComponent);
|
|
1540
|
+
|
|
1541
|
+
// src/ui/ai-cursor/rendering/index.ts
|
|
1542
|
+
var mountAiCursor = (aiCursorProps) => {
|
|
1543
|
+
const root = document.body;
|
|
1544
|
+
const cursor = document.createElement("ai-cursor");
|
|
1545
|
+
cursor.eventHooks = aiCursorProps.eventHooks;
|
|
1546
|
+
root.appendChild(cursor);
|
|
1547
|
+
};
|
|
1548
|
+
|
|
1549
|
+
// src/ui/ai-cursor/AiCursor.ts
|
|
1550
|
+
var AiCursor = class _AiCursor {
|
|
1551
|
+
constructor() {
|
|
1552
|
+
}
|
|
1553
|
+
// Static constructors
|
|
1554
|
+
static spawn() {
|
|
1555
|
+
const newCursor = new _AiCursor();
|
|
1556
|
+
newCursor.mount();
|
|
1557
|
+
return newCursor;
|
|
1558
|
+
}
|
|
1559
|
+
jumpTo(target) {
|
|
1560
|
+
const position = targetToPosition(target);
|
|
1561
|
+
if (position) {
|
|
1562
|
+
this.setPosition?.(position);
|
|
1563
|
+
}
|
|
1564
|
+
}
|
|
1565
|
+
moveTo(target) {
|
|
1566
|
+
const position = targetToPosition(target);
|
|
1567
|
+
if (position) {
|
|
1568
|
+
this.addPositionToQueue?.(position);
|
|
1569
|
+
this.playQueue?.();
|
|
1570
|
+
}
|
|
1571
|
+
}
|
|
1572
|
+
scheduleMoves(targets) {
|
|
1573
|
+
targets.forEach((target) => {
|
|
1574
|
+
const position = targetToPosition(target);
|
|
1575
|
+
if (position) {
|
|
1576
|
+
this.addPositionToQueue?.(position);
|
|
1577
|
+
}
|
|
1578
|
+
});
|
|
1579
|
+
this.playQueue?.();
|
|
1580
|
+
}
|
|
1581
|
+
show() {
|
|
1582
|
+
this.setShowCursor?.(true);
|
|
1583
|
+
}
|
|
1584
|
+
hide() {
|
|
1585
|
+
this.setShowCursor?.(false);
|
|
1586
|
+
}
|
|
1587
|
+
mount() {
|
|
1588
|
+
mountAiCursor({
|
|
1589
|
+
eventHooks: {
|
|
1590
|
+
defineSetPosition: (callback) => {
|
|
1591
|
+
this.setPosition = callback;
|
|
1592
|
+
},
|
|
1593
|
+
defineAddPositionToQueue: (callback) => {
|
|
1594
|
+
this.addPositionToQueue = callback;
|
|
1595
|
+
},
|
|
1596
|
+
definePlayQueue: (callback) => {
|
|
1597
|
+
this.playQueue = callback;
|
|
1598
|
+
},
|
|
1599
|
+
defineSetShowCursor: (callback) => {
|
|
1600
|
+
this.setShowCursor = callback;
|
|
1601
|
+
}
|
|
1602
|
+
}
|
|
1603
|
+
});
|
|
1604
|
+
}
|
|
1605
|
+
};
|
|
1606
|
+
function calculateClickPositionFromElement(element) {
|
|
1607
|
+
const rect = element.getBoundingClientRect();
|
|
1608
|
+
return [rect.left + rect.width / 2, rect.top + rect.height / 2];
|
|
1609
|
+
}
|
|
1610
|
+
function targetToPosition(target) {
|
|
1611
|
+
if (Array.isArray(target) && target.length === 2 && typeof target[0] === "number" && typeof target[1] === "number") {
|
|
1612
|
+
return target;
|
|
1613
|
+
} else if (target instanceof HTMLElement) {
|
|
1614
|
+
return calculateClickPositionFromElement(target);
|
|
1615
|
+
} else if (typeof target === "string") {
|
|
1616
|
+
const element = document.querySelector(target);
|
|
1617
|
+
if (element) {
|
|
1618
|
+
return calculateClickPositionFromElement(element);
|
|
1619
|
+
}
|
|
1620
|
+
}
|
|
1621
|
+
return void 0;
|
|
1622
|
+
}
|
|
1623
|
+
|
|
1624
|
+
// src/stream/Pump.ts
|
|
1625
|
+
var Pump = class _Pump {
|
|
1626
|
+
constructor(src) {
|
|
1627
|
+
this.src = src;
|
|
1628
|
+
}
|
|
1629
|
+
/**
|
|
1630
|
+
* Wrap an existing AsyncIterable or Readable stream into a Pump
|
|
1631
|
+
*
|
|
1632
|
+
* @template U The type of data in the source stream
|
|
1633
|
+
* @param source The source stream to convert to a Pump (AsyncIterable, ReadableStream, or NodeJS.ReadableStream)
|
|
1634
|
+
* @returns A new Pump instance that wraps the source
|
|
1635
|
+
*/
|
|
1636
|
+
static from(source) {
|
|
1637
|
+
async function* gen() {
|
|
1638
|
+
let seq = 0;
|
|
1639
|
+
function isAsyncIterable(obj) {
|
|
1640
|
+
return Symbol.asyncIterator in obj;
|
|
1641
|
+
}
|
|
1642
|
+
function isWebReadableStream(obj) {
|
|
1643
|
+
return "getReader" in obj && typeof obj.getReader === "function";
|
|
1644
|
+
}
|
|
1645
|
+
function isNodeReadableStream(obj) {
|
|
1646
|
+
return "pipe" in obj && "on" in obj && typeof obj.pipe === "function" && typeof obj.on === "function";
|
|
1647
|
+
}
|
|
1648
|
+
if (isAsyncIterable(source)) {
|
|
1649
|
+
const iterator = source[Symbol.asyncIterator]();
|
|
1650
|
+
try {
|
|
1651
|
+
while (true) {
|
|
1652
|
+
const result = await iterator.next();
|
|
1653
|
+
if (result.done)
|
|
1654
|
+
break;
|
|
1655
|
+
yield {
|
|
1656
|
+
sequence: seq++,
|
|
1657
|
+
data: result.value,
|
|
1658
|
+
done: false
|
|
1659
|
+
};
|
|
1660
|
+
}
|
|
1661
|
+
} finally {
|
|
1662
|
+
}
|
|
1663
|
+
} else if (isWebReadableStream(source)) {
|
|
1664
|
+
const reader = source.getReader();
|
|
1665
|
+
try {
|
|
1666
|
+
while (true) {
|
|
1667
|
+
const result = await reader.read();
|
|
1668
|
+
if (result.done)
|
|
1669
|
+
break;
|
|
1670
|
+
yield {
|
|
1671
|
+
sequence: seq++,
|
|
1672
|
+
data: result.value,
|
|
1673
|
+
done: false
|
|
1674
|
+
};
|
|
1675
|
+
}
|
|
1676
|
+
} finally {
|
|
1677
|
+
reader.releaseLock();
|
|
1678
|
+
}
|
|
1679
|
+
} else if (isNodeReadableStream(source)) {
|
|
1680
|
+
try {
|
|
1681
|
+
for await (const chunk of source) {
|
|
1682
|
+
yield {
|
|
1683
|
+
sequence: seq++,
|
|
1684
|
+
data: chunk,
|
|
1685
|
+
done: false
|
|
1686
|
+
};
|
|
1687
|
+
}
|
|
1688
|
+
} catch (error) {
|
|
1689
|
+
console.error("Error reading from Node.js stream:", error);
|
|
1690
|
+
throw error;
|
|
1691
|
+
}
|
|
1692
|
+
}
|
|
1693
|
+
yield { sequence: seq, data: void 0, done: true };
|
|
1694
|
+
}
|
|
1695
|
+
return new _Pump(gen());
|
|
1696
|
+
}
|
|
1697
|
+
/**
|
|
1698
|
+
* Sync or async map over the data portion of each chunk
|
|
1699
|
+
*
|
|
1700
|
+
* @template U The output type after transformation
|
|
1701
|
+
* @param fn The mapping function that transforms each chunk
|
|
1702
|
+
* @returns A new Pump instance with the transformed data
|
|
1703
|
+
*/
|
|
1704
|
+
map(fn) {
|
|
1705
|
+
async function* gen() {
|
|
1706
|
+
for await (const { sequence, data, done } of this.src) {
|
|
1707
|
+
if (done) {
|
|
1708
|
+
const out2 = data !== void 0 ? await fn(data) : void 0;
|
|
1709
|
+
yield { sequence, data: out2, done };
|
|
1710
|
+
break;
|
|
1711
|
+
}
|
|
1712
|
+
const out = await fn(data);
|
|
1713
|
+
yield { sequence, data: out, done };
|
|
1714
|
+
}
|
|
1715
|
+
}
|
|
1716
|
+
return new _Pump(gen.call(this));
|
|
1717
|
+
}
|
|
1718
|
+
/**
|
|
1719
|
+
* Stateful map allows processing stream chunks with a persistent context object.
|
|
1720
|
+
*
|
|
1721
|
+
* The context is initialized when the first chunk arrives and can be updated with each chunk.
|
|
1722
|
+
* This is useful for maintaining state across the stream processing.
|
|
1723
|
+
*
|
|
1724
|
+
* If you plan to use sockets you should rather opt for asyncStatefulMap.
|
|
1725
|
+
*
|
|
1726
|
+
* The pipe closes only after all processing is complete, including any final operations in onClose.
|
|
1727
|
+
*
|
|
1728
|
+
* TODO: Un-tested
|
|
1729
|
+
*
|
|
1730
|
+
* @param handlers Object containing callback functions for stream processing
|
|
1731
|
+
* @param handlers.onFirstChunk Function called when the first chunk arrives, initializes the context
|
|
1732
|
+
* @param handlers.onChunk Function called for each subsequent chunk, updates the context
|
|
1733
|
+
* @param handlers.onClose Optional function called when the stream closes, allows final processing
|
|
1734
|
+
* @returns A new Pump instance with transformed data
|
|
1735
|
+
*/
|
|
1736
|
+
statefulMap(handlers) {
|
|
1737
|
+
const { src } = this;
|
|
1738
|
+
const gen = async function* () {
|
|
1739
|
+
let context;
|
|
1740
|
+
let initialized = false;
|
|
1741
|
+
let lastChunk;
|
|
1742
|
+
let seq = 0;
|
|
1743
|
+
const queue = [];
|
|
1744
|
+
const yieldData = (data) => {
|
|
1745
|
+
queue.push(data);
|
|
1746
|
+
};
|
|
1747
|
+
for await (const { data, done } of src) {
|
|
1748
|
+
if (done) {
|
|
1749
|
+
if (context && handlers.onClose) {
|
|
1750
|
+
await handlers.onClose(lastChunk, context, yieldData);
|
|
1751
|
+
}
|
|
1752
|
+
while (queue.length > 0) {
|
|
1753
|
+
yield { sequence: seq++, data: queue.shift(), done: false };
|
|
1754
|
+
}
|
|
1755
|
+
yield {
|
|
1756
|
+
sequence: seq++,
|
|
1757
|
+
data: void 0,
|
|
1758
|
+
done: true
|
|
1759
|
+
};
|
|
1760
|
+
break;
|
|
1761
|
+
}
|
|
1762
|
+
if (!initialized) {
|
|
1763
|
+
context = await handlers.onFirstChunk(data, yieldData);
|
|
1764
|
+
initialized = true;
|
|
1765
|
+
} else if (context) {
|
|
1766
|
+
context = await handlers.onChunk(data, context, yieldData);
|
|
1767
|
+
}
|
|
1768
|
+
lastChunk = data;
|
|
1769
|
+
while (queue.length > 0) {
|
|
1770
|
+
yield { sequence: seq++, data: queue.shift(), done: false };
|
|
1771
|
+
}
|
|
1772
|
+
}
|
|
1773
|
+
};
|
|
1774
|
+
return new _Pump(gen());
|
|
1775
|
+
}
|
|
1776
|
+
/**
|
|
1777
|
+
* Async map means that each incoming chunk is causing an async operation that when it completes
|
|
1778
|
+
* should yield a new chunk.
|
|
1779
|
+
* The pipe closes only after you unlock the pipe by using the unlockCloseEvent callback.
|
|
1780
|
+
*
|
|
1781
|
+
* Stateful refers to the fact that you can create your own small context object that is passed in the subsequent callbacks.
|
|
1782
|
+
* This allows you to keep track of things like a socket connection.
|
|
1783
|
+
*
|
|
1784
|
+
* Why is this nice? Well if you use things like a socket the pipe might have received the close event,
|
|
1785
|
+
* before you got any or all of your socket responses. Sockets don't fit into the standard promise pattern,
|
|
1786
|
+
* which makes it harder to wait for them.
|
|
1787
|
+
*
|
|
1788
|
+
* TODO: Un-tested
|
|
1789
|
+
*
|
|
1790
|
+
* @param handlers Object containing callback functions for stream processing
|
|
1791
|
+
* @param handlers.onFirstChunk Function called when the first chunk arrives, initializes the context
|
|
1792
|
+
* @param handlers.onChunk Function called for each subsequent chunk, updates the context
|
|
1793
|
+
* @param handlers.onClose Optional function called when the stream closes, allows final processing
|
|
1794
|
+
* @returns A new Pump instance with transformed data
|
|
1795
|
+
*/
|
|
1796
|
+
asyncStatefulMap(handlers) {
|
|
1797
|
+
const { src } = this;
|
|
1798
|
+
const gen = async function* () {
|
|
1799
|
+
let context;
|
|
1800
|
+
let initialized = false;
|
|
1801
|
+
let lastChunk;
|
|
1802
|
+
let seq = 0;
|
|
1803
|
+
let lockedCloseEvent = true;
|
|
1804
|
+
const queue = [];
|
|
1805
|
+
const yieldData = (data) => {
|
|
1806
|
+
queue.push(data);
|
|
1807
|
+
};
|
|
1808
|
+
const unlockCloseEvent = () => {
|
|
1809
|
+
lockedCloseEvent = false;
|
|
1810
|
+
};
|
|
1811
|
+
for await (const { data, done } of src) {
|
|
1812
|
+
if (done) {
|
|
1813
|
+
if (context && handlers.onClose) {
|
|
1814
|
+
await handlers.onClose(
|
|
1815
|
+
lastChunk,
|
|
1816
|
+
context,
|
|
1817
|
+
yieldData,
|
|
1818
|
+
unlockCloseEvent
|
|
1819
|
+
);
|
|
1820
|
+
}
|
|
1821
|
+
const timestamp = Date.now();
|
|
1822
|
+
while (lockedCloseEvent && Date.now() - timestamp < 1e4) {
|
|
1823
|
+
while (queue.length > 0) {
|
|
1824
|
+
yield { sequence: seq++, data: queue.shift(), done: false };
|
|
1825
|
+
}
|
|
1826
|
+
await new Promise((resolve) => setTimeout(resolve, 5));
|
|
1827
|
+
}
|
|
1828
|
+
while (queue.length > 0) {
|
|
1829
|
+
yield { sequence: seq++, data: queue.shift(), done: false };
|
|
1830
|
+
}
|
|
1831
|
+
yield {
|
|
1832
|
+
sequence: seq++,
|
|
1833
|
+
data: void 0,
|
|
1834
|
+
done: true
|
|
1835
|
+
};
|
|
1836
|
+
break;
|
|
1837
|
+
}
|
|
1838
|
+
if (!initialized) {
|
|
1839
|
+
context = await handlers.onFirstChunk(
|
|
1840
|
+
data,
|
|
1841
|
+
yieldData,
|
|
1842
|
+
unlockCloseEvent
|
|
1843
|
+
);
|
|
1844
|
+
initialized = true;
|
|
1845
|
+
} else if (context) {
|
|
1846
|
+
context = await handlers.onChunk(
|
|
1847
|
+
data,
|
|
1848
|
+
context,
|
|
1849
|
+
yieldData,
|
|
1850
|
+
unlockCloseEvent
|
|
1851
|
+
);
|
|
1852
|
+
}
|
|
1853
|
+
lastChunk = data;
|
|
1854
|
+
while (queue.length > 0) {
|
|
1855
|
+
yield { sequence: seq++, data: queue.shift(), done: false };
|
|
1856
|
+
}
|
|
1857
|
+
}
|
|
1858
|
+
};
|
|
1859
|
+
return new _Pump(gen());
|
|
1860
|
+
}
|
|
1861
|
+
/**
|
|
1862
|
+
* Filter items based on a predicate
|
|
1863
|
+
*
|
|
1864
|
+
* @param predicate A function that determines whether to keep each chunk
|
|
1865
|
+
* @returns A new Pump instance containing only chunks that passed the predicate
|
|
1866
|
+
*/
|
|
1867
|
+
filter(predicate) {
|
|
1868
|
+
async function* gen() {
|
|
1869
|
+
for await (const { sequence, data, done } of this.src) {
|
|
1870
|
+
if (done) {
|
|
1871
|
+
yield { sequence, data, done: true };
|
|
1872
|
+
break;
|
|
1873
|
+
}
|
|
1874
|
+
const keep = await predicate(data);
|
|
1875
|
+
if (keep) {
|
|
1876
|
+
yield { sequence, data, done: false };
|
|
1877
|
+
}
|
|
1878
|
+
}
|
|
1879
|
+
}
|
|
1880
|
+
return new _Pump(gen.call(this));
|
|
1881
|
+
}
|
|
1882
|
+
/**
|
|
1883
|
+
* Bundles (accumulates) chunks together based on a condition rather than a fixed size.
|
|
1884
|
+
*
|
|
1885
|
+
* This is useful when you need to group chunks dynamically based on their content or other criteria.
|
|
1886
|
+
*
|
|
1887
|
+
* Example: Bundling text chunks with a maximum character limit
|
|
1888
|
+
*
|
|
1889
|
+
* Input chunks: ["Hello", " this", " is", " a few", " chunks", " of text"]
|
|
1890
|
+
* With max size of 10 characters:
|
|
1891
|
+
* - First bundle: ["Hello", " this"] (10 chars)
|
|
1892
|
+
* - Second bundle: [" is", " a few"] (8 chars)
|
|
1893
|
+
* - Third bundle: [" chunks", " of text"] (13 chars)
|
|
1894
|
+
*
|
|
1895
|
+
* @param closeBundleCondition - Function that determines when to close the current bundle
|
|
1896
|
+
* Returns true when the current bundle should be emitted
|
|
1897
|
+
* Parameters:
|
|
1898
|
+
* - chunk: The current chunk being processed
|
|
1899
|
+
* - accumulatedChunks: Array of chunks in the current bundle
|
|
1900
|
+
*
|
|
1901
|
+
* @returns A pump that emits arrays of bundled items
|
|
1902
|
+
*/
|
|
1903
|
+
bundle(closeBundleCondition) {
|
|
1904
|
+
async function* gen() {
|
|
1905
|
+
let buffer = [];
|
|
1906
|
+
let lastSequence = 0;
|
|
1907
|
+
for await (const { sequence, data, done } of this.src) {
|
|
1908
|
+
lastSequence = sequence;
|
|
1909
|
+
if (done) {
|
|
1910
|
+
if (buffer.length > 0) {
|
|
1911
|
+
yield { sequence, data: [...buffer], done: false };
|
|
1912
|
+
}
|
|
1913
|
+
yield {
|
|
1914
|
+
sequence: lastSequence,
|
|
1915
|
+
data: void 0,
|
|
1916
|
+
done: true
|
|
1917
|
+
};
|
|
1918
|
+
break;
|
|
1919
|
+
}
|
|
1920
|
+
const shouldClose = await closeBundleCondition(data, buffer);
|
|
1921
|
+
buffer.push(data);
|
|
1922
|
+
if (shouldClose) {
|
|
1923
|
+
yield {
|
|
1924
|
+
sequence: lastSequence,
|
|
1925
|
+
data: [...buffer],
|
|
1926
|
+
done: false
|
|
1927
|
+
};
|
|
1928
|
+
buffer = [];
|
|
1929
|
+
}
|
|
1930
|
+
}
|
|
1931
|
+
}
|
|
1932
|
+
return new _Pump(gen.call(this));
|
|
1933
|
+
}
|
|
1934
|
+
/**
|
|
1935
|
+
* Tap into each chunk without altering it
|
|
1936
|
+
*
|
|
1937
|
+
* @param fn A function that receives each chunk but doesn't affect the stream
|
|
1938
|
+
* @returns The same pump instance with unmodified data
|
|
1939
|
+
*/
|
|
1940
|
+
onChunk(fn) {
|
|
1941
|
+
async function* gen() {
|
|
1942
|
+
for await (const chunk of this.src) {
|
|
1943
|
+
if (chunk.data === void 0 && chunk.done) {
|
|
1944
|
+
yield chunk;
|
|
1945
|
+
}
|
|
1946
|
+
await fn(chunk.data);
|
|
1947
|
+
yield chunk;
|
|
1948
|
+
}
|
|
1949
|
+
}
|
|
1950
|
+
return new _Pump(gen.call(this));
|
|
1951
|
+
}
|
|
1952
|
+
/**
|
|
1953
|
+
* Collect all chunks in the stream and run a callback when the stream is done.
|
|
1954
|
+
* The callback receives an array of all chunks that passed through.
|
|
1955
|
+
*
|
|
1956
|
+
* This is useful for analytics, logging, or processing the complete stream history
|
|
1957
|
+
* after all chunks have been received.
|
|
1958
|
+
*
|
|
1959
|
+
* @param fn - Callback function that receives the array of all chunks when the stream is complete
|
|
1960
|
+
* @returns The same pump, for chaining
|
|
1961
|
+
*/
|
|
1962
|
+
onClose(fn) {
|
|
1963
|
+
async function* gen() {
|
|
1964
|
+
const history = [];
|
|
1965
|
+
for await (const chunk of this.src) {
|
|
1966
|
+
if (chunk.data !== void 0) {
|
|
1967
|
+
history.push(chunk.data);
|
|
1968
|
+
}
|
|
1969
|
+
if (chunk.done) {
|
|
1970
|
+
await fn(history);
|
|
1971
|
+
}
|
|
1972
|
+
yield chunk;
|
|
1973
|
+
}
|
|
1974
|
+
}
|
|
1975
|
+
return new _Pump(gen.call(this));
|
|
1976
|
+
}
|
|
1977
|
+
/**
|
|
1978
|
+
* Batch `n` chunks into arrays before emitting
|
|
1979
|
+
*
|
|
1980
|
+
* @param n The number of chunks to batch together
|
|
1981
|
+
* @returns A new Pump instance that emits arrays of batched chunks
|
|
1982
|
+
*/
|
|
1983
|
+
batch(n) {
|
|
1984
|
+
async function* gen() {
|
|
1985
|
+
let buffer = [];
|
|
1986
|
+
for await (const chunk of this.src) {
|
|
1987
|
+
if (chunk.done) {
|
|
1988
|
+
if (chunk.data === void 0) {
|
|
1989
|
+
yield {
|
|
1990
|
+
sequence: buffer[0].sequence,
|
|
1991
|
+
data: buffer.map((c) => c.data),
|
|
1992
|
+
done: false
|
|
1993
|
+
};
|
|
1994
|
+
yield {
|
|
1995
|
+
sequence: chunk.sequence,
|
|
1996
|
+
data: void 0,
|
|
1997
|
+
done: true
|
|
1998
|
+
};
|
|
1999
|
+
buffer = [];
|
|
2000
|
+
} else {
|
|
2001
|
+
buffer.push(chunk);
|
|
2002
|
+
yield {
|
|
2003
|
+
sequence: buffer[0].sequence,
|
|
2004
|
+
data: buffer.map((c) => c.data),
|
|
2005
|
+
done: true
|
|
2006
|
+
};
|
|
2007
|
+
}
|
|
2008
|
+
break;
|
|
2009
|
+
}
|
|
2010
|
+
buffer.push(chunk);
|
|
2011
|
+
if (buffer.length === n) {
|
|
2012
|
+
yield {
|
|
2013
|
+
sequence: buffer[0].sequence,
|
|
2014
|
+
data: buffer.map((c) => c.data),
|
|
2015
|
+
done: chunk.done
|
|
2016
|
+
};
|
|
2017
|
+
buffer = [];
|
|
2018
|
+
}
|
|
2019
|
+
}
|
|
2020
|
+
}
|
|
2021
|
+
return new _Pump(gen.call(this));
|
|
2022
|
+
}
|
|
2023
|
+
/**
|
|
2024
|
+
* If you want to prevent chunk starvation, you can buffer the chunks.
|
|
2025
|
+
* Chunks will not be bundled into arrays or object but kept as is,
|
|
2026
|
+
* but the pipeline will not progress at that segment until the buffer is filled up.
|
|
2027
|
+
* Once a buffer is filled up it will drain and never buffer again.
|
|
2028
|
+
*
|
|
2029
|
+
* @param n The number of chunks to buffer before processing continues
|
|
2030
|
+
* @returns A new Pump instance with buffering behavior
|
|
2031
|
+
*/
|
|
2032
|
+
buffer(n) {
|
|
2033
|
+
async function* gen() {
|
|
2034
|
+
let buffer = [];
|
|
2035
|
+
let bufferFilled = false;
|
|
2036
|
+
for await (const chunk of this.src) {
|
|
2037
|
+
if (!bufferFilled) {
|
|
2038
|
+
if (!chunk.done) {
|
|
2039
|
+
buffer.push(chunk);
|
|
2040
|
+
}
|
|
2041
|
+
if (buffer.length >= n || chunk.done) {
|
|
2042
|
+
bufferFilled = true;
|
|
2043
|
+
for (const bufferedChunk of buffer) {
|
|
2044
|
+
yield bufferedChunk;
|
|
2045
|
+
}
|
|
2046
|
+
if (chunk.done) {
|
|
2047
|
+
yield {
|
|
2048
|
+
sequence: chunk.sequence,
|
|
2049
|
+
data: void 0,
|
|
2050
|
+
done: true
|
|
2051
|
+
};
|
|
2052
|
+
break;
|
|
2053
|
+
}
|
|
2054
|
+
buffer = [];
|
|
2055
|
+
}
|
|
2056
|
+
} else {
|
|
2057
|
+
yield chunk;
|
|
2058
|
+
}
|
|
2059
|
+
}
|
|
2060
|
+
for (const bufferedChunk of buffer) {
|
|
2061
|
+
yield bufferedChunk;
|
|
2062
|
+
}
|
|
2063
|
+
}
|
|
2064
|
+
return new _Pump(gen.call(this));
|
|
2065
|
+
}
|
|
2066
|
+
/**
|
|
2067
|
+
* Rechunk the stream: transform one chunk into zero, one, or many output chunks.
|
|
2068
|
+
* The handler function receives the current buffer of chunks, a push function to emit new chunks,
|
|
2069
|
+
* and a flag indicating if this is the last chunk in the stream.
|
|
2070
|
+
*
|
|
2071
|
+
* @param handler Function that transforms chunks and pushes new ones
|
|
2072
|
+
* @returns A new Pump instance with rechunked data
|
|
2073
|
+
*/
|
|
2074
|
+
rechunk(handler) {
|
|
2075
|
+
async function* gen() {
|
|
2076
|
+
let buffer = [];
|
|
2077
|
+
let seq = 0;
|
|
2078
|
+
const pending = [];
|
|
2079
|
+
const push = (chunk) => {
|
|
2080
|
+
pending.push(chunk);
|
|
2081
|
+
};
|
|
2082
|
+
for await (const { data, done } of this.src) {
|
|
2083
|
+
if (!done) {
|
|
2084
|
+
if (data !== void 0) {
|
|
2085
|
+
buffer.push(data);
|
|
2086
|
+
}
|
|
2087
|
+
await handler({
|
|
2088
|
+
buffer,
|
|
2089
|
+
push,
|
|
2090
|
+
lastChunk: false,
|
|
2091
|
+
setBuffer: (b) => {
|
|
2092
|
+
buffer = b;
|
|
2093
|
+
}
|
|
2094
|
+
});
|
|
2095
|
+
} else {
|
|
2096
|
+
await handler({
|
|
2097
|
+
buffer,
|
|
2098
|
+
push,
|
|
2099
|
+
lastChunk: true,
|
|
2100
|
+
setBuffer: (b) => {
|
|
2101
|
+
buffer = b;
|
|
2102
|
+
}
|
|
2103
|
+
});
|
|
2104
|
+
}
|
|
2105
|
+
while (pending.length > 0) {
|
|
2106
|
+
const out = pending.shift();
|
|
2107
|
+
yield { sequence: seq++, data: out, done: false };
|
|
2108
|
+
}
|
|
2109
|
+
if (done) {
|
|
2110
|
+
break;
|
|
2111
|
+
}
|
|
2112
|
+
}
|
|
2113
|
+
yield { sequence: seq, data: void 0, done: true };
|
|
2114
|
+
}
|
|
2115
|
+
return new _Pump(gen.call(this));
|
|
2116
|
+
}
|
|
2117
|
+
slidingWindow(size, step, fn) {
|
|
2118
|
+
async function* gen() {
|
|
2119
|
+
const history = [];
|
|
2120
|
+
let offset = 0;
|
|
2121
|
+
let lastSeq = 0;
|
|
2122
|
+
function buildWindow(_offset, _size, _history) {
|
|
2123
|
+
const window = Array(_size).fill(void 0);
|
|
2124
|
+
let windowIndex = 0;
|
|
2125
|
+
for (let i = _offset; i > _offset - _size; i -= step) {
|
|
2126
|
+
if (i >= history.length) {
|
|
2127
|
+
windowIndex++;
|
|
2128
|
+
continue;
|
|
2129
|
+
}
|
|
2130
|
+
if (i < 0) {
|
|
2131
|
+
break;
|
|
2132
|
+
}
|
|
2133
|
+
window[windowIndex] = _history[i];
|
|
2134
|
+
windowIndex++;
|
|
2135
|
+
}
|
|
2136
|
+
return window;
|
|
2137
|
+
}
|
|
2138
|
+
for await (const { sequence, data, done } of this.src) {
|
|
2139
|
+
if (done) {
|
|
2140
|
+
for (let i = 0; i < size - 1; i++) {
|
|
2141
|
+
const window2 = buildWindow(offset + i, size, history);
|
|
2142
|
+
yield { sequence: lastSeq, data: window2, done: false };
|
|
2143
|
+
}
|
|
2144
|
+
if (data === void 0) {
|
|
2145
|
+
yield {
|
|
2146
|
+
sequence: lastSeq,
|
|
2147
|
+
data: void 0,
|
|
2148
|
+
done: true
|
|
2149
|
+
};
|
|
2150
|
+
} else {
|
|
2151
|
+
yield {
|
|
2152
|
+
sequence: lastSeq,
|
|
2153
|
+
data: [
|
|
2154
|
+
history[history.length - 2] ?? void 0,
|
|
2155
|
+
history[history.length - 3] ?? void 0,
|
|
2156
|
+
history[history.length - 1]
|
|
2157
|
+
],
|
|
2158
|
+
done: true
|
|
2159
|
+
};
|
|
2160
|
+
}
|
|
2161
|
+
break;
|
|
2162
|
+
}
|
|
2163
|
+
lastSeq = sequence;
|
|
2164
|
+
history.push(data);
|
|
2165
|
+
const window = buildWindow(offset, size, history);
|
|
2166
|
+
yield { sequence, data: window, done: false };
|
|
2167
|
+
offset++;
|
|
2168
|
+
}
|
|
2169
|
+
}
|
|
2170
|
+
const base = new _Pump(gen.call(this));
|
|
2171
|
+
return fn ? base.map(fn) : base;
|
|
2172
|
+
}
|
|
2173
|
+
/**
|
|
2174
|
+
* Sequentially flatten inner stream sources emitted by the pipeline.
|
|
2175
|
+
* Works with any Source type (AsyncIterable or ReadableStream).
|
|
2176
|
+
* This method is only available when the current Pump contains Source elements.
|
|
2177
|
+
*
|
|
2178
|
+
* @template U The type of data in the inner streams
|
|
2179
|
+
* @template F The type of inner stream source (extends Source<U>)
|
|
2180
|
+
* @returns A Pump instance with flattened stream data
|
|
2181
|
+
*/
|
|
2182
|
+
sequenceStreams() {
|
|
2183
|
+
async function* gen() {
|
|
2184
|
+
let seq = 0;
|
|
2185
|
+
for await (const { data: innerSource, done: outerDone } of this.src) {
|
|
2186
|
+
if (outerDone)
|
|
2187
|
+
break;
|
|
2188
|
+
const innerPump = _Pump.from(innerSource);
|
|
2189
|
+
for await (const { data, done } of innerPump.src) {
|
|
2190
|
+
if (done)
|
|
2191
|
+
break;
|
|
2192
|
+
yield { sequence: seq++, data, done: false };
|
|
2193
|
+
}
|
|
2194
|
+
}
|
|
2195
|
+
yield { sequence: seq, data: void 0, done: true };
|
|
2196
|
+
}
|
|
2197
|
+
return new _Pump(gen.call(this));
|
|
2198
|
+
}
|
|
2199
|
+
/**
|
|
2200
|
+
* Fork the stream: two independent Pump<T> consumers
|
|
2201
|
+
* Both resulting Pumps will receive the same data, allowing for divergent processing paths.
|
|
2202
|
+
*
|
|
2203
|
+
* @returns An array containing two independent Pump instances with the same source data
|
|
2204
|
+
*/
|
|
2205
|
+
fork() {
|
|
2206
|
+
const buffers = [[], []];
|
|
2207
|
+
let done = false;
|
|
2208
|
+
const srcIter = this.src[Symbol.asyncIterator]();
|
|
2209
|
+
async function fill() {
|
|
2210
|
+
const { value, done: streamDone } = await srcIter.next();
|
|
2211
|
+
if (streamDone) {
|
|
2212
|
+
done = true;
|
|
2213
|
+
return;
|
|
2214
|
+
}
|
|
2215
|
+
buffers.forEach((q) => q.push(value));
|
|
2216
|
+
if (value.done)
|
|
2217
|
+
done = true;
|
|
2218
|
+
}
|
|
2219
|
+
function makeStream(buf) {
|
|
2220
|
+
return {
|
|
2221
|
+
[Symbol.asyncIterator]() {
|
|
2222
|
+
return {
|
|
2223
|
+
async next() {
|
|
2224
|
+
while (buf.length === 0 && !done) {
|
|
2225
|
+
await fill();
|
|
2226
|
+
}
|
|
2227
|
+
if (buf.length === 0)
|
|
2228
|
+
return {
|
|
2229
|
+
done: true,
|
|
2230
|
+
value: void 0
|
|
2231
|
+
};
|
|
2232
|
+
return { done: false, value: buf.shift() };
|
|
2233
|
+
}
|
|
2234
|
+
};
|
|
2235
|
+
}
|
|
2236
|
+
};
|
|
2237
|
+
}
|
|
2238
|
+
return [new _Pump(makeStream(buffers[0])), new _Pump(makeStream(buffers[1]))];
|
|
2239
|
+
}
|
|
2240
|
+
/**
|
|
2241
|
+
* Drain the pipeline, consuming all chunks.
|
|
2242
|
+
* Returns a Promise that resolves when all chunks have been consumed.
|
|
2243
|
+
*
|
|
2244
|
+
* @returns A Promise that resolves when all chunks have been consumed
|
|
2245
|
+
*/
|
|
2246
|
+
drain() {
|
|
2247
|
+
return (async () => {
|
|
2248
|
+
for await (const { done } of this.src) {
|
|
2249
|
+
if (done)
|
|
2250
|
+
break;
|
|
2251
|
+
}
|
|
2252
|
+
})();
|
|
2253
|
+
}
|
|
2254
|
+
/**
|
|
2255
|
+
* Drain the pipeline to a StreamTransformer.
|
|
2256
|
+
* Applies transform() to each data chunk, then closes the transformer,
|
|
2257
|
+
* and returns its response (which can be of any type defined by the transformer).
|
|
2258
|
+
*
|
|
2259
|
+
* Example with httpStreamResponse:
|
|
2260
|
+
* ```
|
|
2261
|
+
* const { transform, response, close } = httpStreamResponse(options);
|
|
2262
|
+
* return Pump.from(messageStream).drainTo({ transform, close, response });
|
|
2263
|
+
* ```
|
|
2264
|
+
*
|
|
2265
|
+
* @template U The type of data expected by the transformer (extends T)
|
|
2266
|
+
* @template R The response type produced by the transformer
|
|
2267
|
+
* @param transformer The StreamTransformer to drain to
|
|
2268
|
+
* @returns The response from the transformer
|
|
2269
|
+
*/
|
|
2270
|
+
drainTo(transformer) {
|
|
2271
|
+
(async () => {
|
|
2272
|
+
for await (const { data, done } of this.src) {
|
|
2273
|
+
if (done)
|
|
2274
|
+
break;
|
|
2275
|
+
transformer.transform(data);
|
|
2276
|
+
}
|
|
2277
|
+
transformer.close();
|
|
2278
|
+
})();
|
|
2279
|
+
return transformer.response;
|
|
2280
|
+
}
|
|
2281
|
+
};
|
|
2282
|
+
|
|
2283
|
+
// src/stream/utility/pipe-transformers/response.ts
|
|
2284
|
+
function httpStreamResponse(options = {}) {
|
|
2285
|
+
const { init, encoder } = options;
|
|
2286
|
+
const encodeFn = encoder ?? ((d) => {
|
|
2287
|
+
if (d instanceof Uint8Array)
|
|
2288
|
+
return d;
|
|
2289
|
+
if (typeof d === "string")
|
|
2290
|
+
return d;
|
|
2291
|
+
return JSON.stringify(d);
|
|
2292
|
+
});
|
|
2293
|
+
const { readable, writable } = new TransformStream();
|
|
2294
|
+
const writer = writable.getWriter();
|
|
2295
|
+
const response = new Response(readable, init);
|
|
2296
|
+
const transform = (chunk) => {
|
|
2297
|
+
const encoded = encodeFn(chunk);
|
|
2298
|
+
const bytes = typeof encoded === "string" ? new TextEncoder().encode(encoded) : encoded;
|
|
2299
|
+
writer.write(bytes);
|
|
2300
|
+
return chunk;
|
|
2301
|
+
};
|
|
2302
|
+
const close = () => {
|
|
2303
|
+
writer.close();
|
|
2304
|
+
};
|
|
2305
|
+
return { transform, response, close };
|
|
2306
|
+
}
|
|
2307
|
+
|
|
2308
|
+
// src/stream/utility/rechunker/ensure-full-words.ts
|
|
2309
|
+
async function ensureFullWords({
|
|
2310
|
+
buffer,
|
|
2311
|
+
push,
|
|
2312
|
+
lastChunk
|
|
2313
|
+
}) {
|
|
2314
|
+
const combined = buffer.join("");
|
|
2315
|
+
const lastBoundary = Math.max(
|
|
2316
|
+
combined.lastIndexOf(" "),
|
|
2317
|
+
combined.lastIndexOf("\n"),
|
|
2318
|
+
combined.lastIndexOf(" ")
|
|
2319
|
+
);
|
|
2320
|
+
if (lastBoundary !== -1 || lastChunk) {
|
|
2321
|
+
const emitPart = lastBoundary !== -1 ? combined.slice(0, lastBoundary + 1) : combined;
|
|
2322
|
+
const leftoverPart = lastBoundary !== -1 ? combined.slice(lastBoundary + 1) : "";
|
|
2323
|
+
if (emitPart.trim().length > 0) {
|
|
2324
|
+
push(emitPart);
|
|
2325
|
+
}
|
|
2326
|
+
buffer.length = 0;
|
|
2327
|
+
if (leftoverPart.length > 0) {
|
|
2328
|
+
buffer.push(leftoverPart);
|
|
2329
|
+
}
|
|
2330
|
+
}
|
|
2331
|
+
}
|
|
2332
|
+
|
|
2333
|
+
// src/api/socket-handler/SocketIoFactory.ts
|
|
2334
|
+
var SocketIoFactory = class _SocketIoFactory {
|
|
2335
|
+
constructor(socket, prefix, hooks) {
|
|
2336
|
+
this.socket = socket;
|
|
2337
|
+
this.prefix = prefix;
|
|
2338
|
+
this.hooks = hooks;
|
|
2339
|
+
}
|
|
2340
|
+
static setupSocketHandlers({
|
|
2341
|
+
enableVoiceEvents,
|
|
2342
|
+
enableChatEvents,
|
|
2343
|
+
enableTranscriptEvents,
|
|
2344
|
+
prefix = "",
|
|
2345
|
+
socket,
|
|
2346
|
+
hooks
|
|
2347
|
+
}) {
|
|
2348
|
+
const factory = new _SocketIoFactory(socket, prefix, hooks);
|
|
2349
|
+
if (enableVoiceEvents) {
|
|
2350
|
+
factory.setupVoiceEvents();
|
|
2351
|
+
}
|
|
2352
|
+
if (enableChatEvents) {
|
|
2353
|
+
factory.setupChatEvents(socket);
|
|
2354
|
+
}
|
|
2355
|
+
if (enableTranscriptEvents) {
|
|
2356
|
+
factory.setupTranscriptEvents(socket);
|
|
2357
|
+
}
|
|
2358
|
+
}
|
|
2359
|
+
setupVoiceEvents() {
|
|
2360
|
+
const {
|
|
2361
|
+
onVoiceInputFile,
|
|
2362
|
+
onVoiceInputChunk,
|
|
2363
|
+
onVoiceInputCommit,
|
|
2364
|
+
onVoiceOutputDelta,
|
|
2365
|
+
onVoiceOutputCommit,
|
|
2366
|
+
onVoiceOutputFile,
|
|
2367
|
+
onVoiceOutputTranscriptDelta,
|
|
2368
|
+
onVoiceOutputTranscriptFull
|
|
2369
|
+
} = this.hooks;
|
|
2370
|
+
const prefix = this.prefixEvent;
|
|
2371
|
+
if (onVoiceInputFile) {
|
|
2372
|
+
this.socket.on(prefix("voice:input_file"), onVoiceInputFile);
|
|
2373
|
+
}
|
|
2374
|
+
if (onVoiceInputChunk) {
|
|
2375
|
+
this.socket.on(prefix("voice:input_chunk"), onVoiceInputChunk);
|
|
2376
|
+
}
|
|
2377
|
+
if (onVoiceInputCommit) {
|
|
2378
|
+
this.socket.on(prefix("voice:input_commit"), onVoiceInputCommit);
|
|
2379
|
+
}
|
|
2380
|
+
if (onVoiceOutputDelta) {
|
|
2381
|
+
this.socket.on(prefix("voice:output_delta"), onVoiceOutputDelta);
|
|
2382
|
+
}
|
|
2383
|
+
if (onVoiceOutputCommit) {
|
|
2384
|
+
this.socket.on(prefix("voice:output_commit"), onVoiceOutputCommit);
|
|
2385
|
+
}
|
|
2386
|
+
if (onVoiceOutputFile) {
|
|
2387
|
+
this.socket.on(prefix("voice:output_file"), onVoiceOutputFile);
|
|
2388
|
+
}
|
|
2389
|
+
if (onVoiceOutputTranscriptDelta) {
|
|
2390
|
+
this.socket.on(
|
|
2391
|
+
prefix("voice:output_transcript_delta"),
|
|
2392
|
+
onVoiceOutputTranscriptDelta
|
|
2393
|
+
);
|
|
2394
|
+
}
|
|
2395
|
+
if (onVoiceOutputTranscriptFull) {
|
|
2396
|
+
this.socket.on(
|
|
2397
|
+
prefix("voice:output_transcript_full"),
|
|
2398
|
+
onVoiceOutputTranscriptFull
|
|
2399
|
+
);
|
|
2400
|
+
}
|
|
2401
|
+
}
|
|
2402
|
+
setupChatEvents(_socket) {
|
|
2403
|
+
}
|
|
2404
|
+
setupTranscriptEvents(_socket) {
|
|
2405
|
+
}
|
|
2406
|
+
prefixEvent(event) {
|
|
2407
|
+
return this.prefix ? `${this.prefix}:${event}` : event;
|
|
2408
|
+
}
|
|
2409
|
+
};
|
|
2410
|
+
var humanAndAI = (message) => message instanceof HumanMessage || message instanceof AIMessage;
|
|
2411
|
+
var humanOnly = (message) => message instanceof HumanMessage;
|
|
2412
|
+
var aiOnly = (message) => message instanceof AIMessage;
|
|
2413
|
+
var includingTags = (message, tags) => {
|
|
2414
|
+
if (tags) {
|
|
2415
|
+
return tags.some(
|
|
2416
|
+
(tag) => Array.isArray(message.additional_kwargs?.tags) ? message.additional_kwargs?.tags.includes(tag) : false
|
|
2417
|
+
);
|
|
2418
|
+
}
|
|
2419
|
+
return true;
|
|
2420
|
+
};
|
|
2421
|
+
var excludingTags = (message, tags) => {
|
|
2422
|
+
if (tags) {
|
|
2423
|
+
return !tags.some(
|
|
2424
|
+
(tag) => Array.isArray(message.additional_kwargs?.tags) ? message.additional_kwargs?.tags.includes(tag) : false
|
|
2425
|
+
);
|
|
2426
|
+
}
|
|
2427
|
+
return true;
|
|
2428
|
+
};
|
|
2429
|
+
var typeOnFilter = {
|
|
2430
|
+
["HumanAndAI" /* HumanAndAI */]: humanAndAI,
|
|
2431
|
+
["HumanOnly" /* HumanOnly */]: humanOnly,
|
|
2432
|
+
["AIOnly" /* AIOnly */]: aiOnly,
|
|
2433
|
+
["IncludingTags" /* IncludingTags */]: includingTags,
|
|
2434
|
+
["ExcludingTags" /* ExcludingTags */]: excludingTags
|
|
2435
|
+
};
|
|
2436
|
+
function concise(messages) {
|
|
2437
|
+
return messages.map((message) => {
|
|
2438
|
+
const prefix = message instanceof AIMessage ? "AI" : "Human";
|
|
2439
|
+
return `${prefix}: ${message.content}`;
|
|
2440
|
+
}).join("\n");
|
|
2441
|
+
}
|
|
2442
|
+
function verbose(messages) {
|
|
2443
|
+
return messages.map((message) => {
|
|
2444
|
+
const prefix = message instanceof AIMessage ? "AI" : "Human";
|
|
2445
|
+
return `${prefix}:
|
|
2446
|
+
${message.content}`;
|
|
2447
|
+
}).join("\n-------------------\n");
|
|
2448
|
+
}
|
|
2449
|
+
function redactAi(messages) {
|
|
2450
|
+
return messages.map((message) => {
|
|
2451
|
+
const prefix = message instanceof AIMessage ? "AI" : "Human";
|
|
2452
|
+
const content = message instanceof AIMessage ? "[...]" : message.content;
|
|
2453
|
+
return `${prefix}: ${content}`;
|
|
2454
|
+
}).join("\n");
|
|
2455
|
+
}
|
|
2456
|
+
function redactHuman(messages) {
|
|
2457
|
+
return messages.map((message) => {
|
|
2458
|
+
const prefix = message instanceof AIMessage ? "AI" : "Human";
|
|
2459
|
+
const content = message instanceof AIMessage ? "[...]" : message.content;
|
|
2460
|
+
return `${prefix}: ${content}`;
|
|
2461
|
+
}).join("\n");
|
|
2462
|
+
}
|
|
2463
|
+
var typeOnFormatter = {
|
|
2464
|
+
["concise" /* Concise */]: concise,
|
|
2465
|
+
["verbose" /* Verbose */]: verbose,
|
|
2466
|
+
["redact-ai" /* RedactAi */]: redactAi,
|
|
2467
|
+
["redact-human" /* RedactHuman */]: redactHuman
|
|
2468
|
+
};
|
|
2469
|
+
|
|
2470
|
+
// src/helper/transform-messages/TransformMessages.ts
|
|
2471
|
+
var TransformMessages = class _TransformMessages {
|
|
2472
|
+
constructor(effect) {
|
|
2473
|
+
this.effect = effect;
|
|
2474
|
+
}
|
|
2475
|
+
/**
|
|
2476
|
+
* Create a new TransformMessages from an array of messages.
|
|
2477
|
+
*/
|
|
2478
|
+
static from(messages) {
|
|
2479
|
+
return new _TransformMessages(Effect.succeed(messages));
|
|
2480
|
+
}
|
|
2481
|
+
/**
|
|
2482
|
+
* Filter messages based on a predicate function
|
|
2483
|
+
*/
|
|
2484
|
+
filter(predicate, tags) {
|
|
2485
|
+
let finalPredicate;
|
|
2486
|
+
if (typeof predicate === "string") {
|
|
2487
|
+
finalPredicate = typeOnFilter[predicate];
|
|
2488
|
+
} else {
|
|
2489
|
+
finalPredicate = predicate;
|
|
2490
|
+
}
|
|
2491
|
+
return new _TransformMessages(
|
|
2492
|
+
pipe(
|
|
2493
|
+
this.effect,
|
|
2494
|
+
Effect.map(
|
|
2495
|
+
(messages) => messages.filter((message) => finalPredicate(message, tags))
|
|
2496
|
+
)
|
|
2497
|
+
)
|
|
2498
|
+
);
|
|
2499
|
+
}
|
|
2500
|
+
/**
|
|
2501
|
+
* Take only the last n messages, but safely.
|
|
2502
|
+
* Tool calls should not be separated from the last human message.
|
|
2503
|
+
* Ensures all tool call conversations in the last n messages are complete.
|
|
2504
|
+
*/
|
|
2505
|
+
safelyTakeLast(n, pruneAfterNOvershootingMessages = 0) {
|
|
2506
|
+
return new _TransformMessages(
|
|
2507
|
+
pipe(
|
|
2508
|
+
this.effect,
|
|
2509
|
+
Effect.map((messages) => {
|
|
2510
|
+
const total = messages.length;
|
|
2511
|
+
if (n <= 0 || total === 0)
|
|
2512
|
+
return [];
|
|
2513
|
+
const start = Math.max(0, total - n);
|
|
2514
|
+
const end = total;
|
|
2515
|
+
const lastSlice = messages.slice(start, end);
|
|
2516
|
+
if (lastSlice[0] instanceof ToolMessage && lastSlice[0].tool_call_id) {
|
|
2517
|
+
let messagesToInclude = [];
|
|
2518
|
+
const remainingMessages = messages.slice(0, start);
|
|
2519
|
+
for (let i = remainingMessages.length - 1; i >= 0; i--) {
|
|
2520
|
+
const msg = remainingMessages[i];
|
|
2521
|
+
if (pruneAfterNOvershootingMessages > 0 && messagesToInclude.length - 1 >= pruneAfterNOvershootingMessages) {
|
|
2522
|
+
messagesToInclude = [];
|
|
2523
|
+
const filteredSlice = [];
|
|
2524
|
+
let foundFirstNonToolMessage = false;
|
|
2525
|
+
for (let i2 = 0; i2 < lastSlice.length; i2++) {
|
|
2526
|
+
const msg2 = lastSlice[i2];
|
|
2527
|
+
if (msg2 instanceof ToolMessage) {
|
|
2528
|
+
if (foundFirstNonToolMessage) {
|
|
2529
|
+
filteredSlice.push(msg2);
|
|
2530
|
+
}
|
|
2531
|
+
} else {
|
|
2532
|
+
foundFirstNonToolMessage = true;
|
|
2533
|
+
filteredSlice.push(msg2);
|
|
2534
|
+
}
|
|
2535
|
+
}
|
|
2536
|
+
return filteredSlice;
|
|
2537
|
+
}
|
|
2538
|
+
if (msg instanceof AIMessage && Array.isArray(msg.tool_calls)) {
|
|
2539
|
+
messagesToInclude.push(msg);
|
|
2540
|
+
break;
|
|
2541
|
+
} else if (msg instanceof ToolMessage) {
|
|
2542
|
+
messagesToInclude.push(msg);
|
|
2543
|
+
} else {
|
|
2544
|
+
throw new Error(
|
|
2545
|
+
"Messages array invalid no adjacent AI message found"
|
|
2546
|
+
);
|
|
2547
|
+
}
|
|
2548
|
+
}
|
|
2549
|
+
return [...messagesToInclude.reverse(), ...lastSlice];
|
|
2550
|
+
} else {
|
|
2551
|
+
return lastSlice;
|
|
2552
|
+
}
|
|
2553
|
+
})
|
|
2554
|
+
)
|
|
2555
|
+
);
|
|
2556
|
+
}
|
|
2557
|
+
/**
|
|
2558
|
+
* Take only the last n messages
|
|
2559
|
+
*/
|
|
2560
|
+
last(n) {
|
|
2561
|
+
return new _TransformMessages(
|
|
2562
|
+
pipe(
|
|
2563
|
+
this.effect,
|
|
2564
|
+
Effect.map((messages) => messages.slice(-n))
|
|
2565
|
+
)
|
|
2566
|
+
);
|
|
2567
|
+
}
|
|
2568
|
+
/**
|
|
2569
|
+
* Take only the first n messages
|
|
2570
|
+
*/
|
|
2571
|
+
first(n) {
|
|
2572
|
+
return new _TransformMessages(
|
|
2573
|
+
pipe(
|
|
2574
|
+
this.effect,
|
|
2575
|
+
Effect.map((messages) => messages.slice(0, n))
|
|
2576
|
+
)
|
|
2577
|
+
);
|
|
2578
|
+
}
|
|
2579
|
+
/**
|
|
2580
|
+
* Skip the first n messages
|
|
2581
|
+
*/
|
|
2582
|
+
skip(n) {
|
|
2583
|
+
return new _TransformMessages(
|
|
2584
|
+
pipe(
|
|
2585
|
+
this.effect,
|
|
2586
|
+
Effect.map((messages) => messages.slice(n))
|
|
2587
|
+
)
|
|
2588
|
+
);
|
|
2589
|
+
}
|
|
2590
|
+
/**
|
|
2591
|
+
* Reverse the order of messages
|
|
2592
|
+
*/
|
|
2593
|
+
reverse() {
|
|
2594
|
+
return new _TransformMessages(
|
|
2595
|
+
pipe(
|
|
2596
|
+
this.effect,
|
|
2597
|
+
Effect.map((messages) => [...messages].reverse())
|
|
2598
|
+
)
|
|
2599
|
+
);
|
|
2600
|
+
}
|
|
2601
|
+
/**
|
|
2602
|
+
* Map over messages with a transformation function
|
|
2603
|
+
*/
|
|
2604
|
+
map(fn) {
|
|
2605
|
+
return new _TransformMessages(
|
|
2606
|
+
pipe(
|
|
2607
|
+
this.effect,
|
|
2608
|
+
Effect.map((messages) => messages.map(fn))
|
|
2609
|
+
)
|
|
2610
|
+
);
|
|
2611
|
+
}
|
|
2612
|
+
/**
|
|
2613
|
+
* Format messages according to the specified format type
|
|
2614
|
+
*/
|
|
2615
|
+
format(formatType) {
|
|
2616
|
+
return pipe(
|
|
2617
|
+
this.effect,
|
|
2618
|
+
Effect.map((messages) => {
|
|
2619
|
+
if (formatType === "json" /* JSON */) {
|
|
2620
|
+
return JSON.stringify(messages, null, 2);
|
|
2621
|
+
}
|
|
2622
|
+
const formatter = typeOnFormatter[formatType];
|
|
2623
|
+
return formatter(messages);
|
|
2624
|
+
})
|
|
2625
|
+
);
|
|
2626
|
+
}
|
|
2627
|
+
// Sink methods
|
|
2628
|
+
/**
|
|
2629
|
+
* Convert to array - runs the effect and returns the result
|
|
2630
|
+
*/
|
|
2631
|
+
toArray() {
|
|
2632
|
+
return this.effect;
|
|
2633
|
+
}
|
|
2634
|
+
/**
|
|
2635
|
+
* Convert to string - runs the effect and returns JSON string
|
|
2636
|
+
*/
|
|
2637
|
+
toString() {
|
|
2638
|
+
return pipe(
|
|
2639
|
+
this.effect,
|
|
2640
|
+
Effect.map((messages) => JSON.stringify(messages, null, 2))
|
|
2641
|
+
);
|
|
2642
|
+
}
|
|
2643
|
+
/**
|
|
2644
|
+
* Get the count of messages
|
|
2645
|
+
*/
|
|
2646
|
+
count() {
|
|
2647
|
+
return pipe(
|
|
2648
|
+
this.effect,
|
|
2649
|
+
Effect.map((messages) => messages.length)
|
|
2650
|
+
);
|
|
2651
|
+
}
|
|
2652
|
+
};
|
|
2653
|
+
|
|
2654
|
+
export { AiCursor, BaseVoiceEndpointAdapter, Emitter, InputAudioController, Pump, SocketIoFactory, TransformMessages, VoiceEndpointAdapter, VoiceSocketAdapter, ensureFullWords, httpStreamResponse, useConversation, useSocketConversation };
|
|
2655
|
+
//# sourceMappingURL=out.js.map
|
|
2656
|
+
//# sourceMappingURL=index.js.map
|