@jchaffin/voicekit 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +369 -0
- package/dist/adapters/deepgram.d.mts +43 -0
- package/dist/adapters/deepgram.d.ts +43 -0
- package/dist/adapters/deepgram.js +216 -0
- package/dist/adapters/deepgram.mjs +162 -0
- package/dist/adapters/elevenlabs.d.mts +41 -0
- package/dist/adapters/elevenlabs.d.ts +41 -0
- package/dist/adapters/elevenlabs.js +304 -0
- package/dist/adapters/elevenlabs.mjs +250 -0
- package/dist/adapters/livekit.d.mts +44 -0
- package/dist/adapters/livekit.d.ts +44 -0
- package/dist/adapters/livekit.js +225 -0
- package/dist/adapters/livekit.mjs +161 -0
- package/dist/adapters/openai.d.mts +41 -0
- package/dist/adapters/openai.d.ts +41 -0
- package/dist/adapters/openai.js +350 -0
- package/dist/adapters/openai.mjs +294 -0
- package/dist/chunk-22WLZIXO.mjs +33 -0
- package/dist/chunk-T3II3DRG.mjs +178 -0
- package/dist/chunk-UZ2VGPZD.mjs +33 -0
- package/dist/chunk-Y6FXYEAI.mjs +10 -0
- package/dist/index.d.mts +693 -0
- package/dist/index.d.ts +693 -0
- package/dist/index.js +1838 -0
- package/dist/index.mjs +1593 -0
- package/dist/server.d.mts +80 -0
- package/dist/server.d.ts +80 -0
- package/dist/server.js +147 -0
- package/dist/server.mjs +119 -0
- package/dist/types-DY31oVB1.d.mts +150 -0
- package/dist/types-DY31oVB1.d.ts +150 -0
- package/dist/types-mThnXW9S.d.mts +150 -0
- package/dist/types-mThnXW9S.d.ts +150 -0
- package/dist/types-uLnzb8NE.d.mts +150 -0
- package/dist/types-uLnzb8NE.d.ts +150 -0
- package/package.json +100 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1838 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
ChatInput: () => ChatInput,
|
|
34
|
+
ConnectButton: () => ConnectButton,
|
|
35
|
+
EventEmitter: () => EventEmitter,
|
|
36
|
+
EventProvider: () => EventProvider,
|
|
37
|
+
GuardrailOutputZod: () => GuardrailOutputZod,
|
|
38
|
+
MODERATION_CATEGORIES: () => MODERATION_CATEGORIES,
|
|
39
|
+
ModerationCategoryZod: () => ModerationCategoryZod,
|
|
40
|
+
SUGGESTION_EVENT: () => SUGGESTION_EVENT,
|
|
41
|
+
StatusIndicator: () => StatusIndicator,
|
|
42
|
+
SuggestionChips: () => SuggestionChips,
|
|
43
|
+
SuggestionProvider: () => SuggestionProvider,
|
|
44
|
+
TOOL_RESULT_EVENT: () => TOOL_RESULT_EVENT,
|
|
45
|
+
Transcript: () => Transcript,
|
|
46
|
+
TranscriptProvider: () => TranscriptProvider,
|
|
47
|
+
VoiceChat: () => VoiceChat,
|
|
48
|
+
VoiceProvider: () => VoiceProvider,
|
|
49
|
+
applyCodecPreferences: () => applyCodecPreferences,
|
|
50
|
+
audioFormatForCodec: () => audioFormatForCodec,
|
|
51
|
+
clearSuggestions: () => clearSuggestions,
|
|
52
|
+
convertWebMToWav: () => convertWebMToWav,
|
|
53
|
+
createAPITool: () => createAPITool,
|
|
54
|
+
createAgent: () => createAgent,
|
|
55
|
+
createAgentFromTemplate: () => createAgentFromTemplate,
|
|
56
|
+
createCustomGuardrail: () => createCustomGuardrail,
|
|
57
|
+
createEventTool: () => createEventTool,
|
|
58
|
+
createModerationGuardrail: () => createModerationGuardrail,
|
|
59
|
+
createNavigationTool: () => createNavigationTool,
|
|
60
|
+
createRAGTool: () => createRAGTool,
|
|
61
|
+
createSearchTool: () => createSearchTool,
|
|
62
|
+
defineTool: () => defineTool,
|
|
63
|
+
emitSuggestions: () => emitSuggestions,
|
|
64
|
+
encodeWAV: () => encodeWAV,
|
|
65
|
+
runGuardrailClassifier: () => runGuardrailClassifier,
|
|
66
|
+
useAudioRecorder: () => useAudioRecorder,
|
|
67
|
+
useEvent: () => useEvent,
|
|
68
|
+
useRealtimeSession: () => useRealtimeSession,
|
|
69
|
+
useSessionHistory: () => useSessionHistory,
|
|
70
|
+
useSuggestions: () => useSuggestions,
|
|
71
|
+
useToolListener: () => useToolListener,
|
|
72
|
+
useToolResult: () => useToolResult,
|
|
73
|
+
useToolResults: () => useToolResults,
|
|
74
|
+
useTranscript: () => useTranscript,
|
|
75
|
+
useVoice: () => useVoice
|
|
76
|
+
});
|
|
77
|
+
module.exports = __toCommonJS(index_exports);
|
|
78
|
+
|
|
79
|
+
// src/VoiceProvider.tsx
|
|
80
|
+
var import_react = require("react");
|
|
81
|
+
var import_jsx_runtime = require("react/jsx-runtime");
|
|
82
|
+
var VoiceContext = (0, import_react.createContext)(null);
|
|
83
|
+
function VoiceProvider({
|
|
84
|
+
children,
|
|
85
|
+
adapter,
|
|
86
|
+
agent,
|
|
87
|
+
sessionEndpoint = "/api/session",
|
|
88
|
+
model,
|
|
89
|
+
language = "en",
|
|
90
|
+
onStatusChange,
|
|
91
|
+
onTranscriptUpdate,
|
|
92
|
+
onToolCall,
|
|
93
|
+
onError
|
|
94
|
+
}) {
|
|
95
|
+
const [status, setStatus] = (0, import_react.useState)("DISCONNECTED");
|
|
96
|
+
const [transcript, setTranscript] = (0, import_react.useState)([]);
|
|
97
|
+
const [isMuted, setIsMuted] = (0, import_react.useState)(false);
|
|
98
|
+
const sessionRef = (0, import_react.useRef)(null);
|
|
99
|
+
const audioRef = (0, import_react.useRef)(null);
|
|
100
|
+
const statusRef = (0, import_react.useRef)("DISCONNECTED");
|
|
101
|
+
const currentMsgIdRef = (0, import_react.useRef)(null);
|
|
102
|
+
(0, import_react.useEffect)(() => {
|
|
103
|
+
statusRef.current = status;
|
|
104
|
+
}, [status]);
|
|
105
|
+
(0, import_react.useEffect)(() => {
|
|
106
|
+
if (typeof window === "undefined") return;
|
|
107
|
+
const audio = document.createElement("audio");
|
|
108
|
+
audio.autoplay = true;
|
|
109
|
+
audio.style.display = "none";
|
|
110
|
+
document.body.appendChild(audio);
|
|
111
|
+
audioRef.current = audio;
|
|
112
|
+
return () => {
|
|
113
|
+
try {
|
|
114
|
+
audio.pause();
|
|
115
|
+
audio.srcObject = null;
|
|
116
|
+
audio.remove();
|
|
117
|
+
} catch {
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
}, []);
|
|
121
|
+
const updateStatus = (0, import_react.useCallback)((newStatus) => {
|
|
122
|
+
setStatus(newStatus);
|
|
123
|
+
onStatusChange?.(newStatus);
|
|
124
|
+
}, [onStatusChange]);
|
|
125
|
+
const addMessage = (0, import_react.useCallback)((role, text, id) => {
|
|
126
|
+
const message = {
|
|
127
|
+
id: id || crypto.randomUUID(),
|
|
128
|
+
role,
|
|
129
|
+
text,
|
|
130
|
+
timestamp: /* @__PURE__ */ new Date(),
|
|
131
|
+
status: "pending"
|
|
132
|
+
};
|
|
133
|
+
setTranscript((prev) => {
|
|
134
|
+
const updated = [...prev, message];
|
|
135
|
+
onTranscriptUpdate?.(updated);
|
|
136
|
+
return updated;
|
|
137
|
+
});
|
|
138
|
+
return message.id;
|
|
139
|
+
}, [onTranscriptUpdate]);
|
|
140
|
+
const updateMessage = (0, import_react.useCallback)((id, text, append = false) => {
|
|
141
|
+
setTranscript((prev) => {
|
|
142
|
+
const updated = prev.map(
|
|
143
|
+
(m) => m.id === id ? { ...m, text: append ? m.text + text : text } : m
|
|
144
|
+
);
|
|
145
|
+
onTranscriptUpdate?.(updated);
|
|
146
|
+
return updated;
|
|
147
|
+
});
|
|
148
|
+
}, [onTranscriptUpdate]);
|
|
149
|
+
const completeMessage = (0, import_react.useCallback)((id) => {
|
|
150
|
+
setTranscript((prev) => {
|
|
151
|
+
const updated = prev.map(
|
|
152
|
+
(m) => m.id === id ? { ...m, status: "complete" } : m
|
|
153
|
+
);
|
|
154
|
+
onTranscriptUpdate?.(updated);
|
|
155
|
+
return updated;
|
|
156
|
+
});
|
|
157
|
+
}, [onTranscriptUpdate]);
|
|
158
|
+
const wireSessionEvents = (0, import_react.useCallback)((session) => {
|
|
159
|
+
session.on("user_transcript", (data) => {
|
|
160
|
+
if (data.isFinal) {
|
|
161
|
+
addMessage("user", data.text || data.delta || "");
|
|
162
|
+
}
|
|
163
|
+
});
|
|
164
|
+
session.on("assistant_transcript", (data) => {
|
|
165
|
+
if (data.isFinal) {
|
|
166
|
+
if (currentMsgIdRef.current) {
|
|
167
|
+
completeMessage(currentMsgIdRef.current);
|
|
168
|
+
currentMsgIdRef.current = null;
|
|
169
|
+
}
|
|
170
|
+
} else if (data.delta) {
|
|
171
|
+
if (!currentMsgIdRef.current) {
|
|
172
|
+
currentMsgIdRef.current = addMessage("assistant", data.delta);
|
|
173
|
+
} else {
|
|
174
|
+
updateMessage(currentMsgIdRef.current, data.delta, true);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
});
|
|
178
|
+
session.on("tool_call_end", (name, input, output) => {
|
|
179
|
+
onToolCall?.(name, input, output);
|
|
180
|
+
});
|
|
181
|
+
session.on("error", (error) => {
|
|
182
|
+
console.error("VoiceKit session error:", error);
|
|
183
|
+
onError?.(error);
|
|
184
|
+
});
|
|
185
|
+
}, [addMessage, updateMessage, completeMessage, onToolCall, onError]);
|
|
186
|
+
const fetchToken = (0, import_react.useCallback)(async () => {
|
|
187
|
+
try {
|
|
188
|
+
const res = await fetch(sessionEndpoint, { method: "POST" });
|
|
189
|
+
if (!res.ok) return null;
|
|
190
|
+
const data = await res.json();
|
|
191
|
+
return data.ephemeralKey || data.token || null;
|
|
192
|
+
} catch {
|
|
193
|
+
return null;
|
|
194
|
+
}
|
|
195
|
+
}, [sessionEndpoint]);
|
|
196
|
+
const connect = (0, import_react.useCallback)(async () => {
|
|
197
|
+
if (statusRef.current !== "DISCONNECTED") return;
|
|
198
|
+
if (!audioRef.current) return;
|
|
199
|
+
updateStatus("CONNECTING");
|
|
200
|
+
try {
|
|
201
|
+
const token = await fetchToken();
|
|
202
|
+
if (!token) {
|
|
203
|
+
onError?.(new Error("Failed to get session key"));
|
|
204
|
+
updateStatus("DISCONNECTED");
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
const session = adapter.createSession(agent, { model, language });
|
|
208
|
+
sessionRef.current = session;
|
|
209
|
+
wireSessionEvents(session);
|
|
210
|
+
await session.connect({
|
|
211
|
+
authToken: token,
|
|
212
|
+
audioElement: audioRef.current
|
|
213
|
+
});
|
|
214
|
+
updateStatus("CONNECTED");
|
|
215
|
+
setTimeout(() => {
|
|
216
|
+
session.sendRawEvent?.({ type: "response.create" });
|
|
217
|
+
}, 500);
|
|
218
|
+
} catch (error) {
|
|
219
|
+
console.error("VoiceKit connection failed:", error);
|
|
220
|
+
onError?.(error instanceof Error ? error : new Error(String(error)));
|
|
221
|
+
updateStatus("DISCONNECTED");
|
|
222
|
+
}
|
|
223
|
+
}, [adapter, agent, model, language, fetchToken, wireSessionEvents, updateStatus, onError]);
|
|
224
|
+
const disconnect = (0, import_react.useCallback)(async () => {
|
|
225
|
+
if (sessionRef.current) {
|
|
226
|
+
try {
|
|
227
|
+
await sessionRef.current.disconnect();
|
|
228
|
+
} catch {
|
|
229
|
+
}
|
|
230
|
+
sessionRef.current = null;
|
|
231
|
+
}
|
|
232
|
+
currentMsgIdRef.current = null;
|
|
233
|
+
updateStatus("DISCONNECTED");
|
|
234
|
+
}, [updateStatus]);
|
|
235
|
+
const sendMessage = (0, import_react.useCallback)((text) => {
|
|
236
|
+
if (!sessionRef.current || statusRef.current !== "CONNECTED") return;
|
|
237
|
+
sessionRef.current.interrupt();
|
|
238
|
+
sessionRef.current.sendMessage(text);
|
|
239
|
+
}, []);
|
|
240
|
+
const interrupt = (0, import_react.useCallback)(() => {
|
|
241
|
+
sessionRef.current?.interrupt();
|
|
242
|
+
}, []);
|
|
243
|
+
const mute = (0, import_react.useCallback)((muted) => {
|
|
244
|
+
setIsMuted(muted);
|
|
245
|
+
sessionRef.current?.mute(muted);
|
|
246
|
+
if (audioRef.current) {
|
|
247
|
+
audioRef.current.muted = muted;
|
|
248
|
+
}
|
|
249
|
+
}, []);
|
|
250
|
+
const clearTranscript = (0, import_react.useCallback)(() => {
|
|
251
|
+
setTranscript([]);
|
|
252
|
+
onTranscriptUpdate?.([]);
|
|
253
|
+
}, [onTranscriptUpdate]);
|
|
254
|
+
(0, import_react.useEffect)(() => {
|
|
255
|
+
return () => {
|
|
256
|
+
try {
|
|
257
|
+
sessionRef.current?.disconnect();
|
|
258
|
+
} catch {
|
|
259
|
+
}
|
|
260
|
+
};
|
|
261
|
+
}, []);
|
|
262
|
+
const value = {
|
|
263
|
+
status,
|
|
264
|
+
connect,
|
|
265
|
+
disconnect,
|
|
266
|
+
transcript,
|
|
267
|
+
clearTranscript,
|
|
268
|
+
sendMessage,
|
|
269
|
+
interrupt,
|
|
270
|
+
mute,
|
|
271
|
+
isMuted,
|
|
272
|
+
agent
|
|
273
|
+
};
|
|
274
|
+
return /* @__PURE__ */ (0, import_jsx_runtime.jsx)(VoiceContext.Provider, { value, children });
|
|
275
|
+
}
|
|
276
|
+
function useVoice() {
|
|
277
|
+
const context = (0, import_react.useContext)(VoiceContext);
|
|
278
|
+
if (!context) {
|
|
279
|
+
throw new Error("useVoice must be used within a VoiceProvider");
|
|
280
|
+
}
|
|
281
|
+
return context;
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
// src/components/VoiceChat.tsx
|
|
285
|
+
var import_react2 = require("react");
|
|
286
|
+
var import_jsx_runtime2 = require("react/jsx-runtime");
|
|
287
|
+
function Message({ message, userClassName, assistantClassName }) {
|
|
288
|
+
const isUser = message.role === "user";
|
|
289
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { className: `flex ${isUser ? "justify-end" : "justify-start"}`, children: /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
290
|
+
"div",
|
|
291
|
+
{
|
|
292
|
+
className: `max-w-[80%] rounded-2xl px-4 py-2 ${isUser ? userClassName || "bg-blue-500 text-white rounded-br-md" : assistantClassName || "bg-gray-100 dark:bg-gray-800 text-gray-900 dark:text-gray-100 rounded-bl-md"}`,
|
|
293
|
+
children: /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("p", { className: "text-sm whitespace-pre-wrap", children: message.text })
|
|
294
|
+
}
|
|
295
|
+
) });
|
|
296
|
+
}
|
|
297
|
+
function Transcript({
|
|
298
|
+
messages,
|
|
299
|
+
userClassName,
|
|
300
|
+
assistantClassName,
|
|
301
|
+
emptyMessage = "Start a conversation..."
|
|
302
|
+
}) {
|
|
303
|
+
const containerRef = (0, import_react2.useRef)(null);
|
|
304
|
+
const userScrolledUp = (0, import_react2.useRef)(false);
|
|
305
|
+
(0, import_react2.useEffect)(() => {
|
|
306
|
+
const container = containerRef.current;
|
|
307
|
+
if (!container) return;
|
|
308
|
+
const handleScroll = () => {
|
|
309
|
+
const isAtBottom = container.scrollHeight - container.scrollTop - container.clientHeight < 50;
|
|
310
|
+
userScrolledUp.current = !isAtBottom;
|
|
311
|
+
};
|
|
312
|
+
container.addEventListener("scroll", handleScroll);
|
|
313
|
+
return () => container.removeEventListener("scroll", handleScroll);
|
|
314
|
+
}, []);
|
|
315
|
+
(0, import_react2.useEffect)(() => {
|
|
316
|
+
if (containerRef.current && messages.length > 0 && !userScrolledUp.current) {
|
|
317
|
+
containerRef.current.scrollTo({
|
|
318
|
+
top: containerRef.current.scrollHeight,
|
|
319
|
+
behavior: "smooth"
|
|
320
|
+
});
|
|
321
|
+
}
|
|
322
|
+
}, [messages]);
|
|
323
|
+
if (messages.length === 0) {
|
|
324
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { className: "flex items-center justify-center h-full text-gray-500", children: emptyMessage });
|
|
325
|
+
}
|
|
326
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { ref: containerRef, className: "flex flex-col gap-3 overflow-y-auto h-full p-4", children: messages.map((msg) => /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
327
|
+
Message,
|
|
328
|
+
{
|
|
329
|
+
message: msg,
|
|
330
|
+
userClassName,
|
|
331
|
+
assistantClassName
|
|
332
|
+
},
|
|
333
|
+
msg.id
|
|
334
|
+
)) });
|
|
335
|
+
}
|
|
336
|
+
function StatusIndicator({
|
|
337
|
+
className = "",
|
|
338
|
+
connectedText = "Connected",
|
|
339
|
+
connectingText = "Connecting...",
|
|
340
|
+
disconnectedText = "Disconnected"
|
|
341
|
+
}) {
|
|
342
|
+
const { status } = useVoice();
|
|
343
|
+
const statusConfig = {
|
|
344
|
+
CONNECTED: { color: "bg-green-500", text: connectedText, pulse: true },
|
|
345
|
+
CONNECTING: { color: "bg-yellow-500", text: connectingText, pulse: true },
|
|
346
|
+
DISCONNECTED: { color: "bg-gray-400", text: disconnectedText, pulse: false }
|
|
347
|
+
};
|
|
348
|
+
const config = statusConfig[status];
|
|
349
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)("div", { className: `flex items-center gap-2 ${className}`, children: [
|
|
350
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { className: `w-2 h-2 rounded-full ${config.color} ${config.pulse ? "animate-pulse" : ""}` }),
|
|
351
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)("span", { className: "text-sm", children: config.text })
|
|
352
|
+
] });
|
|
353
|
+
}
|
|
354
|
+
function ConnectButton({
|
|
355
|
+
className = "",
|
|
356
|
+
connectText = "Start",
|
|
357
|
+
disconnectText = "End",
|
|
358
|
+
connectingText = "Connecting...",
|
|
359
|
+
children
|
|
360
|
+
}) {
|
|
361
|
+
const { status, connect, disconnect } = useVoice();
|
|
362
|
+
const handleClick = () => {
|
|
363
|
+
if (status === "CONNECTED") {
|
|
364
|
+
disconnect();
|
|
365
|
+
} else if (status === "DISCONNECTED") {
|
|
366
|
+
connect();
|
|
367
|
+
}
|
|
368
|
+
};
|
|
369
|
+
const text = status === "CONNECTED" ? disconnectText : status === "CONNECTING" ? connectingText : connectText;
|
|
370
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
371
|
+
"button",
|
|
372
|
+
{
|
|
373
|
+
onClick: handleClick,
|
|
374
|
+
disabled: status === "CONNECTING",
|
|
375
|
+
className: className || `px-4 py-2 rounded-lg font-medium transition-colors ${status === "CONNECTED" ? "bg-red-500 hover:bg-red-600 text-white" : status === "CONNECTING" ? "bg-gray-300 text-gray-500 cursor-not-allowed" : "bg-blue-500 hover:bg-blue-600 text-white"}`,
|
|
376
|
+
children: children || text
|
|
377
|
+
}
|
|
378
|
+
);
|
|
379
|
+
}
|
|
380
|
+
function ChatInput({
|
|
381
|
+
placeholder = "Type a message...",
|
|
382
|
+
className = "",
|
|
383
|
+
buttonText = "Send",
|
|
384
|
+
onSend
|
|
385
|
+
}) {
|
|
386
|
+
const { sendMessage, status } = useVoice();
|
|
387
|
+
const inputRef = (0, import_react2.useRef)(null);
|
|
388
|
+
const handleSubmit = (e) => {
|
|
389
|
+
e.preventDefault();
|
|
390
|
+
const text = inputRef.current?.value.trim();
|
|
391
|
+
if (!text) return;
|
|
392
|
+
if (onSend) {
|
|
393
|
+
onSend(text);
|
|
394
|
+
} else {
|
|
395
|
+
sendMessage(text);
|
|
396
|
+
}
|
|
397
|
+
if (inputRef.current) {
|
|
398
|
+
inputRef.current.value = "";
|
|
399
|
+
}
|
|
400
|
+
};
|
|
401
|
+
const disabled = status !== "CONNECTED";
|
|
402
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)("form", { onSubmit: handleSubmit, className: `flex gap-2 ${className}`, children: [
|
|
403
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
404
|
+
"input",
|
|
405
|
+
{
|
|
406
|
+
ref: inputRef,
|
|
407
|
+
type: "text",
|
|
408
|
+
placeholder,
|
|
409
|
+
disabled,
|
|
410
|
+
className: "flex-1 px-4 py-2 rounded-lg border border-gray-300 dark:border-gray-600 \n bg-white dark:bg-gray-800 text-gray-900 dark:text-gray-100\n focus:outline-none focus:ring-2 focus:ring-blue-500\n disabled:opacity-50 disabled:cursor-not-allowed"
|
|
411
|
+
}
|
|
412
|
+
),
|
|
413
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
414
|
+
"button",
|
|
415
|
+
{
|
|
416
|
+
type: "submit",
|
|
417
|
+
disabled,
|
|
418
|
+
className: "px-4 py-2 bg-blue-500 text-white rounded-lg font-medium\n hover:bg-blue-600 disabled:opacity-50 disabled:cursor-not-allowed",
|
|
419
|
+
children: buttonText
|
|
420
|
+
}
|
|
421
|
+
)
|
|
422
|
+
] });
|
|
423
|
+
}
|
|
424
|
+
function VoiceChat({
|
|
425
|
+
className = "",
|
|
426
|
+
height = "400px",
|
|
427
|
+
showHeader = true,
|
|
428
|
+
showInput = true,
|
|
429
|
+
emptyState,
|
|
430
|
+
header,
|
|
431
|
+
footer
|
|
432
|
+
}) {
|
|
433
|
+
const { status, transcript, connect, disconnect, clearTranscript } = useVoice();
|
|
434
|
+
const defaultEmptyState = /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)("div", { className: "flex flex-col items-center justify-center gap-4", children: [
|
|
435
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)(ConnectButton, {}),
|
|
436
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)("p", { className: "text-sm text-gray-500", children: status === "CONNECTING" ? "Connecting..." : "Click to start a conversation" })
|
|
437
|
+
] });
|
|
438
|
+
return /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)("div", { className: `flex flex-col rounded-xl border border-gray-200 dark:border-gray-700
|
|
439
|
+
bg-white dark:bg-gray-900 overflow-hidden ${className}`, children: [
|
|
440
|
+
showHeader && /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { className: "flex items-center justify-between px-4 py-3 border-b border-gray-200 dark:border-gray-700", children: header || /* @__PURE__ */ (0, import_jsx_runtime2.jsxs)(import_jsx_runtime2.Fragment, { children: [
|
|
441
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)(StatusIndicator, {}),
|
|
442
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsxs)("div", { className: "flex gap-2", children: [
|
|
443
|
+
transcript.length > 0 && /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
444
|
+
"button",
|
|
445
|
+
{
|
|
446
|
+
onClick: clearTranscript,
|
|
447
|
+
className: "text-sm text-gray-500 hover:text-gray-700",
|
|
448
|
+
children: "Clear"
|
|
449
|
+
}
|
|
450
|
+
),
|
|
451
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
452
|
+
"button",
|
|
453
|
+
{
|
|
454
|
+
onClick: status === "CONNECTED" ? disconnect : connect,
|
|
455
|
+
className: `text-sm font-medium ${status === "CONNECTED" ? "text-red-500 hover:text-red-600" : "text-green-500 hover:text-green-600"}`,
|
|
456
|
+
children: status === "CONNECTED" ? "End" : "Connect"
|
|
457
|
+
}
|
|
458
|
+
)
|
|
459
|
+
] })
|
|
460
|
+
] }) }),
|
|
461
|
+
/* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { style: { height }, className: "overflow-hidden", children: /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(
|
|
462
|
+
Transcript,
|
|
463
|
+
{
|
|
464
|
+
messages: transcript,
|
|
465
|
+
emptyMessage: emptyState || defaultEmptyState
|
|
466
|
+
}
|
|
467
|
+
) }),
|
|
468
|
+
footer || showInput && status === "CONNECTED" && /* @__PURE__ */ (0, import_jsx_runtime2.jsx)("div", { className: "p-4 border-t border-gray-200 dark:border-gray-700", children: /* @__PURE__ */ (0, import_jsx_runtime2.jsx)(ChatInput, {}) })
|
|
469
|
+
] });
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
// src/createAgent.ts
|
|
473
|
+
function createAgent(config) {
|
|
474
|
+
const { name, instructions, tools = [], voice } = config;
|
|
475
|
+
const fullInstructions = `
|
|
476
|
+
${instructions}
|
|
477
|
+
|
|
478
|
+
# Response Guidelines
|
|
479
|
+
- Keep responses concise (2-3 sentences max)
|
|
480
|
+
- Answer questions directly before asking follow-ups
|
|
481
|
+
- Use tools silently without announcing them
|
|
482
|
+
- Speak naturally and conversationally
|
|
483
|
+
`.trim();
|
|
484
|
+
return {
|
|
485
|
+
name,
|
|
486
|
+
instructions: fullInstructions,
|
|
487
|
+
tools,
|
|
488
|
+
voice
|
|
489
|
+
};
|
|
490
|
+
}
|
|
491
|
+
function createAgentFromTemplate(config) {
|
|
492
|
+
const {
|
|
493
|
+
name,
|
|
494
|
+
role,
|
|
495
|
+
personality = "Professional and helpful",
|
|
496
|
+
capabilities = [],
|
|
497
|
+
constraints = [],
|
|
498
|
+
tools = [],
|
|
499
|
+
context = {}
|
|
500
|
+
} = config;
|
|
501
|
+
const capabilitiesSection = capabilities.length > 0 ? `## What You Can Do
|
|
502
|
+
${capabilities.map((c) => `- ${c}`).join("\n")}` : "";
|
|
503
|
+
const constraintsSection = constraints.length > 0 ? `## Constraints
|
|
504
|
+
${constraints.map((c) => `- ${c}`).join("\n")}` : "";
|
|
505
|
+
const contextSection = Object.keys(context).length > 0 ? `## Context
|
|
506
|
+
\`\`\`json
|
|
507
|
+
${JSON.stringify(context, null, 2)}
|
|
508
|
+
\`\`\`` : "";
|
|
509
|
+
const instructions = `
|
|
510
|
+
You are ${name}, ${role}.
|
|
511
|
+
|
|
512
|
+
## Personality
|
|
513
|
+
${personality}
|
|
514
|
+
|
|
515
|
+
${capabilitiesSection}
|
|
516
|
+
|
|
517
|
+
${constraintsSection}
|
|
518
|
+
|
|
519
|
+
${contextSection}
|
|
520
|
+
`.trim();
|
|
521
|
+
return createAgent({
|
|
522
|
+
name,
|
|
523
|
+
instructions,
|
|
524
|
+
tools
|
|
525
|
+
});
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
// src/tools.ts
|
|
529
|
+
function defineTool(config) {
|
|
530
|
+
return {
|
|
531
|
+
name: config.name,
|
|
532
|
+
description: config.description,
|
|
533
|
+
parameters: {
|
|
534
|
+
type: "object",
|
|
535
|
+
properties: config.parameters,
|
|
536
|
+
required: config.required
|
|
537
|
+
},
|
|
538
|
+
execute: config.execute
|
|
539
|
+
};
|
|
540
|
+
}
|
|
541
|
+
function createNavigationTool(sections) {
|
|
542
|
+
return defineTool({
|
|
543
|
+
name: "navigate",
|
|
544
|
+
description: `Navigate to a section. Available: ${sections.join(", ")}`,
|
|
545
|
+
parameters: {
|
|
546
|
+
section: {
|
|
547
|
+
type: "string",
|
|
548
|
+
enum: sections,
|
|
549
|
+
description: "Section to scroll to"
|
|
550
|
+
}
|
|
551
|
+
},
|
|
552
|
+
required: ["section"],
|
|
553
|
+
execute: ({ section }) => {
|
|
554
|
+
if (typeof window !== "undefined") {
|
|
555
|
+
const el = document.getElementById(section);
|
|
556
|
+
if (el) {
|
|
557
|
+
el.scrollIntoView({ behavior: "smooth" });
|
|
558
|
+
return { success: true, section };
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
return { success: false, error: "Section not found" };
|
|
562
|
+
}
|
|
563
|
+
});
|
|
564
|
+
}
|
|
565
|
+
function createEventTool(config) {
|
|
566
|
+
return defineTool({
|
|
567
|
+
name: config.name,
|
|
568
|
+
description: config.description,
|
|
569
|
+
parameters: config.parameters,
|
|
570
|
+
required: config.required,
|
|
571
|
+
execute: (params) => {
|
|
572
|
+
if (typeof window !== "undefined") {
|
|
573
|
+
window.dispatchEvent(new CustomEvent(config.eventType, {
|
|
574
|
+
detail: { toolName: config.name, params }
|
|
575
|
+
}));
|
|
576
|
+
}
|
|
577
|
+
return { success: true, ...params };
|
|
578
|
+
}
|
|
579
|
+
});
|
|
580
|
+
}
|
|
581
|
+
function createAPITool(config) {
|
|
582
|
+
return defineTool({
|
|
583
|
+
name: config.name,
|
|
584
|
+
description: config.description,
|
|
585
|
+
parameters: config.parameters,
|
|
586
|
+
required: config.required,
|
|
587
|
+
execute: async (params) => {
|
|
588
|
+
try {
|
|
589
|
+
const url = typeof config.endpoint === "function" ? config.endpoint(params) : config.endpoint;
|
|
590
|
+
const isPost = config.method === "POST";
|
|
591
|
+
const response = await fetch(url, {
|
|
592
|
+
method: config.method || "GET",
|
|
593
|
+
headers: {
|
|
594
|
+
...isPost ? { "Content-Type": "application/json" } : {},
|
|
595
|
+
...config.headers
|
|
596
|
+
},
|
|
597
|
+
body: isPost ? JSON.stringify(params) : void 0
|
|
598
|
+
});
|
|
599
|
+
if (!response.ok) {
|
|
600
|
+
throw new Error(`HTTP ${response.status}`);
|
|
601
|
+
}
|
|
602
|
+
const data = await response.json();
|
|
603
|
+
return config.transform ? config.transform(data) : data;
|
|
604
|
+
} catch (error) {
|
|
605
|
+
return { success: false, error: String(error) };
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
});
|
|
609
|
+
}
|
|
610
|
+
function createSearchTool(config) {
|
|
611
|
+
const paramName = config.searchParam || "query";
|
|
612
|
+
return defineTool({
|
|
613
|
+
name: config.name,
|
|
614
|
+
description: config.description,
|
|
615
|
+
parameters: {
|
|
616
|
+
[paramName]: {
|
|
617
|
+
type: "string",
|
|
618
|
+
description: `The ${paramName} to search for`
|
|
619
|
+
}
|
|
620
|
+
},
|
|
621
|
+
required: [paramName],
|
|
622
|
+
execute: async (params) => {
|
|
623
|
+
const query = params[paramName];
|
|
624
|
+
try {
|
|
625
|
+
let result;
|
|
626
|
+
if (config.fetch) {
|
|
627
|
+
result = await config.fetch(query);
|
|
628
|
+
} else if (config.endpoint) {
|
|
629
|
+
const res = await fetch(config.endpoint, {
|
|
630
|
+
method: "POST",
|
|
631
|
+
headers: { "Content-Type": "application/json" },
|
|
632
|
+
body: JSON.stringify({ query })
|
|
633
|
+
});
|
|
634
|
+
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
|
635
|
+
result = await res.json();
|
|
636
|
+
} else {
|
|
637
|
+
throw new Error("Must provide either endpoint or fetch function");
|
|
638
|
+
}
|
|
639
|
+
const finalResult = config.transform ? config.transform(result) : result;
|
|
640
|
+
if (config.eventType && typeof window !== "undefined") {
|
|
641
|
+
window.dispatchEvent(new CustomEvent(config.eventType, {
|
|
642
|
+
detail: { query, result: finalResult }
|
|
643
|
+
}));
|
|
644
|
+
}
|
|
645
|
+
return finalResult;
|
|
646
|
+
} catch (error) {
|
|
647
|
+
return { success: false, error: String(error) };
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
});
|
|
651
|
+
}
|
|
652
|
+
function createRAGTool(config) {
|
|
653
|
+
return defineTool({
|
|
654
|
+
name: config.name,
|
|
655
|
+
description: config.description,
|
|
656
|
+
parameters: {
|
|
657
|
+
query: { type: "string", description: "Search query" },
|
|
658
|
+
...config.repo ? {} : { repo: { type: "string", description: "Optional: filter by repository name" } }
|
|
659
|
+
},
|
|
660
|
+
required: ["query"],
|
|
661
|
+
execute: async (params) => {
|
|
662
|
+
const { query, repo } = params;
|
|
663
|
+
try {
|
|
664
|
+
const res = await fetch(config.endpoint, {
|
|
665
|
+
method: "POST",
|
|
666
|
+
headers: { "Content-Type": "application/json" },
|
|
667
|
+
body: JSON.stringify({
|
|
668
|
+
query,
|
|
669
|
+
repo: config.repo || repo,
|
|
670
|
+
limit: config.limit || 10
|
|
671
|
+
})
|
|
672
|
+
});
|
|
673
|
+
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
|
674
|
+
const result = await res.json();
|
|
675
|
+
if (config.eventType && typeof window !== "undefined") {
|
|
676
|
+
window.dispatchEvent(new CustomEvent(config.eventType, {
|
|
677
|
+
detail: { query, result }
|
|
678
|
+
}));
|
|
679
|
+
}
|
|
680
|
+
return result;
|
|
681
|
+
} catch (error) {
|
|
682
|
+
return { success: false, error: String(error) };
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
});
|
|
686
|
+
}
|
|
687
|
+
var TOOL_RESULT_EVENT = "voicekit:tool-result";
|
|
688
|
+
|
|
689
|
+
// src/hooks/toolHooks.ts
|
|
690
|
+
var import_react3 = require("react");
|
|
691
|
+
function useToolResults() {
|
|
692
|
+
const [results, setResults] = (0, import_react3.useState)([]);
|
|
693
|
+
(0, import_react3.useEffect)(() => {
|
|
694
|
+
const handler = (event) => {
|
|
695
|
+
setResults((prev) => [...prev, event.detail]);
|
|
696
|
+
};
|
|
697
|
+
window.addEventListener(TOOL_RESULT_EVENT, handler);
|
|
698
|
+
return () => window.removeEventListener(TOOL_RESULT_EVENT, handler);
|
|
699
|
+
}, []);
|
|
700
|
+
const clear = (0, import_react3.useCallback)(() => setResults([]), []);
|
|
701
|
+
return {
|
|
702
|
+
results,
|
|
703
|
+
lastResult: results[results.length - 1] || null,
|
|
704
|
+
clear
|
|
705
|
+
};
|
|
706
|
+
}
|
|
707
|
+
function useToolListener(toolName, handler) {
|
|
708
|
+
const handlerRef = (0, import_react3.useRef)(handler);
|
|
709
|
+
handlerRef.current = handler;
|
|
710
|
+
(0, import_react3.useEffect)(() => {
|
|
711
|
+
const eventHandler = (event) => {
|
|
712
|
+
if (event.detail.name === toolName) {
|
|
713
|
+
handlerRef.current(event.detail.input, event.detail.result);
|
|
714
|
+
}
|
|
715
|
+
};
|
|
716
|
+
window.addEventListener(TOOL_RESULT_EVENT, eventHandler);
|
|
717
|
+
return () => window.removeEventListener(TOOL_RESULT_EVENT, eventHandler);
|
|
718
|
+
}, [toolName]);
|
|
719
|
+
}
|
|
720
|
+
function useToolResult(toolName) {
|
|
721
|
+
const [state, setState] = (0, import_react3.useState)(null);
|
|
722
|
+
(0, import_react3.useEffect)(() => {
|
|
723
|
+
const handler = (event) => {
|
|
724
|
+
if (event.detail.name === toolName) {
|
|
725
|
+
setState({ input: event.detail.input, result: event.detail.result });
|
|
726
|
+
}
|
|
727
|
+
};
|
|
728
|
+
window.addEventListener(TOOL_RESULT_EVENT, handler);
|
|
729
|
+
return () => window.removeEventListener(TOOL_RESULT_EVENT, handler);
|
|
730
|
+
}, [toolName]);
|
|
731
|
+
const clear = (0, import_react3.useCallback)(() => setState(null), []);
|
|
732
|
+
return {
|
|
733
|
+
input: state?.input ?? null,
|
|
734
|
+
result: state?.result ?? null,
|
|
735
|
+
hasResult: state !== null,
|
|
736
|
+
clear
|
|
737
|
+
};
|
|
738
|
+
}
|
|
739
|
+
|
|
740
|
+
// src/hooks/useAudioRecorder.ts
|
|
741
|
+
var import_react4 = require("react");
|
|
742
|
+
|
|
743
|
+
// src/utils/audio.ts
|
|
744
|
+
function writeString(view, offset, str) {
|
|
745
|
+
for (let i = 0; i < str.length; i++) {
|
|
746
|
+
view.setUint8(offset + i, str.charCodeAt(i));
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
function floatTo16BitPCM(output, offset, input) {
|
|
750
|
+
for (let i = 0; i < input.length; i++, offset += 2) {
|
|
751
|
+
const s = Math.max(-1, Math.min(1, input[i]));
|
|
752
|
+
output.setInt16(offset, s < 0 ? s * 32768 : s * 32767, true);
|
|
753
|
+
}
|
|
754
|
+
}
|
|
755
|
+
function encodeWAV(samples, sampleRate) {
|
|
756
|
+
const buffer = new ArrayBuffer(44 + samples.length * 2);
|
|
757
|
+
const view = new DataView(buffer);
|
|
758
|
+
writeString(view, 0, "RIFF");
|
|
759
|
+
view.setUint32(4, 36 + samples.length * 2, true);
|
|
760
|
+
writeString(view, 8, "WAVE");
|
|
761
|
+
writeString(view, 12, "fmt ");
|
|
762
|
+
view.setUint32(16, 16, true);
|
|
763
|
+
view.setUint16(20, 1, true);
|
|
764
|
+
view.setUint16(22, 1, true);
|
|
765
|
+
view.setUint32(24, sampleRate, true);
|
|
766
|
+
view.setUint32(28, sampleRate * 2, true);
|
|
767
|
+
view.setUint16(32, 2, true);
|
|
768
|
+
view.setUint16(34, 16, true);
|
|
769
|
+
writeString(view, 36, "data");
|
|
770
|
+
view.setUint32(40, samples.length * 2, true);
|
|
771
|
+
floatTo16BitPCM(view, 44, samples);
|
|
772
|
+
return buffer;
|
|
773
|
+
}
|
|
774
|
+
async function convertWebMToWav(blob) {
|
|
775
|
+
const arrayBuffer = await blob.arrayBuffer();
|
|
776
|
+
const audioContext = new AudioContext();
|
|
777
|
+
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
|
778
|
+
const numChannels = audioBuffer.numberOfChannels;
|
|
779
|
+
const length = audioBuffer.length;
|
|
780
|
+
const combined = new Float32Array(length);
|
|
781
|
+
for (let channel = 0; channel < numChannels; channel++) {
|
|
782
|
+
const channelData = audioBuffer.getChannelData(channel);
|
|
783
|
+
for (let i = 0; i < length; i++) {
|
|
784
|
+
combined[i] += channelData[i];
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
for (let i = 0; i < length; i++) {
|
|
788
|
+
combined[i] /= numChannels;
|
|
789
|
+
}
|
|
790
|
+
const wavBuffer = encodeWAV(combined, audioBuffer.sampleRate);
|
|
791
|
+
return new Blob([wavBuffer], { type: "audio/wav" });
|
|
792
|
+
}
|
|
793
|
+
function audioFormatForCodec(codec) {
|
|
794
|
+
switch (codec.toLowerCase()) {
|
|
795
|
+
case "opus":
|
|
796
|
+
case "pcm":
|
|
797
|
+
return "pcm16";
|
|
798
|
+
case "g711":
|
|
799
|
+
return "g711_ulaw";
|
|
800
|
+
default:
|
|
801
|
+
return "pcm16";
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
function applyCodecPreferences(pc, codec) {
|
|
805
|
+
if (codec === "g711") {
|
|
806
|
+
pc.getTransceivers().forEach((transceiver) => {
|
|
807
|
+
if (transceiver.sender.track?.kind === "audio") {
|
|
808
|
+
transceiver.setCodecPreferences([
|
|
809
|
+
{ mimeType: "audio/PCMU", clockRate: 8e3 },
|
|
810
|
+
{ mimeType: "audio/PCMA", clockRate: 8e3 }
|
|
811
|
+
]);
|
|
812
|
+
}
|
|
813
|
+
});
|
|
814
|
+
}
|
|
815
|
+
return pc;
|
|
816
|
+
}
|
|
817
|
+
|
|
818
|
+
// src/hooks/useAudioRecorder.ts
|
|
819
|
+
function useAudioRecorder() {
|
|
820
|
+
const mediaRecorderRef = (0, import_react4.useRef)(null);
|
|
821
|
+
const recordedChunksRef = (0, import_react4.useRef)([]);
|
|
822
|
+
const startRecording = (0, import_react4.useCallback)(async (stream) => {
|
|
823
|
+
if (mediaRecorderRef.current?.state === "recording") {
|
|
824
|
+
return;
|
|
825
|
+
}
|
|
826
|
+
try {
|
|
827
|
+
const mediaRecorder = new MediaRecorder(stream, { mimeType: "audio/webm" });
|
|
828
|
+
mediaRecorder.ondataavailable = (event) => {
|
|
829
|
+
if (event.data?.size > 0) {
|
|
830
|
+
recordedChunksRef.current.push(event.data);
|
|
831
|
+
}
|
|
832
|
+
};
|
|
833
|
+
mediaRecorder.start();
|
|
834
|
+
mediaRecorderRef.current = mediaRecorder;
|
|
835
|
+
} catch (error) {
|
|
836
|
+
console.error("Failed to start recording:", error);
|
|
837
|
+
throw error;
|
|
838
|
+
}
|
|
839
|
+
}, []);
|
|
840
|
+
const stopRecording = (0, import_react4.useCallback)(() => {
|
|
841
|
+
if (mediaRecorderRef.current) {
|
|
842
|
+
try {
|
|
843
|
+
mediaRecorderRef.current.requestData();
|
|
844
|
+
} catch {
|
|
845
|
+
}
|
|
846
|
+
try {
|
|
847
|
+
mediaRecorderRef.current.stop();
|
|
848
|
+
} catch {
|
|
849
|
+
}
|
|
850
|
+
mediaRecorderRef.current = null;
|
|
851
|
+
}
|
|
852
|
+
}, []);
|
|
853
|
+
const downloadRecording = (0, import_react4.useCallback)(async (filename) => {
|
|
854
|
+
if (mediaRecorderRef.current?.state === "recording") {
|
|
855
|
+
mediaRecorderRef.current.requestData();
|
|
856
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
857
|
+
}
|
|
858
|
+
if (recordedChunksRef.current.length === 0) {
|
|
859
|
+
return null;
|
|
860
|
+
}
|
|
861
|
+
const webmBlob = new Blob(recordedChunksRef.current, { type: "audio/webm" });
|
|
862
|
+
try {
|
|
863
|
+
const wavBlob = await convertWebMToWav(webmBlob);
|
|
864
|
+
const url = URL.createObjectURL(wavBlob);
|
|
865
|
+
const now = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
866
|
+
const name = filename || `voice_recording_${now}.wav`;
|
|
867
|
+
const a = document.createElement("a");
|
|
868
|
+
a.style.display = "none";
|
|
869
|
+
a.href = url;
|
|
870
|
+
a.download = name;
|
|
871
|
+
document.body.appendChild(a);
|
|
872
|
+
a.click();
|
|
873
|
+
document.body.removeChild(a);
|
|
874
|
+
setTimeout(() => URL.revokeObjectURL(url), 100);
|
|
875
|
+
return wavBlob;
|
|
876
|
+
} catch (error) {
|
|
877
|
+
console.error("Failed to convert recording:", error);
|
|
878
|
+
throw error;
|
|
879
|
+
}
|
|
880
|
+
}, []);
|
|
881
|
+
const getRecordingBlob = (0, import_react4.useCallback)(async () => {
|
|
882
|
+
if (recordedChunksRef.current.length === 0) {
|
|
883
|
+
return null;
|
|
884
|
+
}
|
|
885
|
+
const webmBlob = new Blob(recordedChunksRef.current, { type: "audio/webm" });
|
|
886
|
+
return convertWebMToWav(webmBlob);
|
|
887
|
+
}, []);
|
|
888
|
+
const clearRecording = (0, import_react4.useCallback)(() => {
|
|
889
|
+
recordedChunksRef.current = [];
|
|
890
|
+
}, []);
|
|
891
|
+
return {
|
|
892
|
+
startRecording,
|
|
893
|
+
stopRecording,
|
|
894
|
+
downloadRecording,
|
|
895
|
+
getRecordingBlob,
|
|
896
|
+
clearRecording,
|
|
897
|
+
isRecording: () => mediaRecorderRef.current?.state === "recording"
|
|
898
|
+
};
|
|
899
|
+
}
|
|
900
|
+
|
|
901
|
+
// src/hooks/useRealtimeSession.ts
|
|
902
|
+
var import_react8 = require("react");
|
|
903
|
+
|
|
904
|
+
// src/contexts/EventContext.tsx
|
|
905
|
+
var import_react5 = require("react");
|
|
906
|
+
var import_jsx_runtime3 = require("react/jsx-runtime");
|
|
907
|
+
var EventContext = (0, import_react5.createContext)(void 0);
|
|
908
|
+
var EventProvider = ({ children }) => {
|
|
909
|
+
const [loggedEvents, setLoggedEvents] = (0, import_react5.useState)([]);
|
|
910
|
+
const addLoggedEvent = (0, import_react5.useCallback)(
|
|
911
|
+
(direction, eventName, eventData) => {
|
|
912
|
+
const id = typeof eventData.event_id === "number" ? eventData.event_id : Date.now();
|
|
913
|
+
setLoggedEvents((prev) => [
|
|
914
|
+
...prev,
|
|
915
|
+
{
|
|
916
|
+
id,
|
|
917
|
+
direction,
|
|
918
|
+
eventName,
|
|
919
|
+
eventData,
|
|
920
|
+
timestamp: (/* @__PURE__ */ new Date()).toLocaleTimeString(),
|
|
921
|
+
expanded: false
|
|
922
|
+
}
|
|
923
|
+
]);
|
|
924
|
+
},
|
|
925
|
+
[]
|
|
926
|
+
);
|
|
927
|
+
const logClientEvent = (0, import_react5.useCallback)(
|
|
928
|
+
(eventObj, eventNameSuffix = "") => {
|
|
929
|
+
const name = `${eventObj.type || ""} ${eventNameSuffix || ""}`.trim();
|
|
930
|
+
addLoggedEvent("client", name, eventObj);
|
|
931
|
+
},
|
|
932
|
+
[addLoggedEvent]
|
|
933
|
+
);
|
|
934
|
+
const logServerEvent = (0, import_react5.useCallback)(
|
|
935
|
+
(eventObj, eventNameSuffix = "") => {
|
|
936
|
+
const name = `${eventObj.type || ""} ${eventNameSuffix || ""}`.trim();
|
|
937
|
+
addLoggedEvent("server", name, eventObj);
|
|
938
|
+
},
|
|
939
|
+
[addLoggedEvent]
|
|
940
|
+
);
|
|
941
|
+
const logHistoryItem = (0, import_react5.useCallback)(
|
|
942
|
+
(item) => {
|
|
943
|
+
let eventName = item.type;
|
|
944
|
+
if (item.type === "message") {
|
|
945
|
+
eventName = `${item.role}.${item.status || "unknown"}`;
|
|
946
|
+
}
|
|
947
|
+
if (item.type === "function_call") {
|
|
948
|
+
eventName = `function.${item.name || "unknown"}.${item.status || "unknown"}`;
|
|
949
|
+
}
|
|
950
|
+
addLoggedEvent("server", eventName, item);
|
|
951
|
+
},
|
|
952
|
+
[addLoggedEvent]
|
|
953
|
+
);
|
|
954
|
+
const toggleExpand = (0, import_react5.useCallback)((id) => {
|
|
955
|
+
setLoggedEvents(
|
|
956
|
+
(prev) => prev.map((log) => log.id === id ? { ...log, expanded: !log.expanded } : log)
|
|
957
|
+
);
|
|
958
|
+
}, []);
|
|
959
|
+
const clearEvents = (0, import_react5.useCallback)(() => {
|
|
960
|
+
setLoggedEvents([]);
|
|
961
|
+
}, []);
|
|
962
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
|
|
963
|
+
EventContext.Provider,
|
|
964
|
+
{
|
|
965
|
+
value: { loggedEvents, logClientEvent, logServerEvent, logHistoryItem, toggleExpand, clearEvents },
|
|
966
|
+
children
|
|
967
|
+
}
|
|
968
|
+
);
|
|
969
|
+
};
|
|
970
|
+
function useEvent() {
|
|
971
|
+
const context = (0, import_react5.useContext)(EventContext);
|
|
972
|
+
if (!context) {
|
|
973
|
+
throw new Error("useEvent must be used within an EventProvider");
|
|
974
|
+
}
|
|
975
|
+
return context;
|
|
976
|
+
}
|
|
977
|
+
|
|
978
|
+
// src/hooks/useSessionHistory.ts
|
|
979
|
+
var import_react7 = require("react");
|
|
980
|
+
|
|
981
|
+
// src/contexts/TranscriptContext.tsx
|
|
982
|
+
var import_react6 = require("react");
|
|
983
|
+
var import_jsx_runtime4 = require("react/jsx-runtime");
|
|
984
|
+
var TranscriptContext = (0, import_react6.createContext)(void 0);
|
|
985
|
+
function newTimestampPretty() {
|
|
986
|
+
return (/* @__PURE__ */ new Date()).toLocaleTimeString([], {
|
|
987
|
+
hour12: false,
|
|
988
|
+
hour: "2-digit",
|
|
989
|
+
minute: "2-digit",
|
|
990
|
+
second: "2-digit"
|
|
991
|
+
});
|
|
992
|
+
}
|
|
993
|
+
function generateId() {
|
|
994
|
+
return Math.random().toString(36).substring(2, 15);
|
|
995
|
+
}
|
|
996
|
+
var TranscriptProvider = ({ children }) => {
|
|
997
|
+
const [transcriptItems, setTranscriptItems] = (0, import_react6.useState)([]);
|
|
998
|
+
const addTranscriptMessage = (0, import_react6.useCallback)(
|
|
999
|
+
(itemId, role, text = "", isHidden = false) => {
|
|
1000
|
+
setTranscriptItems((prev) => {
|
|
1001
|
+
if (prev.some((i) => i.itemId === itemId)) return prev;
|
|
1002
|
+
return [
|
|
1003
|
+
...prev,
|
|
1004
|
+
{
|
|
1005
|
+
itemId,
|
|
1006
|
+
type: "MESSAGE",
|
|
1007
|
+
role,
|
|
1008
|
+
title: text,
|
|
1009
|
+
expanded: false,
|
|
1010
|
+
timestamp: newTimestampPretty(),
|
|
1011
|
+
createdAtMs: Date.now(),
|
|
1012
|
+
status: "IN_PROGRESS",
|
|
1013
|
+
isHidden
|
|
1014
|
+
}
|
|
1015
|
+
];
|
|
1016
|
+
});
|
|
1017
|
+
},
|
|
1018
|
+
[]
|
|
1019
|
+
);
|
|
1020
|
+
const updateTranscriptMessage = (0, import_react6.useCallback)(
|
|
1021
|
+
(itemId, newText, append = false) => {
|
|
1022
|
+
setTranscriptItems(
|
|
1023
|
+
(prev) => prev.map((item) => {
|
|
1024
|
+
if (item.itemId === itemId && item.type === "MESSAGE") {
|
|
1025
|
+
return {
|
|
1026
|
+
...item,
|
|
1027
|
+
title: append ? (item.title ?? "") + newText : newText
|
|
1028
|
+
};
|
|
1029
|
+
}
|
|
1030
|
+
return item;
|
|
1031
|
+
})
|
|
1032
|
+
);
|
|
1033
|
+
},
|
|
1034
|
+
[]
|
|
1035
|
+
);
|
|
1036
|
+
const addTranscriptBreadcrumb = (0, import_react6.useCallback)(
|
|
1037
|
+
(title, data) => {
|
|
1038
|
+
setTranscriptItems((prev) => [
|
|
1039
|
+
...prev,
|
|
1040
|
+
{
|
|
1041
|
+
itemId: `breadcrumb-${generateId()}`,
|
|
1042
|
+
type: "BREADCRUMB",
|
|
1043
|
+
title,
|
|
1044
|
+
data,
|
|
1045
|
+
expanded: false,
|
|
1046
|
+
timestamp: newTimestampPretty(),
|
|
1047
|
+
createdAtMs: Date.now(),
|
|
1048
|
+
status: "DONE",
|
|
1049
|
+
isHidden: false
|
|
1050
|
+
}
|
|
1051
|
+
]);
|
|
1052
|
+
},
|
|
1053
|
+
[]
|
|
1054
|
+
);
|
|
1055
|
+
const toggleTranscriptItemExpand = (0, import_react6.useCallback)((itemId) => {
|
|
1056
|
+
setTranscriptItems(
|
|
1057
|
+
(prev) => prev.map(
|
|
1058
|
+
(log) => log.itemId === itemId ? { ...log, expanded: !log.expanded } : log
|
|
1059
|
+
)
|
|
1060
|
+
);
|
|
1061
|
+
}, []);
|
|
1062
|
+
const updateTranscriptItem = (0, import_react6.useCallback)(
|
|
1063
|
+
(itemId, updatedProperties) => {
|
|
1064
|
+
setTranscriptItems(
|
|
1065
|
+
(prev) => prev.map(
|
|
1066
|
+
(item) => item.itemId === itemId ? { ...item, ...updatedProperties } : item
|
|
1067
|
+
)
|
|
1068
|
+
);
|
|
1069
|
+
},
|
|
1070
|
+
[]
|
|
1071
|
+
);
|
|
1072
|
+
const clearTranscript = (0, import_react6.useCallback)(() => {
|
|
1073
|
+
setTranscriptItems([]);
|
|
1074
|
+
}, []);
|
|
1075
|
+
return /* @__PURE__ */ (0, import_jsx_runtime4.jsx)(
|
|
1076
|
+
TranscriptContext.Provider,
|
|
1077
|
+
{
|
|
1078
|
+
value: {
|
|
1079
|
+
transcriptItems,
|
|
1080
|
+
addTranscriptMessage,
|
|
1081
|
+
updateTranscriptMessage,
|
|
1082
|
+
addTranscriptBreadcrumb,
|
|
1083
|
+
toggleTranscriptItemExpand,
|
|
1084
|
+
updateTranscriptItem,
|
|
1085
|
+
clearTranscript
|
|
1086
|
+
},
|
|
1087
|
+
children
|
|
1088
|
+
}
|
|
1089
|
+
);
|
|
1090
|
+
};
|
|
1091
|
+
function useTranscript() {
|
|
1092
|
+
const context = (0, import_react6.useContext)(TranscriptContext);
|
|
1093
|
+
if (!context) {
|
|
1094
|
+
throw new Error("useTranscript must be used within a TranscriptProvider");
|
|
1095
|
+
}
|
|
1096
|
+
return context;
|
|
1097
|
+
}
|
|
1098
|
+
|
|
1099
|
+
// src/hooks/useSessionHistory.ts
|
|
1100
|
+
function useSessionHistory() {
|
|
1101
|
+
const {
|
|
1102
|
+
transcriptItems,
|
|
1103
|
+
addTranscriptBreadcrumb,
|
|
1104
|
+
addTranscriptMessage,
|
|
1105
|
+
updateTranscriptMessage,
|
|
1106
|
+
updateTranscriptItem
|
|
1107
|
+
} = useTranscript();
|
|
1108
|
+
const { logServerEvent } = useEvent();
|
|
1109
|
+
const accumulatedTextRef = (0, import_react7.useRef)(/* @__PURE__ */ new Map());
|
|
1110
|
+
const pendingDeltasRef = (0, import_react7.useRef)(/* @__PURE__ */ new Map());
|
|
1111
|
+
const deltaTimerRef = (0, import_react7.useRef)(/* @__PURE__ */ new Map());
|
|
1112
|
+
const interruptedItemsRef = (0, import_react7.useRef)(/* @__PURE__ */ new Set());
|
|
1113
|
+
const totalAudioDurationRef = (0, import_react7.useRef)(/* @__PURE__ */ new Map());
|
|
1114
|
+
const extractMessageText = (content = []) => {
|
|
1115
|
+
if (!Array.isArray(content)) return "";
|
|
1116
|
+
return content.map((c) => {
|
|
1117
|
+
if (!c || typeof c !== "object") return "";
|
|
1118
|
+
const item = c;
|
|
1119
|
+
if (item.type === "input_text") return item.text ?? "";
|
|
1120
|
+
if (item.type === "audio") return item.transcript ?? "";
|
|
1121
|
+
return "";
|
|
1122
|
+
}).filter(Boolean).join("\n");
|
|
1123
|
+
};
|
|
1124
|
+
const extractFunctionCallByName = (name, content = []) => {
|
|
1125
|
+
if (!Array.isArray(content)) return void 0;
|
|
1126
|
+
return content.find(
|
|
1127
|
+
(c) => c && typeof c === "object" && c.type === "function_call" && c.name === name
|
|
1128
|
+
);
|
|
1129
|
+
};
|
|
1130
|
+
const maybeParseJson = (val) => {
|
|
1131
|
+
if (typeof val === "string") {
|
|
1132
|
+
try {
|
|
1133
|
+
return JSON.parse(val);
|
|
1134
|
+
} catch {
|
|
1135
|
+
return val;
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
return val;
|
|
1139
|
+
};
|
|
1140
|
+
const extractLastAssistantMessage = (history = []) => {
|
|
1141
|
+
if (!Array.isArray(history)) return void 0;
|
|
1142
|
+
return [...history].reverse().find(
|
|
1143
|
+
(c) => c && typeof c === "object" && c.type === "message" && c.role === "assistant"
|
|
1144
|
+
);
|
|
1145
|
+
};
|
|
1146
|
+
const extractModeration = (obj) => {
|
|
1147
|
+
if (!obj || typeof obj !== "object") return void 0;
|
|
1148
|
+
const o = obj;
|
|
1149
|
+
if ("moderationCategory" in o) return o;
|
|
1150
|
+
if ("outputInfo" in o) return extractModeration(o.outputInfo);
|
|
1151
|
+
if ("output" in o) return extractModeration(o.output);
|
|
1152
|
+
if ("result" in o) return extractModeration(o.result);
|
|
1153
|
+
return void 0;
|
|
1154
|
+
};
|
|
1155
|
+
const sketchilyDetectGuardrailMessage = (text) => {
|
|
1156
|
+
return text.match(/Failure Details: (\{.*?\})/)?.[1];
|
|
1157
|
+
};
|
|
1158
|
+
function handleAgentToolStart(details, _agent, functionCall) {
|
|
1159
|
+
const context = details?.context;
|
|
1160
|
+
const history = context?.history;
|
|
1161
|
+
const lastFunctionCall = extractFunctionCallByName(functionCall.name, history);
|
|
1162
|
+
addTranscriptBreadcrumb(`function call: ${lastFunctionCall?.name}`, lastFunctionCall?.arguments);
|
|
1163
|
+
}
|
|
1164
|
+
function handleAgentToolEnd(details, _agent, functionCall, result) {
|
|
1165
|
+
const context = details?.context;
|
|
1166
|
+
const history = context?.history;
|
|
1167
|
+
const lastFunctionCall = extractFunctionCallByName(functionCall.name, history);
|
|
1168
|
+
addTranscriptBreadcrumb(`function call result: ${lastFunctionCall?.name}`, maybeParseJson(result));
|
|
1169
|
+
}
|
|
1170
|
+
function handleHistoryAdded(item) {
|
|
1171
|
+
if (!item || item.type !== "message") return;
|
|
1172
|
+
const { itemId, role, content = [] } = item;
|
|
1173
|
+
if (itemId && role) {
|
|
1174
|
+
let text = extractMessageText(content);
|
|
1175
|
+
if (role === "assistant" && !text) {
|
|
1176
|
+
text = "";
|
|
1177
|
+
} else if (role === "user" && !text) {
|
|
1178
|
+
return;
|
|
1179
|
+
}
|
|
1180
|
+
const guardrailMessage = sketchilyDetectGuardrailMessage(text);
|
|
1181
|
+
if (guardrailMessage) {
|
|
1182
|
+
const failureDetails = JSON.parse(guardrailMessage);
|
|
1183
|
+
addTranscriptBreadcrumb("Output Guardrail Active", { details: failureDetails });
|
|
1184
|
+
} else {
|
|
1185
|
+
addTranscriptMessage(itemId, role, text);
|
|
1186
|
+
}
|
|
1187
|
+
}
|
|
1188
|
+
}
|
|
1189
|
+
function handleHistoryUpdated(items) {
|
|
1190
|
+
items.forEach((item) => {
|
|
1191
|
+
if (!item || item.type !== "message") return;
|
|
1192
|
+
const { itemId, role, content = [] } = item;
|
|
1193
|
+
if (interruptedItemsRef.current.has(itemId)) return;
|
|
1194
|
+
if (role === "assistant") return;
|
|
1195
|
+
const text = extractMessageText(content);
|
|
1196
|
+
if (text) {
|
|
1197
|
+
updateTranscriptMessage(itemId, text, false);
|
|
1198
|
+
}
|
|
1199
|
+
});
|
|
1200
|
+
}
|
|
1201
|
+
const pendingTextRef = (0, import_react7.useRef)(/* @__PURE__ */ new Map());
|
|
1202
|
+
const displayedTextRef = (0, import_react7.useRef)(/* @__PURE__ */ new Map());
|
|
1203
|
+
function handleTranscriptionDelta(item, audioPositionMs) {
|
|
1204
|
+
const itemId = item.item_id;
|
|
1205
|
+
const deltaText = item.delta || "";
|
|
1206
|
+
if (!itemId || !deltaText) return;
|
|
1207
|
+
if (interruptedItemsRef.current.has(itemId)) return;
|
|
1208
|
+
const text = (accumulatedTextRef.current.get(itemId) || "") + deltaText;
|
|
1209
|
+
accumulatedTextRef.current.set(itemId, text);
|
|
1210
|
+
pendingTextRef.current.set(itemId, text);
|
|
1211
|
+
displayedTextRef.current.set(itemId, text);
|
|
1212
|
+
if (audioPositionMs !== void 0 && audioPositionMs > 0) {
|
|
1213
|
+
totalAudioDurationRef.current.set(itemId, audioPositionMs);
|
|
1214
|
+
}
|
|
1215
|
+
if (text.replace(/[\s.…]+/g, "").length === 0) return;
|
|
1216
|
+
updateTranscriptMessage(itemId, text, false);
|
|
1217
|
+
}
|
|
1218
|
+
function handleTranscriptionCompleted(item) {
|
|
1219
|
+
const itemId = item.item_id;
|
|
1220
|
+
if (interruptedItemsRef.current.has(itemId)) return;
|
|
1221
|
+
if (itemId) {
|
|
1222
|
+
const timer = deltaTimerRef.current.get(itemId);
|
|
1223
|
+
if (timer) clearTimeout(timer);
|
|
1224
|
+
deltaTimerRef.current.delete(itemId);
|
|
1225
|
+
pendingDeltasRef.current.delete(itemId);
|
|
1226
|
+
pendingTextRef.current.delete(itemId);
|
|
1227
|
+
displayedTextRef.current.delete(itemId);
|
|
1228
|
+
accumulatedTextRef.current.delete(itemId);
|
|
1229
|
+
totalAudioDurationRef.current.delete(itemId);
|
|
1230
|
+
const displayedText = displayedTextRef.current.get(itemId);
|
|
1231
|
+
const finalText = displayedText || item.transcript || "";
|
|
1232
|
+
const stripped = finalText.replace(/[\s.…]+/g, "");
|
|
1233
|
+
if (stripped.length > 0) {
|
|
1234
|
+
updateTranscriptMessage(itemId, finalText, false);
|
|
1235
|
+
}
|
|
1236
|
+
updateTranscriptItem(itemId, { status: "DONE" });
|
|
1237
|
+
const transcriptItem = transcriptItems.find((i) => i.itemId === itemId);
|
|
1238
|
+
if (transcriptItem?.guardrailResult?.status === "IN_PROGRESS") {
|
|
1239
|
+
updateTranscriptItem(itemId, {
|
|
1240
|
+
guardrailResult: {
|
|
1241
|
+
status: "DONE",
|
|
1242
|
+
category: "NONE",
|
|
1243
|
+
rationale: ""
|
|
1244
|
+
}
|
|
1245
|
+
});
|
|
1246
|
+
}
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
function handleGuardrailTripped(details, _agent, guardrail) {
|
|
1250
|
+
const result = guardrail.result;
|
|
1251
|
+
const output = result?.output;
|
|
1252
|
+
const outputInfo = output?.outputInfo;
|
|
1253
|
+
const moderation = extractModeration(outputInfo);
|
|
1254
|
+
logServerEvent({ type: "guardrail_tripped", payload: moderation });
|
|
1255
|
+
const context = details?.context;
|
|
1256
|
+
const history = context?.history;
|
|
1257
|
+
const lastAssistant = extractLastAssistantMessage(history);
|
|
1258
|
+
if (lastAssistant && moderation) {
|
|
1259
|
+
const category = moderation.moderationCategory ?? "NONE";
|
|
1260
|
+
const rationale = moderation.moderationRationale ?? "";
|
|
1261
|
+
const offendingText = moderation.testText;
|
|
1262
|
+
updateTranscriptItem(lastAssistant.itemId, {
|
|
1263
|
+
guardrailResult: {
|
|
1264
|
+
status: "DONE",
|
|
1265
|
+
category,
|
|
1266
|
+
rationale,
|
|
1267
|
+
testText: offendingText
|
|
1268
|
+
}
|
|
1269
|
+
});
|
|
1270
|
+
}
|
|
1271
|
+
}
|
|
1272
|
+
const transcriptItemsRef = (0, import_react7.useRef)(transcriptItems);
|
|
1273
|
+
transcriptItemsRef.current = transcriptItems;
|
|
1274
|
+
const handlersRef = (0, import_react7.useRef)({
|
|
1275
|
+
handleAgentToolStart,
|
|
1276
|
+
handleAgentToolEnd,
|
|
1277
|
+
handleHistoryUpdated,
|
|
1278
|
+
handleHistoryAdded,
|
|
1279
|
+
handleTranscriptionDelta,
|
|
1280
|
+
handleTranscriptionCompleted,
|
|
1281
|
+
isInterrupted: (itemId) => interruptedItemsRef.current.has(itemId),
|
|
1282
|
+
handleTruncation: (itemId, audioEndMs, totalAudioMs) => {
|
|
1283
|
+
if (interruptedItemsRef.current.has(itemId)) return;
|
|
1284
|
+
const timer = deltaTimerRef.current.get(itemId);
|
|
1285
|
+
if (timer) clearTimeout(timer);
|
|
1286
|
+
deltaTimerRef.current.delete(itemId);
|
|
1287
|
+
const fullText = pendingTextRef.current.get(itemId) || accumulatedTextRef.current.get(itemId) || "";
|
|
1288
|
+
pendingDeltasRef.current.delete(itemId);
|
|
1289
|
+
pendingTextRef.current.delete(itemId);
|
|
1290
|
+
displayedTextRef.current.delete(itemId);
|
|
1291
|
+
accumulatedTextRef.current.delete(itemId);
|
|
1292
|
+
totalAudioDurationRef.current.delete(itemId);
|
|
1293
|
+
interruptedItemsRef.current.add(itemId);
|
|
1294
|
+
if (!fullText || totalAudioMs <= 0) {
|
|
1295
|
+
updateTranscriptItem(itemId, { isHidden: true, status: "DONE" });
|
|
1296
|
+
return;
|
|
1297
|
+
}
|
|
1298
|
+
const fractionSpoken = Math.min(Math.max(audioEndMs / totalAudioMs, 0), 1);
|
|
1299
|
+
const estimatedCharPos = Math.floor(fullText.length * fractionSpoken);
|
|
1300
|
+
let truncatePos = estimatedCharPos;
|
|
1301
|
+
while (truncatePos > 0 && !/\s/.test(fullText[truncatePos - 1])) {
|
|
1302
|
+
truncatePos--;
|
|
1303
|
+
}
|
|
1304
|
+
if (truncatePos === 0 && estimatedCharPos > 0) {
|
|
1305
|
+
truncatePos = estimatedCharPos;
|
|
1306
|
+
while (truncatePos < fullText.length && !/\s/.test(fullText[truncatePos])) {
|
|
1307
|
+
truncatePos++;
|
|
1308
|
+
}
|
|
1309
|
+
}
|
|
1310
|
+
const truncatedText = fullText.slice(0, truncatePos).trim();
|
|
1311
|
+
if (truncatedText.length > 0) {
|
|
1312
|
+
updateTranscriptMessage(itemId, truncatedText + "...", false);
|
|
1313
|
+
updateTranscriptItem(itemId, { status: "DONE" });
|
|
1314
|
+
} else {
|
|
1315
|
+
updateTranscriptItem(itemId, { isHidden: true, status: "DONE" });
|
|
1316
|
+
}
|
|
1317
|
+
},
|
|
1318
|
+
handleGuardrailTripped
|
|
1319
|
+
});
|
|
1320
|
+
return handlersRef;
|
|
1321
|
+
}
|
|
1322
|
+
|
|
1323
|
+
// src/hooks/useRealtimeSession.ts
|
|
1324
|
+
function useRealtimeSession(callbacks = {}) {
|
|
1325
|
+
const sessionRef = (0, import_react8.useRef)(null);
|
|
1326
|
+
const [status, setStatus] = (0, import_react8.useState)("DISCONNECTED");
|
|
1327
|
+
const { logClientEvent, logServerEvent } = useEvent();
|
|
1328
|
+
const codecParamRef = (0, import_react8.useRef)("opus");
|
|
1329
|
+
const updateStatus = (0, import_react8.useCallback)(
|
|
1330
|
+
(s) => {
|
|
1331
|
+
setStatus(s);
|
|
1332
|
+
callbacks.onConnectionChange?.(s);
|
|
1333
|
+
logClientEvent({}, s);
|
|
1334
|
+
},
|
|
1335
|
+
[callbacks, logClientEvent]
|
|
1336
|
+
);
|
|
1337
|
+
const historyHandlers = useSessionHistory().current;
|
|
1338
|
+
const interruptedRef = (0, import_react8.useRef)(/* @__PURE__ */ new Set());
|
|
1339
|
+
(0, import_react8.useEffect)(() => {
|
|
1340
|
+
if (typeof window !== "undefined") {
|
|
1341
|
+
const params = new URLSearchParams(window.location.search);
|
|
1342
|
+
const codec = params.get("codec");
|
|
1343
|
+
if (codec) {
|
|
1344
|
+
codecParamRef.current = codec.toLowerCase();
|
|
1345
|
+
}
|
|
1346
|
+
}
|
|
1347
|
+
}, []);
|
|
1348
|
+
const wireNormalizedEvents = (0, import_react8.useCallback)((session) => {
|
|
1349
|
+
session.on("user_speech_started", () => {
|
|
1350
|
+
});
|
|
1351
|
+
session.on("user_transcript", (data) => {
|
|
1352
|
+
if (data.isFinal) {
|
|
1353
|
+
const text = data.text || data.delta || "";
|
|
1354
|
+
if (text.replace(/[\s.…,!?]+/g, "").length === 0) return;
|
|
1355
|
+
historyHandlers.handleTranscriptionCompleted({
|
|
1356
|
+
item_id: data.itemId,
|
|
1357
|
+
transcript: text
|
|
1358
|
+
});
|
|
1359
|
+
} else if (data.delta) {
|
|
1360
|
+
historyHandlers.handleTranscriptionDelta({
|
|
1361
|
+
item_id: data.itemId,
|
|
1362
|
+
delta: data.delta
|
|
1363
|
+
});
|
|
1364
|
+
}
|
|
1365
|
+
});
|
|
1366
|
+
session.on("assistant_transcript", (data) => {
|
|
1367
|
+
if (interruptedRef.current.has(data.itemId)) return;
|
|
1368
|
+
if (data.isFinal) {
|
|
1369
|
+
historyHandlers.handleTranscriptionCompleted({
|
|
1370
|
+
item_id: data.itemId,
|
|
1371
|
+
transcript: data.text || ""
|
|
1372
|
+
});
|
|
1373
|
+
} else if (data.delta) {
|
|
1374
|
+
historyHandlers.handleTranscriptionDelta(
|
|
1375
|
+
{ item_id: data.itemId, delta: data.delta }
|
|
1376
|
+
);
|
|
1377
|
+
}
|
|
1378
|
+
});
|
|
1379
|
+
session.on("tool_call_start", (name, input) => {
|
|
1380
|
+
historyHandlers.handleAgentToolStart(
|
|
1381
|
+
{},
|
|
1382
|
+
void 0,
|
|
1383
|
+
{ name, arguments: input }
|
|
1384
|
+
);
|
|
1385
|
+
});
|
|
1386
|
+
session.on("tool_call_end", (name, input, result) => {
|
|
1387
|
+
historyHandlers.handleAgentToolEnd(
|
|
1388
|
+
{},
|
|
1389
|
+
void 0,
|
|
1390
|
+
{ name, arguments: input },
|
|
1391
|
+
result
|
|
1392
|
+
);
|
|
1393
|
+
});
|
|
1394
|
+
session.on("agent_handoff", (_from, to) => {
|
|
1395
|
+
callbacks.onAgentHandoff?.(to);
|
|
1396
|
+
});
|
|
1397
|
+
session.on("guardrail_tripped", (info) => {
|
|
1398
|
+
historyHandlers.handleGuardrailTripped(
|
|
1399
|
+
{},
|
|
1400
|
+
void 0,
|
|
1401
|
+
{ result: info }
|
|
1402
|
+
);
|
|
1403
|
+
});
|
|
1404
|
+
session.on("raw_event", (event) => {
|
|
1405
|
+
const ev = event;
|
|
1406
|
+
if (ev.type === "conversation.item.truncated") {
|
|
1407
|
+
const itemId = ev.item_id;
|
|
1408
|
+
if (itemId) interruptedRef.current.add(itemId);
|
|
1409
|
+
return;
|
|
1410
|
+
}
|
|
1411
|
+
if (ev.type === "history_updated") {
|
|
1412
|
+
historyHandlers.handleHistoryUpdated(ev.items);
|
|
1413
|
+
return;
|
|
1414
|
+
}
|
|
1415
|
+
if (ev.type === "history_added") {
|
|
1416
|
+
historyHandlers.handleHistoryAdded(ev.item);
|
|
1417
|
+
return;
|
|
1418
|
+
}
|
|
1419
|
+
logServerEvent(ev);
|
|
1420
|
+
});
|
|
1421
|
+
session.on("error", (error) => {
|
|
1422
|
+
const e = error;
|
|
1423
|
+
const msg = e instanceof Error ? e.message : typeof e === "string" ? e : JSON.stringify(e);
|
|
1424
|
+
const errObj = typeof e === "object" && e?.error ? e.error : e;
|
|
1425
|
+
const code = typeof errObj === "object" && errObj?.code ? String(errObj.code) : "";
|
|
1426
|
+
const msgStr = typeof msg === "string" ? msg : "";
|
|
1427
|
+
const isBenign = code === "response_cancel_not_active" || code === "conversation_already_has_active_response" || msgStr.includes("response_cancel_not_active") || msgStr.includes("conversation_already_has_active_response");
|
|
1428
|
+
if (isBenign) return;
|
|
1429
|
+
console.error("Session error:", msg);
|
|
1430
|
+
logServerEvent({ type: "error", message: msg });
|
|
1431
|
+
});
|
|
1432
|
+
}, [callbacks, historyHandlers, logServerEvent]);
|
|
1433
|
+
const connect = (0, import_react8.useCallback)(
|
|
1434
|
+
async ({
|
|
1435
|
+
getEphemeralKey,
|
|
1436
|
+
initialAgents,
|
|
1437
|
+
audioElement,
|
|
1438
|
+
extraContext,
|
|
1439
|
+
outputGuardrails,
|
|
1440
|
+
adapter
|
|
1441
|
+
}) => {
|
|
1442
|
+
if (sessionRef.current) return;
|
|
1443
|
+
if (!adapter) {
|
|
1444
|
+
throw new Error(
|
|
1445
|
+
"useRealtimeSession: `adapter` is required in ConnectOptions. Pass an adapter like openai() from @jchaffin/voicekit/openai."
|
|
1446
|
+
);
|
|
1447
|
+
}
|
|
1448
|
+
updateStatus("CONNECTING");
|
|
1449
|
+
const ek = await getEphemeralKey();
|
|
1450
|
+
const rootAgent = initialAgents[0];
|
|
1451
|
+
const codecParam = codecParamRef.current;
|
|
1452
|
+
const session = adapter.createSession(rootAgent, {
|
|
1453
|
+
codec: codecParam,
|
|
1454
|
+
language: "en"
|
|
1455
|
+
});
|
|
1456
|
+
sessionRef.current = session;
|
|
1457
|
+
wireNormalizedEvents(session);
|
|
1458
|
+
try {
|
|
1459
|
+
await session.connect({
|
|
1460
|
+
authToken: ek,
|
|
1461
|
+
audioElement,
|
|
1462
|
+
context: extraContext,
|
|
1463
|
+
outputGuardrails
|
|
1464
|
+
});
|
|
1465
|
+
updateStatus("CONNECTED");
|
|
1466
|
+
} catch (connectError) {
|
|
1467
|
+
console.error("Connection error:", connectError);
|
|
1468
|
+
sessionRef.current = null;
|
|
1469
|
+
updateStatus("DISCONNECTED");
|
|
1470
|
+
throw connectError;
|
|
1471
|
+
}
|
|
1472
|
+
},
|
|
1473
|
+
[updateStatus, wireNormalizedEvents]
|
|
1474
|
+
);
|
|
1475
|
+
const disconnect = (0, import_react8.useCallback)(async () => {
|
|
1476
|
+
if (sessionRef.current) {
|
|
1477
|
+
try {
|
|
1478
|
+
await sessionRef.current.disconnect();
|
|
1479
|
+
} catch (error) {
|
|
1480
|
+
console.error("Error closing session:", error);
|
|
1481
|
+
} finally {
|
|
1482
|
+
sessionRef.current = null;
|
|
1483
|
+
updateStatus("DISCONNECTED");
|
|
1484
|
+
}
|
|
1485
|
+
} else {
|
|
1486
|
+
updateStatus("DISCONNECTED");
|
|
1487
|
+
}
|
|
1488
|
+
}, [updateStatus]);
|
|
1489
|
+
const interrupt = (0, import_react8.useCallback)(() => {
|
|
1490
|
+
sessionRef.current?.interrupt();
|
|
1491
|
+
}, []);
|
|
1492
|
+
const sendUserText = (0, import_react8.useCallback)((text) => {
|
|
1493
|
+
if (!sessionRef.current) throw new Error("Session not connected");
|
|
1494
|
+
sessionRef.current.sendMessage(text);
|
|
1495
|
+
}, []);
|
|
1496
|
+
const sendEvent = (0, import_react8.useCallback)((ev) => {
|
|
1497
|
+
sessionRef.current?.sendRawEvent?.(ev);
|
|
1498
|
+
}, []);
|
|
1499
|
+
const mute = (0, import_react8.useCallback)((m) => {
|
|
1500
|
+
sessionRef.current?.mute(m);
|
|
1501
|
+
}, []);
|
|
1502
|
+
const pushToTalkStart = (0, import_react8.useCallback)(() => {
|
|
1503
|
+
sessionRef.current?.sendRawEvent?.({ type: "input_audio_buffer.clear" });
|
|
1504
|
+
}, []);
|
|
1505
|
+
const pushToTalkStop = (0, import_react8.useCallback)(() => {
|
|
1506
|
+
sessionRef.current?.sendRawEvent?.({ type: "input_audio_buffer.commit" });
|
|
1507
|
+
sessionRef.current?.sendRawEvent?.({ type: "response.create" });
|
|
1508
|
+
}, []);
|
|
1509
|
+
return {
|
|
1510
|
+
status,
|
|
1511
|
+
connect,
|
|
1512
|
+
disconnect,
|
|
1513
|
+
sendUserText,
|
|
1514
|
+
sendEvent,
|
|
1515
|
+
mute,
|
|
1516
|
+
pushToTalkStart,
|
|
1517
|
+
pushToTalkStop,
|
|
1518
|
+
interrupt
|
|
1519
|
+
};
|
|
1520
|
+
}
|
|
1521
|
+
|
|
1522
|
+
// src/guardrails.ts
|
|
1523
|
+
var import_zod = require("zod");
|
|
1524
|
+
var MODERATION_CATEGORIES = [
|
|
1525
|
+
"OFFENSIVE",
|
|
1526
|
+
"OFF_BRAND",
|
|
1527
|
+
"VIOLENCE",
|
|
1528
|
+
"NONE"
|
|
1529
|
+
];
|
|
1530
|
+
var ModerationCategoryZod = import_zod.z.enum([...MODERATION_CATEGORIES]);
|
|
1531
|
+
var GuardrailOutputZod = import_zod.z.object({
|
|
1532
|
+
moderationRationale: import_zod.z.string(),
|
|
1533
|
+
moderationCategory: ModerationCategoryZod,
|
|
1534
|
+
testText: import_zod.z.string().optional()
|
|
1535
|
+
}).strict();
|
|
1536
|
+
async function runGuardrailClassifier(message, config = {}) {
|
|
1537
|
+
const {
|
|
1538
|
+
apiEndpoint = "/api/responses",
|
|
1539
|
+
model = "gpt-4o-mini",
|
|
1540
|
+
categories = MODERATION_CATEGORIES,
|
|
1541
|
+
companyName = "Company"
|
|
1542
|
+
} = config;
|
|
1543
|
+
const categoryDescriptions = categories.map((cat) => {
|
|
1544
|
+
switch (cat) {
|
|
1545
|
+
case "OFFENSIVE":
|
|
1546
|
+
return "- OFFENSIVE: Content that includes hate speech, discriminatory language, insults, slurs, or harassment.";
|
|
1547
|
+
case "OFF_BRAND":
|
|
1548
|
+
return "- OFF_BRAND: Content that discusses competitors in a disparaging way.";
|
|
1549
|
+
case "VIOLENCE":
|
|
1550
|
+
return "- VIOLENCE: Content that includes explicit threats, incitement of harm, or graphic descriptions of physical injury or violence.";
|
|
1551
|
+
case "NONE":
|
|
1552
|
+
return "- NONE: If no other classes are appropriate and the message is fine.";
|
|
1553
|
+
default:
|
|
1554
|
+
return `- ${cat}: Custom category.`;
|
|
1555
|
+
}
|
|
1556
|
+
}).join("\n");
|
|
1557
|
+
const messages = [
|
|
1558
|
+
{
|
|
1559
|
+
role: "user",
|
|
1560
|
+
content: `You are an expert at classifying text according to moderation policies. Consider the provided message, analyze potential classes from output_classes, and output the best classification. Output json, following the provided schema. Keep your analysis and reasoning short and to the point, maximum 2 sentences.
|
|
1561
|
+
|
|
1562
|
+
<info>
|
|
1563
|
+
- Company name: ${companyName}
|
|
1564
|
+
</info>
|
|
1565
|
+
|
|
1566
|
+
<message>
|
|
1567
|
+
${message}
|
|
1568
|
+
</message>
|
|
1569
|
+
|
|
1570
|
+
<output_classes>
|
|
1571
|
+
${categoryDescriptions}
|
|
1572
|
+
</output_classes>
|
|
1573
|
+
`
|
|
1574
|
+
}
|
|
1575
|
+
];
|
|
1576
|
+
const response = await fetch(apiEndpoint, {
|
|
1577
|
+
method: "POST",
|
|
1578
|
+
headers: { "Content-Type": "application/json" },
|
|
1579
|
+
body: JSON.stringify({
|
|
1580
|
+
model,
|
|
1581
|
+
input: messages,
|
|
1582
|
+
text: {
|
|
1583
|
+
format: {
|
|
1584
|
+
type: "json_schema",
|
|
1585
|
+
name: "output_format",
|
|
1586
|
+
schema: GuardrailOutputZod
|
|
1587
|
+
}
|
|
1588
|
+
}
|
|
1589
|
+
})
|
|
1590
|
+
});
|
|
1591
|
+
if (!response.ok) return null;
|
|
1592
|
+
try {
|
|
1593
|
+
const data = await response.json();
|
|
1594
|
+
return GuardrailOutputZod.parse(data);
|
|
1595
|
+
} catch {
|
|
1596
|
+
return null;
|
|
1597
|
+
}
|
|
1598
|
+
}
|
|
1599
|
+
function createModerationGuardrail(config = {}) {
|
|
1600
|
+
return {
|
|
1601
|
+
name: "moderation_guardrail",
|
|
1602
|
+
async execute({ agentOutput }) {
|
|
1603
|
+
try {
|
|
1604
|
+
const res = await runGuardrailClassifier(agentOutput, config);
|
|
1605
|
+
const triggered = res?.moderationCategory !== "NONE";
|
|
1606
|
+
return {
|
|
1607
|
+
tripwireTriggered: triggered || false,
|
|
1608
|
+
outputInfo: res || { error: "guardrail_failed" }
|
|
1609
|
+
};
|
|
1610
|
+
} catch {
|
|
1611
|
+
return {
|
|
1612
|
+
tripwireTriggered: false,
|
|
1613
|
+
outputInfo: { error: "guardrail_failed" }
|
|
1614
|
+
};
|
|
1615
|
+
}
|
|
1616
|
+
}
|
|
1617
|
+
};
|
|
1618
|
+
}
|
|
1619
|
+
function createCustomGuardrail(name, classifier) {
|
|
1620
|
+
return {
|
|
1621
|
+
name,
|
|
1622
|
+
async execute({ agentOutput }) {
|
|
1623
|
+
try {
|
|
1624
|
+
const { triggered, info } = await classifier(agentOutput);
|
|
1625
|
+
return {
|
|
1626
|
+
tripwireTriggered: triggered,
|
|
1627
|
+
outputInfo: info
|
|
1628
|
+
};
|
|
1629
|
+
} catch {
|
|
1630
|
+
return {
|
|
1631
|
+
tripwireTriggered: false,
|
|
1632
|
+
outputInfo: { error: "guardrail_failed" }
|
|
1633
|
+
};
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1636
|
+
};
|
|
1637
|
+
}
|
|
1638
|
+
|
|
1639
|
+
// src/suggestions/SuggestionContext.tsx
|
|
1640
|
+
var import_react9 = require("react");
|
|
1641
|
+
|
|
1642
|
+
// src/suggestions/types.ts
|
|
1643
|
+
var SUGGESTION_EVENT = "voicekit:suggestions";
|
|
1644
|
+
|
|
1645
|
+
// src/suggestions/SuggestionContext.tsx
|
|
1646
|
+
var import_jsx_runtime5 = require("react/jsx-runtime");
|
|
1647
|
+
var SuggestionCtx = (0, import_react9.createContext)(null);
|
|
1648
|
+
function SuggestionProvider({
|
|
1649
|
+
children,
|
|
1650
|
+
onSelect,
|
|
1651
|
+
autoClear = true
|
|
1652
|
+
}) {
|
|
1653
|
+
const [suggestions, setSuggestionsState] = (0, import_react9.useState)(null);
|
|
1654
|
+
const setSuggestions = (0, import_react9.useCallback)((group) => {
|
|
1655
|
+
setSuggestionsState(group);
|
|
1656
|
+
}, []);
|
|
1657
|
+
const clearSuggestions2 = (0, import_react9.useCallback)(() => {
|
|
1658
|
+
setSuggestionsState(null);
|
|
1659
|
+
}, []);
|
|
1660
|
+
const selectSuggestion = (0, import_react9.useCallback)(
|
|
1661
|
+
(item) => {
|
|
1662
|
+
onSelect?.(item);
|
|
1663
|
+
if (autoClear) setSuggestionsState(null);
|
|
1664
|
+
},
|
|
1665
|
+
[onSelect, autoClear]
|
|
1666
|
+
);
|
|
1667
|
+
(0, import_react9.useEffect)(() => {
|
|
1668
|
+
const handler = (e) => {
|
|
1669
|
+
const detail = e.detail;
|
|
1670
|
+
if (detail?.group) {
|
|
1671
|
+
setSuggestionsState(detail.group);
|
|
1672
|
+
}
|
|
1673
|
+
};
|
|
1674
|
+
window.addEventListener(SUGGESTION_EVENT, handler);
|
|
1675
|
+
return () => window.removeEventListener(SUGGESTION_EVENT, handler);
|
|
1676
|
+
}, []);
|
|
1677
|
+
const value = {
|
|
1678
|
+
suggestions,
|
|
1679
|
+
setSuggestions,
|
|
1680
|
+
selectSuggestion,
|
|
1681
|
+
clearSuggestions: clearSuggestions2
|
|
1682
|
+
};
|
|
1683
|
+
return /* @__PURE__ */ (0, import_jsx_runtime5.jsx)(SuggestionCtx.Provider, { value, children });
|
|
1684
|
+
}
|
|
1685
|
+
function useSuggestions() {
|
|
1686
|
+
const ctx = (0, import_react9.useContext)(SuggestionCtx);
|
|
1687
|
+
if (!ctx) {
|
|
1688
|
+
throw new Error("useSuggestions must be used within a SuggestionProvider");
|
|
1689
|
+
}
|
|
1690
|
+
return ctx;
|
|
1691
|
+
}
|
|
1692
|
+
|
|
1693
|
+
// src/suggestions/emitSuggestions.ts
|
|
1694
|
+
function emitSuggestions(group) {
|
|
1695
|
+
if (typeof window === "undefined") return;
|
|
1696
|
+
window.dispatchEvent(
|
|
1697
|
+
new CustomEvent(SUGGESTION_EVENT, {
|
|
1698
|
+
detail: { group }
|
|
1699
|
+
})
|
|
1700
|
+
);
|
|
1701
|
+
}
|
|
1702
|
+
function clearSuggestions() {
|
|
1703
|
+
if (typeof window === "undefined") return;
|
|
1704
|
+
window.dispatchEvent(
|
|
1705
|
+
new CustomEvent(SUGGESTION_EVENT, {
|
|
1706
|
+
detail: { group: null }
|
|
1707
|
+
})
|
|
1708
|
+
);
|
|
1709
|
+
}
|
|
1710
|
+
|
|
1711
|
+
// src/suggestions/SuggestionChips.tsx
|
|
1712
|
+
var import_react10 = __toESM(require("react"));
|
|
1713
|
+
var import_jsx_runtime6 = require("react/jsx-runtime");
|
|
1714
|
+
function SuggestionChips({
|
|
1715
|
+
group: groupOverride,
|
|
1716
|
+
renderItem,
|
|
1717
|
+
className,
|
|
1718
|
+
chipClassName
|
|
1719
|
+
}) {
|
|
1720
|
+
const { suggestions, selectSuggestion } = useSuggestions();
|
|
1721
|
+
const group = groupOverride ?? suggestions;
|
|
1722
|
+
if (!group || group.items.length === 0) return null;
|
|
1723
|
+
return /* @__PURE__ */ (0, import_jsx_runtime6.jsxs)("div", { className: className ?? "vk-suggestions", children: [
|
|
1724
|
+
group.prompt && /* @__PURE__ */ (0, import_jsx_runtime6.jsx)("p", { className: "vk-suggestions-prompt", style: { fontSize: "0.875rem", opacity: 0.7, marginBottom: "0.5rem" }, children: group.prompt }),
|
|
1725
|
+
/* @__PURE__ */ (0, import_jsx_runtime6.jsx)(
|
|
1726
|
+
"div",
|
|
1727
|
+
{
|
|
1728
|
+
className: "vk-suggestions-list",
|
|
1729
|
+
style: { display: "flex", flexWrap: "wrap", gap: "0.5rem" },
|
|
1730
|
+
children: group.items.map((item) => {
|
|
1731
|
+
const handleClick = () => selectSuggestion(item);
|
|
1732
|
+
if (renderItem) {
|
|
1733
|
+
return /* @__PURE__ */ (0, import_jsx_runtime6.jsx)(import_react10.default.Fragment, { children: renderItem(item, handleClick) }, item.id);
|
|
1734
|
+
}
|
|
1735
|
+
return /* @__PURE__ */ (0, import_jsx_runtime6.jsx)(
|
|
1736
|
+
"button",
|
|
1737
|
+
{
|
|
1738
|
+
onClick: handleClick,
|
|
1739
|
+
className: chipClassName ?? "vk-chip",
|
|
1740
|
+
style: chipClassName ? void 0 : {
|
|
1741
|
+
display: "inline-flex",
|
|
1742
|
+
alignItems: "center",
|
|
1743
|
+
gap: "0.375rem",
|
|
1744
|
+
padding: "0.5rem 0.75rem",
|
|
1745
|
+
borderRadius: "9999px",
|
|
1746
|
+
fontSize: "0.875rem",
|
|
1747
|
+
fontWeight: 500,
|
|
1748
|
+
border: "1px solid rgba(99,102,241,0.3)",
|
|
1749
|
+
background: "rgba(99,102,241,0.08)",
|
|
1750
|
+
color: "inherit",
|
|
1751
|
+
cursor: "pointer",
|
|
1752
|
+
transition: "all 0.15s"
|
|
1753
|
+
},
|
|
1754
|
+
children: item.label
|
|
1755
|
+
},
|
|
1756
|
+
item.id
|
|
1757
|
+
);
|
|
1758
|
+
})
|
|
1759
|
+
}
|
|
1760
|
+
)
|
|
1761
|
+
] });
|
|
1762
|
+
}
|
|
1763
|
+
|
|
1764
|
+
// src/core/EventEmitter.ts
|
|
1765
|
+
var EventEmitter = class {
|
|
1766
|
+
constructor() {
|
|
1767
|
+
this.handlers = /* @__PURE__ */ new Map();
|
|
1768
|
+
}
|
|
1769
|
+
on(event, handler) {
|
|
1770
|
+
let set = this.handlers.get(event);
|
|
1771
|
+
if (!set) {
|
|
1772
|
+
set = /* @__PURE__ */ new Set();
|
|
1773
|
+
this.handlers.set(event, set);
|
|
1774
|
+
}
|
|
1775
|
+
set.add(handler);
|
|
1776
|
+
}
|
|
1777
|
+
off(event, handler) {
|
|
1778
|
+
this.handlers.get(event)?.delete(handler);
|
|
1779
|
+
}
|
|
1780
|
+
emit(event, ...args) {
|
|
1781
|
+
this.handlers.get(event)?.forEach((fn) => {
|
|
1782
|
+
try {
|
|
1783
|
+
fn(...args);
|
|
1784
|
+
} catch (e) {
|
|
1785
|
+
console.error(`EventEmitter error in "${event}":`, e);
|
|
1786
|
+
}
|
|
1787
|
+
});
|
|
1788
|
+
}
|
|
1789
|
+
removeAllListeners() {
|
|
1790
|
+
this.handlers.clear();
|
|
1791
|
+
}
|
|
1792
|
+
};
|
|
1793
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
1794
|
+
0 && (module.exports = {
|
|
1795
|
+
ChatInput,
|
|
1796
|
+
ConnectButton,
|
|
1797
|
+
EventEmitter,
|
|
1798
|
+
EventProvider,
|
|
1799
|
+
GuardrailOutputZod,
|
|
1800
|
+
MODERATION_CATEGORIES,
|
|
1801
|
+
ModerationCategoryZod,
|
|
1802
|
+
SUGGESTION_EVENT,
|
|
1803
|
+
StatusIndicator,
|
|
1804
|
+
SuggestionChips,
|
|
1805
|
+
SuggestionProvider,
|
|
1806
|
+
TOOL_RESULT_EVENT,
|
|
1807
|
+
Transcript,
|
|
1808
|
+
TranscriptProvider,
|
|
1809
|
+
VoiceChat,
|
|
1810
|
+
VoiceProvider,
|
|
1811
|
+
applyCodecPreferences,
|
|
1812
|
+
audioFormatForCodec,
|
|
1813
|
+
clearSuggestions,
|
|
1814
|
+
convertWebMToWav,
|
|
1815
|
+
createAPITool,
|
|
1816
|
+
createAgent,
|
|
1817
|
+
createAgentFromTemplate,
|
|
1818
|
+
createCustomGuardrail,
|
|
1819
|
+
createEventTool,
|
|
1820
|
+
createModerationGuardrail,
|
|
1821
|
+
createNavigationTool,
|
|
1822
|
+
createRAGTool,
|
|
1823
|
+
createSearchTool,
|
|
1824
|
+
defineTool,
|
|
1825
|
+
emitSuggestions,
|
|
1826
|
+
encodeWAV,
|
|
1827
|
+
runGuardrailClassifier,
|
|
1828
|
+
useAudioRecorder,
|
|
1829
|
+
useEvent,
|
|
1830
|
+
useRealtimeSession,
|
|
1831
|
+
useSessionHistory,
|
|
1832
|
+
useSuggestions,
|
|
1833
|
+
useToolListener,
|
|
1834
|
+
useToolResult,
|
|
1835
|
+
useToolResults,
|
|
1836
|
+
useTranscript,
|
|
1837
|
+
useVoice
|
|
1838
|
+
});
|