@backbay/glia 0.2.0-alpha.6 → 0.2.0-alpha.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/audio/index.js +1145 -5
- package/dist/audio/index.js.map +1 -1
- package/dist/components/index.js +3187 -10
- package/dist/components/index.js.map +1 -1
- package/dist/core.js +19714 -12
- package/dist/core.js.map +1 -1
- package/dist/emotion/index.js +1 -1
- package/dist/emotion/index.js.map +1 -1
- package/dist/hooks/index.js +941 -6
- package/dist/hooks/index.js.map +1 -1
- package/dist/index.js +31841 -183
- package/dist/index.js.map +1 -1
- package/dist/primitives/index.js +21111 -57
- package/dist/primitives/index.js.map +1 -1
- package/dist/protocol/index.js +360 -2
- package/dist/protocol/index.js.map +1 -1
- package/dist/speakeasy/index.js +2786 -38
- package/dist/speakeasy/index.js.map +1 -1
- package/dist/styles.css +1 -1
- package/dist/theme/index.js +1150 -3
- package/dist/theme/index.js.map +1 -1
- package/dist/vision/index.js +370 -2
- package/dist/vision/index.js.map +1 -1
- package/dist/workspace/index.js +16824 -2
- package/dist/workspace/index.js.map +1 -1
- package/package.json +16 -10
- package/dist/AuroraBackground-AP6ZHVFA.js +0 -6
- package/dist/AuroraBackground-AP6ZHVFA.js.map +0 -1
- package/dist/BentoGrid-CDARICNM.js +0 -6
- package/dist/BentoGrid-CDARICNM.js.map +0 -1
- package/dist/CommandPalette-JCWJKRBY.js +0 -6
- package/dist/CommandPalette-JCWJKRBY.js.map +0 -1
- package/dist/Glass-H4X4ZI4P.js +0 -7
- package/dist/Glass-H4X4ZI4P.js.map +0 -1
- package/dist/GlitchText-KLQ57PPY.js +0 -6
- package/dist/GlitchText-KLQ57PPY.js.map +0 -1
- package/dist/GlowButton-VGBPMZO7.js +0 -6
- package/dist/GlowButton-VGBPMZO7.js.map +0 -1
- package/dist/Graph3D-GO7N2EZQ.js +0 -540
- package/dist/Graph3D-GO7N2EZQ.js.map +0 -1
- package/dist/HUDProgressRing-N6C5NAEV.js +0 -6
- package/dist/HUDProgressRing-N6C5NAEV.js.map +0 -1
- package/dist/KPIStat-PBQK27ZB.js +0 -6
- package/dist/KPIStat-PBQK27ZB.js.map +0 -1
- package/dist/NeonToast-W5F7MU3U.js +0 -6
- package/dist/NeonToast-W5F7MU3U.js.map +0 -1
- package/dist/ParticleField-WK6CNHWU.js +0 -51
- package/dist/ParticleField-WK6CNHWU.js.map +0 -1
- package/dist/TextGenerateEffect-EUCEIIUJ.js +0 -6
- package/dist/TextGenerateEffect-EUCEIIUJ.js.map +0 -1
- package/dist/ThreeDCard-VH5I3SSY.js +0 -6
- package/dist/ThreeDCard-VH5I3SSY.js.map +0 -1
- package/dist/TypingAnimation-GIWOHPIX.js +0 -6
- package/dist/TypingAnimation-GIWOHPIX.js.map +0 -1
- package/dist/alert-dialog-QOSYBIIE.js +0 -19
- package/dist/alert-dialog-QOSYBIIE.js.map +0 -1
- package/dist/avatar-N5R37PCU.js +0 -10
- package/dist/avatar-N5R37PCU.js.map +0 -1
- package/dist/badge-GTVIIGPY.js +0 -8
- package/dist/badge-GTVIIGPY.js.map +0 -1
- package/dist/button-D7IMSV2D.js +0 -8
- package/dist/button-D7IMSV2D.js.map +0 -1
- package/dist/chunk-3CMPQOMY.js +0 -69
- package/dist/chunk-3CMPQOMY.js.map +0 -1
- package/dist/chunk-3OQT6IYR.js +0 -41
- package/dist/chunk-3OQT6IYR.js.map +0 -1
- package/dist/chunk-43B2WVLS.js +0 -85
- package/dist/chunk-43B2WVLS.js.map +0 -1
- package/dist/chunk-4SRFO5W3.js +0 -121
- package/dist/chunk-4SRFO5W3.js.map +0 -1
- package/dist/chunk-5IZELOOU.js +0 -362
- package/dist/chunk-5IZELOOU.js.map +0 -1
- package/dist/chunk-6DM4ACSS.js +0 -154
- package/dist/chunk-6DM4ACSS.js.map +0 -1
- package/dist/chunk-6IGT34PC.js +0 -50
- package/dist/chunk-6IGT34PC.js.map +0 -1
- package/dist/chunk-6RKBCJHN.js +0 -194
- package/dist/chunk-6RKBCJHN.js.map +0 -1
- package/dist/chunk-6RX2WGCO.js +0 -108
- package/dist/chunk-6RX2WGCO.js.map +0 -1
- package/dist/chunk-7K4WZM3U.js +0 -189
- package/dist/chunk-7K4WZM3U.js.map +0 -1
- package/dist/chunk-7MDBHJPT.js +0 -407
- package/dist/chunk-7MDBHJPT.js.map +0 -1
- package/dist/chunk-7UQD6ROV.js +0 -9
- package/dist/chunk-7UQD6ROV.js.map +0 -1
- package/dist/chunk-AFNIVLZP.js +0 -1069
- package/dist/chunk-AFNIVLZP.js.map +0 -1
- package/dist/chunk-ANWYRO6A.js +0 -407
- package/dist/chunk-ANWYRO6A.js.map +0 -1
- package/dist/chunk-DIXPOHDO.js +0 -71
- package/dist/chunk-DIXPOHDO.js.map +0 -1
- package/dist/chunk-DWYMKYPI.js +0 -3
- package/dist/chunk-DWYMKYPI.js.map +0 -1
- package/dist/chunk-E3NVDCZG.js +0 -280
- package/dist/chunk-E3NVDCZG.js.map +0 -1
- package/dist/chunk-EBM7YBKL.js +0 -399
- package/dist/chunk-EBM7YBKL.js.map +0 -1
- package/dist/chunk-EPAM7IWW.js +0 -294
- package/dist/chunk-EPAM7IWW.js.map +0 -1
- package/dist/chunk-EXQ7GYRS.js +0 -134
- package/dist/chunk-EXQ7GYRS.js.map +0 -1
- package/dist/chunk-F4QTUZ3C.js +0 -136
- package/dist/chunk-F4QTUZ3C.js.map +0 -1
- package/dist/chunk-FEW533R2.js +0 -105
- package/dist/chunk-FEW533R2.js.map +0 -1
- package/dist/chunk-FFZLJKC7.js +0 -270
- package/dist/chunk-FFZLJKC7.js.map +0 -1
- package/dist/chunk-GEAMOBF7.js +0 -8486
- package/dist/chunk-GEAMOBF7.js.map +0 -1
- package/dist/chunk-GRTRSCTD.js +0 -74
- package/dist/chunk-GRTRSCTD.js.map +0 -1
- package/dist/chunk-IKGYOGLK.js +0 -16
- package/dist/chunk-IKGYOGLK.js.map +0 -1
- package/dist/chunk-IQ7WYWVJ.js +0 -73
- package/dist/chunk-IQ7WYWVJ.js.map +0 -1
- package/dist/chunk-IXIVWQLF.js +0 -543
- package/dist/chunk-IXIVWQLF.js.map +0 -1
- package/dist/chunk-JCJU57RC.js +0 -115
- package/dist/chunk-JCJU57RC.js.map +0 -1
- package/dist/chunk-KORSTBU4.js +0 -117
- package/dist/chunk-KORSTBU4.js.map +0 -1
- package/dist/chunk-KSEZ6UM2.js +0 -235
- package/dist/chunk-KSEZ6UM2.js.map +0 -1
- package/dist/chunk-MHPF7R3O.js +0 -1376
- package/dist/chunk-MHPF7R3O.js.map +0 -1
- package/dist/chunk-MPC5IH7E.js +0 -81
- package/dist/chunk-MPC5IH7E.js.map +0 -1
- package/dist/chunk-MQIU2NYA.js +0 -114
- package/dist/chunk-MQIU2NYA.js.map +0 -1
- package/dist/chunk-NYMBJOGR.js +0 -2192
- package/dist/chunk-NYMBJOGR.js.map +0 -1
- package/dist/chunk-OBZD2M3C.js +0 -169
- package/dist/chunk-OBZD2M3C.js.map +0 -1
- package/dist/chunk-ODM2AG6G.js +0 -176
- package/dist/chunk-ODM2AG6G.js.map +0 -1
- package/dist/chunk-ONDKF5LP.js +0 -53
- package/dist/chunk-ONDKF5LP.js.map +0 -1
- package/dist/chunk-P25YCWQB.js +0 -41
- package/dist/chunk-P25YCWQB.js.map +0 -1
- package/dist/chunk-PFYVNM6H.js +0 -14
- package/dist/chunk-PFYVNM6H.js.map +0 -1
- package/dist/chunk-PWNNSGFL.js +0 -20
- package/dist/chunk-PWNNSGFL.js.map +0 -1
- package/dist/chunk-Q2PGZVOT.js +0 -36
- package/dist/chunk-Q2PGZVOT.js.map +0 -1
- package/dist/chunk-Q2XDMV7U.js +0 -76
- package/dist/chunk-Q2XDMV7U.js.map +0 -1
- package/dist/chunk-QG7FH2FI.js +0 -45
- package/dist/chunk-QG7FH2FI.js.map +0 -1
- package/dist/chunk-R7HUOK2D.js +0 -1914
- package/dist/chunk-R7HUOK2D.js.map +0 -1
- package/dist/chunk-REUYY7G5.js +0 -773
- package/dist/chunk-REUYY7G5.js.map +0 -1
- package/dist/chunk-RHC2Z2HT.js +0 -199
- package/dist/chunk-RHC2Z2HT.js.map +0 -1
- package/dist/chunk-RMCVLIFE.js +0 -23
- package/dist/chunk-RMCVLIFE.js.map +0 -1
- package/dist/chunk-ROZLTXGR.js +0 -234
- package/dist/chunk-ROZLTXGR.js.map +0 -1
- package/dist/chunk-RSS2C2O3.js +0 -17
- package/dist/chunk-RSS2C2O3.js.map +0 -1
- package/dist/chunk-SAGCG5SH.js +0 -355
- package/dist/chunk-SAGCG5SH.js.map +0 -1
- package/dist/chunk-TM6AOUSD.js +0 -40
- package/dist/chunk-TM6AOUSD.js.map +0 -1
- package/dist/chunk-TPK4BYCO.js +0 -970
- package/dist/chunk-TPK4BYCO.js.map +0 -1
- package/dist/chunk-UNQIL4K2.js +0 -34
- package/dist/chunk-UNQIL4K2.js.map +0 -1
- package/dist/chunk-UUG6L75Y.js +0 -47
- package/dist/chunk-UUG6L75Y.js.map +0 -1
- package/dist/chunk-V2SYMV4W.js +0 -114
- package/dist/chunk-V2SYMV4W.js.map +0 -1
- package/dist/chunk-V7EN5CTH.js +0 -130
- package/dist/chunk-V7EN5CTH.js.map +0 -1
- package/dist/chunk-VITKG2HL.js +0 -1125
- package/dist/chunk-VITKG2HL.js.map +0 -1
- package/dist/chunk-VYEWU5LO.js +0 -2631
- package/dist/chunk-VYEWU5LO.js.map +0 -1
- package/dist/chunk-W67QAGSH.js +0 -178
- package/dist/chunk-W67QAGSH.js.map +0 -1
- package/dist/chunk-WWBIN6KV.js +0 -1353
- package/dist/chunk-WWBIN6KV.js.map +0 -1
- package/dist/chunk-X77Z4PFB.js +0 -224
- package/dist/chunk-X77Z4PFB.js.map +0 -1
- package/dist/chunk-X7VG7OTT.js +0 -8
- package/dist/chunk-X7VG7OTT.js.map +0 -1
- package/dist/chunk-XE4K2SGI.js +0 -74
- package/dist/chunk-XE4K2SGI.js.map +0 -1
- package/dist/chunk-YIUG7IJK.js +0 -628
- package/dist/chunk-YIUG7IJK.js.map +0 -1
- package/dist/chunk-YNVN3V4Y.js +0 -13
- package/dist/chunk-YNVN3V4Y.js.map +0 -1
- package/dist/chunk-Z2S54IZX.js +0 -198
- package/dist/chunk-Z2S54IZX.js.map +0 -1
- package/dist/chunk-ZR6AH25Z.js +0 -17
- package/dist/chunk-ZR6AH25Z.js.map +0 -1
- package/dist/dialog-SPM3DTTI.js +0 -17
- package/dist/dialog-SPM3DTTI.js.map +0 -1
- package/dist/dropdown-menu-HMTWKWGK.js +0 -21
- package/dist/dropdown-menu-HMTWKWGK.js.map +0 -1
- package/dist/input-BH4P4S26.js +0 -6
- package/dist/input-BH4P4S26.js.map +0 -1
- package/dist/label-5Z4Q6VER.js +0 -8
- package/dist/label-5Z4Q6VER.js.map +0 -1
- package/dist/popover-IFOUXYLI.js +0 -18
- package/dist/popover-IFOUXYLI.js.map +0 -1
- package/dist/scroll-area-DJXNW6QX.js +0 -14
- package/dist/scroll-area-DJXNW6QX.js.map +0 -1
- package/dist/select-FZ277C3G.js +0 -22
- package/dist/select-FZ277C3G.js.map +0 -1
- package/dist/separator-BTMLN4NB.js +0 -8
- package/dist/separator-BTMLN4NB.js.map +0 -1
- package/dist/skeleton-DXIWBH4W.js +0 -6
- package/dist/skeleton-DXIWBH4W.js.map +0 -1
- package/dist/switch-4MCXIZBY.js +0 -13
- package/dist/switch-4MCXIZBY.js.map +0 -1
- package/dist/tabs-O7AW3APK.js +0 -17
- package/dist/tabs-O7AW3APK.js.map +0 -1
- package/dist/textarea-IB5WAFDO.js +0 -6
- package/dist/textarea-IB5WAFDO.js.map +0 -1
- package/dist/toggle-XVPPG6P4.js +0 -10
- package/dist/toggle-XVPPG6P4.js.map +0 -1
- package/dist/tooltip-JICZTD4F.js +0 -18
- package/dist/tooltip-JICZTD4F.js.map +0 -1
package/dist/audio/index.js
CHANGED
|
@@ -1,7 +1,1147 @@
|
|
|
1
|
-
export
|
|
2
|
-
import '
|
|
3
|
-
import '
|
|
4
|
-
import '
|
|
5
|
-
|
|
1
|
+
export * from '@backbay/glia-agent/audio';
|
|
2
|
+
import { createContext, useRef, useEffect, useCallback, useState, useMemo, useContext } from 'react';
|
|
3
|
+
import 'react/jsx-runtime';
|
|
4
|
+
import { z } from 'zod';
|
|
5
|
+
|
|
6
|
+
// src/audio/index.ts
|
|
7
|
+
var BBContext = createContext(null);
|
|
8
|
+
function useBBContext() {
|
|
9
|
+
const context = useContext(BBContext);
|
|
10
|
+
if (!context) {
|
|
11
|
+
throw new Error("useBBContext must be used within a BBProvider");
|
|
12
|
+
}
|
|
13
|
+
return context;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// src/hooks/useRunStream.ts
|
|
17
|
+
function useRunStream(runId, options = {}) {
|
|
18
|
+
const { pollInterval = 4e3, preferPolling = false, onEvent, onComplete, onError } = options;
|
|
19
|
+
const { config } = useBBContext();
|
|
20
|
+
const [events, setEvents] = useState([]);
|
|
21
|
+
const [latestEvent, setLatestEvent] = useState(null);
|
|
22
|
+
const [status, setStatus] = useState("idle");
|
|
23
|
+
const [isConnected, setIsConnected] = useState(false);
|
|
24
|
+
const [run, setRun] = useState(null);
|
|
25
|
+
const eventSourceRef = useRef(null);
|
|
26
|
+
const pollingIntervalRef = useRef(null);
|
|
27
|
+
const isActiveRef = useRef(true);
|
|
28
|
+
const handleEvent = useCallback(
|
|
29
|
+
(event) => {
|
|
30
|
+
if (!isActiveRef.current) return;
|
|
31
|
+
setEvents((prev) => [...prev, event]);
|
|
32
|
+
setLatestEvent(event);
|
|
33
|
+
onEvent?.(event);
|
|
34
|
+
if (event.data?.status) {
|
|
35
|
+
setStatus(event.data.status);
|
|
36
|
+
}
|
|
37
|
+
if (event.type === "completed" || event.type === "failed" || event.type === "cancelled") {
|
|
38
|
+
const finalStatus = event.type === "completed" ? "completed" : event.type === "failed" ? "failed" : "cancelled";
|
|
39
|
+
setStatus(finalStatus);
|
|
40
|
+
if (event.data?.run) {
|
|
41
|
+
setRun(event.data.run);
|
|
42
|
+
if (event.type === "completed") {
|
|
43
|
+
onComplete?.(event.data.run);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
},
|
|
48
|
+
[onEvent, onComplete]
|
|
49
|
+
);
|
|
50
|
+
const setupEventSource = useCallback(() => {
|
|
51
|
+
if (!runId || preferPolling) return;
|
|
52
|
+
const url = `${config.apiBaseUrl}/runs/${runId}/events`;
|
|
53
|
+
const eventSource = new EventSource(url);
|
|
54
|
+
eventSource.onopen = () => {
|
|
55
|
+
setIsConnected(true);
|
|
56
|
+
handleEvent({
|
|
57
|
+
type: "connected",
|
|
58
|
+
runId,
|
|
59
|
+
timestamp: Date.now()
|
|
60
|
+
});
|
|
61
|
+
};
|
|
62
|
+
eventSource.onmessage = (e) => {
|
|
63
|
+
try {
|
|
64
|
+
const data = JSON.parse(e.data);
|
|
65
|
+
handleEvent({
|
|
66
|
+
type: data.type ?? "status",
|
|
67
|
+
runId,
|
|
68
|
+
timestamp: Date.now(),
|
|
69
|
+
data
|
|
70
|
+
});
|
|
71
|
+
} catch (err) {
|
|
72
|
+
console.error("Failed to parse event:", err);
|
|
73
|
+
}
|
|
74
|
+
};
|
|
75
|
+
eventSource.onerror = () => {
|
|
76
|
+
setIsConnected(false);
|
|
77
|
+
startPolling();
|
|
78
|
+
};
|
|
79
|
+
eventSourceRef.current = eventSource;
|
|
80
|
+
}, [runId, config.apiBaseUrl, preferPolling, handleEvent]);
|
|
81
|
+
const startPolling = useCallback(() => {
|
|
82
|
+
if (!runId || pollingIntervalRef.current) return;
|
|
83
|
+
const poll = async () => {
|
|
84
|
+
if (!isActiveRef.current || !runId) return;
|
|
85
|
+
try {
|
|
86
|
+
const response = await fetch(`${config.apiBaseUrl}/runs/${runId}`, {
|
|
87
|
+
headers: config.headers
|
|
88
|
+
});
|
|
89
|
+
if (!response.ok) {
|
|
90
|
+
throw new Error(`HTTP ${response.status}`);
|
|
91
|
+
}
|
|
92
|
+
const data = await response.json();
|
|
93
|
+
handleEvent({
|
|
94
|
+
type: "status",
|
|
95
|
+
runId,
|
|
96
|
+
timestamp: Date.now(),
|
|
97
|
+
data: {
|
|
98
|
+
status: data.status,
|
|
99
|
+
output: data.output,
|
|
100
|
+
run: data
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
if (data.status === "completed" || data.status === "failed" || data.status === "cancelled") {
|
|
104
|
+
stopPolling();
|
|
105
|
+
handleEvent({
|
|
106
|
+
type: data.status,
|
|
107
|
+
runId,
|
|
108
|
+
timestamp: Date.now(),
|
|
109
|
+
data: { run: data }
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
} catch (err) {
|
|
113
|
+
if (err instanceof Error) {
|
|
114
|
+
onError?.(err);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
poll();
|
|
119
|
+
pollingIntervalRef.current = setInterval(poll, pollInterval);
|
|
120
|
+
}, [runId, config, pollInterval, handleEvent, onError]);
|
|
121
|
+
const stopPolling = useCallback(() => {
|
|
122
|
+
if (pollingIntervalRef.current) {
|
|
123
|
+
clearInterval(pollingIntervalRef.current);
|
|
124
|
+
pollingIntervalRef.current = null;
|
|
125
|
+
}
|
|
126
|
+
}, []);
|
|
127
|
+
const disconnect = useCallback(() => {
|
|
128
|
+
isActiveRef.current = false;
|
|
129
|
+
if (eventSourceRef.current) {
|
|
130
|
+
eventSourceRef.current.close();
|
|
131
|
+
eventSourceRef.current = null;
|
|
132
|
+
}
|
|
133
|
+
stopPolling();
|
|
134
|
+
setIsConnected(false);
|
|
135
|
+
}, [stopPolling]);
|
|
136
|
+
const reconnect = useCallback(() => {
|
|
137
|
+
disconnect();
|
|
138
|
+
isActiveRef.current = true;
|
|
139
|
+
setEvents([]);
|
|
140
|
+
setLatestEvent(null);
|
|
141
|
+
setStatus("idle");
|
|
142
|
+
if (preferPolling) {
|
|
143
|
+
startPolling();
|
|
144
|
+
} else {
|
|
145
|
+
setupEventSource();
|
|
146
|
+
}
|
|
147
|
+
}, [disconnect, preferPolling, startPolling, setupEventSource]);
|
|
148
|
+
useEffect(() => {
|
|
149
|
+
if (!runId) {
|
|
150
|
+
disconnect();
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
isActiveRef.current = true;
|
|
154
|
+
setEvents([]);
|
|
155
|
+
setLatestEvent(null);
|
|
156
|
+
setStatus("running");
|
|
157
|
+
if (preferPolling) {
|
|
158
|
+
startPolling();
|
|
159
|
+
} else {
|
|
160
|
+
setupEventSource();
|
|
161
|
+
setTimeout(() => {
|
|
162
|
+
if (isActiveRef.current && !isConnected) {
|
|
163
|
+
startPolling();
|
|
164
|
+
}
|
|
165
|
+
}, 2e3);
|
|
166
|
+
}
|
|
167
|
+
return () => {
|
|
168
|
+
disconnect();
|
|
169
|
+
};
|
|
170
|
+
}, [runId]);
|
|
171
|
+
return {
|
|
172
|
+
events,
|
|
173
|
+
latestEvent,
|
|
174
|
+
status,
|
|
175
|
+
isConnected,
|
|
176
|
+
run,
|
|
177
|
+
disconnect,
|
|
178
|
+
reconnect
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// src/audio/hooks/useRunSpeechCues.ts
|
|
183
|
+
var DEFAULT_RUN_SPEECH_CUES = {
|
|
184
|
+
onStart: "hold",
|
|
185
|
+
onComplete: "done",
|
|
186
|
+
onFail: "error",
|
|
187
|
+
onCancel: "warning"
|
|
188
|
+
};
|
|
189
|
+
function useRunSpeechCues(options) {
|
|
190
|
+
const { runId, enabled = true, emit, cues, cooldownMs = 800 } = options;
|
|
191
|
+
const { status } = useRunStream(runId);
|
|
192
|
+
const prevStatusRef = useRef(null);
|
|
193
|
+
const lastEmitAtRef = useRef(0);
|
|
194
|
+
useEffect(() => {
|
|
195
|
+
if (!enabled) return;
|
|
196
|
+
if (!runId) return;
|
|
197
|
+
const prev = prevStatusRef.current;
|
|
198
|
+
prevStatusRef.current = status;
|
|
199
|
+
if (prev === status) return;
|
|
200
|
+
const now = Date.now();
|
|
201
|
+
if (now - lastEmitAtRef.current < cooldownMs) return;
|
|
202
|
+
const cueMap = { ...DEFAULT_RUN_SPEECH_CUES, ...cues ?? {} };
|
|
203
|
+
let token = null;
|
|
204
|
+
if (status === "running") token = cueMap.onStart;
|
|
205
|
+
if (status === "completed") token = cueMap.onComplete;
|
|
206
|
+
if (status === "failed") token = cueMap.onFail;
|
|
207
|
+
if (status === "cancelled") token = cueMap.onCancel;
|
|
208
|
+
if (!token) return;
|
|
209
|
+
lastEmitAtRef.current = now;
|
|
210
|
+
void emit(token, { runId, status });
|
|
211
|
+
}, [enabled, runId, status, emit, cues, cooldownMs]);
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// src/cognition/types.ts
|
|
215
|
+
function clamp01(value) {
|
|
216
|
+
return Math.max(0, Math.min(1, value));
|
|
217
|
+
}
|
|
218
|
+
var DEFAULT_AVO = { arousal: 0.25, valence: 0.6, openness: 0.35 };
|
|
219
|
+
function createInitialCognitionState(overrides) {
|
|
220
|
+
return {
|
|
221
|
+
mode: "idle",
|
|
222
|
+
attention: 0.3,
|
|
223
|
+
workload: 0,
|
|
224
|
+
timePressure: 0,
|
|
225
|
+
planDrift: 0,
|
|
226
|
+
costPressure: 0,
|
|
227
|
+
risk: 0,
|
|
228
|
+
uncertainty: 0.2,
|
|
229
|
+
confidence: 0.8,
|
|
230
|
+
errorStress: 0,
|
|
231
|
+
personaAnchor: 1,
|
|
232
|
+
personaDriftRisk: 0,
|
|
233
|
+
moodAVO: { ...DEFAULT_AVO },
|
|
234
|
+
emotionAVO: { ...DEFAULT_AVO },
|
|
235
|
+
...overrides
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// src/cognition/reducers.ts
|
|
240
|
+
var MODE_TRANSITION_MAP = {
|
|
241
|
+
"ui.input_received": "listening",
|
|
242
|
+
"ui.user_idle": "idle",
|
|
243
|
+
"ui.interrupt": "listening",
|
|
244
|
+
"run.started": "deliberating",
|
|
245
|
+
"run.completed": "idle",
|
|
246
|
+
"run.event": "deliberating"
|
|
247
|
+
};
|
|
248
|
+
function reduceSignals(state, signals) {
|
|
249
|
+
return {
|
|
250
|
+
...state,
|
|
251
|
+
attention: signals.attention !== void 0 ? clamp01(signals.attention) : state.attention,
|
|
252
|
+
workload: signals.workload !== void 0 ? clamp01(signals.workload) : state.workload,
|
|
253
|
+
risk: signals.risk !== void 0 ? clamp01(signals.risk) : state.risk,
|
|
254
|
+
timePressure: signals.timePressure !== void 0 ? clamp01(signals.timePressure) : state.timePressure,
|
|
255
|
+
errorStress: signals.errorStress !== void 0 ? clamp01(signals.errorStress) : state.errorStress,
|
|
256
|
+
planDrift: signals.planDrift !== void 0 ? clamp01(signals.planDrift) : state.planDrift,
|
|
257
|
+
costPressure: signals.costPressure !== void 0 ? clamp01(signals.costPressure) : state.costPressure,
|
|
258
|
+
uncertainty: signals.uncertainty !== void 0 ? clamp01(signals.uncertainty) : state.uncertainty,
|
|
259
|
+
confidence: signals.confidence !== void 0 ? clamp01(signals.confidence) : state.confidence
|
|
260
|
+
};
|
|
261
|
+
}
|
|
262
|
+
function reduceDynamicsUpdate(state, dynamics) {
|
|
263
|
+
return {
|
|
264
|
+
...state,
|
|
265
|
+
dynamics
|
|
266
|
+
};
|
|
267
|
+
}
|
|
268
|
+
function reducePolicyUpdate(state, policy, personality) {
|
|
269
|
+
return {
|
|
270
|
+
...state,
|
|
271
|
+
policy: policy ?? state.policy,
|
|
272
|
+
personality: personality ?? state.personality
|
|
273
|
+
};
|
|
274
|
+
}
|
|
275
|
+
function reduceModeTransition(state, event) {
|
|
276
|
+
const trigger = event.type;
|
|
277
|
+
const newMode = MODE_TRANSITION_MAP[trigger];
|
|
278
|
+
if (!newMode) {
|
|
279
|
+
return state;
|
|
280
|
+
}
|
|
281
|
+
let focusRunId = state.focusRunId;
|
|
282
|
+
let errorStress = state.errorStress;
|
|
283
|
+
let mode = newMode;
|
|
284
|
+
if (event.type === "run.started") {
|
|
285
|
+
focusRunId = event.runId;
|
|
286
|
+
} else if (event.type === "run.completed") {
|
|
287
|
+
focusRunId = void 0;
|
|
288
|
+
if (!event.success) {
|
|
289
|
+
errorStress = clamp01(state.errorStress + 0.2);
|
|
290
|
+
mode = "recovering";
|
|
291
|
+
}
|
|
292
|
+
} else if (event.type === "run.event") {
|
|
293
|
+
focusRunId = event.runId;
|
|
294
|
+
}
|
|
295
|
+
return {
|
|
296
|
+
...state,
|
|
297
|
+
mode,
|
|
298
|
+
focusRunId,
|
|
299
|
+
errorStress
|
|
300
|
+
};
|
|
301
|
+
}
|
|
302
|
+
function reduceDecay(state, deltaMs) {
|
|
303
|
+
const deltaSec = deltaMs / 1e3;
|
|
304
|
+
const errorStress = clamp01(
|
|
305
|
+
state.errorStress * Math.exp(-0.1 * deltaSec)
|
|
306
|
+
);
|
|
307
|
+
const timePressure = clamp01(
|
|
308
|
+
state.timePressure * Math.exp(-0.05 * deltaSec)
|
|
309
|
+
);
|
|
310
|
+
const planDrift = clamp01(
|
|
311
|
+
state.planDrift * Math.exp(-0.08 * deltaSec)
|
|
312
|
+
);
|
|
313
|
+
return {
|
|
314
|
+
...state,
|
|
315
|
+
errorStress,
|
|
316
|
+
timePressure,
|
|
317
|
+
planDrift
|
|
318
|
+
};
|
|
319
|
+
}
|
|
320
|
+
function reduceEvent(state, event) {
|
|
321
|
+
switch (event.type) {
|
|
322
|
+
case "signals.update":
|
|
323
|
+
return reduceSignals(state, event.signals);
|
|
324
|
+
case "intensity.update":
|
|
325
|
+
return reduceSignals(state, event.values);
|
|
326
|
+
case "dynamics.update":
|
|
327
|
+
return reduceDynamicsUpdate(state, event.dynamics);
|
|
328
|
+
case "policy.update":
|
|
329
|
+
return reducePolicyUpdate(state, event.policy, event.personality);
|
|
330
|
+
case "tick":
|
|
331
|
+
return reduceDecay(state, event.deltaMs);
|
|
332
|
+
case "ui.input_received":
|
|
333
|
+
case "ui.user_idle":
|
|
334
|
+
case "ui.interrupt":
|
|
335
|
+
case "run.started":
|
|
336
|
+
case "run.completed":
|
|
337
|
+
case "run.event":
|
|
338
|
+
return reduceModeTransition(state, event);
|
|
339
|
+
case "text.user_message": {
|
|
340
|
+
const categories = event.categories ?? [];
|
|
341
|
+
const driftRisk = categories.includes("meta_reflection") ? 0.3 : categories.includes("vulnerable_disclosure") ? 0.4 : 0;
|
|
342
|
+
const newPersonaDriftRisk = clamp01(
|
|
343
|
+
state.personaDriftRisk * 0.7 + driftRisk * 0.3
|
|
344
|
+
);
|
|
345
|
+
return {
|
|
346
|
+
...state,
|
|
347
|
+
mode: "listening",
|
|
348
|
+
personaDriftRisk: newPersonaDriftRisk
|
|
349
|
+
};
|
|
350
|
+
}
|
|
351
|
+
default: {
|
|
352
|
+
return state;
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
// src/cognition/controller.ts
|
|
358
|
+
var MODE_TO_ANCHOR = {
|
|
359
|
+
idle: "idle",
|
|
360
|
+
listening: "listening",
|
|
361
|
+
deliberating: "thinking",
|
|
362
|
+
acting: "focused",
|
|
363
|
+
explaining: "explaining",
|
|
364
|
+
recovering: "recovering",
|
|
365
|
+
blocked: "concerned"
|
|
366
|
+
};
|
|
367
|
+
var MODE_TO_AVO = {
|
|
368
|
+
idle: { arousal: 0.25, valence: 0.6, openness: 0.35 },
|
|
369
|
+
listening: { arousal: 0.45, valence: 0.7, openness: 0.05 },
|
|
370
|
+
deliberating: { arousal: 0.6, valence: 0.6, openness: 0.4 },
|
|
371
|
+
acting: { arousal: 0.7, valence: 0.7, openness: 0.5 },
|
|
372
|
+
explaining: { arousal: 0.55, valence: 0.8, openness: 0.85 },
|
|
373
|
+
recovering: { arousal: 0.4, valence: 0.45, openness: 0.4 },
|
|
374
|
+
blocked: { arousal: 0.55, valence: 0.3, openness: 0.3 }
|
|
375
|
+
};
|
|
376
|
+
var CognitionController = class {
|
|
377
|
+
_state;
|
|
378
|
+
_listeners = /* @__PURE__ */ new Map();
|
|
379
|
+
_disposed = false;
|
|
380
|
+
constructor(options = {}) {
|
|
381
|
+
this._state = createInitialCognitionState(options.initial);
|
|
382
|
+
}
|
|
383
|
+
/**
|
|
384
|
+
* Get current cognition state
|
|
385
|
+
*/
|
|
386
|
+
getState() {
|
|
387
|
+
return { ...this._state };
|
|
388
|
+
}
|
|
389
|
+
/**
|
|
390
|
+
* Get the emotion target for the current cognitive state
|
|
391
|
+
* Returns anchor state and AVO values adjusted by signals
|
|
392
|
+
*/
|
|
393
|
+
getEmotionTarget() {
|
|
394
|
+
const anchor = MODE_TO_ANCHOR[this._state.mode];
|
|
395
|
+
const baseAVO = MODE_TO_AVO[this._state.mode];
|
|
396
|
+
const avo = this._adjustAVOBySignals(baseAVO);
|
|
397
|
+
return { anchor, avo };
|
|
398
|
+
}
|
|
399
|
+
/**
|
|
400
|
+
* Get emotion bridge - derives anchor and AVO from cognitive state
|
|
401
|
+
* @deprecated Use getEmotionTarget() instead
|
|
402
|
+
*/
|
|
403
|
+
getEmotionBridge() {
|
|
404
|
+
const target = this.getEmotionTarget();
|
|
405
|
+
return { anchor: target.anchor, avo: target.avo };
|
|
406
|
+
}
|
|
407
|
+
/**
|
|
408
|
+
* Handle a cognition event and update state
|
|
409
|
+
*/
|
|
410
|
+
handleEvent(event) {
|
|
411
|
+
if (this._disposed) return;
|
|
412
|
+
const prevMode = this._state.mode;
|
|
413
|
+
this._state = reduceEvent(this._state, event);
|
|
414
|
+
if (this._state.mode !== prevMode) {
|
|
415
|
+
this._emitEvent("modeChange", { from: prevMode, to: this._state.mode });
|
|
416
|
+
}
|
|
417
|
+
this._emitEvent("change", this._state);
|
|
418
|
+
}
|
|
419
|
+
/**
|
|
420
|
+
* Process an event and update state
|
|
421
|
+
* @deprecated Use handleEvent() instead
|
|
422
|
+
*/
|
|
423
|
+
emit(event) {
|
|
424
|
+
this.handleEvent(event);
|
|
425
|
+
}
|
|
426
|
+
/**
|
|
427
|
+
* Update tick - call each frame with delta time in milliseconds
|
|
428
|
+
* Applies time-based decay to stress signals
|
|
429
|
+
*/
|
|
430
|
+
tick(deltaMs) {
|
|
431
|
+
if (this._disposed) return;
|
|
432
|
+
const prevState = this._state;
|
|
433
|
+
this._state = reduceDecay(this._state, deltaMs);
|
|
434
|
+
if (prevState.errorStress !== this._state.errorStress || prevState.timePressure !== this._state.timePressure || prevState.planDrift !== this._state.planDrift) {
|
|
435
|
+
this._emitEvent("change", this._state);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
/**
|
|
439
|
+
* Subscribe to controller events
|
|
440
|
+
* Returns unsubscribe function
|
|
441
|
+
*/
|
|
442
|
+
on(event, handler) {
|
|
443
|
+
if (!this._listeners.has(event)) {
|
|
444
|
+
this._listeners.set(event, /* @__PURE__ */ new Set());
|
|
445
|
+
}
|
|
446
|
+
this._listeners.get(event).add(handler);
|
|
447
|
+
return () => {
|
|
448
|
+
this._listeners.get(event)?.delete(handler);
|
|
449
|
+
};
|
|
450
|
+
}
|
|
451
|
+
/**
|
|
452
|
+
* Dispose controller and clean up resources
|
|
453
|
+
*/
|
|
454
|
+
dispose() {
|
|
455
|
+
this._disposed = true;
|
|
456
|
+
this._listeners.clear();
|
|
457
|
+
}
|
|
458
|
+
/**
|
|
459
|
+
* Adjust AVO values based on current signal levels
|
|
460
|
+
*/
|
|
461
|
+
_adjustAVOBySignals(baseAVO) {
|
|
462
|
+
const { errorStress, workload, timePressure, uncertainty, confidence, personaDriftRisk } = this._state;
|
|
463
|
+
const arousalBoost = workload * 0.2 + timePressure * 0.15 + errorStress * 0.1;
|
|
464
|
+
const valenceDrops = errorStress * 0.3 + uncertainty * 0.15;
|
|
465
|
+
const valenceBoost = (confidence - 0.5) * 0.2;
|
|
466
|
+
const opennessDrop = personaDriftRisk * 0.3;
|
|
467
|
+
return {
|
|
468
|
+
arousal: Math.max(0, Math.min(1, baseAVO.arousal + arousalBoost)),
|
|
469
|
+
valence: Math.max(0, Math.min(1, baseAVO.valence - valenceDrops + valenceBoost)),
|
|
470
|
+
openness: Math.max(0, Math.min(1, baseAVO.openness - opennessDrop))
|
|
471
|
+
};
|
|
472
|
+
}
|
|
473
|
+
_emitEvent(event, data) {
|
|
474
|
+
this._listeners.get(event)?.forEach((handler) => handler(data));
|
|
475
|
+
}
|
|
476
|
+
};
|
|
477
|
+
|
|
478
|
+
// src/cognition/hooks/useCognition.ts
|
|
479
|
+
function useCognition(options = {}) {
|
|
480
|
+
const { initial, onChange, autoTick = true } = options;
|
|
481
|
+
const controllerRef = useRef(null);
|
|
482
|
+
if (!controllerRef.current) {
|
|
483
|
+
controllerRef.current = new CognitionController({ initial });
|
|
484
|
+
}
|
|
485
|
+
const controller = controllerRef.current;
|
|
486
|
+
const [state, setState] = useState(controller.getState());
|
|
487
|
+
const [emotion, setEmotion] = useState(
|
|
488
|
+
controller.getEmotionTarget()
|
|
489
|
+
);
|
|
490
|
+
const onChangeRef = useRef(onChange);
|
|
491
|
+
onChangeRef.current = onChange;
|
|
492
|
+
useEffect(() => {
|
|
493
|
+
const unsub = controller.on("change", (newState) => {
|
|
494
|
+
setState({ ...newState });
|
|
495
|
+
setEmotion(controller.getEmotionTarget());
|
|
496
|
+
onChangeRef.current?.(newState);
|
|
497
|
+
});
|
|
498
|
+
return () => {
|
|
499
|
+
unsub();
|
|
500
|
+
};
|
|
501
|
+
}, [controller]);
|
|
502
|
+
useEffect(() => {
|
|
503
|
+
if (!autoTick) return;
|
|
504
|
+
let lastTime = performance.now();
|
|
505
|
+
let rafId;
|
|
506
|
+
const tickFn = (time) => {
|
|
507
|
+
const delta = time - lastTime;
|
|
508
|
+
lastTime = time;
|
|
509
|
+
controller.tick(delta);
|
|
510
|
+
rafId = requestAnimationFrame(tickFn);
|
|
511
|
+
};
|
|
512
|
+
rafId = requestAnimationFrame(tickFn);
|
|
513
|
+
return () => {
|
|
514
|
+
cancelAnimationFrame(rafId);
|
|
515
|
+
};
|
|
516
|
+
}, [controller, autoTick]);
|
|
517
|
+
useEffect(() => {
|
|
518
|
+
return () => {
|
|
519
|
+
controller.dispose();
|
|
520
|
+
};
|
|
521
|
+
}, [controller]);
|
|
522
|
+
const handleEvent = useCallback(
|
|
523
|
+
(event) => {
|
|
524
|
+
controller.handleEvent(event);
|
|
525
|
+
},
|
|
526
|
+
[controller]
|
|
527
|
+
);
|
|
528
|
+
const tick = useCallback(
|
|
529
|
+
(deltaMs) => {
|
|
530
|
+
controller.tick(deltaMs);
|
|
531
|
+
},
|
|
532
|
+
[controller]
|
|
533
|
+
);
|
|
534
|
+
const emit = handleEvent;
|
|
535
|
+
return {
|
|
536
|
+
state,
|
|
537
|
+
emotion,
|
|
538
|
+
handleEvent,
|
|
539
|
+
tick,
|
|
540
|
+
emit
|
|
541
|
+
};
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
// src/audio/types.ts
|
|
545
|
+
var DEFAULT_TARGET_AVO = {
|
|
546
|
+
// Match the emotion system's "idle" anchor for a stable neutral baseline.
|
|
547
|
+
arousal: 0.25,
|
|
548
|
+
valence: 0.6,
|
|
549
|
+
openness: 0.35
|
|
550
|
+
};
|
|
551
|
+
function clamp012(value) {
|
|
552
|
+
return Math.max(0, Math.min(1, value));
|
|
553
|
+
}
|
|
554
|
+
function createTraceId() {
|
|
555
|
+
if (typeof crypto !== "undefined" && "randomUUID" in crypto) {
|
|
556
|
+
return crypto.randomUUID();
|
|
557
|
+
}
|
|
558
|
+
return `trace_${Date.now()}_${Math.random().toString(16).slice(2)}`;
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
// src/audio/planner.ts
|
|
562
|
+
function pickFirstAllowedVoiceId(voices, allowedVoiceIds) {
|
|
563
|
+
const list = voices.list();
|
|
564
|
+
for (const entry of list) {
|
|
565
|
+
if (!allowedVoiceIds || allowedVoiceIds.includes(entry.voiceId)) {
|
|
566
|
+
return entry.voiceId;
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
return null;
|
|
570
|
+
}
|
|
571
|
+
function pickVoiceId(args) {
|
|
572
|
+
const { voices, allowedVoiceIds, preferredVoiceId, requiredTag } = args;
|
|
573
|
+
if (preferredVoiceId) {
|
|
574
|
+
const entry = voices.get(preferredVoiceId);
|
|
575
|
+
if (entry && (!allowedVoiceIds || allowedVoiceIds.includes(entry.voiceId))) {
|
|
576
|
+
if (!requiredTag || (entry.tags ?? []).includes(requiredTag)) {
|
|
577
|
+
return entry.voiceId;
|
|
578
|
+
}
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
for (const entry of voices.list()) {
|
|
582
|
+
if (allowedVoiceIds && !allowedVoiceIds.includes(entry.voiceId)) continue;
|
|
583
|
+
if (requiredTag && !(entry.tags ?? []).includes(requiredTag)) continue;
|
|
584
|
+
return entry.voiceId;
|
|
585
|
+
}
|
|
586
|
+
return null;
|
|
587
|
+
}
|
|
588
|
+
function stripExcessPunctuation(text) {
|
|
589
|
+
return text.replace(/[!?]{2,}/g, (m) => m[0] ?? "!").replace(/\.{3,}/g, "\u2026");
|
|
590
|
+
}
|
|
591
|
+
function clampTextForSafetyMode(text) {
|
|
592
|
+
return stripExcessPunctuation(text).replace(/!/g, ".");
|
|
593
|
+
}
|
|
594
|
+
function planSpeech(input) {
|
|
595
|
+
const {
|
|
596
|
+
text,
|
|
597
|
+
language,
|
|
598
|
+
runId,
|
|
599
|
+
targetAffect,
|
|
600
|
+
policy,
|
|
601
|
+
voices,
|
|
602
|
+
signals,
|
|
603
|
+
defaults
|
|
604
|
+
} = input;
|
|
605
|
+
const personaDriftRisk = clamp012(signals?.personaDriftRisk ?? 0);
|
|
606
|
+
const confidence = clamp012(signals?.confidence ?? 0.8);
|
|
607
|
+
const risk = clamp012(signals?.risk ?? 1 - confidence);
|
|
608
|
+
const groundedVoiceTag = defaults?.groundedVoiceTag ?? "grounded";
|
|
609
|
+
const defaultVoiceTag = defaults?.defaultVoiceTag ?? "default";
|
|
610
|
+
const requireGrounded = policy.safetyMode || personaDriftRisk >= 0.6 || risk >= 0.7 && confidence <= 0.5;
|
|
611
|
+
const voiceId = (requireGrounded ? pickVoiceId({
|
|
612
|
+
voices,
|
|
613
|
+
allowedVoiceIds: policy.allowedVoiceIds,
|
|
614
|
+
preferredVoiceId: defaults?.voiceId,
|
|
615
|
+
requiredTag: groundedVoiceTag
|
|
616
|
+
}) : pickVoiceId({
|
|
617
|
+
voices,
|
|
618
|
+
allowedVoiceIds: policy.allowedVoiceIds,
|
|
619
|
+
preferredVoiceId: defaults?.voiceId,
|
|
620
|
+
requiredTag: defaultVoiceTag
|
|
621
|
+
})) ?? pickVoiceId({
|
|
622
|
+
voices,
|
|
623
|
+
allowedVoiceIds: policy.allowedVoiceIds,
|
|
624
|
+
preferredVoiceId: defaults?.voiceId
|
|
625
|
+
}) ?? pickFirstAllowedVoiceId(voices, policy.allowedVoiceIds);
|
|
626
|
+
if (!voiceId) {
|
|
627
|
+
throw new Error("No available voices (voice catalog is empty or policy blocks all voices)");
|
|
628
|
+
}
|
|
629
|
+
const baseTemperature = clamp012(defaults?.temperature ?? 0.65);
|
|
630
|
+
const temperature = requireGrounded ? Math.min(baseTemperature, 0.25) : baseTemperature;
|
|
631
|
+
const controls = {
|
|
632
|
+
temperature
|
|
633
|
+
};
|
|
634
|
+
const plannedText = policy.safetyMode || personaDriftRisk >= 0.7 ? clampTextForSafetyMode(text) : stripExcessPunctuation(text);
|
|
635
|
+
return {
|
|
636
|
+
traceId: createTraceId(),
|
|
637
|
+
runId,
|
|
638
|
+
text: plannedText,
|
|
639
|
+
language,
|
|
640
|
+
voiceId,
|
|
641
|
+
targetAffect: targetAffect ?? DEFAULT_TARGET_AVO,
|
|
642
|
+
controls,
|
|
643
|
+
policy: {
|
|
644
|
+
safetyMode: policy.safetyMode,
|
|
645
|
+
trustTier: policy.trustTier,
|
|
646
|
+
voiceCloningAllowed: policy.voiceCloningAllowed
|
|
647
|
+
}
|
|
648
|
+
};
|
|
649
|
+
}
|
|
650
|
+
function planSpeechFromCognition(input) {
|
|
651
|
+
const { cognition, ...rest } = input;
|
|
652
|
+
const signals = {
|
|
653
|
+
mode: cognition.mode,
|
|
654
|
+
personaDriftRisk: cognition.personaDriftRisk,
|
|
655
|
+
confidence: cognition.confidence,
|
|
656
|
+
risk: cognition.risk
|
|
657
|
+
};
|
|
658
|
+
const targetAffect = rest.targetAffect ?? cognition.emotionAVO;
|
|
659
|
+
return planSpeech({
|
|
660
|
+
...rest,
|
|
661
|
+
targetAffect,
|
|
662
|
+
signals
|
|
663
|
+
});
|
|
664
|
+
}
|
|
665
|
+
var AudioProofVersionSchema = z.enum(["1.0"]);
|
|
666
|
+
var AudioFormatSchema = z.enum(["wav", "pcm_s16le", "opus", "mp3", "flac"]);
|
|
667
|
+
var VoiceLicenseCategorySchema = z.enum(["cc0", "cc-by", "cc-by-nc", "custom", "unknown"]);
|
|
668
|
+
var AvoSchema = z.object({
|
|
669
|
+
valence: z.number().min(0).max(1),
|
|
670
|
+
arousal: z.number().min(0).max(1),
|
|
671
|
+
openness: z.number().min(0).max(1)
|
|
672
|
+
});
|
|
673
|
+
var AudioArtifactSchema = z.object({
|
|
674
|
+
id: z.string().min(1),
|
|
675
|
+
uri: z.string().optional(),
|
|
676
|
+
format: AudioFormatSchema,
|
|
677
|
+
sha256: z.string().min(32),
|
|
678
|
+
sampleRateHz: z.number().int().min(8e3).optional(),
|
|
679
|
+
channels: z.number().int().min(1).max(8).optional(),
|
|
680
|
+
durationMs: z.number().int().min(1)
|
|
681
|
+
});
|
|
682
|
+
var AudioGateResultSchema = z.object({
|
|
683
|
+
passed: z.boolean(),
|
|
684
|
+
metrics: z.record(z.string(), z.unknown()).optional(),
|
|
685
|
+
reason: z.string().optional()
|
|
686
|
+
});
|
|
687
|
+
var AudioGatesSchema = z.object({
|
|
688
|
+
quality: AudioGateResultSchema,
|
|
689
|
+
semantic: AudioGateResultSchema,
|
|
690
|
+
affect: AudioGateResultSchema,
|
|
691
|
+
multimodalConsistency: AudioGateResultSchema.optional(),
|
|
692
|
+
watermark: AudioGateResultSchema.optional(),
|
|
693
|
+
speakerConsistency: AudioGateResultSchema.optional(),
|
|
694
|
+
antiSpoof: AudioGateResultSchema.optional(),
|
|
695
|
+
mos: AudioGateResultSchema.optional(),
|
|
696
|
+
safetyText: AudioGateResultSchema.optional(),
|
|
697
|
+
safetyAudio: AudioGateResultSchema.optional()
|
|
698
|
+
});
|
|
699
|
+
var EvidenceRefSchema = z.object({
|
|
700
|
+
type: z.enum(["run", "run_receipt", "artifact", "ui"]),
|
|
701
|
+
runId: z.string().optional(),
|
|
702
|
+
receiptHash: z.string().optional(),
|
|
703
|
+
path: z.string().optional(),
|
|
704
|
+
digest: z.string().optional(),
|
|
705
|
+
componentId: z.string().optional(),
|
|
706
|
+
note: z.string().optional()
|
|
707
|
+
});
|
|
708
|
+
var AudioProofSchema = z.object({
|
|
709
|
+
version: AudioProofVersionSchema,
|
|
710
|
+
createdAt: z.string().datetime(),
|
|
711
|
+
manifest: z.object({
|
|
712
|
+
traceId: z.string().optional(),
|
|
713
|
+
runId: z.string().optional(),
|
|
714
|
+
text: z.string().min(1),
|
|
715
|
+
language: z.string().optional(),
|
|
716
|
+
targetAffect: AvoSchema,
|
|
717
|
+
policy: z.object({
|
|
718
|
+
safetyMode: z.boolean(),
|
|
719
|
+
trustTier: z.string().optional(),
|
|
720
|
+
voiceCloningAllowed: z.boolean()
|
|
721
|
+
}),
|
|
722
|
+
cognitionSnapshot: z.record(z.string(), z.unknown()).optional()
|
|
723
|
+
}),
|
|
724
|
+
proof: z.object({
|
|
725
|
+
synthesis: z.object({
|
|
726
|
+
providerId: z.string().min(1),
|
|
727
|
+
model: z.object({
|
|
728
|
+
id: z.string().min(1),
|
|
729
|
+
revision: z.string().optional(),
|
|
730
|
+
sha256: z.string().optional()
|
|
731
|
+
}),
|
|
732
|
+
voice: z.object({
|
|
733
|
+
voiceId: z.string().min(1),
|
|
734
|
+
licenseCategory: VoiceLicenseCategorySchema,
|
|
735
|
+
licenseText: z.string().optional(),
|
|
736
|
+
source: z.string().optional()
|
|
737
|
+
}),
|
|
738
|
+
controls: z.record(z.string(), z.unknown()).optional(),
|
|
739
|
+
seed: z.number().int().min(0).optional()
|
|
740
|
+
}),
|
|
741
|
+
attempts: z.array(
|
|
742
|
+
z.object({
|
|
743
|
+
attempt: z.number().int().min(1),
|
|
744
|
+
artifactRef: z.string().min(1),
|
|
745
|
+
notes: z.string().optional(),
|
|
746
|
+
gates: AudioGatesSchema
|
|
747
|
+
})
|
|
748
|
+
).optional(),
|
|
749
|
+
artifacts: z.array(AudioArtifactSchema).min(1),
|
|
750
|
+
gates: AudioGatesSchema,
|
|
751
|
+
evidence: z.array(EvidenceRefSchema).optional()
|
|
752
|
+
}),
|
|
753
|
+
verdict: z.object({
|
|
754
|
+
passed: z.boolean(),
|
|
755
|
+
reason: z.string().optional(),
|
|
756
|
+
score: z.number().optional()
|
|
757
|
+
})
|
|
758
|
+
});
|
|
759
|
+
function validateAudioProof(audioProof) {
|
|
760
|
+
const result = AudioProofSchema.safeParse(audioProof);
|
|
761
|
+
if (result.success) {
|
|
762
|
+
return { success: true, data: result.data };
|
|
763
|
+
}
|
|
764
|
+
return { success: false, errors: result.error };
|
|
765
|
+
}
|
|
766
|
+
function useAudioPlayer(options = {}) {
|
|
767
|
+
const { volume = 1, onEnded, onError } = options;
|
|
768
|
+
const audioRef = useRef(null);
|
|
769
|
+
const objectUrlRef = useRef(null);
|
|
770
|
+
const [isPlaying, setIsPlaying] = useState(false);
|
|
771
|
+
const [error, setError] = useState(null);
|
|
772
|
+
if (!audioRef.current && typeof Audio !== "undefined") {
|
|
773
|
+
audioRef.current = new Audio();
|
|
774
|
+
}
|
|
775
|
+
const stop = useCallback(() => {
|
|
776
|
+
const audio = audioRef.current;
|
|
777
|
+
if (!audio) return;
|
|
778
|
+
try {
|
|
779
|
+
audio.pause();
|
|
780
|
+
audio.currentTime = 0;
|
|
781
|
+
} catch {
|
|
782
|
+
} finally {
|
|
783
|
+
setIsPlaying(false);
|
|
784
|
+
}
|
|
785
|
+
}, []);
|
|
786
|
+
const play = useCallback(
|
|
787
|
+
async (source) => {
|
|
788
|
+
const audio = audioRef.current;
|
|
789
|
+
if (!audio) {
|
|
790
|
+
throw new Error("Audio playback not supported in this environment");
|
|
791
|
+
}
|
|
792
|
+
setError(null);
|
|
793
|
+
if (objectUrlRef.current) {
|
|
794
|
+
URL.revokeObjectURL(objectUrlRef.current);
|
|
795
|
+
objectUrlRef.current = null;
|
|
796
|
+
}
|
|
797
|
+
const src = typeof source === "string" ? source : URL.createObjectURL(source);
|
|
798
|
+
if (typeof source !== "string") {
|
|
799
|
+
objectUrlRef.current = src;
|
|
800
|
+
}
|
|
801
|
+
audio.volume = volume;
|
|
802
|
+
audio.src = src;
|
|
803
|
+
try {
|
|
804
|
+
await audio.play();
|
|
805
|
+
setIsPlaying(true);
|
|
806
|
+
} catch (err) {
|
|
807
|
+
const message = err instanceof Error ? err.message : "Failed to play audio";
|
|
808
|
+
setIsPlaying(false);
|
|
809
|
+
setError(message);
|
|
810
|
+
onError?.(err instanceof Error ? err : new Error(message));
|
|
811
|
+
throw err instanceof Error ? err : new Error(message);
|
|
812
|
+
}
|
|
813
|
+
},
|
|
814
|
+
[volume, onError]
|
|
815
|
+
);
|
|
816
|
+
useEffect(() => {
|
|
817
|
+
const audio = audioRef.current;
|
|
818
|
+
if (!audio) return;
|
|
819
|
+
const handleEnded = () => {
|
|
820
|
+
setIsPlaying(false);
|
|
821
|
+
onEnded?.();
|
|
822
|
+
};
|
|
823
|
+
const handleError = () => {
|
|
824
|
+
setIsPlaying(false);
|
|
825
|
+
const message = "Audio element error";
|
|
826
|
+
setError(message);
|
|
827
|
+
onError?.(new Error(message));
|
|
828
|
+
};
|
|
829
|
+
audio.addEventListener("ended", handleEnded);
|
|
830
|
+
audio.addEventListener("error", handleError);
|
|
831
|
+
return () => {
|
|
832
|
+
audio.removeEventListener("ended", handleEnded);
|
|
833
|
+
audio.removeEventListener("error", handleError);
|
|
834
|
+
};
|
|
835
|
+
}, [onEnded, onError]);
|
|
836
|
+
useEffect(() => {
|
|
837
|
+
return () => {
|
|
838
|
+
stop();
|
|
839
|
+
if (objectUrlRef.current) {
|
|
840
|
+
URL.revokeObjectURL(objectUrlRef.current);
|
|
841
|
+
objectUrlRef.current = null;
|
|
842
|
+
}
|
|
843
|
+
};
|
|
844
|
+
}, [stop]);
|
|
845
|
+
return {
|
|
846
|
+
isPlaying,
|
|
847
|
+
error,
|
|
848
|
+
play,
|
|
849
|
+
stop,
|
|
850
|
+
audioElement: audioRef.current
|
|
851
|
+
};
|
|
852
|
+
}
|
|
853
|
+
function useBargeIn(options) {
|
|
854
|
+
const { stream, enabled = true, threshold = 0.02, hangoverMs = 250, onBargeIn } = options;
|
|
855
|
+
const [isUserSpeaking, setIsUserSpeaking] = useState(false);
|
|
856
|
+
const [levelRms, setLevelRms] = useState(0);
|
|
857
|
+
const rafRef = useRef(null);
|
|
858
|
+
const lastAboveRef = useRef(0);
|
|
859
|
+
const prevSpeakingRef = useRef(false);
|
|
860
|
+
useEffect(() => {
|
|
861
|
+
if (!enabled || !stream) {
|
|
862
|
+
setIsUserSpeaking(false);
|
|
863
|
+
setLevelRms(0);
|
|
864
|
+
return;
|
|
865
|
+
}
|
|
866
|
+
if (typeof AudioContext === "undefined") {
|
|
867
|
+
return;
|
|
868
|
+
}
|
|
869
|
+
const audioContext = new AudioContext();
|
|
870
|
+
const source = audioContext.createMediaStreamSource(stream);
|
|
871
|
+
const analyser = audioContext.createAnalyser();
|
|
872
|
+
analyser.fftSize = 2048;
|
|
873
|
+
source.connect(analyser);
|
|
874
|
+
const buffer = new Float32Array(analyser.fftSize);
|
|
875
|
+
const tick = () => {
|
|
876
|
+
analyser.getFloatTimeDomainData(buffer);
|
|
877
|
+
let sum = 0;
|
|
878
|
+
for (let i = 0; i < buffer.length; i++) {
|
|
879
|
+
const x = buffer[i];
|
|
880
|
+
sum += x * x;
|
|
881
|
+
}
|
|
882
|
+
const rms = Math.sqrt(sum / buffer.length);
|
|
883
|
+
setLevelRms(rms);
|
|
884
|
+
const now = performance.now();
|
|
885
|
+
if (rms >= threshold) {
|
|
886
|
+
lastAboveRef.current = now;
|
|
887
|
+
}
|
|
888
|
+
const speaking = rms >= threshold || now - lastAboveRef.current <= hangoverMs;
|
|
889
|
+
setIsUserSpeaking(speaking);
|
|
890
|
+
if (!prevSpeakingRef.current && speaking) {
|
|
891
|
+
onBargeIn?.();
|
|
892
|
+
}
|
|
893
|
+
prevSpeakingRef.current = speaking;
|
|
894
|
+
rafRef.current = requestAnimationFrame(tick);
|
|
895
|
+
};
|
|
896
|
+
rafRef.current = requestAnimationFrame(tick);
|
|
897
|
+
return () => {
|
|
898
|
+
if (rafRef.current) cancelAnimationFrame(rafRef.current);
|
|
899
|
+
rafRef.current = null;
|
|
900
|
+
try {
|
|
901
|
+
source.disconnect();
|
|
902
|
+
analyser.disconnect();
|
|
903
|
+
} catch {
|
|
904
|
+
}
|
|
905
|
+
audioContext.close().catch(() => {
|
|
906
|
+
});
|
|
907
|
+
};
|
|
908
|
+
}, [stream, enabled, threshold, hangoverMs, onBargeIn]);
|
|
909
|
+
return { isUserSpeaking, levelRms };
|
|
910
|
+
}
|
|
911
|
+
|
|
912
|
+
// src/audio/hooks/useSpeechSynthesis.ts
|
|
913
|
+
function useSpeechSynthesis(options) {
|
|
914
|
+
const {
|
|
915
|
+
provider,
|
|
916
|
+
verifier,
|
|
917
|
+
voices,
|
|
918
|
+
policy,
|
|
919
|
+
signals,
|
|
920
|
+
defaults,
|
|
921
|
+
bargeIn,
|
|
922
|
+
volume = 1,
|
|
923
|
+
verificationMode,
|
|
924
|
+
onProof,
|
|
925
|
+
onBargeIn,
|
|
926
|
+
onError
|
|
927
|
+
} = options;
|
|
928
|
+
const effectiveVerificationMode = useMemo(() => {
|
|
929
|
+
if (verificationMode) return verificationMode;
|
|
930
|
+
if (policy.requireProofBeforePlayback) return "before_playback";
|
|
931
|
+
if (verifier) return "after_playback";
|
|
932
|
+
return "never";
|
|
933
|
+
}, [verificationMode, policy.requireProofBeforePlayback, verifier]);
|
|
934
|
+
const abortRef = useRef(null);
|
|
935
|
+
const [isSynthesizing, setIsSynthesizing] = useState(false);
|
|
936
|
+
const [error, setError] = useState(null);
|
|
937
|
+
const [lastRequest, setLastRequest] = useState(null);
|
|
938
|
+
const [lastResult, setLastResult] = useState(null);
|
|
939
|
+
const [lastProof, setLastProof] = useState(null);
|
|
940
|
+
const player = useAudioPlayer({
|
|
941
|
+
volume,
|
|
942
|
+
onError: (err) => {
|
|
943
|
+
setError(err.message);
|
|
944
|
+
onError?.(err);
|
|
945
|
+
}
|
|
946
|
+
});
|
|
947
|
+
const bargeInState = useBargeIn({
|
|
948
|
+
stream: bargeIn?.stream ?? null,
|
|
949
|
+
enabled: !!bargeIn?.stream,
|
|
950
|
+
threshold: bargeIn?.threshold,
|
|
951
|
+
hangoverMs: bargeIn?.hangoverMs,
|
|
952
|
+
onBargeIn: () => {
|
|
953
|
+
onBargeIn?.();
|
|
954
|
+
}
|
|
955
|
+
});
|
|
956
|
+
const cancel = useCallback(() => {
|
|
957
|
+
abortRef.current?.abort();
|
|
958
|
+
abortRef.current = null;
|
|
959
|
+
setIsSynthesizing(false);
|
|
960
|
+
player.stop();
|
|
961
|
+
}, [player]);
|
|
962
|
+
useEffect(() => {
|
|
963
|
+
if (!bargeIn?.stream) return;
|
|
964
|
+
if (!bargeInState.isUserSpeaking) return;
|
|
965
|
+
cancel();
|
|
966
|
+
}, [bargeIn?.stream, bargeInState.isUserSpeaking, cancel]);
|
|
967
|
+
const speak = useCallback(
|
|
968
|
+
async (text, speakOptions) => {
|
|
969
|
+
cancel();
|
|
970
|
+
setError(null);
|
|
971
|
+
setIsSynthesizing(true);
|
|
972
|
+
const controller = new AbortController();
|
|
973
|
+
abortRef.current = controller;
|
|
974
|
+
try {
|
|
975
|
+
const request = planSpeech({
|
|
976
|
+
text,
|
|
977
|
+
language: speakOptions?.language,
|
|
978
|
+
runId: speakOptions?.runId,
|
|
979
|
+
targetAffect: speakOptions?.targetAffect,
|
|
980
|
+
policy,
|
|
981
|
+
voices,
|
|
982
|
+
signals,
|
|
983
|
+
defaults
|
|
984
|
+
});
|
|
985
|
+
setLastRequest(request);
|
|
986
|
+
const result = await provider.synthesizeSpeech(request, { signal: controller.signal });
|
|
987
|
+
setLastResult(result);
|
|
988
|
+
let proof = result.proof;
|
|
989
|
+
if (proof) {
|
|
990
|
+
const parsed = validateAudioProof(proof);
|
|
991
|
+
if (!parsed.success) {
|
|
992
|
+
throw new Error("Provider returned invalid AudioProof");
|
|
993
|
+
}
|
|
994
|
+
proof = parsed.data;
|
|
995
|
+
}
|
|
996
|
+
const verifyNow = effectiveVerificationMode === "before_playback" && (!proof || !proof.verdict?.passed) && !!verifier;
|
|
997
|
+
if (verifyNow && verifier) {
|
|
998
|
+
proof = await verifier.verifySpeech({ request, result, policy });
|
|
999
|
+
const parsed = validateAudioProof(proof);
|
|
1000
|
+
if (!parsed.success) {
|
|
1001
|
+
throw new Error("Verifier returned invalid AudioProof");
|
|
1002
|
+
}
|
|
1003
|
+
proof = parsed.data;
|
|
1004
|
+
}
|
|
1005
|
+
if (policy.requireProofBeforePlayback && (!proof || proof.verdict.passed !== true)) {
|
|
1006
|
+
throw new Error("Playback blocked by policy: missing or failing AudioProof");
|
|
1007
|
+
}
|
|
1008
|
+
await player.play(result.audio);
|
|
1009
|
+
if (effectiveVerificationMode === "after_playback" && verifier && !proof) {
|
|
1010
|
+
void (async () => {
|
|
1011
|
+
try {
|
|
1012
|
+
const p = await verifier.verifySpeech({ request, result, policy });
|
|
1013
|
+
const parsed = validateAudioProof(p);
|
|
1014
|
+
if (!parsed.success) return;
|
|
1015
|
+
setLastProof(parsed.data);
|
|
1016
|
+
onProof?.(parsed.data);
|
|
1017
|
+
} catch {
|
|
1018
|
+
}
|
|
1019
|
+
})();
|
|
1020
|
+
}
|
|
1021
|
+
if (proof) {
|
|
1022
|
+
setLastProof(proof);
|
|
1023
|
+
onProof?.(proof);
|
|
1024
|
+
}
|
|
1025
|
+
} catch (err) {
|
|
1026
|
+
const message = err instanceof Error ? err.message : "Speech synthesis failed";
|
|
1027
|
+
setError(message);
|
|
1028
|
+
onError?.(err instanceof Error ? err : new Error(message));
|
|
1029
|
+
throw err instanceof Error ? err : new Error(message);
|
|
1030
|
+
} finally {
|
|
1031
|
+
setIsSynthesizing(false);
|
|
1032
|
+
abortRef.current = null;
|
|
1033
|
+
}
|
|
1034
|
+
},
|
|
1035
|
+
[
|
|
1036
|
+
cancel,
|
|
1037
|
+
policy,
|
|
1038
|
+
voices,
|
|
1039
|
+
signals,
|
|
1040
|
+
defaults,
|
|
1041
|
+
provider,
|
|
1042
|
+
verifier,
|
|
1043
|
+
effectiveVerificationMode,
|
|
1044
|
+
player,
|
|
1045
|
+
onProof,
|
|
1046
|
+
onError
|
|
1047
|
+
]
|
|
1048
|
+
);
|
|
1049
|
+
return {
|
|
1050
|
+
isSynthesizing,
|
|
1051
|
+
isSpeaking: player.isPlaying,
|
|
1052
|
+
error,
|
|
1053
|
+
lastRequest,
|
|
1054
|
+
lastResult,
|
|
1055
|
+
lastProof,
|
|
1056
|
+
speak,
|
|
1057
|
+
cancel
|
|
1058
|
+
};
|
|
1059
|
+
}
|
|
1060
|
+
|
|
1061
|
+
// src/audio/hooks/useCognitionSpeech.ts
|
|
1062
|
+
function useCognitionSpeech(options) {
|
|
1063
|
+
const {
|
|
1064
|
+
provider,
|
|
1065
|
+
verifier,
|
|
1066
|
+
voices,
|
|
1067
|
+
policy,
|
|
1068
|
+
volume,
|
|
1069
|
+
verificationMode,
|
|
1070
|
+
initialCognition,
|
|
1071
|
+
autoTick = true,
|
|
1072
|
+
onCognitionChange,
|
|
1073
|
+
bargeIn,
|
|
1074
|
+
defaults,
|
|
1075
|
+
onProof,
|
|
1076
|
+
onBargeIn,
|
|
1077
|
+
onError
|
|
1078
|
+
} = options;
|
|
1079
|
+
const cognitionResult = useCognition({
|
|
1080
|
+
initial: initialCognition,
|
|
1081
|
+
autoTick,
|
|
1082
|
+
onChange: onCognitionChange
|
|
1083
|
+
});
|
|
1084
|
+
const speechResult = useSpeechSynthesis({
|
|
1085
|
+
provider,
|
|
1086
|
+
verifier,
|
|
1087
|
+
voices,
|
|
1088
|
+
policy,
|
|
1089
|
+
volume,
|
|
1090
|
+
verificationMode,
|
|
1091
|
+
signals: {
|
|
1092
|
+
mode: cognitionResult.state.mode,
|
|
1093
|
+
personaDriftRisk: cognitionResult.state.personaDriftRisk,
|
|
1094
|
+
confidence: cognitionResult.state.confidence,
|
|
1095
|
+
risk: cognitionResult.state.risk
|
|
1096
|
+
},
|
|
1097
|
+
defaults,
|
|
1098
|
+
bargeIn,
|
|
1099
|
+
onProof,
|
|
1100
|
+
onBargeIn: () => {
|
|
1101
|
+
cognitionResult.handleEvent({ type: "ui.interrupt" });
|
|
1102
|
+
onBargeIn?.();
|
|
1103
|
+
},
|
|
1104
|
+
onError
|
|
1105
|
+
});
|
|
1106
|
+
const speak = useCallback(
|
|
1107
|
+
async (text, speakOptions) => {
|
|
1108
|
+
const request = planSpeechFromCognition({
|
|
1109
|
+
text,
|
|
1110
|
+
language: speakOptions?.language,
|
|
1111
|
+
runId: speakOptions?.runId,
|
|
1112
|
+
targetAffect: speakOptions?.targetAffect,
|
|
1113
|
+
cognition: cognitionResult.state,
|
|
1114
|
+
policy,
|
|
1115
|
+
voices,
|
|
1116
|
+
defaults
|
|
1117
|
+
});
|
|
1118
|
+
await speechResult.speak(text, {
|
|
1119
|
+
...speakOptions,
|
|
1120
|
+
targetAffect: request.targetAffect
|
|
1121
|
+
});
|
|
1122
|
+
},
|
|
1123
|
+
[cognitionResult.state, policy, voices, defaults, speechResult]
|
|
1124
|
+
);
|
|
1125
|
+
return {
|
|
1126
|
+
// Cognition
|
|
1127
|
+
cognition: cognitionResult.state,
|
|
1128
|
+
emotion: cognitionResult.emotion,
|
|
1129
|
+
handleCognition: cognitionResult.handleEvent,
|
|
1130
|
+
emitCognition: cognitionResult.handleEvent,
|
|
1131
|
+
// deprecated alias
|
|
1132
|
+
tickCognition: cognitionResult.tick,
|
|
1133
|
+
// Speech
|
|
1134
|
+
isSynthesizing: speechResult.isSynthesizing,
|
|
1135
|
+
isSpeaking: speechResult.isSpeaking,
|
|
1136
|
+
error: speechResult.error,
|
|
1137
|
+
lastRequest: speechResult.lastRequest,
|
|
1138
|
+
lastResult: speechResult.lastResult,
|
|
1139
|
+
lastProof: speechResult.lastProof,
|
|
1140
|
+
speak,
|
|
1141
|
+
cancel: speechResult.cancel
|
|
1142
|
+
};
|
|
1143
|
+
}
|
|
1144
|
+
|
|
1145
|
+
export { DEFAULT_RUN_SPEECH_CUES, useCognitionSpeech, useRunSpeechCues };
|
|
6
1146
|
//# sourceMappingURL=index.js.map
|
|
7
1147
|
//# sourceMappingURL=index.js.map
|