@myscheme/voice-navigation-sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +359 -0
- package/dist/actions.d.ts +8 -0
- package/dist/actions.d.ts.map +1 -0
- package/dist/actions.js +478 -0
- package/dist/constants.d.ts +2 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +1 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +156 -0
- package/dist/microphone-handler.d.ts +47 -0
- package/dist/microphone-handler.d.ts.map +1 -0
- package/dist/microphone-handler.js +341 -0
- package/dist/navigation-controller.d.ts +50 -0
- package/dist/navigation-controller.d.ts.map +1 -0
- package/dist/navigation-controller.js +782 -0
- package/dist/server/index.d.ts +3 -0
- package/dist/server/index.d.ts.map +1 -0
- package/dist/server/index.js +1 -0
- package/dist/server/opensearch-handler.d.ts +52 -0
- package/dist/server/opensearch-handler.d.ts.map +1 -0
- package/dist/server/opensearch-handler.js +279 -0
- package/dist/services/azure-speech.d.ts +13 -0
- package/dist/services/azure-speech.d.ts.map +1 -0
- package/dist/services/azure-speech.js +33 -0
- package/dist/services/bedrock.d.ts +18 -0
- package/dist/services/bedrock.d.ts.map +1 -0
- package/dist/services/bedrock.js +132 -0
- package/dist/services/schemes.d.ts +2 -0
- package/dist/services/schemes.d.ts.map +1 -0
- package/dist/services/schemes.js +1 -0
- package/dist/services/vector-search.d.ts +21 -0
- package/dist/services/vector-search.d.ts.map +1 -0
- package/dist/services/vector-search.js +181 -0
- package/dist/types.d.ts +107 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +1 -0
- package/dist/ui.d.ts +10 -0
- package/dist/ui.d.ts.map +1 -0
- package/dist/ui.js +225 -0
- package/package.json +55 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import * as sdk from "microsoft-cognitiveservices-speech-sdk";
|
|
2
|
+
import type { NavigationCallbacks, AgentActionResponse } from "./types.js";
|
|
3
|
+
import { AzureSpeechService } from "./services/azure-speech.js";
|
|
4
|
+
import { BedrockService } from "./services/bedrock.js";
|
|
5
|
+
interface MicrophoneConfig {
|
|
6
|
+
azureSpeechService: AzureSpeechService;
|
|
7
|
+
bedrockService: BedrockService;
|
|
8
|
+
language?: string;
|
|
9
|
+
silenceTimeout?: number;
|
|
10
|
+
}
|
|
11
|
+
export declare class MicrophoneHandler {
|
|
12
|
+
private azureSpeechService;
|
|
13
|
+
private bedrockService;
|
|
14
|
+
private language;
|
|
15
|
+
private silenceTimeout;
|
|
16
|
+
private token;
|
|
17
|
+
private region;
|
|
18
|
+
private recognizer;
|
|
19
|
+
private transcriptionBuffer;
|
|
20
|
+
private silenceTimerId;
|
|
21
|
+
private isRecording;
|
|
22
|
+
private hasPermission;
|
|
23
|
+
private audioContext;
|
|
24
|
+
private audioContextUnlocked;
|
|
25
|
+
constructor(config: MicrophoneConfig);
|
|
26
|
+
hasMicrophonePermission(): boolean;
|
|
27
|
+
hasUnlockedAudioContext(): boolean;
|
|
28
|
+
getPermissionState(): Promise<PermissionState | "unknown">;
|
|
29
|
+
canStartAutomatically(): Promise<boolean>;
|
|
30
|
+
unlockAudioContext(isUserGesture: boolean): Promise<void>;
|
|
31
|
+
requestMicrophonePermission(): Promise<boolean>;
|
|
32
|
+
setLanguage(language: string): void;
|
|
33
|
+
resetTranscriptionBuffer(): void;
|
|
34
|
+
fetchAzureToken(): Promise<boolean>;
|
|
35
|
+
ensureSpeechSdk(): Promise<boolean>;
|
|
36
|
+
clearSilenceTimer(): void;
|
|
37
|
+
getAggregatedText(): string;
|
|
38
|
+
scheduleSilenceCheck(callbacks?: NavigationCallbacks): void;
|
|
39
|
+
sendToBedrockAgent(text: string): Promise<AgentActionResponse>;
|
|
40
|
+
disposeRecognizer(): void;
|
|
41
|
+
createRecognizer(): Promise<sdk.SpeechRecognizer>;
|
|
42
|
+
startRecording(callbacks?: NavigationCallbacks): Promise<boolean>;
|
|
43
|
+
stopRecording(): Promise<string>;
|
|
44
|
+
static isMicrophoneAvailable(): Promise<boolean>;
|
|
45
|
+
}
|
|
46
|
+
export {};
|
|
47
|
+
//# sourceMappingURL=microphone-handler.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"microphone-handler.d.ts","sourceRoot":"","sources":["../src/microphone-handler.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,GAAG,MAAM,wCAAwC,CAAC;AAC9D,OAAO,KAAK,EAAE,mBAAmB,EAAE,mBAAmB,EAAE,MAAM,YAAY,CAAC;AAC3E,OAAO,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAChE,OAAO,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAEvD,UAAU,gBAAgB;IACxB,kBAAkB,EAAE,kBAAkB,CAAC;IACvC,cAAc,EAAE,cAAc,CAAC;IAC/B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB;AAED,qBAAa,iBAAiB;IAC5B,OAAO,CAAC,kBAAkB,CAAqB;IAC/C,OAAO,CAAC,cAAc,CAAiB;IACvC,OAAO,CAAC,QAAQ,CAAS;IACzB,OAAO,CAAC,cAAc,CAAS;IAC/B,OAAO,CAAC,KAAK,CAAuB;IACpC,OAAO,CAAC,MAAM,CAAuB;IACrC,OAAO,CAAC,UAAU,CAAqC;IACvD,OAAO,CAAC,mBAAmB,CAAgB;IAC3C,OAAO,CAAC,cAAc,CAA8C;IACpE,OAAO,CAAC,WAAW,CAAkB;IACrC,OAAO,CAAC,aAAa,CAAkB;IACvC,OAAO,CAAC,YAAY,CAA6B;IACjD,OAAO,CAAC,oBAAoB,CAAkB;gBAElC,MAAM,EAAE,gBAAgB;IAUpC,uBAAuB,IAAI,OAAO;IAIlC,uBAAuB,IAAI,OAAO;IAO5B,kBAAkB,IAAI,OAAO,CAAC,eAAe,GAAG,SAAS,CAAC;IAoB1D,qBAAqB,IAAI,OAAO,CAAC,OAAO,CAAC;IAgBzC,kBAAkB,CAAC,aAAa,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAwDzD,2BAA2B,IAAI,OAAO,CAAC,OAAO,CAAC;IA2BrD,WAAW,CAAC,QAAQ,EAAE,MAAM,GAAG,IAAI;IAYnC,wBAAwB,IAAI,IAAI;IAO1B,eAAe,IAAI,OAAO,CAAC,OAAO,CAAC;IAgBnC,eAAe,IAAI,OAAO,CAAC,OAAO,CAAC;IAkBzC,iBAAiB,IAAI,IAAI;IAUzB,iBAAiB,IAAI,MAAM;IAO3B,oBAAoB,CAAC,SAAS,GAAE,mBAAwB,GAAG,IAAI;IAczD,kBAAkB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,mBAAmB,CAAC;IAiBpE,iBAAiB,IAAI,IAAI;IAgCnB,gBAAgB,IAAI,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC;IAmBjD,cAAc,CAAC,SAAS,GAAE,mBAAwB,GAAG,OAAO,CAAC,OAAO,CAAC;IAiHrE,aAAa,IAAI,OAAO,CAAC,MAAM,CAAC;WAoCzB,qBAAqB,IAAI,OAAO,CAAC,OAAO,CAAC;CAYvD"}
|
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
import * as sdk from "microsoft-cognitiveservices-speech-sdk";
|
|
2
|
+
export class MicrophoneHandler {
|
|
3
|
+
constructor(config) {
|
|
4
|
+
this.token = null;
|
|
5
|
+
this.region = null;
|
|
6
|
+
this.recognizer = null;
|
|
7
|
+
this.transcriptionBuffer = [];
|
|
8
|
+
this.silenceTimerId = null;
|
|
9
|
+
this.isRecording = false;
|
|
10
|
+
this.hasPermission = false;
|
|
11
|
+
this.audioContext = null;
|
|
12
|
+
this.audioContextUnlocked = false;
|
|
13
|
+
this.azureSpeechService = config.azureSpeechService;
|
|
14
|
+
this.bedrockService = config.bedrockService;
|
|
15
|
+
this.language = config.language || "en-IN";
|
|
16
|
+
this.silenceTimeout = config.silenceTimeout || 2000;
|
|
17
|
+
}
|
|
18
|
+
hasMicrophonePermission() {
|
|
19
|
+
return this.hasPermission;
|
|
20
|
+
}
|
|
21
|
+
hasUnlockedAudioContext() {
|
|
22
|
+
return this.audioContextUnlocked;
|
|
23
|
+
}
|
|
24
|
+
async getPermissionState() {
|
|
25
|
+
if (typeof navigator === "undefined" || !navigator.permissions) {
|
|
26
|
+
return "unknown";
|
|
27
|
+
}
|
|
28
|
+
try {
|
|
29
|
+
const result = await navigator.permissions.query({
|
|
30
|
+
name: "microphone",
|
|
31
|
+
});
|
|
32
|
+
return result.state;
|
|
33
|
+
}
|
|
34
|
+
catch (error) {
|
|
35
|
+
console.warn("Failed to query microphone permission state:", error);
|
|
36
|
+
return "unknown";
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
async canStartAutomatically() {
|
|
40
|
+
if (this.hasPermission) {
|
|
41
|
+
return true;
|
|
42
|
+
}
|
|
43
|
+
const state = await this.getPermissionState();
|
|
44
|
+
if (state !== "granted") {
|
|
45
|
+
return false;
|
|
46
|
+
}
|
|
47
|
+
return this.audioContextUnlocked;
|
|
48
|
+
}
|
|
49
|
+
async unlockAudioContext(isUserGesture) {
|
|
50
|
+
if (this.audioContextUnlocked) {
|
|
51
|
+
return;
|
|
52
|
+
}
|
|
53
|
+
if (!isUserGesture) {
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
if (typeof window === "undefined") {
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
const globalWindow = window;
|
|
60
|
+
const AudioContextCtor = globalWindow.AudioContext || globalWindow.webkitAudioContext;
|
|
61
|
+
if (!AudioContextCtor) {
|
|
62
|
+
this.audioContextUnlocked = true;
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
if (!this.audioContext) {
|
|
66
|
+
try {
|
|
67
|
+
this.audioContext = new AudioContextCtor();
|
|
68
|
+
}
|
|
69
|
+
catch (error) {
|
|
70
|
+
console.warn("Failed to create AudioContext:", error);
|
|
71
|
+
throw error;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
try {
|
|
75
|
+
if (this.audioContext.state === "suspended") {
|
|
76
|
+
await this.audioContext.resume();
|
|
77
|
+
}
|
|
78
|
+
else if (this.audioContext.state === "closed") {
|
|
79
|
+
this.audioContext = new AudioContextCtor();
|
|
80
|
+
if (this.audioContext.state === "suspended") {
|
|
81
|
+
await this.audioContext.resume();
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if (this.audioContext.state === "running") {
|
|
85
|
+
this.audioContextUnlocked = true;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
catch (error) {
|
|
89
|
+
console.warn("Failed to resume AudioContext:", error);
|
|
90
|
+
throw error;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
async requestMicrophonePermission() {
|
|
94
|
+
if (typeof navigator === "undefined" || !navigator.mediaDevices) {
|
|
95
|
+
console.error("MediaDevices not supported");
|
|
96
|
+
return false;
|
|
97
|
+
}
|
|
98
|
+
try {
|
|
99
|
+
const stream = await navigator.mediaDevices.getUserMedia({
|
|
100
|
+
audio: true,
|
|
101
|
+
});
|
|
102
|
+
stream.getTracks().forEach((track) => track.stop());
|
|
103
|
+
this.hasPermission = true;
|
|
104
|
+
console.log("Microphone permission granted");
|
|
105
|
+
return true;
|
|
106
|
+
}
|
|
107
|
+
catch (error) {
|
|
108
|
+
console.error("Microphone permission denied:", error);
|
|
109
|
+
this.hasPermission = false;
|
|
110
|
+
return false;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
setLanguage(language) {
|
|
114
|
+
if (!language) {
|
|
115
|
+
console.warn("Invalid language code");
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
this.language = language;
|
|
119
|
+
console.log(`Speech recognition language set to: ${language}`);
|
|
120
|
+
}
|
|
121
|
+
resetTranscriptionBuffer() {
|
|
122
|
+
this.transcriptionBuffer = [];
|
|
123
|
+
}
|
|
124
|
+
async fetchAzureToken() {
|
|
125
|
+
try {
|
|
126
|
+
const response = await this.azureSpeechService.fetchToken();
|
|
127
|
+
this.token = response.token;
|
|
128
|
+
this.region = response.region;
|
|
129
|
+
console.log("Azure token fetched successfully");
|
|
130
|
+
return true;
|
|
131
|
+
}
|
|
132
|
+
catch (error) {
|
|
133
|
+
console.error("Failed to fetch Azure token:", error);
|
|
134
|
+
throw error;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
async ensureSpeechSdk() {
|
|
138
|
+
if (typeof window === "undefined") {
|
|
139
|
+
console.error("Speech SDK requires browser environment");
|
|
140
|
+
return false;
|
|
141
|
+
}
|
|
142
|
+
if (typeof sdk !== "undefined") {
|
|
143
|
+
return true;
|
|
144
|
+
}
|
|
145
|
+
console.error("Speech SDK not available");
|
|
146
|
+
return false;
|
|
147
|
+
}
|
|
148
|
+
clearSilenceTimer() {
|
|
149
|
+
if (this.silenceTimerId) {
|
|
150
|
+
clearTimeout(this.silenceTimerId);
|
|
151
|
+
this.silenceTimerId = null;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
getAggregatedText() {
|
|
155
|
+
return this.transcriptionBuffer.join(" ").trim();
|
|
156
|
+
}
|
|
157
|
+
scheduleSilenceCheck(callbacks = {}) {
|
|
158
|
+
this.clearSilenceTimer();
|
|
159
|
+
this.silenceTimerId = setTimeout(() => {
|
|
160
|
+
console.log("Silence detected - triggering callback");
|
|
161
|
+
if (callbacks.onSilence) {
|
|
162
|
+
callbacks.onSilence();
|
|
163
|
+
}
|
|
164
|
+
}, this.silenceTimeout);
|
|
165
|
+
}
|
|
166
|
+
async sendToBedrockAgent(text) {
|
|
167
|
+
if (!text || !text.trim()) {
|
|
168
|
+
return { action: "unknown" };
|
|
169
|
+
}
|
|
170
|
+
try {
|
|
171
|
+
const response = await this.bedrockService.extractAction(text);
|
|
172
|
+
return response;
|
|
173
|
+
}
|
|
174
|
+
catch (error) {
|
|
175
|
+
console.error("Bedrock agent error:", error);
|
|
176
|
+
throw error;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
disposeRecognizer() {
|
|
180
|
+
if (this.recognizer) {
|
|
181
|
+
try {
|
|
182
|
+
this.recognizer.stopContinuousRecognitionAsync(() => {
|
|
183
|
+
console.log("Recognizer stopped");
|
|
184
|
+
if (this.recognizer) {
|
|
185
|
+
this.recognizer.close();
|
|
186
|
+
this.recognizer = null;
|
|
187
|
+
}
|
|
188
|
+
}, (error) => {
|
|
189
|
+
console.error("Error stopping recognizer:", error);
|
|
190
|
+
if (this.recognizer) {
|
|
191
|
+
this.recognizer.close();
|
|
192
|
+
this.recognizer = null;
|
|
193
|
+
}
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
catch (error) {
|
|
197
|
+
console.error("Error disposing recognizer:", error);
|
|
198
|
+
if (this.recognizer) {
|
|
199
|
+
this.recognizer.close();
|
|
200
|
+
this.recognizer = null;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
async createRecognizer() {
|
|
206
|
+
if (!this.token || !this.region) {
|
|
207
|
+
throw new Error("Azure token not available");
|
|
208
|
+
}
|
|
209
|
+
const speechConfig = sdk.SpeechConfig.fromAuthorizationToken(this.token, this.region);
|
|
210
|
+
speechConfig.speechRecognitionLanguage = this.language;
|
|
211
|
+
const audioConfig = sdk.AudioConfig.fromDefaultMicrophoneInput();
|
|
212
|
+
return new sdk.SpeechRecognizer(speechConfig, audioConfig);
|
|
213
|
+
}
|
|
214
|
+
async startRecording(callbacks = {}) {
|
|
215
|
+
if (this.isRecording) {
|
|
216
|
+
console.warn("Already recording");
|
|
217
|
+
return false;
|
|
218
|
+
}
|
|
219
|
+
if (!this.hasPermission) {
|
|
220
|
+
const granted = await this.requestMicrophonePermission();
|
|
221
|
+
if (!granted) {
|
|
222
|
+
if (callbacks.onError) {
|
|
223
|
+
callbacks.onError(new Error("Microphone permission denied"));
|
|
224
|
+
}
|
|
225
|
+
return false;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
const sdkReady = await this.ensureSpeechSdk();
|
|
229
|
+
if (!sdkReady) {
|
|
230
|
+
if (callbacks.onError) {
|
|
231
|
+
callbacks.onError(new Error("Speech SDK not available"));
|
|
232
|
+
}
|
|
233
|
+
return false;
|
|
234
|
+
}
|
|
235
|
+
try {
|
|
236
|
+
await this.fetchAzureToken();
|
|
237
|
+
}
|
|
238
|
+
catch (error) {
|
|
239
|
+
if (callbacks.onError) {
|
|
240
|
+
callbacks.onError(error);
|
|
241
|
+
}
|
|
242
|
+
return false;
|
|
243
|
+
}
|
|
244
|
+
this.resetTranscriptionBuffer();
|
|
245
|
+
try {
|
|
246
|
+
this.recognizer = await this.createRecognizer();
|
|
247
|
+
}
|
|
248
|
+
catch (error) {
|
|
249
|
+
console.error("Failed to create recognizer:", error);
|
|
250
|
+
if (callbacks.onError) {
|
|
251
|
+
callbacks.onError(error);
|
|
252
|
+
}
|
|
253
|
+
return false;
|
|
254
|
+
}
|
|
255
|
+
this.recognizer.recognizing = (_s, e) => {
|
|
256
|
+
if (e.result.text && callbacks.onPartial) {
|
|
257
|
+
callbacks.onPartial(e.result.text);
|
|
258
|
+
}
|
|
259
|
+
};
|
|
260
|
+
this.recognizer.recognized = (_s, e) => {
|
|
261
|
+
if (e.result.reason === sdk.ResultReason.RecognizedSpeech) {
|
|
262
|
+
const text = e.result.text;
|
|
263
|
+
if (text) {
|
|
264
|
+
this.transcriptionBuffer.push(text);
|
|
265
|
+
if (callbacks.onSegment) {
|
|
266
|
+
callbacks.onSegment(text, {
|
|
267
|
+
duration: e.result.duration,
|
|
268
|
+
offset: e.result.offset,
|
|
269
|
+
});
|
|
270
|
+
}
|
|
271
|
+
this.scheduleSilenceCheck(callbacks);
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
else if (e.result.reason === sdk.ResultReason.NoMatch) {
|
|
275
|
+
console.log("No speech recognized");
|
|
276
|
+
}
|
|
277
|
+
};
|
|
278
|
+
this.recognizer.canceled = (_s, e) => {
|
|
279
|
+
console.error("Recognition canceled:", e.reason, e.errorDetails);
|
|
280
|
+
this.isRecording = false;
|
|
281
|
+
if (callbacks.onError) {
|
|
282
|
+
callbacks.onError(new Error(`Recognition canceled: ${e.reason} - ${e.errorDetails}`));
|
|
283
|
+
}
|
|
284
|
+
};
|
|
285
|
+
this.recognizer.sessionStopped = (_s, _e) => {
|
|
286
|
+
console.log("Session stopped");
|
|
287
|
+
this.isRecording = false;
|
|
288
|
+
};
|
|
289
|
+
this.recognizer.startContinuousRecognitionAsync(() => {
|
|
290
|
+
console.log("Continuous recognition started");
|
|
291
|
+
this.isRecording = true;
|
|
292
|
+
this.audioContextUnlocked = true;
|
|
293
|
+
this.scheduleSilenceCheck(callbacks);
|
|
294
|
+
}, (error) => {
|
|
295
|
+
console.error("Failed to start recognition:", error);
|
|
296
|
+
this.isRecording = false;
|
|
297
|
+
if (callbacks.onError) {
|
|
298
|
+
callbacks.onError(new Error(`Failed to start recognition: ${error}`));
|
|
299
|
+
}
|
|
300
|
+
});
|
|
301
|
+
return true;
|
|
302
|
+
}
|
|
303
|
+
async stopRecording() {
|
|
304
|
+
this.clearSilenceTimer();
|
|
305
|
+
if (!this.isRecording || !this.recognizer) {
|
|
306
|
+
console.warn("Not currently recording");
|
|
307
|
+
return this.getAggregatedText();
|
|
308
|
+
}
|
|
309
|
+
return new Promise((resolve) => {
|
|
310
|
+
if (!this.recognizer) {
|
|
311
|
+
resolve(this.getAggregatedText());
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
this.recognizer.stopContinuousRecognitionAsync(() => {
|
|
315
|
+
console.log("Recognition stopped successfully");
|
|
316
|
+
this.isRecording = false;
|
|
317
|
+
const finalText = this.getAggregatedText();
|
|
318
|
+
this.disposeRecognizer();
|
|
319
|
+
resolve(finalText);
|
|
320
|
+
}, (error) => {
|
|
321
|
+
console.error("Error stopping recognition:", error);
|
|
322
|
+
this.isRecording = false;
|
|
323
|
+
const finalText = this.getAggregatedText();
|
|
324
|
+
this.disposeRecognizer();
|
|
325
|
+
resolve(finalText);
|
|
326
|
+
});
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
static async isMicrophoneAvailable() {
|
|
330
|
+
if (typeof navigator === "undefined" || !navigator.mediaDevices) {
|
|
331
|
+
return false;
|
|
332
|
+
}
|
|
333
|
+
try {
|
|
334
|
+
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
335
|
+
return devices.some((device) => device.kind === "audioinput");
|
|
336
|
+
}
|
|
337
|
+
catch {
|
|
338
|
+
return false;
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import type { NavigationConfig } from "./types.js";
|
|
2
|
+
export declare class VoiceNavigationController {
|
|
3
|
+
private config;
|
|
4
|
+
private microphoneHandler;
|
|
5
|
+
private azureSpeechService;
|
|
6
|
+
private bedrockService;
|
|
7
|
+
private ui;
|
|
8
|
+
private lastTranscript;
|
|
9
|
+
private isProcessing;
|
|
10
|
+
private resumeAfterNavigation;
|
|
11
|
+
private lastStartReason;
|
|
12
|
+
private autoStartPendingUserInteraction;
|
|
13
|
+
private removeAutoStartListeners;
|
|
14
|
+
private vectorSearchService;
|
|
15
|
+
constructor(config: NavigationConfig);
|
|
16
|
+
private shouldAutoStart;
|
|
17
|
+
setAutoStart(enabled: boolean): void;
|
|
18
|
+
prepareForNavigation(options?: {
|
|
19
|
+
target?: string;
|
|
20
|
+
}): void;
|
|
21
|
+
private setupEventHandlers;
|
|
22
|
+
private toggleRecording;
|
|
23
|
+
start(options?: {
|
|
24
|
+
reason?: "auto" | "user";
|
|
25
|
+
}): Promise<void>;
|
|
26
|
+
stop(): Promise<void>;
|
|
27
|
+
private handlePartial;
|
|
28
|
+
private handleSegment;
|
|
29
|
+
private handleSilence;
|
|
30
|
+
private processPendingTranscript;
|
|
31
|
+
private handleError;
|
|
32
|
+
private processTranscript;
|
|
33
|
+
private handleAgentResult;
|
|
34
|
+
setLanguage(language: string): void;
|
|
35
|
+
private executeVectorSearchAction;
|
|
36
|
+
private normalizeSearchQuery;
|
|
37
|
+
private normalizeIndianSchemeName;
|
|
38
|
+
private resolveUrl;
|
|
39
|
+
private isSameDomain;
|
|
40
|
+
private safeHostname;
|
|
41
|
+
private pauseAfterExternalNavigation;
|
|
42
|
+
destroy(): void;
|
|
43
|
+
private persistResumeState;
|
|
44
|
+
private consumeResumeState;
|
|
45
|
+
private isActiveState;
|
|
46
|
+
private handleAutoStartFallback;
|
|
47
|
+
private clearAutoStartFallback;
|
|
48
|
+
private isPermissionRelatedError;
|
|
49
|
+
}
|
|
50
|
+
//# sourceMappingURL=navigation-controller.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"navigation-controller.d.ts","sourceRoot":"","sources":["../src/navigation-controller.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EACV,gBAAgB,EAKjB,MAAM,YAAY,CAAC;AAuDpB,qBAAa,yBAAyB;IACpC,OAAO,CAAC,MAAM,CAAmB;IACjC,OAAO,CAAC,iBAAiB,CAAoB;IAC7C,OAAO,CAAC,kBAAkB,CAAqB;IAC/C,OAAO,CAAC,cAAc,CAAiB;IACvC,OAAO,CAAC,EAAE,CAAa;IACvB,OAAO,CAAC,cAAc,CAAc;IACpC,OAAO,CAAC,YAAY,CAAkB;IACtC,OAAO,CAAC,qBAAqB,CAAkB;IAC/C,OAAO,CAAC,eAAe,CAA2B;IAClD,OAAO,CAAC,+BAA+B,CAAkB;IACzD,OAAO,CAAC,wBAAwB,CAA6B;IAC7D,OAAO,CAAC,mBAAmB,CAAoC;gBAEnD,MAAM,EAAE,gBAAgB;IA0EpC,OAAO,CAAC,eAAe;IAoBhB,YAAY,CAAC,OAAO,EAAE,OAAO,GAAG,IAAI;IAepC,oBAAoB,CAAC,OAAO,GAAE;QAAE,MAAM,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,IAAI;IAOpE,OAAO,CAAC,kBAAkB;YASZ,eAAe;IAkBhB,KAAK,CAAC,OAAO,GAAE;QAAE,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAiEhE,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IA2BlC,OAAO,CAAC,aAAa;IAUrB,OAAO,CAAC,aAAa;YAWP,aAAa;YAOb,wBAAwB;IAuCtC,OAAO,CAAC,WAAW;YAiBL,iBAAiB;YAwBjB,iBAAiB;IA0HxB,WAAW,CAAC,QAAQ,EAAE,MAAM,GAAG,IAAI;YAI5B,yBAAyB;IAgMvC,OAAO,CAAC,oBAAoB;IAqD5B,OAAO,CAAC,yBAAyB;IAoCjC,OAAO,CAAC,UAAU;IAQlB,OAAO,CAAC,YAAY;IAUpB,OAAO,CAAC,YAAY;IASpB,OAAO,CAAC,4BAA4B;IAiB7B,OAAO,IAAI,IAAI;IAgBtB,OAAO,CAAC,kBAAkB;IA+B1B,OAAO,CAAC,kBAAkB;IAwD1B,OAAO,CAAC,aAAa;IAYrB,OAAO,CAAC,uBAAuB;IAoD/B,OAAO,CAAC,sBAAsB;IAW9B,OAAO,CAAC,wBAAwB;CAgBjC"}
|