voice-router-dev 0.1.8 → 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +138 -4
- package/dist/index.d.mts +8917 -7571
- package/dist/index.d.ts +8917 -7571
- package/dist/index.js +115 -60
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +108 -60
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -4
package/dist/index.mjs
CHANGED
|
@@ -210,6 +210,69 @@ function createVoiceRouter(config, adapters) {
|
|
|
210
210
|
return router;
|
|
211
211
|
}
|
|
212
212
|
|
|
213
|
+
// src/generated/deepgram/schema/listenV1EncodingParameter.ts
|
|
214
|
+
var ListenV1EncodingParameter = {
|
|
215
|
+
linear16: "linear16",
|
|
216
|
+
flac: "flac",
|
|
217
|
+
mulaw: "mulaw",
|
|
218
|
+
opus: "opus",
|
|
219
|
+
speex: "speex",
|
|
220
|
+
g729: "g729"
|
|
221
|
+
};
|
|
222
|
+
|
|
223
|
+
// src/generated/deepgram/schema/speakV1EncodingParameter.ts
|
|
224
|
+
var SpeakV1EncodingParameter = {
|
|
225
|
+
linear16: "linear16",
|
|
226
|
+
aac: "aac",
|
|
227
|
+
opus: "opus",
|
|
228
|
+
mp3: "mp3",
|
|
229
|
+
flac: "flac",
|
|
230
|
+
mulaw: "mulaw",
|
|
231
|
+
alaw: "alaw"
|
|
232
|
+
};
|
|
233
|
+
|
|
234
|
+
// src/generated/deepgram/schema/speakV1ContainerParameter.ts
|
|
235
|
+
var SpeakV1ContainerParameter = {
|
|
236
|
+
none: "none",
|
|
237
|
+
wav: "wav",
|
|
238
|
+
ogg: "ogg"
|
|
239
|
+
};
|
|
240
|
+
|
|
241
|
+
// src/generated/deepgram/schema/speakV1SampleRateParameter.ts
|
|
242
|
+
var SpeakV1SampleRateParameter = {
|
|
243
|
+
NUMBER_16000: 16e3,
|
|
244
|
+
NUMBER_24000: 24e3,
|
|
245
|
+
NUMBER_32000: 32e3,
|
|
246
|
+
NUMBER_48000: 48e3,
|
|
247
|
+
null: null,
|
|
248
|
+
NUMBER_8000: 8e3,
|
|
249
|
+
NUMBER_22050: 22050
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
// src/generated/gladia/schema/streamingSupportedEncodingEnum.ts
|
|
253
|
+
var StreamingSupportedEncodingEnum = {
|
|
254
|
+
"wav/pcm": "wav/pcm",
|
|
255
|
+
"wav/alaw": "wav/alaw",
|
|
256
|
+
"wav/ulaw": "wav/ulaw"
|
|
257
|
+
};
|
|
258
|
+
|
|
259
|
+
// src/generated/gladia/schema/streamingSupportedSampleRateEnum.ts
|
|
260
|
+
var StreamingSupportedSampleRateEnum = {
|
|
261
|
+
NUMBER_8000: 8e3,
|
|
262
|
+
NUMBER_16000: 16e3,
|
|
263
|
+
NUMBER_32000: 32e3,
|
|
264
|
+
NUMBER_44100: 44100,
|
|
265
|
+
NUMBER_48000: 48e3
|
|
266
|
+
};
|
|
267
|
+
|
|
268
|
+
// src/generated/gladia/schema/streamingSupportedBitDepthEnum.ts
|
|
269
|
+
var StreamingSupportedBitDepthEnum = {
|
|
270
|
+
NUMBER_8: 8,
|
|
271
|
+
NUMBER_16: 16,
|
|
272
|
+
NUMBER_24: 24,
|
|
273
|
+
NUMBER_32: 32
|
|
274
|
+
};
|
|
275
|
+
|
|
213
276
|
// src/constants/defaults.ts
|
|
214
277
|
var DEFAULT_TIMEOUTS = {
|
|
215
278
|
/** Standard HTTP request timeout for API calls (60 seconds) */
|
|
@@ -1097,21 +1160,6 @@ var StreamingResponseStatus = {
|
|
|
1097
1160
|
error: "error"
|
|
1098
1161
|
};
|
|
1099
1162
|
|
|
1100
|
-
// src/generated/gladia/schema/streamingSupportedBitDepthEnum.ts
|
|
1101
|
-
var StreamingSupportedBitDepthEnum = {
|
|
1102
|
-
NUMBER_8: 8,
|
|
1103
|
-
NUMBER_16: 16,
|
|
1104
|
-
NUMBER_24: 24,
|
|
1105
|
-
NUMBER_32: 32
|
|
1106
|
-
};
|
|
1107
|
-
|
|
1108
|
-
// src/generated/gladia/schema/streamingSupportedEncodingEnum.ts
|
|
1109
|
-
var StreamingSupportedEncodingEnum = {
|
|
1110
|
-
"wav/pcm": "wav/pcm",
|
|
1111
|
-
"wav/alaw": "wav/alaw",
|
|
1112
|
-
"wav/ulaw": "wav/ulaw"
|
|
1113
|
-
};
|
|
1114
|
-
|
|
1115
1163
|
// src/generated/gladia/schema/streamingSupportedModels.ts
|
|
1116
1164
|
var StreamingSupportedModels = {
|
|
1117
1165
|
"solaria-1": "solaria-1"
|
|
@@ -1123,15 +1171,6 @@ var StreamingSupportedRegions = {
|
|
|
1123
1171
|
"eu-west": "eu-west"
|
|
1124
1172
|
};
|
|
1125
1173
|
|
|
1126
|
-
// src/generated/gladia/schema/streamingSupportedSampleRateEnum.ts
|
|
1127
|
-
var StreamingSupportedSampleRateEnum = {
|
|
1128
|
-
NUMBER_8000: 8e3,
|
|
1129
|
-
NUMBER_16000: 16e3,
|
|
1130
|
-
NUMBER_32000: 32e3,
|
|
1131
|
-
NUMBER_44100: 44100,
|
|
1132
|
-
NUMBER_48000: 48e3
|
|
1133
|
-
};
|
|
1134
|
-
|
|
1135
1174
|
// src/generated/gladia/schema/subtitlesFormatEnum.ts
|
|
1136
1175
|
var SubtitlesFormatEnum = {
|
|
1137
1176
|
srt: "srt",
|
|
@@ -2434,9 +2473,6 @@ var createTranscript = (transcriptParams, options) => {
|
|
|
2434
2473
|
var getTranscript = (transcriptId, options) => {
|
|
2435
2474
|
return axios2.get(`/v2/transcript/${transcriptId}`, options);
|
|
2436
2475
|
};
|
|
2437
|
-
var createTemporaryToken = (createRealtimeTemporaryTokenParams, options) => {
|
|
2438
|
-
return axios2.post("/v2/realtime/token", createRealtimeTemporaryTokenParams, options);
|
|
2439
|
-
};
|
|
2440
2476
|
|
|
2441
2477
|
// src/adapters/assemblyai-adapter.ts
|
|
2442
2478
|
var AssemblyAIAdapter = class extends BaseAdapter {
|
|
@@ -2456,8 +2492,9 @@ var AssemblyAIAdapter = class extends BaseAdapter {
|
|
|
2456
2492
|
};
|
|
2457
2493
|
this.baseUrl = "https://api.assemblyai.com";
|
|
2458
2494
|
// Generated functions already include /v2 path
|
|
2459
|
-
this.wsBaseUrl = "wss://
|
|
2495
|
+
this.wsBaseUrl = "wss://streaming.assemblyai.com/v3/ws";
|
|
2460
2496
|
}
|
|
2497
|
+
// v3 Universal Streaming endpoint
|
|
2461
2498
|
/**
|
|
2462
2499
|
* Get axios config for generated API client functions
|
|
2463
2500
|
* Configures headers and base URL using authorization header
|
|
@@ -2781,14 +2818,17 @@ var AssemblyAIAdapter = class extends BaseAdapter {
|
|
|
2781
2818
|
*/
|
|
2782
2819
|
async transcribeStream(options, callbacks) {
|
|
2783
2820
|
this.validateConfig();
|
|
2784
|
-
|
|
2785
|
-
|
|
2786
|
-
|
|
2787
|
-
|
|
2788
|
-
|
|
2789
|
-
const
|
|
2790
|
-
const
|
|
2791
|
-
|
|
2821
|
+
if (!this.config?.apiKey) {
|
|
2822
|
+
throw new Error("API key is required for streaming");
|
|
2823
|
+
}
|
|
2824
|
+
const sampleRate = options?.sampleRate || 16e3;
|
|
2825
|
+
const encoding = options?.encoding || "pcm_s16le";
|
|
2826
|
+
const wsUrl = `${this.wsBaseUrl}?sample_rate=${sampleRate}&encoding=${encoding}`;
|
|
2827
|
+
const ws = new WebSocket2(wsUrl, {
|
|
2828
|
+
headers: {
|
|
2829
|
+
Authorization: this.config.apiKey
|
|
2830
|
+
}
|
|
2831
|
+
});
|
|
2792
2832
|
let sessionStatus = "connecting";
|
|
2793
2833
|
const sessionId = `assemblyai-${Date.now()}-${Math.random().toString(36).substring(7)}`;
|
|
2794
2834
|
ws.on("open", () => {
|
|
@@ -2798,41 +2838,42 @@ var AssemblyAIAdapter = class extends BaseAdapter {
|
|
|
2798
2838
|
ws.on("message", (data) => {
|
|
2799
2839
|
try {
|
|
2800
2840
|
const message = JSON.parse(data.toString());
|
|
2801
|
-
if (
|
|
2841
|
+
if ("error" in message) {
|
|
2842
|
+
callbacks?.onError?.({
|
|
2843
|
+
code: "API_ERROR",
|
|
2844
|
+
message: message.error
|
|
2845
|
+
});
|
|
2846
|
+
return;
|
|
2847
|
+
}
|
|
2848
|
+
if (message.type === "Begin") {
|
|
2849
|
+
const beginMsg = message;
|
|
2802
2850
|
callbacks?.onMetadata?.({
|
|
2803
|
-
sessionId:
|
|
2804
|
-
expiresAt:
|
|
2851
|
+
sessionId: beginMsg.id,
|
|
2852
|
+
expiresAt: new Date(beginMsg.expires_at).toISOString()
|
|
2805
2853
|
});
|
|
2806
|
-
} else if (message.
|
|
2854
|
+
} else if (message.type === "Turn") {
|
|
2855
|
+
const turnMsg = message;
|
|
2807
2856
|
callbacks?.onTranscript?.({
|
|
2808
2857
|
type: "transcript",
|
|
2809
|
-
text:
|
|
2810
|
-
isFinal:
|
|
2811
|
-
confidence:
|
|
2812
|
-
words:
|
|
2858
|
+
text: turnMsg.transcript,
|
|
2859
|
+
isFinal: turnMsg.end_of_turn,
|
|
2860
|
+
confidence: turnMsg.end_of_turn_confidence,
|
|
2861
|
+
words: turnMsg.words.map((word) => ({
|
|
2813
2862
|
text: word.text,
|
|
2814
2863
|
start: word.start / 1e3,
|
|
2864
|
+
// Convert ms to seconds
|
|
2815
2865
|
end: word.end / 1e3,
|
|
2816
2866
|
confidence: word.confidence
|
|
2817
2867
|
})),
|
|
2818
|
-
data:
|
|
2868
|
+
data: turnMsg
|
|
2819
2869
|
});
|
|
2820
|
-
} else if (message.
|
|
2821
|
-
|
|
2822
|
-
|
|
2823
|
-
|
|
2824
|
-
|
|
2825
|
-
|
|
2826
|
-
words: message.words.map((word) => ({
|
|
2827
|
-
text: word.text,
|
|
2828
|
-
start: word.start / 1e3,
|
|
2829
|
-
end: word.end / 1e3,
|
|
2830
|
-
confidence: word.confidence
|
|
2831
|
-
})),
|
|
2832
|
-
data: message
|
|
2870
|
+
} else if (message.type === "Termination") {
|
|
2871
|
+
const termMsg = message;
|
|
2872
|
+
callbacks?.onMetadata?.({
|
|
2873
|
+
terminated: true,
|
|
2874
|
+
audioDurationSeconds: termMsg.audio_duration_seconds,
|
|
2875
|
+
sessionDurationSeconds: termMsg.session_duration_seconds
|
|
2833
2876
|
});
|
|
2834
|
-
} else if (message.message_type === "SessionTerminated") {
|
|
2835
|
-
callbacks?.onMetadata?.({ terminated: true });
|
|
2836
2877
|
}
|
|
2837
2878
|
} catch (error) {
|
|
2838
2879
|
callbacks?.onError?.({
|
|
@@ -5071,9 +5112,16 @@ export {
|
|
|
5071
5112
|
GladiaAdapter,
|
|
5072
5113
|
schema_exports as GladiaTypes,
|
|
5073
5114
|
GladiaWebhookHandler,
|
|
5115
|
+
ListenV1EncodingParameter,
|
|
5074
5116
|
OpenAIWhisperAdapter,
|
|
5117
|
+
SpeakV1ContainerParameter,
|
|
5118
|
+
SpeakV1EncodingParameter,
|
|
5119
|
+
SpeakV1SampleRateParameter,
|
|
5075
5120
|
SpeechmaticsAdapter,
|
|
5076
5121
|
SpeechmaticsWebhookHandler,
|
|
5122
|
+
StreamingSupportedBitDepthEnum,
|
|
5123
|
+
StreamingSupportedEncodingEnum,
|
|
5124
|
+
StreamingSupportedSampleRateEnum,
|
|
5077
5125
|
VoiceRouter,
|
|
5078
5126
|
WebhookRouter,
|
|
5079
5127
|
createAssemblyAIAdapter,
|