@voquill/voice-ai 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/dist/aldea.utils.d.ts +16 -0
  2. package/dist/aldea.utils.d.ts.map +1 -0
  3. package/dist/aldea.utils.js +42 -0
  4. package/dist/assemblyai.utils.d.ts +6 -0
  5. package/dist/assemblyai.utils.d.ts.map +1 -0
  6. package/dist/assemblyai.utils.js +24 -0
  7. package/dist/azure-openai.utils.d.ts +29 -0
  8. package/dist/azure-openai.utils.d.ts.map +1 -0
  9. package/dist/azure-openai.utils.js +67 -0
  10. package/dist/azure.utils.d.ts +30 -0
  11. package/dist/azure.utils.d.ts.map +1 -0
  12. package/dist/azure.utils.js +253 -0
  13. package/dist/claude.utils.d.ts +26 -0
  14. package/dist/claude.utils.d.ts.map +1 -0
  15. package/dist/claude.utils.js +229 -0
  16. package/dist/deepgram.utils.d.ts +5 -0
  17. package/dist/deepgram.utils.d.ts.map +1 -0
  18. package/dist/deepgram.utils.js +25 -0
  19. package/dist/deepseek.utils.d.ts +26 -0
  20. package/dist/deepseek.utils.d.ts.map +1 -0
  21. package/dist/deepseek.utils.js +102 -0
  22. package/dist/elevenlabs.utils.d.ts +6 -0
  23. package/dist/elevenlabs.utils.d.ts.map +1 -0
  24. package/dist/elevenlabs.utils.js +29 -0
  25. package/dist/gemini.utils.d.ts +41 -0
  26. package/dist/gemini.utils.d.ts.map +1 -0
  27. package/dist/gemini.utils.js +271 -0
  28. package/dist/groq.utils.d.ts +42 -0
  29. package/dist/groq.utils.d.ts.map +1 -0
  30. package/dist/groq.utils.js +141 -0
  31. package/dist/index.d.ts +14 -0
  32. package/dist/index.d.ts.map +1 -0
  33. package/dist/index.js +13 -0
  34. package/dist/openai.utils.d.ts +55 -0
  35. package/dist/openai.utils.d.ts.map +1 -0
  36. package/dist/openai.utils.js +275 -0
  37. package/dist/openrouter.utils.d.ts +69 -0
  38. package/dist/openrouter.utils.d.ts.map +1 -0
  39. package/dist/openrouter.utils.js +148 -0
  40. package/dist/speaches.utils.d.ts +18 -0
  41. package/dist/speaches.utils.d.ts.map +1 -0
  42. package/dist/speaches.utils.js +38 -0
  43. package/package.json +35 -0
@@ -0,0 +1,229 @@
1
+ import Anthropic from "@anthropic-ai/sdk";
2
+ import { retry, countWords } from "@voquill/utilities";
3
+ export const CLAUDE_MODELS = [
4
+ "claude-opus-4-5-20251101",
5
+ "claude-opus-4-5",
6
+ "claude-3-7-sonnet-latest",
7
+ "claude-3-7-sonnet-20250219",
8
+ "claude-3-5-haiku-latest",
9
+ "claude-3-5-haiku-20241022",
10
+ "claude-haiku-4-5",
11
+ "claude-haiku-4-5-20251001",
12
+ "claude-sonnet-4-20250514",
13
+ "claude-sonnet-4-0",
14
+ "claude-4-sonnet-20250514",
15
+ "claude-sonnet-4-5",
16
+ "claude-sonnet-4-5-20250929",
17
+ "claude-opus-4-0",
18
+ "claude-opus-4-20250514",
19
+ "claude-4-opus-20250514",
20
+ "claude-opus-4-1-20250805",
21
+ "claude-3-opus-latest",
22
+ "claude-3-opus-20240229",
23
+ "claude-3-haiku-20240307",
24
+ ];
25
+ const createClient = (apiKey) => {
26
+ return new Anthropic({
27
+ apiKey: apiKey.trim(),
28
+ dangerouslyAllowBrowser: true,
29
+ });
30
+ };
31
+ export const claudeGenerateTextResponse = async ({ apiKey, model = "claude-sonnet-4-20250514", system, prompt, jsonResponse, }) => {
32
+ return retry({
33
+ retries: 3,
34
+ fn: async () => {
35
+ const client = createClient(apiKey);
36
+ let finalPrompt = prompt;
37
+ if (jsonResponse) {
38
+ finalPrompt = `${prompt}\n\nRespond with valid JSON matching this schema: ${JSON.stringify(jsonResponse.schema)}`;
39
+ }
40
+ const response = await client.messages.create({
41
+ model,
42
+ max_tokens: 1024,
43
+ system: system ?? undefined,
44
+ messages: [{ role: "user", content: finalPrompt }],
45
+ });
46
+ console.log("claude llm usage:", response.usage);
47
+ const textBlock = response.content.find((block) => block.type === "text");
48
+ if (!textBlock || textBlock.type !== "text") {
49
+ throw new Error("No text response from Claude");
50
+ }
51
+ const content = textBlock.text;
52
+ const tokensUsed = (response.usage?.input_tokens ?? 0) +
53
+ (response.usage?.output_tokens ?? 0);
54
+ return {
55
+ text: content,
56
+ tokensUsed: tokensUsed || countWords(content),
57
+ };
58
+ },
59
+ });
60
+ };
61
+ export const claudeTestIntegration = async ({ apiKey, }) => {
62
+ const client = createClient(apiKey);
63
+ const response = await client.messages.create({
64
+ model: "claude-3-haiku-20240307",
65
+ max_tokens: 32,
66
+ messages: [
67
+ {
68
+ role: "user",
69
+ content: 'Reply with the single word "Hello."',
70
+ },
71
+ ],
72
+ });
73
+ const textBlock = response.content.find((block) => block.type === "text");
74
+ if (!textBlock || textBlock.type !== "text") {
75
+ throw new Error("No text response from Claude");
76
+ }
77
+ return textBlock.text.toLowerCase().includes("hello");
78
+ };
79
+ // ============================================================================
80
+ // Streaming Chat
81
+ // ============================================================================
82
+ function llmMessagesToClaude(messages) {
83
+ let system;
84
+ const out = [];
85
+ for (const msg of messages) {
86
+ if (msg.role === "system") {
87
+ system = msg.content;
88
+ continue;
89
+ }
90
+ if (msg.role === "user") {
91
+ out.push({ role: "user", content: msg.content });
92
+ continue;
93
+ }
94
+ if (msg.role === "assistant") {
95
+ const content = [];
96
+ if (msg.content) {
97
+ content.push({ type: "text", text: msg.content });
98
+ }
99
+ for (const tc of msg.toolCalls ?? []) {
100
+ let parsedInput;
101
+ try {
102
+ parsedInput = JSON.parse(tc.arguments);
103
+ }
104
+ catch {
105
+ parsedInput = {};
106
+ }
107
+ content.push({
108
+ type: "tool_use",
109
+ id: tc.id,
110
+ name: tc.name,
111
+ input: parsedInput,
112
+ });
113
+ }
114
+ if (content.length > 0) {
115
+ out.push({ role: "assistant", content });
116
+ }
117
+ continue;
118
+ }
119
+ if (msg.role === "tool") {
120
+ out.push({
121
+ role: "user",
122
+ content: [
123
+ {
124
+ type: "tool_result",
125
+ tool_use_id: msg.toolCallId,
126
+ content: msg.content,
127
+ },
128
+ ],
129
+ });
130
+ }
131
+ }
132
+ return { system, messages: out };
133
+ }
134
+ function claudeFinishReason(raw) {
135
+ switch (raw) {
136
+ case "end_turn":
137
+ return "stop";
138
+ case "max_tokens":
139
+ return "length";
140
+ case "tool_use":
141
+ return "tool-calls";
142
+ default:
143
+ return "other";
144
+ }
145
+ }
146
+ export async function* claudeStreamChat({ apiKey, model, input, }) {
147
+ const client = createClient(apiKey);
148
+ const { system, messages } = llmMessagesToClaude(input.messages);
149
+ const tools = input.tools && input.tools.length > 0
150
+ ? input.tools.map((t) => ({
151
+ name: t.name,
152
+ description: t.description ?? "",
153
+ input_schema: (t.parameters ?? {
154
+ type: "object",
155
+ properties: {},
156
+ }),
157
+ }))
158
+ : undefined;
159
+ let toolChoice;
160
+ if (input.toolChoice && tools) {
161
+ if (typeof input.toolChoice === "string") {
162
+ switch (input.toolChoice) {
163
+ case "auto":
164
+ toolChoice = { type: "auto" };
165
+ break;
166
+ case "required":
167
+ toolChoice = { type: "any" };
168
+ break;
169
+ case "none":
170
+ toolChoice = undefined;
171
+ break;
172
+ }
173
+ }
174
+ else {
175
+ toolChoice = { type: "tool", name: input.toolChoice.name };
176
+ }
177
+ }
178
+ const stream = client.messages.stream({
179
+ model,
180
+ max_tokens: input.maxTokens ?? 4096,
181
+ system,
182
+ messages,
183
+ tools,
184
+ tool_choice: toolChoice,
185
+ temperature: input.temperature,
186
+ top_p: input.topP,
187
+ stop_sequences: input.stopSequences,
188
+ });
189
+ const pendingToolCalls = [];
190
+ for await (const event of stream) {
191
+ if (event.type === "content_block_delta" &&
192
+ event.delta.type === "text_delta") {
193
+ yield { type: "text-delta", text: event.delta.text };
194
+ }
195
+ if (event.type === "content_block_delta" &&
196
+ event.delta.type === "input_json_delta") {
197
+ const last = pendingToolCalls[pendingToolCalls.length - 1];
198
+ if (last) {
199
+ last.arguments += event.delta.partial_json;
200
+ }
201
+ }
202
+ if (event.type === "content_block_start" &&
203
+ event.content_block.type === "tool_use") {
204
+ pendingToolCalls.push({
205
+ id: event.content_block.id,
206
+ name: event.content_block.name,
207
+ arguments: "",
208
+ });
209
+ }
210
+ }
211
+ for (const tc of pendingToolCalls) {
212
+ yield {
213
+ type: "tool-call",
214
+ id: tc.id,
215
+ name: tc.name,
216
+ arguments: tc.arguments,
217
+ };
218
+ }
219
+ const finalMessage = await stream.finalMessage();
220
+ yield {
221
+ type: "finish",
222
+ finishReason: claudeFinishReason(finalMessage.stop_reason),
223
+ usage: {
224
+ promptTokens: finalMessage.usage?.input_tokens,
225
+ completionTokens: finalMessage.usage?.output_tokens,
226
+ },
227
+ modelId: finalMessage.model,
228
+ };
229
+ }
@@ -0,0 +1,5 @@
1
+ export type DeepgramTestIntegrationArgs = {
2
+ apiKey: string;
3
+ };
4
+ export declare const deepgramTestIntegration: ({ apiKey, }: DeepgramTestIntegrationArgs) => Promise<boolean>;
5
+ //# sourceMappingURL=deepgram.utils.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"deepgram.utils.d.ts","sourceRoot":"","sources":["../src/deepgram.utils.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,2BAA2B,GAAG;IACxC,MAAM,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,eAAO,MAAM,uBAAuB,GAAI,aAErC,2BAA2B,KAAG,OAAO,CAAC,OAAO,CA4B/C,CAAC"}
@@ -0,0 +1,25 @@
1
+ export const deepgramTestIntegration = ({ apiKey, }) => {
2
+ return new Promise((resolve) => {
3
+ const wsUrl = "wss://api.deepgram.com/v1/listen?encoding=linear16&sample_rate=16000&model=nova-3";
4
+ const ws = new WebSocket(wsUrl, ["token", apiKey]);
5
+ const timeout = setTimeout(() => {
6
+ ws.close();
7
+ resolve(false);
8
+ }, 5000);
9
+ ws.onopen = () => {
10
+ clearTimeout(timeout);
11
+ ws.close();
12
+ resolve(true);
13
+ };
14
+ ws.onerror = () => {
15
+ clearTimeout(timeout);
16
+ resolve(false);
17
+ };
18
+ ws.onclose = (event) => {
19
+ clearTimeout(timeout);
20
+ if (event.code === 1008 || event.code === 4001 || event.code === 4003) {
21
+ resolve(false);
22
+ }
23
+ };
24
+ });
25
+ };
@@ -0,0 +1,26 @@
1
+ import type { JsonResponse, LlmChatInput, LlmStreamEvent } from "@voquill/types";
2
+ export declare const DEEPSEEK_MODELS: readonly ["deepseek-chat", "deepseek-reasoner"];
3
+ export type DeepseekModel = (typeof DEEPSEEK_MODELS)[number];
4
+ export type DeepseekGenerateTextArgs = {
5
+ apiKey: string;
6
+ model?: DeepseekModel;
7
+ system?: string;
8
+ prompt: string;
9
+ jsonResponse?: JsonResponse;
10
+ };
11
+ export type DeepseekGenerateResponseOutput = {
12
+ text: string;
13
+ tokensUsed: number;
14
+ };
15
+ export declare const deepseekGenerateTextResponse: ({ apiKey, model, system, prompt, jsonResponse, }: DeepseekGenerateTextArgs) => Promise<DeepseekGenerateResponseOutput>;
16
+ export type DeepseekTestIntegrationArgs = {
17
+ apiKey: string;
18
+ };
19
+ export declare const deepseekTestIntegration: ({ apiKey, }: DeepseekTestIntegrationArgs) => Promise<boolean>;
20
+ export type DeepseekStreamChatArgs = {
21
+ apiKey: string;
22
+ model: string;
23
+ input: LlmChatInput;
24
+ };
25
+ export declare function deepseekStreamChat({ apiKey, model, input, }: DeepseekStreamChatArgs): AsyncGenerator<LlmStreamEvent>;
26
+ //# sourceMappingURL=deepseek.utils.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"deepseek.utils.d.ts","sourceRoot":"","sources":["../src/deepseek.utils.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,EAAE,YAAY,EAAE,YAAY,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAC;AAGjF,eAAO,MAAM,eAAe,iDAAkD,CAAC;AAC/E,MAAM,MAAM,aAAa,GAAG,CAAC,OAAO,eAAe,CAAC,CAAC,MAAM,CAAC,CAAC;AAkC7D,MAAM,MAAM,wBAAwB,GAAG;IACrC,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,aAAa,CAAC;IACtB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,CAAC,EAAE,YAAY,CAAC;CAC7B,CAAC;AAEF,MAAM,MAAM,8BAA8B,GAAG;IAC3C,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;CACpB,CAAC;AAEF,eAAO,MAAM,4BAA4B,GAAU,kDAMhD,wBAAwB,KAAG,OAAO,CAAC,8BAA8B,CA8CnE,CAAC;AAEF,MAAM,MAAM,2BAA2B,GAAG;IACxC,MAAM,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,eAAO,MAAM,uBAAuB,GAAU,aAE3C,2BAA2B,KAAG,OAAO,CAAC,OAAO,CAgC/C,CAAC;AAMF,MAAM,MAAM,sBAAsB,GAAG;IACnC,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,YAAY,CAAC;CACrB,CAAC;AAEF,wBAAuB,kBAAkB,CAAC,EACxC,MAAM,EACN,KAAK,EACL,KAAK,GACN,EAAE,sBAAsB,GAAG,cAAc,CAAC,cAAc,CAAC,CAGzD"}
@@ -0,0 +1,102 @@
1
+ import OpenAI from "openai";
2
+ import { retry, countWords } from "@voquill/utilities";
3
+ import { openaiCompatibleStreamChat } from "./openai.utils";
4
+ export const DEEPSEEK_MODELS = ["deepseek-chat", "deepseek-reasoner"];
5
+ const DEEPSEEK_BASE_URL = "https://api.deepseek.com";
6
+ const contentToString = (content) => {
7
+ if (!content) {
8
+ return "";
9
+ }
10
+ if (typeof content === "string") {
11
+ return content;
12
+ }
13
+ return content
14
+ .map((part) => {
15
+ if (part.type === "text") {
16
+ return part.text ?? "";
17
+ }
18
+ return "";
19
+ })
20
+ .join("")
21
+ .trim();
22
+ };
23
+ const createClient = (apiKey) => {
24
+ return new OpenAI({
25
+ apiKey: apiKey.trim(),
26
+ baseURL: DEEPSEEK_BASE_URL,
27
+ dangerouslyAllowBrowser: true, // This is safe because Voquill natively on desktop
28
+ });
29
+ };
30
+ export const deepseekGenerateTextResponse = async ({ apiKey, model = "deepseek-chat", system, prompt, jsonResponse, }) => {
31
+ return retry({
32
+ retries: 3,
33
+ fn: async () => {
34
+ const client = createClient(apiKey);
35
+ const messages = [];
36
+ if (system) {
37
+ messages.push({ role: "system", content: system });
38
+ }
39
+ let finalPrompt = prompt;
40
+ if (jsonResponse) {
41
+ finalPrompt = `${prompt}\n\nRespond with valid JSON matching this schema: ${JSON.stringify(jsonResponse.schema)}`;
42
+ }
43
+ const userParts = [];
44
+ userParts.push({ type: "text", text: finalPrompt });
45
+ messages.push({ role: "user", content: userParts });
46
+ const response = await client.chat.completions.create({
47
+ messages,
48
+ model,
49
+ temperature: 1,
50
+ max_tokens: 1024,
51
+ top_p: 1,
52
+ response_format: jsonResponse ? { type: "json_object" } : undefined,
53
+ });
54
+ console.log("deepseek llm usage:", response.usage);
55
+ if (!response.choices || response.choices.length === 0) {
56
+ throw new Error("No response from DeepSeek");
57
+ }
58
+ const result = response.choices[0].message.content;
59
+ if (!result) {
60
+ throw new Error("Content is empty");
61
+ }
62
+ const content = contentToString(result);
63
+ return {
64
+ text: content,
65
+ tokensUsed: response.usage?.total_tokens ?? countWords(content),
66
+ };
67
+ },
68
+ });
69
+ };
70
+ export const deepseekTestIntegration = async ({ apiKey, }) => {
71
+ const client = createClient(apiKey);
72
+ const response = await client.chat.completions.create({
73
+ messages: [
74
+ {
75
+ role: "user",
76
+ content: [
77
+ {
78
+ type: "text",
79
+ text: `Reply with the single word "Hello."`,
80
+ },
81
+ ],
82
+ },
83
+ ],
84
+ model: "deepseek-chat",
85
+ temperature: 0,
86
+ max_tokens: 32,
87
+ top_p: 1,
88
+ });
89
+ if (!response.choices || response.choices.length === 0) {
90
+ throw new Error("No response from DeepSeek");
91
+ }
92
+ const first = response.choices[0];
93
+ const content = contentToString(first?.message?.content);
94
+ if (!content) {
95
+ throw new Error("Response content is empty");
96
+ }
97
+ return content.toLowerCase().includes("hello");
98
+ };
99
+ export async function* deepseekStreamChat({ apiKey, model, input, }) {
100
+ const client = createClient(apiKey);
101
+ yield* openaiCompatibleStreamChat(client, model, input);
102
+ }
@@ -0,0 +1,6 @@
1
+ export type ElevenLabsTestIntegrationArgs = {
2
+ apiKey: string;
3
+ };
4
+ export declare const elevenlabsTestIntegration: ({ apiKey, }: ElevenLabsTestIntegrationArgs) => Promise<boolean>;
5
+ export declare const convertFloat32ToBase64PCM16: (float32Array: Float32Array | number[]) => string;
6
+ //# sourceMappingURL=elevenlabs.utils.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"elevenlabs.utils.d.ts","sourceRoot":"","sources":["../src/elevenlabs.utils.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,6BAA6B,GAAG;IAC1C,MAAM,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,eAAO,MAAM,yBAAyB,GAAU,aAE7C,6BAA6B,KAAG,OAAO,CAAC,OAAO,CAUjD,CAAC;AAEF,eAAO,MAAM,2BAA2B,GACtC,cAAc,YAAY,GAAG,MAAM,EAAE,KACpC,MAmBF,CAAC"}
@@ -0,0 +1,29 @@
1
+ export const elevenlabsTestIntegration = async ({ apiKey, }) => {
2
+ try {
3
+ const response = await fetch("https://api.elevenlabs.io/v1/user", {
4
+ method: "GET",
5
+ headers: { "xi-api-key": apiKey },
6
+ });
7
+ return response.ok;
8
+ }
9
+ catch {
10
+ return false;
11
+ }
12
+ };
13
+ export const convertFloat32ToBase64PCM16 = (float32Array) => {
14
+ const samples = Array.isArray(float32Array)
15
+ ? float32Array
16
+ : Array.from(float32Array);
17
+ const buffer = new ArrayBuffer(samples.length * 2);
18
+ const view = new DataView(buffer);
19
+ for (let i = 0; i < samples.length; i++) {
20
+ const s = Math.max(-1, Math.min(1, samples[i]));
21
+ view.setInt16(i * 2, s < 0 ? s * 0x8000 : s * 0x7fff, true);
22
+ }
23
+ const bytes = new Uint8Array(buffer);
24
+ let binary = "";
25
+ for (let i = 0; i < bytes.length; i++) {
26
+ binary += String.fromCharCode(bytes[i]);
27
+ }
28
+ return btoa(binary);
29
+ };
@@ -0,0 +1,41 @@
1
+ import type { JsonResponse, LlmChatInput, LlmStreamEvent } from "@voquill/types";
2
+ export declare const GEMINI_GENERATE_TEXT_MODELS: readonly ["gemini-2.5-flash", "gemini-2.5-pro", "gemini-3-flash-preview", "gemini-3-pro-preview", "gemini-2.5-flash-lite"];
3
+ export type GeminiGenerateTextModel = (typeof GEMINI_GENERATE_TEXT_MODELS)[number];
4
+ export declare const GEMINI_TRANSCRIPTION_MODELS: readonly ["gemini-2.5-flash", "gemini-2.5-pro", "gemini-3-flash-preview"];
5
+ export type GeminiTranscriptionModel = (typeof GEMINI_TRANSCRIPTION_MODELS)[number];
6
+ export type GeminiTranscriptionArgs = {
7
+ apiKey: string;
8
+ model?: GeminiTranscriptionModel;
9
+ blob: ArrayBuffer | Buffer;
10
+ mimeType?: string;
11
+ prompt?: string;
12
+ language?: string;
13
+ };
14
+ export type GeminiTranscribeAudioOutput = {
15
+ text: string;
16
+ wordsUsed: number;
17
+ };
18
+ export declare const geminiTranscribeAudio: ({ apiKey, model, blob, mimeType, prompt, language, }: GeminiTranscriptionArgs) => Promise<GeminiTranscribeAudioOutput>;
19
+ export type GeminiGenerateTextArgs = {
20
+ apiKey: string;
21
+ model?: GeminiGenerateTextModel;
22
+ system?: string;
23
+ prompt: string;
24
+ jsonResponse?: JsonResponse;
25
+ };
26
+ export type GeminiGenerateResponseOutput = {
27
+ text: string;
28
+ tokensUsed: number;
29
+ };
30
+ export declare const geminiGenerateTextResponse: ({ apiKey, model, system, prompt, jsonResponse, }: GeminiGenerateTextArgs) => Promise<GeminiGenerateResponseOutput>;
31
+ export type GeminiTestIntegrationArgs = {
32
+ apiKey: string;
33
+ };
34
+ export declare const geminiTestIntegration: ({ apiKey, }: GeminiTestIntegrationArgs) => Promise<boolean>;
35
+ export type GeminiStreamChatArgs = {
36
+ apiKey: string;
37
+ model: string;
38
+ input: LlmChatInput;
39
+ };
40
+ export declare function geminiStreamChat({ apiKey, model, input, }: GeminiStreamChatArgs): AsyncGenerator<LlmStreamEvent>;
41
+ //# sourceMappingURL=gemini.utils.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"gemini.utils.d.ts","sourceRoot":"","sources":["../src/gemini.utils.ts"],"names":[],"mappings":"AAQA,OAAO,KAAK,EACV,YAAY,EACZ,YAAY,EAGZ,cAAc,EACf,MAAM,gBAAgB,CAAC;AAExB,eAAO,MAAM,2BAA2B,4HAM9B,CAAC;AACX,MAAM,MAAM,uBAAuB,GACjC,CAAC,OAAO,2BAA2B,CAAC,CAAC,MAAM,CAAC,CAAC;AAE/C,eAAO,MAAM,2BAA2B,2EAI9B,CAAC;AACX,MAAM,MAAM,wBAAwB,GAClC,CAAC,OAAO,2BAA2B,CAAC,CAAC,MAAM,CAAC,CAAC;AAgD/C,MAAM,MAAM,uBAAuB,GAAG;IACpC,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,wBAAwB,CAAC;IACjC,IAAI,EAAE,WAAW,GAAG,MAAM,CAAC;IAC3B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,MAAM,MAAM,2BAA2B,GAAG;IACxC,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,eAAO,MAAM,qBAAqB,GAAU,sDAOzC,uBAAuB,KAAG,OAAO,CAAC,2BAA2B,CA0C/D,CAAC;AAEF,MAAM,MAAM,sBAAsB,GAAG;IACnC,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,uBAAuB,CAAC;IAChC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,YAAY,CAAC,EAAE,YAAY,CAAC;CAC7B,CAAC;AAEF,MAAM,MAAM,4BAA4B,GAAG;IACzC,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;CACpB,CAAC;AAEF,eAAO,MAAM,0BAA0B,GAAU,kDAM9C,sBAAsB,KAAG,OAAO,CAAC,4BAA4B,CA4C/D,CAAC;AAEF,MAAM,MAAM,yBAAyB,GAAG;IACtC,MAAM,EAAE,MAAM,CAAC;CAChB,CAAC;AAEF,eAAO,MAAM,qBAAqB,GAAU,aAEzC,yBAAyB,KAAG,OAAO,CAAC,OAAO,CAc7C,CAAC;AA6EF,MAAM,MAAM,oBAAoB,GAAG;IACjC,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,YAAY,CAAC;CACrB,CAAC;AAEF,wBAAuB,gBAAgB,CAAC,EACtC,MAAM,EACN,KAAK,EACL,KAAK,GACN,EAAE,oBAAoB,GAAG,cAAc,CAAC,cAAc,CAAC,CAuFvD"}