krisspy-ai 1.1.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,3099 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
21
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
22
+ // If the importer is in node compatibility mode or this is not an ESM
23
+ // file that has been converted to a CommonJS file using a Babel-
24
+ // compatible transform (i.e. "__esModule" has not been set), then set
25
+ // "default" to the CommonJS "module.exports" for node compatibility.
26
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
27
+ mod
28
+ ));
29
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
30
+
31
+ // src/index.ts
32
+ var index_exports = {};
33
+ __export(index_exports, {
34
+ AzureDalleService: () => AzureDalleService,
35
+ AzureProvider: () => AzureProvider,
36
+ AzureSoraService: () => AzureSoraService,
37
+ AzureTTSService: () => AzureTTSService,
38
+ AzureTranscriptionService: () => AzureTranscriptionService,
39
+ BaseGenerativeService: () => BaseGenerativeService,
40
+ BaseImageService: () => BaseImageService,
41
+ BaseProvider: () => BaseProvider,
42
+ BaseTTSService: () => BaseTTSService,
43
+ BaseTranscriptionService: () => BaseTranscriptionService,
44
+ BaseVideoService: () => BaseVideoService,
45
+ BedrockProvider: () => BedrockProvider,
46
+ GeminiProvider: () => GeminiProvider,
47
+ KrisspyProvider: () => KrisspyProvider,
48
+ OpenAIImageService: () => OpenAIImageService,
49
+ OpenAIProvider: () => OpenAIProvider,
50
+ OpenAISoraService: () => OpenAISoraService,
51
+ OpenAITTSService: () => OpenAITTSService,
52
+ OpenAITranscriptionService: () => OpenAITranscriptionService,
53
+ ZAIProvider: () => ZAIProvider,
54
+ cleanup: () => cleanup,
55
+ generateImage: () => generateImage,
56
+ generateVideo: () => generateVideo,
57
+ getAvailableImageServices: () => getAvailableImageServices,
58
+ getAvailableProviders: () => getAvailableProviders,
59
+ getAvailableTTSServices: () => getAvailableTTSServices,
60
+ getAvailableTranscriptionServices: () => getAvailableTranscriptionServices,
61
+ getAvailableVideoServices: () => getAvailableVideoServices,
62
+ getImageService: () => getImageService,
63
+ getProvider: () => getProvider,
64
+ getTTSService: () => getTTSService,
65
+ getTranscriptionService: () => getTranscriptionService,
66
+ getVideoService: () => getVideoService,
67
+ query: () => query,
68
+ registerImageService: () => registerImageService,
69
+ registerProvider: () => registerProvider,
70
+ registerTTSService: () => registerTTSService,
71
+ registerTranscriptionService: () => registerTranscriptionService,
72
+ registerVideoService: () => registerVideoService,
73
+ synthesize: () => synthesize,
74
+ transcribe: () => transcribe
75
+ });
76
+ module.exports = __toCommonJS(index_exports);
77
+ var import_claude_agent_sdk = require("@anthropic-ai/claude-agent-sdk");
78
+ var import_node_fetch13 = __toESM(require("node-fetch"));
79
+ var import_form_data6 = __toESM(require("form-data"));
80
+ var import_http = require("http");
81
+
82
+ // src/providers/base.ts
83
+ var BaseProvider = class {
84
+ constructor(config = {}) {
85
+ this.config = config;
86
+ }
87
+ };
88
+
89
+ // src/providers/openai.ts
90
+ var OpenAIProvider = class extends BaseProvider {
91
+ constructor(config = {}) {
92
+ super(config);
93
+ this.baseUrl = config.baseUrl || "https://api.openai.com/v1";
94
+ this.apiKey = config.apiKey || "";
95
+ this.defaultModel = config.defaultModel || "gpt-4o";
96
+ this.modelMap = {
97
+ "claude-3-5-haiku-20241022": "gpt-4o-mini",
98
+ "claude-3-5-haiku-latest": "gpt-4o-mini",
99
+ "claude-haiku-4-5-20251001": "gpt-4o-mini",
100
+ "claude-3-5-sonnet-20241022": "gpt-5",
101
+ "claude-sonnet-4-20250514": "gpt-5",
102
+ "claude-opus-4-20250514": "gpt-5.2",
103
+ "claude-opus-4-5-20250514": "gpt-5.2",
104
+ "claude-opus-4-5-20251101": "gpt-5.2"
105
+ };
106
+ }
107
+ getName() {
108
+ return "openai";
109
+ }
110
+ mapModel(anthropicModel) {
111
+ return this.modelMap[anthropicModel] || this.defaultModel;
112
+ }
113
+ convertMessages(anthropicMessages) {
114
+ const openaiMessages = [];
115
+ for (const msg of anthropicMessages) {
116
+ if (typeof msg.content === "string") {
117
+ openaiMessages.push({
118
+ role: msg.role === "assistant" ? "assistant" : "user",
119
+ content: msg.content
120
+ });
121
+ } else if (Array.isArray(msg.content)) {
122
+ const contentParts = [];
123
+ const toolCalls = [];
124
+ let hasImages = false;
125
+ for (const block of msg.content) {
126
+ if (block.type === "text") {
127
+ contentParts.push({ type: "text", text: block.text });
128
+ } else if (block.type === "image") {
129
+ hasImages = true;
130
+ if (block.source?.type === "url") {
131
+ contentParts.push({
132
+ type: "image_url",
133
+ image_url: { url: block.source.url }
134
+ });
135
+ } else if (block.source?.type === "base64") {
136
+ contentParts.push({
137
+ type: "image_url",
138
+ image_url: {
139
+ url: `data:${block.source.media_type};base64,${block.source.data}`
140
+ }
141
+ });
142
+ }
143
+ } else if (block.type === "document") {
144
+ hasImages = true;
145
+ const source = block.source;
146
+ const data = source?.data;
147
+ contentParts.push({
148
+ type: "file",
149
+ file: {
150
+ filename: "document.pdf",
151
+ file_data: `data:application/pdf;base64,${data}`
152
+ }
153
+ });
154
+ } else if (block.type === "tool_result") {
155
+ openaiMessages.push({
156
+ role: "tool",
157
+ tool_call_id: block.tool_use_id,
158
+ content: typeof block.content === "string" ? block.content : JSON.stringify(block.content)
159
+ });
160
+ } else if (block.type === "tool_use") {
161
+ toolCalls.push({
162
+ id: block.id,
163
+ type: "function",
164
+ function: {
165
+ name: block.name,
166
+ arguments: JSON.stringify(block.input)
167
+ }
168
+ });
169
+ }
170
+ }
171
+ if (msg.role === "assistant" && toolCalls.length > 0) {
172
+ const textContent = contentParts.filter((p) => p.type === "text").map((p) => p.text).join("");
173
+ openaiMessages.push({
174
+ role: "assistant",
175
+ content: textContent || null,
176
+ tool_calls: toolCalls
177
+ });
178
+ } else if (contentParts.length > 0) {
179
+ if (hasImages) {
180
+ openaiMessages.push({
181
+ role: msg.role === "assistant" ? "assistant" : "user",
182
+ content: contentParts
183
+ });
184
+ } else {
185
+ const textContent = contentParts.map((p) => p.text).join("");
186
+ if (textContent) {
187
+ openaiMessages.push({
188
+ role: msg.role === "assistant" ? "assistant" : "user",
189
+ content: textContent
190
+ });
191
+ }
192
+ }
193
+ }
194
+ }
195
+ }
196
+ return openaiMessages;
197
+ }
198
+ convertTools(anthropicTools) {
199
+ if (!anthropicTools || anthropicTools.length === 0) return void 0;
200
+ return anthropicTools.map((tool) => {
201
+ const params = tool.input_schema || { type: "object", properties: {} };
202
+ return {
203
+ type: "function",
204
+ function: {
205
+ name: tool.name,
206
+ description: tool.description || "",
207
+ parameters: params
208
+ }
209
+ };
210
+ });
211
+ }
212
+ buildRequest(anthropicRequest) {
213
+ const { model, messages, max_tokens, system, tools, tool_choice } = anthropicRequest;
214
+ const mappedModel = this.mapModel(model);
215
+ const openaiMessages = [];
216
+ if (system) {
217
+ let systemContent;
218
+ if (typeof system === "string") {
219
+ systemContent = system;
220
+ } else if (Array.isArray(system)) {
221
+ systemContent = system.map((s) => s.text || s).join("\n");
222
+ } else {
223
+ systemContent = "";
224
+ }
225
+ openaiMessages.push({ role: "system", content: systemContent });
226
+ }
227
+ openaiMessages.push(...this.convertMessages(messages));
228
+ const openaiRequest = {
229
+ model: mappedModel,
230
+ messages: openaiMessages,
231
+ stream: true,
232
+ stream_options: { include_usage: true }
233
+ };
234
+ if (max_tokens) {
235
+ if (mappedModel.startsWith("o1") || mappedModel.startsWith("o3") || mappedModel.startsWith("gpt-5")) {
236
+ openaiRequest.max_completion_tokens = max_tokens;
237
+ } else {
238
+ openaiRequest.max_tokens = max_tokens;
239
+ }
240
+ }
241
+ const openaiTools = this.convertTools(tools);
242
+ if (openaiTools && openaiTools.length > 0) {
243
+ openaiRequest.tools = openaiTools;
244
+ if (tool_choice) {
245
+ if (tool_choice.type === "auto") {
246
+ openaiRequest.tool_choice = "auto";
247
+ } else if (tool_choice.type === "any") {
248
+ openaiRequest.tool_choice = "required";
249
+ } else if (tool_choice.type === "tool" && tool_choice.name) {
250
+ openaiRequest.tool_choice = {
251
+ type: "function",
252
+ function: { name: tool_choice.name }
253
+ };
254
+ }
255
+ }
256
+ }
257
+ return openaiRequest;
258
+ }
259
+ getEndpoint() {
260
+ return `${this.baseUrl}/chat/completions`;
261
+ }
262
+ getHeaders() {
263
+ return {
264
+ "Content-Type": "application/json",
265
+ Authorization: `Bearer ${this.apiKey}`
266
+ };
267
+ }
268
+ async handleStreamingResponse(providerResponse, res, messageId, requestedModel) {
269
+ res.setHeader("Content-Type", "text/event-stream");
270
+ res.setHeader("Cache-Control", "no-cache");
271
+ res.setHeader("Connection", "keep-alive");
272
+ res.setHeader("X-Accel-Buffering", "no");
273
+ res.flushHeaders();
274
+ let contentBlockIndex = 0;
275
+ let outputTokens = 0;
276
+ const toolCallsBuffer = {};
277
+ const writeAndFlush = (data) => {
278
+ res.write(data);
279
+ if (res.flush) res.flush();
280
+ };
281
+ const messageStart = {
282
+ type: "message_start",
283
+ message: {
284
+ id: messageId,
285
+ type: "message",
286
+ role: "assistant",
287
+ content: [],
288
+ model: requestedModel,
289
+ stop_reason: null,
290
+ stop_sequence: null,
291
+ usage: { input_tokens: 0, output_tokens: 0 }
292
+ }
293
+ };
294
+ writeAndFlush(`event: message_start
295
+ data: ${JSON.stringify(messageStart)}
296
+
297
+ `);
298
+ writeAndFlush(
299
+ `event: content_block_start
300
+ data: ${JSON.stringify({
301
+ type: "content_block_start",
302
+ index: contentBlockIndex,
303
+ content_block: { type: "text", text: "" }
304
+ })}
305
+
306
+ `
307
+ );
308
+ const reader = providerResponse.body;
309
+ let buffer = "";
310
+ return new Promise((resolve, reject) => {
311
+ reader.on("data", (chunk) => {
312
+ buffer += chunk.toString();
313
+ const lines = buffer.split("\n");
314
+ buffer = lines.pop() || "";
315
+ for (const line of lines) {
316
+ if (line.startsWith("data: ")) {
317
+ const data = line.slice(6).trim();
318
+ if (data === "[DONE]") {
319
+ writeAndFlush(
320
+ `event: content_block_stop
321
+ data: ${JSON.stringify({
322
+ type: "content_block_stop",
323
+ index: contentBlockIndex
324
+ })}
325
+
326
+ `
327
+ );
328
+ const toolCalls = Object.values(toolCallsBuffer);
329
+ if (toolCalls.length > 0) {
330
+ for (const tc of toolCalls) {
331
+ contentBlockIndex++;
332
+ writeAndFlush(
333
+ `event: content_block_start
334
+ data: ${JSON.stringify({
335
+ type: "content_block_start",
336
+ index: contentBlockIndex,
337
+ content_block: {
338
+ type: "tool_use",
339
+ id: tc.id,
340
+ name: tc.name,
341
+ input: {}
342
+ }
343
+ })}
344
+
345
+ `
346
+ );
347
+ if (tc.arguments) {
348
+ writeAndFlush(
349
+ `event: content_block_delta
350
+ data: ${JSON.stringify({
351
+ type: "content_block_delta",
352
+ index: contentBlockIndex,
353
+ delta: { type: "input_json_delta", partial_json: tc.arguments }
354
+ })}
355
+
356
+ `
357
+ );
358
+ }
359
+ writeAndFlush(
360
+ `event: content_block_stop
361
+ data: ${JSON.stringify({
362
+ type: "content_block_stop",
363
+ index: contentBlockIndex
364
+ })}
365
+
366
+ `
367
+ );
368
+ }
369
+ }
370
+ const stopReason = toolCalls.length > 0 ? "tool_use" : "end_turn";
371
+ writeAndFlush(
372
+ `event: message_delta
373
+ data: ${JSON.stringify({
374
+ type: "message_delta",
375
+ delta: { stop_reason: stopReason, stop_sequence: null },
376
+ usage: { output_tokens: outputTokens }
377
+ })}
378
+
379
+ `
380
+ );
381
+ writeAndFlush(
382
+ `event: message_stop
383
+ data: ${JSON.stringify({ type: "message_stop" })}
384
+
385
+ `
386
+ );
387
+ resolve();
388
+ return;
389
+ }
390
+ try {
391
+ const parsed = JSON.parse(data);
392
+ const choice = parsed.choices?.[0];
393
+ const textContent = choice?.delta?.content;
394
+ if (textContent) {
395
+ outputTokens++;
396
+ writeAndFlush(
397
+ `event: content_block_delta
398
+ data: ${JSON.stringify({
399
+ type: "content_block_delta",
400
+ index: contentBlockIndex,
401
+ delta: { type: "text_delta", text: textContent }
402
+ })}
403
+
404
+ `
405
+ );
406
+ }
407
+ if (choice?.delta?.tool_calls) {
408
+ for (const toolCall of choice.delta.tool_calls) {
409
+ const idx = toolCall.index;
410
+ if (!toolCallsBuffer[idx]) {
411
+ toolCallsBuffer[idx] = {
412
+ id: toolCall.id || `toolu_${Date.now()}_${idx}`,
413
+ name: "",
414
+ arguments: ""
415
+ };
416
+ }
417
+ if (toolCall.id) {
418
+ toolCallsBuffer[idx].id = toolCall.id;
419
+ }
420
+ if (toolCall.function?.name) {
421
+ toolCallsBuffer[idx].name = toolCall.function.name;
422
+ }
423
+ if (toolCall.function?.arguments) {
424
+ toolCallsBuffer[idx].arguments += toolCall.function.arguments;
425
+ }
426
+ }
427
+ }
428
+ if (parsed.usage) {
429
+ outputTokens = parsed.usage.completion_tokens || 0;
430
+ }
431
+ } catch {
432
+ }
433
+ }
434
+ }
435
+ });
436
+ reader.on("end", () => resolve());
437
+ reader.on("error", reject);
438
+ });
439
+ }
440
+ convertResponse(providerData, messageId, requestedModel) {
441
+ const data = providerData;
442
+ const content = [];
443
+ if (data.choices[0].message.content) {
444
+ content.push({ type: "text", text: data.choices[0].message.content });
445
+ }
446
+ if (data.choices[0].message.tool_calls) {
447
+ for (const tc of data.choices[0].message.tool_calls) {
448
+ let parsedInput = {};
449
+ try {
450
+ parsedInput = JSON.parse(tc.function.arguments);
451
+ } catch {
452
+ parsedInput = {};
453
+ }
454
+ content.push({
455
+ type: "tool_use",
456
+ id: tc.id,
457
+ name: tc.function.name,
458
+ input: parsedInput
459
+ });
460
+ }
461
+ }
462
+ const stopReason = data.choices[0].message.tool_calls ? "tool_use" : "end_turn";
463
+ return {
464
+ id: messageId,
465
+ type: "message",
466
+ role: "assistant",
467
+ content,
468
+ model: requestedModel,
469
+ stop_reason: stopReason,
470
+ stop_sequence: null,
471
+ usage: {
472
+ input_tokens: data.usage?.prompt_tokens || 0,
473
+ output_tokens: data.usage?.completion_tokens || 0
474
+ }
475
+ };
476
+ }
477
+ };
478
+
479
+ // src/providers/zai.ts
480
+ var ZAIProvider = class extends BaseProvider {
481
+ constructor(config = {}) {
482
+ super(config);
483
+ this.baseUrl = config.baseUrl || "https://api.z.ai/api/coding/paas/v4";
484
+ this.apiKey = config.apiKey || "";
485
+ this.defaultModel = config.defaultModel || "glm-4.7";
486
+ this.modelMap = {
487
+ "claude-3-5-haiku-20241022": "glm-4.7",
488
+ "claude-3-5-haiku-latest": "glm-4.7",
489
+ "claude-haiku-4-5-20251001": "glm-4.7",
490
+ "claude-3-5-sonnet-20241022": "glm-4.7",
491
+ "claude-sonnet-4-20250514": "glm-4.7",
492
+ "claude-opus-4-20250514": "glm-4.7",
493
+ "claude-opus-4-5-20250514": "glm-4.7",
494
+ "claude-opus-4-5-20251101": "glm-4.7"
495
+ };
496
+ }
497
+ getName() {
498
+ return "zai";
499
+ }
500
+ mapModel(anthropicModel) {
501
+ return this.modelMap[anthropicModel] || this.defaultModel;
502
+ }
503
+ convertMessages(anthropicMessages) {
504
+ const openaiMessages = [];
505
+ for (const msg of anthropicMessages) {
506
+ if (typeof msg.content === "string") {
507
+ openaiMessages.push({
508
+ role: msg.role === "assistant" ? "assistant" : "user",
509
+ content: msg.content
510
+ });
511
+ } else if (Array.isArray(msg.content)) {
512
+ const contentParts = [];
513
+ const toolCalls = [];
514
+ let hasMultimodal = false;
515
+ for (const block of msg.content) {
516
+ if (block.type === "text") {
517
+ contentParts.push({ type: "text", text: block.text });
518
+ } else if (block.type === "image") {
519
+ hasMultimodal = true;
520
+ if (block.source?.type === "url") {
521
+ contentParts.push({
522
+ type: "image_url",
523
+ image_url: { url: block.source.url }
524
+ });
525
+ } else if (block.source?.type === "base64") {
526
+ contentParts.push({
527
+ type: "image_url",
528
+ image_url: {
529
+ url: `data:${block.source.media_type};base64,${block.source.data}`
530
+ }
531
+ });
532
+ }
533
+ } else if (block.type === "document") {
534
+ hasMultimodal = true;
535
+ const source = block.source;
536
+ const data = source?.data;
537
+ contentParts.push({
538
+ type: "file",
539
+ file: {
540
+ filename: "document.pdf",
541
+ file_data: `data:application/pdf;base64,${data}`
542
+ }
543
+ });
544
+ } else if (block.type === "tool_result") {
545
+ openaiMessages.push({
546
+ role: "tool",
547
+ tool_call_id: block.tool_use_id,
548
+ content: typeof block.content === "string" ? block.content : JSON.stringify(block.content)
549
+ });
550
+ } else if (block.type === "tool_use") {
551
+ toolCalls.push({
552
+ id: block.id,
553
+ type: "function",
554
+ function: {
555
+ name: block.name,
556
+ arguments: JSON.stringify(block.input)
557
+ }
558
+ });
559
+ }
560
+ }
561
+ if (msg.role === "assistant" && toolCalls.length > 0) {
562
+ const textContent = contentParts.filter((p) => p.type === "text").map((p) => p.text).join("");
563
+ openaiMessages.push({
564
+ role: "assistant",
565
+ content: textContent || null,
566
+ tool_calls: toolCalls
567
+ });
568
+ } else if (contentParts.length > 0) {
569
+ if (hasMultimodal) {
570
+ openaiMessages.push({
571
+ role: msg.role === "assistant" ? "assistant" : "user",
572
+ content: contentParts
573
+ });
574
+ } else {
575
+ const textContent = contentParts.map((p) => p.text).join("");
576
+ if (textContent) {
577
+ openaiMessages.push({
578
+ role: msg.role === "assistant" ? "assistant" : "user",
579
+ content: textContent
580
+ });
581
+ }
582
+ }
583
+ }
584
+ }
585
+ }
586
+ return openaiMessages;
587
+ }
588
+ convertTools(anthropicTools) {
589
+ if (!anthropicTools) return void 0;
590
+ return anthropicTools.map((tool) => ({
591
+ type: "function",
592
+ function: {
593
+ name: tool.name,
594
+ description: tool.description,
595
+ parameters: tool.input_schema
596
+ }
597
+ }));
598
+ }
599
+ buildRequest(anthropicRequest) {
600
+ const { model, messages, max_tokens, system, tools } = anthropicRequest;
601
+ const mappedModel = this.mapModel(model);
602
+ const openaiMessages = [];
603
+ if (system) {
604
+ const systemContent = typeof system === "string" ? system : system.map((s) => s.text).join("\n");
605
+ openaiMessages.push({ role: "system", content: systemContent });
606
+ }
607
+ openaiMessages.push(...this.convertMessages(messages));
608
+ const openaiRequest = {
609
+ model: mappedModel,
610
+ messages: openaiMessages,
611
+ max_tokens: max_tokens || 4096,
612
+ stream: true,
613
+ temperature: 0.7
614
+ };
615
+ const openaiTools = this.convertTools(tools);
616
+ if (openaiTools) {
617
+ openaiRequest.tools = openaiTools;
618
+ }
619
+ return openaiRequest;
620
+ }
621
+ getEndpoint() {
622
+ return `${this.baseUrl}/chat/completions`;
623
+ }
624
+ getHeaders() {
625
+ return {
626
+ "Content-Type": "application/json",
627
+ Authorization: `Bearer ${this.apiKey}`,
628
+ "Accept-Language": "en-US,en"
629
+ };
630
+ }
631
+ async handleStreamingResponse(providerResponse, res, messageId, requestedModel) {
632
+ res.setHeader("Content-Type", "text/event-stream");
633
+ res.setHeader("Cache-Control", "no-cache");
634
+ res.setHeader("Connection", "keep-alive");
635
+ res.setHeader("X-Accel-Buffering", "no");
636
+ res.flushHeaders();
637
+ let contentBlockIndex = 0;
638
+ let outputTokens = 0;
639
+ const toolCallsBuffer = {};
640
+ const writeAndFlush = (data) => {
641
+ res.write(data);
642
+ if (res.flush) res.flush();
643
+ };
644
+ const messageStart = {
645
+ type: "message_start",
646
+ message: {
647
+ id: messageId,
648
+ type: "message",
649
+ role: "assistant",
650
+ content: [],
651
+ model: requestedModel,
652
+ stop_reason: null,
653
+ stop_sequence: null,
654
+ usage: { input_tokens: 0, output_tokens: 0 }
655
+ }
656
+ };
657
+ writeAndFlush(`event: message_start
658
+ data: ${JSON.stringify(messageStart)}
659
+
660
+ `);
661
+ writeAndFlush(
662
+ `event: content_block_start
663
+ data: ${JSON.stringify({
664
+ type: "content_block_start",
665
+ index: contentBlockIndex,
666
+ content_block: { type: "text", text: "" }
667
+ })}
668
+
669
+ `
670
+ );
671
+ const reader = providerResponse.body;
672
+ let buffer = "";
673
+ return new Promise((resolve, reject) => {
674
+ reader.on("data", (chunk) => {
675
+ buffer += chunk.toString();
676
+ const lines = buffer.split("\n");
677
+ buffer = lines.pop() || "";
678
+ for (const line of lines) {
679
+ if (line.startsWith("data: ")) {
680
+ const data = line.slice(6);
681
+ if (data === "[DONE]") {
682
+ writeAndFlush(
683
+ `event: content_block_stop
684
+ data: ${JSON.stringify({
685
+ type: "content_block_stop",
686
+ index: contentBlockIndex
687
+ })}
688
+
689
+ `
690
+ );
691
+ writeAndFlush(
692
+ `event: message_delta
693
+ data: ${JSON.stringify({
694
+ type: "message_delta",
695
+ delta: { stop_reason: "end_turn", stop_sequence: null },
696
+ usage: { output_tokens: outputTokens }
697
+ })}
698
+
699
+ `
700
+ );
701
+ writeAndFlush(
702
+ `event: message_stop
703
+ data: ${JSON.stringify({ type: "message_stop" })}
704
+
705
+ `
706
+ );
707
+ resolve();
708
+ return;
709
+ }
710
+ try {
711
+ const parsed = JSON.parse(data);
712
+ const choice = parsed.choices?.[0];
713
+ const textContent = choice?.delta?.content || choice?.delta?.reasoning_content;
714
+ if (textContent) {
715
+ outputTokens++;
716
+ writeAndFlush(
717
+ `event: content_block_delta
718
+ data: ${JSON.stringify({
719
+ type: "content_block_delta",
720
+ index: contentBlockIndex,
721
+ delta: { type: "text_delta", text: textContent }
722
+ })}
723
+
724
+ `
725
+ );
726
+ }
727
+ if (choice?.delta?.tool_calls) {
728
+ for (const toolCall of choice.delta.tool_calls) {
729
+ const idx = toolCall.index;
730
+ if (!toolCallsBuffer[idx]) {
731
+ toolCallsBuffer[idx] = {
732
+ id: toolCall.id || `tool_${idx}`,
733
+ name: "",
734
+ arguments: ""
735
+ };
736
+ }
737
+ if (toolCall.function?.name) {
738
+ toolCallsBuffer[idx].name = toolCall.function.name;
739
+ }
740
+ if (toolCall.function?.arguments) {
741
+ toolCallsBuffer[idx].arguments += toolCall.function.arguments;
742
+ }
743
+ }
744
+ }
745
+ if (parsed.usage) {
746
+ outputTokens = parsed.usage.completion_tokens || 0;
747
+ }
748
+ } catch {
749
+ }
750
+ }
751
+ }
752
+ });
753
+ reader.on("end", () => {
754
+ const toolCalls = Object.values(toolCallsBuffer);
755
+ if (toolCalls.length > 0) {
756
+ for (const tc of toolCalls) {
757
+ contentBlockIndex++;
758
+ writeAndFlush(
759
+ `event: content_block_start
760
+ data: ${JSON.stringify({
761
+ type: "content_block_start",
762
+ index: contentBlockIndex,
763
+ content_block: {
764
+ type: "tool_use",
765
+ id: tc.id,
766
+ name: tc.name,
767
+ input: {}
768
+ }
769
+ })}
770
+
771
+ `
772
+ );
773
+ if (tc.arguments) {
774
+ writeAndFlush(
775
+ `event: content_block_delta
776
+ data: ${JSON.stringify({
777
+ type: "content_block_delta",
778
+ index: contentBlockIndex,
779
+ delta: { type: "input_json_delta", partial_json: tc.arguments }
780
+ })}
781
+
782
+ `
783
+ );
784
+ }
785
+ writeAndFlush(
786
+ `event: content_block_stop
787
+ data: ${JSON.stringify({
788
+ type: "content_block_stop",
789
+ index: contentBlockIndex
790
+ })}
791
+
792
+ `
793
+ );
794
+ }
795
+ }
796
+ resolve();
797
+ });
798
+ reader.on("error", reject);
799
+ });
800
+ }
801
+ convertResponse(providerData, messageId, requestedModel) {
802
+ const data = providerData;
803
+ return {
804
+ id: messageId,
805
+ type: "message",
806
+ role: "assistant",
807
+ content: [{ type: "text", text: data.choices[0].message.content || "" }],
808
+ model: requestedModel,
809
+ stop_reason: "end_turn",
810
+ stop_sequence: null,
811
+ usage: {
812
+ input_tokens: data.usage?.prompt_tokens || 0,
813
+ output_tokens: data.usage?.completion_tokens || 0
814
+ }
815
+ };
816
+ }
817
+ };
818
+
819
+ // src/providers/gemini.ts
820
+ var GeminiProvider = class extends OpenAIProvider {
821
+ constructor(config = {}) {
822
+ super(config);
823
+ this.baseUrl = config.baseUrl || "https://generativelanguage.googleapis.com/v1beta/openai";
824
+ this.apiKey = config.apiKey || "";
825
+ this.defaultModel = config.defaultModel || "gemini-2.5-flash";
826
+ this.modelMap = {
827
+ "claude-3-5-haiku-20241022": "gemini-2.5-flash",
828
+ "claude-3-5-haiku-latest": "gemini-2.5-flash",
829
+ "claude-haiku-4-5-20251001": "gemini-2.5-flash",
830
+ "claude-3-5-sonnet-20241022": "gemini-2.5-pro",
831
+ "claude-sonnet-4-20250514": "gemini-2.5-pro",
832
+ "claude-opus-4-20250514": "gemini-2.5-pro",
833
+ "claude-opus-4-5-20250514": "gemini-2.5-pro",
834
+ "claude-opus-4-5-20251101": "gemini-2.5-pro"
835
+ };
836
+ }
837
+ getName() {
838
+ return "gemini";
839
+ }
840
+ buildRequest(anthropicRequest) {
841
+ const request = super.buildRequest(anthropicRequest);
842
+ if (request.max_completion_tokens) {
843
+ request.max_tokens = request.max_completion_tokens;
844
+ delete request.max_completion_tokens;
845
+ }
846
+ return request;
847
+ }
848
+ getHeaders() {
849
+ return {
850
+ "Content-Type": "application/json",
851
+ Authorization: `Bearer ${this.apiKey}`
852
+ };
853
+ }
854
+ };
855
+
856
+ // src/providers/bedrock.ts
857
+ var crypto = __toESM(require("crypto"));
858
+ var BedrockProvider = class extends BaseProvider {
859
+ // Track model for current request
860
+ constructor(config = {}) {
861
+ super(config);
862
+ this.region = config.region || "us-west-2";
863
+ this.accessKeyId = config.accessKeyId || config.apiKey || "";
864
+ this.secretAccessKey = config.secretAccessKey || "";
865
+ this.defaultModel = config.defaultModel || "anthropic.claude-3-5-sonnet-20241022-v2:0";
866
+ this.modelMap = {
867
+ "claude-3-5-haiku-20241022": "anthropic.claude-3-5-haiku-20241022-v1:0",
868
+ "claude-3-5-haiku-latest": "anthropic.claude-3-5-haiku-20241022-v1:0",
869
+ "claude-haiku-4-5-20251001": "anthropic.claude-3-5-haiku-20241022-v1:0",
870
+ "claude-3-5-sonnet-20241022": "anthropic.claude-3-5-sonnet-20241022-v2:0",
871
+ "claude-3-5-sonnet-latest": "anthropic.claude-3-5-sonnet-20241022-v2:0",
872
+ // Claude 4 models - these require inference profiles, fallback to 3.5 for on-demand
873
+ "claude-sonnet-4-20250514": "anthropic.claude-3-5-sonnet-20241022-v2:0",
874
+ "claude-sonnet-4-5-20250514": "anthropic.claude-3-5-sonnet-20241022-v2:0",
875
+ "claude-sonnet-4-5": "anthropic.claude-3-5-sonnet-20241022-v2:0",
876
+ "claude-opus-4-20250514": "anthropic.claude-3-5-sonnet-20241022-v2:0",
877
+ "claude-opus-4-5-20250514": "anthropic.claude-3-5-sonnet-20241022-v2:0",
878
+ "claude-opus-4-5-20251101": "anthropic.claude-3-5-sonnet-20241022-v2:0",
879
+ // Short aliases
880
+ "sonnet": "anthropic.claude-3-5-sonnet-20241022-v2:0",
881
+ "haiku": "anthropic.claude-3-5-haiku-20241022-v1:0",
882
+ "opus": "anthropic.claude-3-5-sonnet-20241022-v2:0"
883
+ };
884
+ this.currentModel = this.defaultModel;
885
+ }
886
+ getName() {
887
+ return "bedrock";
888
+ }
889
+ mapModel(anthropicModel) {
890
+ return this.modelMap[anthropicModel] || this.defaultModel;
891
+ }
892
+ // Bedrock uses native Anthropic format - pass through
893
+ convertMessages(anthropicMessages) {
894
+ return anthropicMessages;
895
+ }
896
+ // Bedrock uses native Anthropic format - pass through
897
+ convertTools(anthropicTools) {
898
+ return anthropicTools;
899
+ }
900
+ // AWS Signature V4 signing
901
+ sign(key, msg) {
902
+ return crypto.createHmac("sha256", key).update(msg, "utf8").digest();
903
+ }
904
+ getSignatureKey(key, dateStamp, region, service) {
905
+ const kDate = this.sign(Buffer.from("AWS4" + key, "utf8"), dateStamp);
906
+ const kRegion = this.sign(kDate, region);
907
+ const kService = this.sign(kRegion, service);
908
+ const kSigning = this.sign(kService, "aws4_request");
909
+ return kSigning;
910
+ }
911
+ createAuthHeaders(method, host, uri, queryString, payload, amzDate, dateStamp) {
912
+ const service = "bedrock";
913
+ const algorithm = "AWS4-HMAC-SHA256";
914
+ const contentType = "application/json";
915
+ const payloadHash = crypto.createHash("sha256").update(payload, "utf8").digest("hex");
916
+ const canonicalHeaders = `content-type:${contentType}
917
+ host:${host}
918
+ x-amz-date:${amzDate}
919
+ `;
920
+ const signedHeaders = "content-type;host;x-amz-date";
921
+ const canonicalRequest = [
922
+ method,
923
+ uri,
924
+ queryString,
925
+ canonicalHeaders,
926
+ signedHeaders,
927
+ payloadHash
928
+ ].join("\n");
929
+ const credentialScope = `${dateStamp}/${this.region}/${service}/aws4_request`;
930
+ const stringToSign = [
931
+ algorithm,
932
+ amzDate,
933
+ credentialScope,
934
+ crypto.createHash("sha256").update(canonicalRequest, "utf8").digest("hex")
935
+ ].join("\n");
936
+ const signingKey = this.getSignatureKey(this.secretAccessKey, dateStamp, this.region, service);
937
+ const signature = crypto.createHmac("sha256", signingKey).update(stringToSign, "utf8").digest("hex");
938
+ const authorizationHeader = `${algorithm} Credential=${this.accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
939
+ return {
940
+ "Content-Type": contentType,
941
+ "X-Amz-Date": amzDate,
942
+ "Authorization": authorizationHeader
943
+ };
944
+ }
945
+ buildRequest(anthropicRequest) {
946
+ const { model, messages, max_tokens, system, tools, tool_choice } = anthropicRequest;
947
+ this.currentModel = this.mapModel(model);
948
+ const bedrockRequest = {
949
+ anthropic_version: "bedrock-2023-05-31",
950
+ model: "",
951
+ // Will be removed below
952
+ messages,
953
+ max_tokens: max_tokens || 4096
954
+ };
955
+ if (system) {
956
+ bedrockRequest.system = system;
957
+ }
958
+ if (tools && tools.length > 0) {
959
+ bedrockRequest.tools = tools;
960
+ if (tool_choice) {
961
+ bedrockRequest.tool_choice = tool_choice;
962
+ }
963
+ }
964
+ delete bedrockRequest.model;
965
+ return bedrockRequest;
966
+ }
967
+ getEndpoint() {
968
+ const modelId = this.currentModel || this.defaultModel;
969
+ return `https://bedrock-runtime.${this.region}.amazonaws.com/model/${modelId}/invoke-with-response-stream`;
970
+ }
971
+ getHeaders() {
972
+ const now = /* @__PURE__ */ new Date();
973
+ const amzDate = now.toISOString().replace(/[:-]|\.\d{3}/g, "");
974
+ const host = `bedrock-runtime.${this.region}.amazonaws.com`;
975
+ return {
976
+ "Content-Type": "application/json",
977
+ "X-Amz-Date": amzDate,
978
+ "Host": host
979
+ };
980
+ }
981
+ // Custom method to get signed headers with payload
982
+ getSignedHeaders(payload) {
983
+ const now = /* @__PURE__ */ new Date();
984
+ const amzDate = now.toISOString().replace(/[:-]|\.\d{3}/g, "");
985
+ const dateStamp = amzDate.slice(0, 8);
986
+ const host = `bedrock-runtime.${this.region}.amazonaws.com`;
987
+ const modelId = this.currentModel || this.defaultModel;
988
+ const encodedModelId = modelId.replace(/:/g, "%3A");
989
+ const uri = `/model/${encodedModelId}/invoke-with-response-stream`;
990
+ return this.createAuthHeaders("POST", host, uri, "", payload, amzDate, dateStamp);
991
+ }
992
+ async handleStreamingResponse(providerResponse, res, messageId, requestedModel) {
993
+ res.setHeader("Content-Type", "text/event-stream");
994
+ res.setHeader("Cache-Control", "no-cache");
995
+ res.setHeader("Connection", "keep-alive");
996
+ res.setHeader("X-Accel-Buffering", "no");
997
+ res.flushHeaders();
998
+ let contentBlockIndex = 0;
999
+ let outputTokens = 0;
1000
+ const writeAndFlush = (data) => {
1001
+ res.write(data);
1002
+ if (res.flush) res.flush();
1003
+ };
1004
+ const messageStart = {
1005
+ type: "message_start",
1006
+ message: {
1007
+ id: messageId,
1008
+ type: "message",
1009
+ role: "assistant",
1010
+ content: [],
1011
+ model: requestedModel,
1012
+ stop_reason: null,
1013
+ stop_sequence: null,
1014
+ usage: { input_tokens: 0, output_tokens: 0 }
1015
+ }
1016
+ };
1017
+ writeAndFlush(`event: message_start
1018
+ data: ${JSON.stringify(messageStart)}
1019
+
1020
+ `);
1021
+ writeAndFlush(
1022
+ `event: content_block_start
1023
+ data: ${JSON.stringify({
1024
+ type: "content_block_start",
1025
+ index: contentBlockIndex,
1026
+ content_block: { type: "text", text: "" }
1027
+ })}
1028
+
1029
+ `
1030
+ );
1031
+ const reader = providerResponse.body;
1032
+ let buffer = Buffer.alloc(0);
1033
+ let hasStartedContent = false;
1034
+ const parseEventStreamMessage = (data) => {
1035
+ if (data.length < 16) return null;
1036
+ const totalLength = data.readUInt32BE(0);
1037
+ const headersLength = data.readUInt32BE(4);
1038
+ if (data.length < totalLength) return null;
1039
+ const headersStart = 12;
1040
+ const headersEnd = headersStart + headersLength;
1041
+ const payloadStart = headersEnd;
1042
+ const payloadEnd = totalLength - 4;
1043
+ let eventType = "";
1044
+ let pos = headersStart;
1045
+ while (pos < headersEnd && pos < data.length) {
1046
+ const nameLen = data.readUInt8(pos);
1047
+ pos++;
1048
+ if (pos + nameLen > data.length) break;
1049
+ const name = data.slice(pos, pos + nameLen).toString("utf8");
1050
+ pos += nameLen;
1051
+ if (pos >= data.length) break;
1052
+ const headerType = data.readUInt8(pos);
1053
+ pos++;
1054
+ if (headerType === 7) {
1055
+ if (pos + 2 > data.length) break;
1056
+ const valueLen = data.readUInt16BE(pos);
1057
+ pos += 2;
1058
+ if (pos + valueLen > data.length) break;
1059
+ const value = data.slice(pos, pos + valueLen).toString("utf8");
1060
+ pos += valueLen;
1061
+ if (name === ":event-type") eventType = value;
1062
+ }
1063
+ }
1064
+ const payload = data.slice(payloadStart, payloadEnd).toString("utf8");
1065
+ return { eventType, payload };
1066
+ };
1067
+ return new Promise((resolve, reject) => {
1068
+ let chunkCount = 0;
1069
+ reader.on("data", (chunk) => {
1070
+ buffer = Buffer.concat([buffer, chunk]);
1071
+ chunkCount++;
1072
+ while (buffer.length >= 16) {
1073
+ const totalLength = buffer.readUInt32BE(0);
1074
+ if (buffer.length < totalLength) break;
1075
+ const message = buffer.slice(0, totalLength);
1076
+ buffer = buffer.slice(totalLength);
1077
+ const event = parseEventStreamMessage(message);
1078
+ if (!event || !event.payload) continue;
1079
+ const jsonStart = event.payload.indexOf("{");
1080
+ if (jsonStart === -1) continue;
1081
+ try {
1082
+ const jsonStr = event.payload.slice(jsonStart);
1083
+ const wrapper = JSON.parse(jsonStr);
1084
+ let parsed;
1085
+ if (wrapper.bytes) {
1086
+ const decodedPayload = Buffer.from(wrapper.bytes, "base64").toString("utf8");
1087
+ parsed = JSON.parse(decodedPayload);
1088
+ if (process.env.DEBUG_BEDROCK) {
1089
+ console.log(`[Bedrock] Decoded: ${decodedPayload.substring(0, 100)}`);
1090
+ }
1091
+ } else {
1092
+ parsed = wrapper;
1093
+ }
1094
+ if (parsed.type === "content_block_delta" && parsed.delta?.text) {
1095
+ hasStartedContent = true;
1096
+ outputTokens++;
1097
+ writeAndFlush(
1098
+ `event: content_block_delta
1099
+ data: ${JSON.stringify({
1100
+ type: "content_block_delta",
1101
+ index: contentBlockIndex,
1102
+ delta: { type: "text_delta", text: parsed.delta.text }
1103
+ })}
1104
+
1105
+ `
1106
+ );
1107
+ }
1108
+ if (parsed.type === "message_delta" && parsed.usage) {
1109
+ outputTokens = parsed.usage.output_tokens || outputTokens;
1110
+ }
1111
+ if (parsed.type === "message_stop") {
1112
+ writeAndFlush(
1113
+ `event: content_block_stop
1114
+ data: ${JSON.stringify({
1115
+ type: "content_block_stop",
1116
+ index: contentBlockIndex
1117
+ })}
1118
+
1119
+ `
1120
+ );
1121
+ writeAndFlush(
1122
+ `event: message_delta
1123
+ data: ${JSON.stringify({
1124
+ type: "message_delta",
1125
+ delta: { stop_reason: "end_turn", stop_sequence: null },
1126
+ usage: { output_tokens: outputTokens }
1127
+ })}
1128
+
1129
+ `
1130
+ );
1131
+ writeAndFlush(
1132
+ `event: message_stop
1133
+ data: ${JSON.stringify({ type: "message_stop" })}
1134
+
1135
+ `
1136
+ );
1137
+ }
1138
+ } catch {
1139
+ }
1140
+ }
1141
+ });
1142
+ reader.on("end", () => {
1143
+ if (process.env.DEBUG_BEDROCK) {
1144
+ console.log(`[Bedrock] Stream ended. Chunks: ${chunkCount}, hasContent: ${hasStartedContent}`);
1145
+ }
1146
+ if (hasStartedContent) {
1147
+ writeAndFlush(
1148
+ `event: content_block_stop
1149
+ data: ${JSON.stringify({
1150
+ type: "content_block_stop",
1151
+ index: contentBlockIndex
1152
+ })}
1153
+
1154
+ `
1155
+ );
1156
+ }
1157
+ writeAndFlush(
1158
+ `event: message_delta
1159
+ data: ${JSON.stringify({
1160
+ type: "message_delta",
1161
+ delta: { stop_reason: "end_turn", stop_sequence: null },
1162
+ usage: { output_tokens: outputTokens }
1163
+ })}
1164
+
1165
+ `
1166
+ );
1167
+ writeAndFlush(
1168
+ `event: message_stop
1169
+ data: ${JSON.stringify({ type: "message_stop" })}
1170
+
1171
+ `
1172
+ );
1173
+ resolve();
1174
+ });
1175
+ reader.on("error", reject);
1176
+ });
1177
+ }
1178
+ convertResponse(providerData, messageId, requestedModel) {
1179
+ const data = providerData;
1180
+ return {
1181
+ id: messageId,
1182
+ type: "message",
1183
+ role: "assistant",
1184
+ content: data.content,
1185
+ model: requestedModel,
1186
+ stop_reason: data.stop_reason || "end_turn",
1187
+ stop_sequence: null,
1188
+ usage: {
1189
+ input_tokens: data.usage?.input_tokens || 0,
1190
+ output_tokens: data.usage?.output_tokens || 0
1191
+ }
1192
+ };
1193
+ }
1194
+ };
1195
+
1196
+ // src/providers/azure.ts
1197
+ var AzureProvider = class extends OpenAIProvider {
1198
+ constructor(config = {}) {
1199
+ super(config);
1200
+ this.baseUrl = config.baseUrl || "";
1201
+ this.apiKey = config.apiKey || "";
1202
+ this.deploymentName = config.deploymentName || config.defaultModel || "gpt-4o";
1203
+ this.apiVersion = config.apiVersion || "2024-10-21";
1204
+ this.defaultModel = this.deploymentName;
1205
+ this.modelMap = {
1206
+ "claude-3-5-haiku-20241022": "gpt-4o-mini",
1207
+ "claude-3-5-haiku-latest": "gpt-4o-mini",
1208
+ "claude-haiku-4-5-20251001": "gpt-4o-mini",
1209
+ "claude-3-5-sonnet-20241022": "gpt-4o",
1210
+ "claude-sonnet-4-20250514": "gpt-4o",
1211
+ "claude-opus-4-20250514": "gpt-4o",
1212
+ "claude-opus-4-5-20250514": "gpt-4o",
1213
+ "claude-opus-4-5-20251101": "gpt-4o",
1214
+ "sonnet": "gpt-4o",
1215
+ "haiku": "gpt-4o-mini",
1216
+ "opus": "gpt-4o"
1217
+ };
1218
+ }
1219
+ getName() {
1220
+ return "azure";
1221
+ }
1222
+ mapModel(anthropicModel) {
1223
+ if (this.deploymentName && this.deploymentName !== "gpt-4o") {
1224
+ return this.deploymentName;
1225
+ }
1226
+ return this.modelMap[anthropicModel] || this.defaultModel;
1227
+ }
1228
+ buildRequest(anthropicRequest) {
1229
+ const request = super.buildRequest(anthropicRequest);
1230
+ return request;
1231
+ }
1232
+ getEndpoint() {
1233
+ const deployment = this.mapModel(this.defaultModel);
1234
+ return `${this.baseUrl}/openai/deployments/${deployment}/chat/completions?api-version=${this.apiVersion}`;
1235
+ }
1236
+ getHeaders() {
1237
+ return {
1238
+ "Content-Type": "application/json",
1239
+ "api-key": this.apiKey
1240
+ };
1241
+ }
1242
+ };
1243
+
1244
+ // src/providers/krisspy.ts
1245
+ var KrisspyProvider = class extends BaseProvider {
1246
+ constructor(config) {
1247
+ super(config);
1248
+ this.krisspyConfig = config;
1249
+ }
1250
+ getName() {
1251
+ return "krisspy";
1252
+ }
1253
+ /**
1254
+ * Pass through model names as-is
1255
+ * Krisspy backend will handle model name mapping
1256
+ */
1257
+ mapModel(model) {
1258
+ return model;
1259
+ }
1260
+ /**
1261
+ * Pass through Anthropic messages as-is
1262
+ * Krisspy backend expects Anthropic format
1263
+ */
1264
+ convertMessages(messages) {
1265
+ return messages;
1266
+ }
1267
+ /**
1268
+ * Pass through Anthropic tools as-is
1269
+ */
1270
+ convertTools(tools) {
1271
+ return tools;
1272
+ }
1273
+ /**
1274
+ * Build request - pass through as Anthropic format
1275
+ * Add Krisspy-specific headers/metadata
1276
+ */
1277
+ buildRequest(request) {
1278
+ return {
1279
+ ...request,
1280
+ model: this.mapModel(request.model)
1281
+ };
1282
+ }
1283
+ /**
1284
+ * Get Krisspy AI proxy endpoint
1285
+ * Uses /messages endpoint for transparent LLM proxying (agent runs client-side)
1286
+ */
1287
+ getEndpoint() {
1288
+ const baseUrl = this.krisspyConfig.baseUrl || "https://api.krisspy.ai";
1289
+ return `${baseUrl}/api/v1/ai/messages`;
1290
+ }
1291
+ /**
1292
+ * Get headers with Krisspy API key
1293
+ */
1294
+ getHeaders() {
1295
+ if (!this.krisspyConfig.apiKey) {
1296
+ throw new Error("Krisspy API key is required");
1297
+ }
1298
+ const headers = {
1299
+ "Content-Type": "application/json",
1300
+ "Authorization": `Bearer ${this.krisspyConfig.apiKey}`,
1301
+ "X-Krisspy-Provider": "krisspy-ai-sdk"
1302
+ };
1303
+ if (this.krisspyConfig.appId) {
1304
+ headers["X-Krisspy-App-Id"] = this.krisspyConfig.appId;
1305
+ }
1306
+ if (this.krisspyConfig.userId) {
1307
+ headers["X-Krisspy-User-Id"] = this.krisspyConfig.userId;
1308
+ }
1309
+ return headers;
1310
+ }
1311
+ /**
1312
+ * Handle streaming response from Krisspy backend
1313
+ * The backend streams krisspy-ai events as SSE, so we parse and pass through
1314
+ */
1315
+ async handleStreamingResponse(providerResponse, res, messageId, requestedModel) {
1316
+ res.setHeader("Content-Type", "text/event-stream");
1317
+ res.setHeader("Cache-Control", "no-cache");
1318
+ res.setHeader("Connection", "keep-alive");
1319
+ res.flushHeaders();
1320
+ const stream = providerResponse.body;
1321
+ let buffer = "";
1322
+ return new Promise((resolve, reject) => {
1323
+ stream.on("data", (chunk) => {
1324
+ buffer += chunk.toString();
1325
+ const lines = buffer.split("\n");
1326
+ buffer = lines.pop() || "";
1327
+ for (const line of lines) {
1328
+ if (line.startsWith("data: ")) {
1329
+ const data = line.slice(6).trim();
1330
+ res.write(`data: ${data}
1331
+
1332
+ `);
1333
+ if (res.flush) {
1334
+ res.flush();
1335
+ }
1336
+ }
1337
+ }
1338
+ });
1339
+ stream.on("end", () => {
1340
+ resolve();
1341
+ });
1342
+ stream.on("error", (err) => {
1343
+ console.error("[krisspy provider] Stream error:", err);
1344
+ reject(err);
1345
+ });
1346
+ });
1347
+ }
1348
+ /**
1349
+ * Convert non-streaming response
1350
+ * Krisspy backend returns Anthropic format, so minimal conversion needed
1351
+ */
1352
+ convertResponse(data, messageId, requestedModel) {
1353
+ const response = data;
1354
+ return {
1355
+ id: response.id || messageId,
1356
+ type: "message",
1357
+ role: "assistant",
1358
+ content: response.content || [],
1359
+ model: response.model || requestedModel,
1360
+ stop_reason: response.stop_reason || null,
1361
+ stop_sequence: response.stop_sequence || null,
1362
+ usage: response.usage || { input_tokens: 0, output_tokens: 0 }
1363
+ };
1364
+ }
1365
+ };
1366
+
1367
+ // src/providers/index.ts
1368
+ var providers = {
1369
+ openai: OpenAIProvider,
1370
+ zai: ZAIProvider,
1371
+ gemini: GeminiProvider,
1372
+ bedrock: BedrockProvider,
1373
+ azure: AzureProvider,
1374
+ krisspy: KrisspyProvider
1375
+ };
1376
+ function getProvider(name, config = {}) {
1377
+ const ProviderClass = providers[name.toLowerCase()];
1378
+ if (!ProviderClass) {
1379
+ throw new Error(`Unknown provider: ${name}. Available: ${Object.keys(providers).join(", ")}`);
1380
+ }
1381
+ return new ProviderClass(config);
1382
+ }
1383
+ function registerProvider(name, ProviderClass) {
1384
+ providers[name.toLowerCase()] = ProviderClass;
1385
+ }
1386
+ function getAvailableProviders() {
1387
+ return Object.keys(providers);
1388
+ }
1389
+
1390
+ // src/index.ts
1391
+ __reExport(index_exports, require("@anthropic-ai/claude-agent-sdk"), module.exports);
1392
+
1393
+ // src/services/image/azure-dalle.ts
1394
+ var import_node_fetch = __toESM(require("node-fetch"));
1395
+
1396
+ // src/services/base.ts
1397
+ var BaseGenerativeService = class {
1398
+ constructor(config = {}) {
1399
+ this.config = config;
1400
+ }
1401
+ };
1402
+
1403
+ // src/services/image/base.ts
1404
+ var BaseImageService = class extends BaseGenerativeService {
1405
+ constructor(config = {}) {
1406
+ super(config);
1407
+ }
1408
+ getServiceType() {
1409
+ return "image";
1410
+ }
1411
+ };
1412
+
1413
+ // src/services/image/azure-dalle.ts
1414
+ var AzureDalleService = class extends BaseImageService {
1415
+ constructor(config = {}) {
1416
+ super(config);
1417
+ this.baseUrl = config.baseUrl || "";
1418
+ this.apiKey = config.apiKey || "";
1419
+ this.deploymentName = config.deploymentName || "dall-e-3";
1420
+ this.apiVersion = config.apiVersion || "2024-10-21";
1421
+ }
1422
+ getName() {
1423
+ return "azure-dalle";
1424
+ }
1425
+ async generate(options) {
1426
+ if (!this.baseUrl) {
1427
+ throw new Error("[krisspy-ai] Azure baseUrl is required for image generation");
1428
+ }
1429
+ if (!this.apiKey) {
1430
+ throw new Error("[krisspy-ai] Azure apiKey is required for image generation");
1431
+ }
1432
+ const endpoint = `${this.baseUrl}/openai/deployments/${this.deploymentName}/images/generations?api-version=${this.apiVersion}`;
1433
+ const isGptImage = this.deploymentName.toLowerCase().includes("gpt-image");
1434
+ const body = {
1435
+ prompt: options.prompt,
1436
+ n: options.n || 1,
1437
+ size: options.size || "1024x1024"
1438
+ };
1439
+ if (!isGptImage) {
1440
+ if (options.quality) body.quality = options.quality;
1441
+ if (options.style) body.style = options.style;
1442
+ if (options.responseFormat) body.response_format = options.responseFormat;
1443
+ if (options.user) body.user = options.user;
1444
+ }
1445
+ console.log(`[krisspy-ai] Generating image with Azure DALL-E (deployment: ${this.deploymentName})...`);
1446
+ const response = await (0, import_node_fetch.default)(endpoint, {
1447
+ method: "POST",
1448
+ headers: {
1449
+ "Content-Type": "application/json",
1450
+ "api-key": this.apiKey
1451
+ },
1452
+ body: JSON.stringify(body)
1453
+ });
1454
+ if (!response.ok) {
1455
+ const errorText = await response.text();
1456
+ let errorMessage = errorText;
1457
+ try {
1458
+ const parsed = JSON.parse(errorText);
1459
+ errorMessage = parsed.error?.message || errorText;
1460
+ } catch {
1461
+ }
1462
+ throw new Error(`[krisspy-ai] Azure DALL-E generation failed: ${response.status} - ${errorMessage}`);
1463
+ }
1464
+ const data = await response.json();
1465
+ console.log(`[krisspy-ai] Image generation complete. Generated ${data.data.length} image(s).`);
1466
+ return {
1467
+ created: data.created,
1468
+ data: data.data.map((img) => ({
1469
+ url: img.url,
1470
+ b64_json: img.b64_json,
1471
+ revisedPrompt: img.revised_prompt
1472
+ }))
1473
+ };
1474
+ }
1475
+ };
1476
+
1477
+ // src/services/image/openai-image.ts
1478
+ var import_node_fetch2 = __toESM(require("node-fetch"));
1479
+ var OpenAIImageService = class extends BaseImageService {
1480
+ constructor(config = {}) {
1481
+ super(config);
1482
+ this.apiKey = config.apiKey || "";
1483
+ this.baseUrl = config.baseUrl || "https://api.openai.com/v1";
1484
+ this.model = config.deploymentName || "gpt-image-1";
1485
+ }
1486
+ getName() {
1487
+ return "openai-image";
1488
+ }
1489
+ async generate(options) {
1490
+ if (!this.apiKey) {
1491
+ throw new Error("[krisspy-ai] OpenAI apiKey is required for image generation");
1492
+ }
1493
+ const endpoint = `${this.baseUrl}/images/generations`;
1494
+ const body = {
1495
+ model: this.model,
1496
+ prompt: options.prompt,
1497
+ n: options.n || 1,
1498
+ size: options.size || "1024x1024"
1499
+ };
1500
+ if (options.quality) body.quality = options.quality;
1501
+ if (options.responseFormat && !this.model.includes("gpt-image")) {
1502
+ body.response_format = options.responseFormat;
1503
+ }
1504
+ if (options.user) body.user = options.user;
1505
+ console.log(`[krisspy-ai] Generating image with OpenAI (model: ${this.model})...`);
1506
+ const response = await (0, import_node_fetch2.default)(endpoint, {
1507
+ method: "POST",
1508
+ headers: {
1509
+ "Content-Type": "application/json",
1510
+ "Authorization": `Bearer ${this.apiKey}`
1511
+ },
1512
+ body: JSON.stringify(body)
1513
+ });
1514
+ if (!response.ok) {
1515
+ const errorText = await response.text();
1516
+ let errorMessage = errorText;
1517
+ try {
1518
+ const parsed = JSON.parse(errorText);
1519
+ errorMessage = parsed.error?.message || errorText;
1520
+ } catch {
1521
+ }
1522
+ throw new Error(`[krisspy-ai] OpenAI image generation failed: ${response.status} - ${errorMessage}`);
1523
+ }
1524
+ const data = await response.json();
1525
+ console.log(`[krisspy-ai] Image generation complete. Generated ${data.data.length} image(s).`);
1526
+ return {
1527
+ created: data.created,
1528
+ data: data.data.map((img) => ({
1529
+ url: img.url,
1530
+ b64_json: img.b64_json,
1531
+ revisedPrompt: img.revised_prompt
1532
+ }))
1533
+ };
1534
+ }
1535
+ };
1536
+
1537
+ // src/services/image/krisspy-image.ts
1538
+ var import_node_fetch3 = __toESM(require("node-fetch"));
1539
+ var KrisspyImageService = class extends BaseImageService {
1540
+ getName() {
1541
+ return "krisspy";
1542
+ }
1543
+ async generate(options) {
1544
+ const { prompt, size = "1024x1024" } = options;
1545
+ const baseUrl = this.config.baseUrl || "https://api.krisspy.ai";
1546
+ const apiKey = this.config.apiKey;
1547
+ if (!apiKey) {
1548
+ throw new Error("API key is required for Krisspy image service");
1549
+ }
1550
+ const response = await (0, import_node_fetch3.default)(`${baseUrl}/api/v1/ai/image`, {
1551
+ method: "POST",
1552
+ headers: {
1553
+ "Content-Type": "application/json",
1554
+ "Authorization": `Bearer ${apiKey}`
1555
+ },
1556
+ body: JSON.stringify({ prompt, size })
1557
+ });
1558
+ if (!response.ok) {
1559
+ const error = await response.json();
1560
+ throw new Error(error.message || `Image generation failed: ${response.statusText}`);
1561
+ }
1562
+ const data = await response.json();
1563
+ return {
1564
+ created: Math.floor(Date.now() / 1e3),
1565
+ data: [{
1566
+ b64_json: data.image,
1567
+ url: void 0,
1568
+ revisedPrompt: data.revisedPrompt
1569
+ }]
1570
+ };
1571
+ }
1572
+ };
1573
+
1574
+ // src/services/video/azure-sora.ts
1575
+ var import_node_fetch4 = __toESM(require("node-fetch"));
1576
+ var import_form_data = __toESM(require("form-data"));
1577
+
1578
+ // src/services/video/base.ts
1579
+ var BaseVideoService = class extends BaseGenerativeService {
1580
+ constructor(config = {}) {
1581
+ super(config);
1582
+ }
1583
+ getServiceType() {
1584
+ return "video";
1585
+ }
1586
+ /**
1587
+ * Convenience generator that submits a job and polls until completion.
1588
+ * Yields status updates and returns the final result.
1589
+ *
1590
+ * @param options - Video generation options
1591
+ * @param pollInterval - Time between status checks in ms (default: 5000)
1592
+ * @param timeout - Max wait time in ms (default: 600000 = 10 minutes)
1593
+ */
1594
+ async *generateAndWait(options, pollInterval = 5e3, timeout = 6e5) {
1595
+ const submission = await this.submitJob(options);
1596
+ const startTime = Date.now();
1597
+ let status;
1598
+ console.log(`[krisspy-ai] Video job submitted: ${submission.jobId}`);
1599
+ do {
1600
+ await new Promise((resolve) => setTimeout(resolve, pollInterval));
1601
+ status = await this.getJobStatus(submission.jobId);
1602
+ console.log(`[krisspy-ai] Job ${submission.jobId} status: ${status.status}`);
1603
+ yield status;
1604
+ if (Date.now() - startTime > timeout) {
1605
+ throw new Error(`[krisspy-ai] Video generation timed out after ${timeout}ms`);
1606
+ }
1607
+ } while (status.status === "pending" || status.status === "running");
1608
+ if (status.status === "failed") {
1609
+ throw new Error(`[krisspy-ai] Video generation failed: ${status.error || "Unknown error"}`);
1610
+ }
1611
+ if (status.status === "canceled") {
1612
+ throw new Error(`[krisspy-ai] Video generation was canceled`);
1613
+ }
1614
+ return await this.getResult(submission.jobId);
1615
+ }
1616
+ };
1617
+
1618
+ // src/services/video/azure-sora.ts
1619
+ var AzureSoraService = class extends BaseVideoService {
1620
+ constructor(config = {}) {
1621
+ super(config);
1622
+ this.baseUrl = config.baseUrl || "";
1623
+ this.apiKey = config.apiKey || "";
1624
+ this.deploymentName = config.deploymentName || "sora-2";
1625
+ this.apiVersion = config.apiVersion || "preview";
1626
+ }
1627
+ getName() {
1628
+ return "azure-sora";
1629
+ }
1630
+ mapStatus(azureStatus) {
1631
+ switch (azureStatus) {
1632
+ // Azure Sora uses lowercase snake_case status values
1633
+ case "pending":
1634
+ case "queued":
1635
+ return "pending";
1636
+ case "in_progress":
1637
+ return "running";
1638
+ case "completed":
1639
+ return "succeeded";
1640
+ case "failed":
1641
+ return "failed";
1642
+ case "canceled":
1643
+ return "canceled";
1644
+ default:
1645
+ console.log(`[krisspy-ai] Unknown status: ${azureStatus}`);
1646
+ return "pending";
1647
+ }
1648
+ }
1649
+ async submitJob(options) {
1650
+ if (!this.baseUrl) {
1651
+ throw new Error("[krisspy-ai] Azure baseUrl is required for video generation");
1652
+ }
1653
+ if (!this.apiKey) {
1654
+ throw new Error("[krisspy-ai] Azure apiKey is required for video generation");
1655
+ }
1656
+ const endpoint = `${this.baseUrl}/openai/v1/videos`;
1657
+ const validSizes = ["720x1280", "1280x720", "1024x1792", "1792x1024"];
1658
+ const requestedSize = options.width && options.height ? `${options.width}x${options.height}` : "1280x720";
1659
+ const size = validSizes.includes(requestedSize) ? requestedSize : "1280x720";
1660
+ const requestedDuration = options.duration || 4;
1661
+ const validDurations = [4, 8, 12];
1662
+ const seconds = validDurations.includes(requestedDuration) ? requestedDuration : validDurations.reduce(
1663
+ (prev, curr) => Math.abs(curr - requestedDuration) < Math.abs(prev - requestedDuration) ? curr : prev
1664
+ );
1665
+ console.log(`[krisspy-ai] Submitting video job to Azure Sora (deployment: ${this.deploymentName})...`);
1666
+ let response;
1667
+ if (options.referenceImage) {
1668
+ const formData = new import_form_data.default();
1669
+ formData.append("model", this.deploymentName);
1670
+ formData.append("prompt", options.prompt);
1671
+ formData.append("size", size);
1672
+ formData.append("seconds", String(seconds));
1673
+ if (options.nVariants && options.nVariants > 1) {
1674
+ formData.append("n_variants", String(options.nVariants));
1675
+ }
1676
+ const isUrl = options.referenceImage.startsWith("http://") || options.referenceImage.startsWith("https://");
1677
+ if (isUrl) {
1678
+ const imageResponse = await (0, import_node_fetch4.default)(options.referenceImage);
1679
+ const imageBuffer = await imageResponse.buffer();
1680
+ formData.append("input_reference", imageBuffer, { filename: "reference.png", contentType: "image/png" });
1681
+ } else {
1682
+ const base64Data = options.referenceImage.startsWith("data:") ? options.referenceImage.split(",")[1] : options.referenceImage;
1683
+ const imageBuffer = Buffer.from(base64Data, "base64");
1684
+ formData.append("input_reference", imageBuffer, { filename: "reference.png", contentType: "image/png" });
1685
+ }
1686
+ console.log(`[krisspy-ai] Using reference image for video generation (multipart form-data)`);
1687
+ response = await (0, import_node_fetch4.default)(endpoint, {
1688
+ method: "POST",
1689
+ headers: {
1690
+ "Authorization": `Bearer ${this.apiKey}`,
1691
+ ...formData.getHeaders()
1692
+ },
1693
+ body: formData
1694
+ });
1695
+ } else {
1696
+ const body = {
1697
+ prompt: options.prompt,
1698
+ seconds: String(seconds),
1699
+ size,
1700
+ model: this.deploymentName
1701
+ };
1702
+ if (options.nVariants && options.nVariants > 1) {
1703
+ body.n_variants = String(options.nVariants);
1704
+ }
1705
+ response = await (0, import_node_fetch4.default)(endpoint, {
1706
+ method: "POST",
1707
+ headers: {
1708
+ "Content-Type": "application/json",
1709
+ "Authorization": `Bearer ${this.apiKey}`
1710
+ },
1711
+ body: JSON.stringify(body)
1712
+ });
1713
+ }
1714
+ if (!response.ok) {
1715
+ const errorText = await response.text();
1716
+ let errorMessage = errorText;
1717
+ try {
1718
+ const parsed = JSON.parse(errorText);
1719
+ errorMessage = parsed.error?.message || errorText;
1720
+ } catch {
1721
+ }
1722
+ throw new Error(`[krisspy-ai] Azure Sora job submission failed: ${response.status} - ${errorMessage}`);
1723
+ }
1724
+ const data = await response.json();
1725
+ return {
1726
+ jobId: data.id,
1727
+ status: this.mapStatus(data.status),
1728
+ createdAt: new Date(data.created_at * 1e3).toISOString()
1729
+ };
1730
+ }
1731
+ async getJobStatus(jobId) {
1732
+ const endpoint = `${this.baseUrl}/openai/v1/videos/${jobId}`;
1733
+ const response = await (0, import_node_fetch4.default)(endpoint, {
1734
+ method: "GET",
1735
+ headers: {
1736
+ "Authorization": `Bearer ${this.apiKey}`
1737
+ }
1738
+ });
1739
+ if (!response.ok) {
1740
+ const errorText = await response.text();
1741
+ throw new Error(`[krisspy-ai] Failed to get job status: ${response.status} - ${errorText}`);
1742
+ }
1743
+ const data = await response.json();
1744
+ if (data.status === "failed" && data.error) {
1745
+ console.log(`[krisspy-ai] Job failed: ${data.error.code} - ${data.error.message}`);
1746
+ }
1747
+ return {
1748
+ jobId: data.id,
1749
+ status: this.mapStatus(data.status),
1750
+ progress: data.progress,
1751
+ error: data.error?.message,
1752
+ createdAt: new Date(data.created_at * 1e3).toISOString(),
1753
+ expiresAt: data.expires_at ? new Date(data.expires_at * 1e3).toISOString() : void 0
1754
+ };
1755
+ }
1756
+ async getResult(jobId) {
1757
+ const endpoint = `${this.baseUrl}/openai/v1/videos/${jobId}`;
1758
+ const response = await (0, import_node_fetch4.default)(endpoint, {
1759
+ method: "GET",
1760
+ headers: {
1761
+ "Authorization": `Bearer ${this.apiKey}`
1762
+ }
1763
+ });
1764
+ if (!response.ok) {
1765
+ const errorText = await response.text();
1766
+ throw new Error(`[krisspy-ai] Failed to get job result: ${response.status} - ${errorText}`);
1767
+ }
1768
+ const data = await response.json();
1769
+ if (data.status !== "completed") {
1770
+ throw new Error(`[krisspy-ai] Job ${jobId} not complete. Status: ${data.status}`);
1771
+ }
1772
+ const generations = [{ id: data.id }];
1773
+ console.log(`[krisspy-ai] Video generation complete. ${generations.length} video(s) generated.`);
1774
+ return {
1775
+ jobId: data.id,
1776
+ status: "succeeded",
1777
+ generations,
1778
+ createdAt: new Date(data.created_at * 1e3).toISOString()
1779
+ };
1780
+ }
1781
+ async downloadVideo(generationId) {
1782
+ const endpoint = `${this.baseUrl}/openai/v1/videos/${generationId}/content`;
1783
+ console.log(`[krisspy-ai] Downloading video ${generationId}...`);
1784
+ const response = await (0, import_node_fetch4.default)(endpoint, {
1785
+ method: "GET",
1786
+ headers: {
1787
+ "Authorization": `Bearer ${this.apiKey}`
1788
+ }
1789
+ });
1790
+ if (!response.ok) {
1791
+ const errorText = await response.text();
1792
+ throw new Error(`[krisspy-ai] Failed to download video: ${response.status} - ${errorText}`);
1793
+ }
1794
+ const buffer = await response.buffer();
1795
+ console.log(`[krisspy-ai] Video downloaded: ${buffer.length} bytes`);
1796
+ return buffer;
1797
+ }
1798
+ };
1799
+
1800
+ // src/services/video/openai-sora.ts
1801
+ var import_node_fetch5 = __toESM(require("node-fetch"));
1802
+ var import_form_data2 = __toESM(require("form-data"));
1803
+ var OpenAISoraService = class extends BaseVideoService {
1804
+ constructor(config = {}) {
1805
+ super(config);
1806
+ this.baseUrl = config.baseUrl || "https://api.openai.com/v1";
1807
+ this.apiKey = config.apiKey || "";
1808
+ this.model = config.deploymentName || "sora-2";
1809
+ }
1810
+ getName() {
1811
+ return "openai-sora";
1812
+ }
1813
+ mapStatus(openaiStatus) {
1814
+ switch (openaiStatus) {
1815
+ case "pending":
1816
+ case "queued":
1817
+ return "pending";
1818
+ case "in_progress":
1819
+ return "running";
1820
+ case "completed":
1821
+ return "succeeded";
1822
+ case "failed":
1823
+ return "failed";
1824
+ case "canceled":
1825
+ return "canceled";
1826
+ default:
1827
+ console.log(`[krisspy-ai] Unknown status: ${openaiStatus}`);
1828
+ return "pending";
1829
+ }
1830
+ }
1831
+ async submitJob(options) {
1832
+ if (!this.apiKey) {
1833
+ throw new Error("[krisspy-ai] OpenAI apiKey is required for video generation");
1834
+ }
1835
+ const endpoint = `${this.baseUrl}/videos/generations`;
1836
+ const validSizes = ["720x1280", "1280x720", "1024x1792", "1792x1024"];
1837
+ const requestedSize = options.width && options.height ? `${options.width}x${options.height}` : "1280x720";
1838
+ const size = validSizes.includes(requestedSize) ? requestedSize : "1280x720";
1839
+ const requestedDuration = options.duration || 5;
1840
+ const validDurations = [5, 10, 15, 20];
1841
+ const seconds = validDurations.includes(requestedDuration) ? requestedDuration : validDurations.reduce(
1842
+ (prev, curr) => Math.abs(curr - requestedDuration) < Math.abs(prev - requestedDuration) ? curr : prev
1843
+ );
1844
+ console.log(`[krisspy-ai] Submitting video job to OpenAI Sora (model: ${this.model})...`);
1845
+ let response;
1846
+ if (options.referenceImage) {
1847
+ const formData = new import_form_data2.default();
1848
+ formData.append("model", this.model);
1849
+ formData.append("prompt", options.prompt);
1850
+ formData.append("size", size);
1851
+ formData.append("duration", String(seconds));
1852
+ if (options.nVariants && options.nVariants > 1) {
1853
+ formData.append("n", String(options.nVariants));
1854
+ }
1855
+ const isUrl = options.referenceImage.startsWith("http://") || options.referenceImage.startsWith("https://");
1856
+ if (isUrl) {
1857
+ const imageResponse = await (0, import_node_fetch5.default)(options.referenceImage);
1858
+ const imageBuffer = await imageResponse.buffer();
1859
+ formData.append("image", imageBuffer, { filename: "reference.png", contentType: "image/png" });
1860
+ } else {
1861
+ const base64Data = options.referenceImage.startsWith("data:") ? options.referenceImage.split(",")[1] : options.referenceImage;
1862
+ const imageBuffer = Buffer.from(base64Data, "base64");
1863
+ formData.append("image", imageBuffer, { filename: "reference.png", contentType: "image/png" });
1864
+ }
1865
+ console.log(`[krisspy-ai] Using reference image for video generation (multipart form-data)`);
1866
+ response = await (0, import_node_fetch5.default)(endpoint, {
1867
+ method: "POST",
1868
+ headers: {
1869
+ "Authorization": `Bearer ${this.apiKey}`,
1870
+ ...formData.getHeaders()
1871
+ },
1872
+ body: formData
1873
+ });
1874
+ } else {
1875
+ const body = {
1876
+ model: this.model,
1877
+ prompt: options.prompt,
1878
+ duration: seconds,
1879
+ size
1880
+ };
1881
+ if (options.nVariants && options.nVariants > 1) {
1882
+ body.n = options.nVariants;
1883
+ }
1884
+ response = await (0, import_node_fetch5.default)(endpoint, {
1885
+ method: "POST",
1886
+ headers: {
1887
+ "Content-Type": "application/json",
1888
+ "Authorization": `Bearer ${this.apiKey}`
1889
+ },
1890
+ body: JSON.stringify(body)
1891
+ });
1892
+ }
1893
+ if (!response.ok) {
1894
+ const errorText = await response.text();
1895
+ let errorMessage = errorText;
1896
+ try {
1897
+ const parsed = JSON.parse(errorText);
1898
+ errorMessage = parsed.error?.message || errorText;
1899
+ } catch {
1900
+ }
1901
+ throw new Error(`[krisspy-ai] OpenAI Sora job submission failed: ${response.status} - ${errorMessage}`);
1902
+ }
1903
+ const data = await response.json();
1904
+ return {
1905
+ jobId: data.id,
1906
+ status: this.mapStatus(data.status),
1907
+ createdAt: new Date(data.created_at * 1e3).toISOString()
1908
+ };
1909
+ }
1910
+ async getJobStatus(jobId) {
1911
+ const endpoint = `${this.baseUrl}/videos/generations/${jobId}`;
1912
+ const response = await (0, import_node_fetch5.default)(endpoint, {
1913
+ method: "GET",
1914
+ headers: {
1915
+ "Authorization": `Bearer ${this.apiKey}`
1916
+ }
1917
+ });
1918
+ if (!response.ok) {
1919
+ const errorText = await response.text();
1920
+ throw new Error(`[krisspy-ai] Failed to get job status: ${response.status} - ${errorText}`);
1921
+ }
1922
+ const data = await response.json();
1923
+ if (data.status === "failed" && data.error) {
1924
+ console.log(`[krisspy-ai] Job failed: ${data.error.code} - ${data.error.message}`);
1925
+ }
1926
+ return {
1927
+ jobId: data.id,
1928
+ status: this.mapStatus(data.status),
1929
+ progress: data.progress,
1930
+ error: data.error?.message,
1931
+ createdAt: new Date(data.created_at * 1e3).toISOString(),
1932
+ expiresAt: data.expires_at ? new Date(data.expires_at * 1e3).toISOString() : void 0
1933
+ };
1934
+ }
1935
+ async getResult(jobId) {
1936
+ const endpoint = `${this.baseUrl}/videos/generations/${jobId}`;
1937
+ const response = await (0, import_node_fetch5.default)(endpoint, {
1938
+ method: "GET",
1939
+ headers: {
1940
+ "Authorization": `Bearer ${this.apiKey}`
1941
+ }
1942
+ });
1943
+ if (!response.ok) {
1944
+ const errorText = await response.text();
1945
+ throw new Error(`[krisspy-ai] Failed to get job result: ${response.status} - ${errorText}`);
1946
+ }
1947
+ const data = await response.json();
1948
+ if (data.status !== "completed") {
1949
+ throw new Error(`[krisspy-ai] Job ${jobId} not complete. Status: ${data.status}`);
1950
+ }
1951
+ const generations = [{ id: data.id }];
1952
+ console.log(`[krisspy-ai] Video generation complete. ${generations.length} video(s) generated.`);
1953
+ return {
1954
+ jobId: data.id,
1955
+ status: "succeeded",
1956
+ generations,
1957
+ createdAt: new Date(data.created_at * 1e3).toISOString()
1958
+ };
1959
+ }
1960
+ async downloadVideo(generationId) {
1961
+ const endpoint = `${this.baseUrl}/videos/generations/${generationId}/content`;
1962
+ console.log(`[krisspy-ai] Downloading video ${generationId}...`);
1963
+ const response = await (0, import_node_fetch5.default)(endpoint, {
1964
+ method: "GET",
1965
+ headers: {
1966
+ "Authorization": `Bearer ${this.apiKey}`
1967
+ }
1968
+ });
1969
+ if (!response.ok) {
1970
+ const errorText = await response.text();
1971
+ throw new Error(`[krisspy-ai] Failed to download video: ${response.status} - ${errorText}`);
1972
+ }
1973
+ const buffer = await response.buffer();
1974
+ console.log(`[krisspy-ai] Video downloaded: ${buffer.length} bytes`);
1975
+ return buffer;
1976
+ }
1977
+ };
1978
+
1979
+ // src/services/video/krisspy-sora.ts
1980
+ var import_node_fetch6 = __toESM(require("node-fetch"));
1981
+ var KrisspySoraService = class extends BaseVideoService {
1982
+ constructor() {
1983
+ super(...arguments);
1984
+ this.cachedVideo = null;
1985
+ }
1986
+ getName() {
1987
+ return "krisspy";
1988
+ }
1989
+ async submitJob(options) {
1990
+ const { prompt, duration = 5 } = options;
1991
+ const baseUrl = this.config.baseUrl || "https://api.krisspy.ai";
1992
+ const apiKey = this.config.apiKey;
1993
+ if (!apiKey) {
1994
+ throw new Error("API key is required for Krisspy video service");
1995
+ }
1996
+ const response = await (0, import_node_fetch6.default)(`${baseUrl}/api/v1/ai/video`, {
1997
+ method: "POST",
1998
+ headers: {
1999
+ "Content-Type": "application/json",
2000
+ "Authorization": `Bearer ${apiKey}`
2001
+ },
2002
+ body: JSON.stringify({ prompt, duration })
2003
+ });
2004
+ if (!response.ok) {
2005
+ const error = await response.json();
2006
+ throw new Error(error.message || `Video generation failed: ${response.statusText}`);
2007
+ }
2008
+ const data = await response.json();
2009
+ this.cachedVideo = data.video;
2010
+ return {
2011
+ jobId: data.jobId,
2012
+ status: "succeeded",
2013
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
2014
+ };
2015
+ }
2016
+ async getJobStatus(jobId) {
2017
+ const baseUrl = this.config.baseUrl || "https://api.krisspy.ai";
2018
+ const apiKey = this.config.apiKey;
2019
+ if (!apiKey) {
2020
+ throw new Error("API key is required for Krisspy video service");
2021
+ }
2022
+ const response = await (0, import_node_fetch6.default)(`${baseUrl}/ai/video/status/${jobId}`, {
2023
+ method: "GET",
2024
+ headers: {
2025
+ "Authorization": `Bearer ${apiKey}`
2026
+ }
2027
+ });
2028
+ if (!response.ok) {
2029
+ const error = await response.json();
2030
+ throw new Error(error.message || `Failed to get video status: ${response.statusText}`);
2031
+ }
2032
+ const data = await response.json();
2033
+ return data;
2034
+ }
2035
+ async getResult(jobId) {
2036
+ const baseUrl = this.config.baseUrl || "https://api.krisspy.ai";
2037
+ const apiKey = this.config.apiKey;
2038
+ if (!apiKey) {
2039
+ throw new Error("API key is required for Krisspy video service");
2040
+ }
2041
+ const response = await (0, import_node_fetch6.default)(`${baseUrl}/ai/video/result/${jobId}`, {
2042
+ method: "GET",
2043
+ headers: {
2044
+ "Authorization": `Bearer ${apiKey}`
2045
+ }
2046
+ });
2047
+ if (!response.ok) {
2048
+ const error = await response.json();
2049
+ throw new Error(error.message || `Failed to get video result: ${response.statusText}`);
2050
+ }
2051
+ const data = await response.json();
2052
+ return data;
2053
+ }
2054
+ async downloadVideo(jobId) {
2055
+ if (this.cachedVideo) {
2056
+ const buffer = Buffer.from(this.cachedVideo, "base64");
2057
+ this.cachedVideo = null;
2058
+ return buffer;
2059
+ }
2060
+ throw new Error("No video available. Call submitJob first.");
2061
+ }
2062
+ };
2063
+
2064
+ // src/services/audio/azure-transcription.ts
2065
+ var import_node_fetch7 = __toESM(require("node-fetch"));
2066
+ var import_form_data3 = __toESM(require("form-data"));
2067
+
2068
+ // src/services/audio/base.ts
2069
+ var BaseTranscriptionService = class extends BaseGenerativeService {
2070
+ constructor(config = {}) {
2071
+ super(config);
2072
+ }
2073
+ getServiceType() {
2074
+ return "transcription";
2075
+ }
2076
+ };
2077
+ var BaseTTSService = class extends BaseGenerativeService {
2078
+ constructor(config = {}) {
2079
+ super(config);
2080
+ }
2081
+ getServiceType() {
2082
+ return "tts";
2083
+ }
2084
+ };
2085
+
2086
+ // src/services/audio/azure-transcription.ts
2087
+ var AzureTranscriptionService = class extends BaseTranscriptionService {
2088
+ constructor(config = {}) {
2089
+ super(config);
2090
+ this.baseUrl = config.baseUrl || "";
2091
+ this.apiKey = config.apiKey || "";
2092
+ this.deploymentName = config.deploymentName || "whisper";
2093
+ this.apiVersion = config.apiVersion || "2024-10-21";
2094
+ }
2095
+ getName() {
2096
+ return "azure-transcription";
2097
+ }
2098
+ async transcribe(options) {
2099
+ if (!this.baseUrl) {
2100
+ throw new Error("[krisspy-ai] Azure baseUrl is required for transcription");
2101
+ }
2102
+ if (!this.apiKey) {
2103
+ throw new Error("[krisspy-ai] Azure apiKey is required for transcription");
2104
+ }
2105
+ const endpoint = `${this.baseUrl}/openai/deployments/${this.deploymentName}/audio/transcriptions?api-version=${this.apiVersion}`;
2106
+ const formData = new import_form_data3.default();
2107
+ if (Buffer.isBuffer(options.audio)) {
2108
+ formData.append("file", options.audio, { filename: "audio.mp3", contentType: "audio/mpeg" });
2109
+ } else if (typeof options.audio === "string") {
2110
+ const fs = await import("fs");
2111
+ const buffer = fs.readFileSync(options.audio);
2112
+ const filename = options.audio.split("/").pop() || "audio.mp3";
2113
+ formData.append("file", buffer, { filename });
2114
+ }
2115
+ if (options.language) {
2116
+ formData.append("language", options.language);
2117
+ }
2118
+ if (options.prompt) {
2119
+ formData.append("prompt", options.prompt);
2120
+ }
2121
+ if (options.responseFormat) {
2122
+ formData.append("response_format", options.responseFormat);
2123
+ }
2124
+ if (options.temperature !== void 0) {
2125
+ formData.append("temperature", String(options.temperature));
2126
+ }
2127
+ if (options.timestampGranularities && options.timestampGranularities.length > 0) {
2128
+ for (const granularity of options.timestampGranularities) {
2129
+ formData.append("timestamp_granularities[]", granularity);
2130
+ }
2131
+ }
2132
+ console.log(`[krisspy-ai] Transcribing audio with Azure (deployment: ${this.deploymentName})...`);
2133
+ const response = await (0, import_node_fetch7.default)(endpoint, {
2134
+ method: "POST",
2135
+ headers: {
2136
+ "api-key": this.apiKey,
2137
+ ...formData.getHeaders()
2138
+ },
2139
+ body: formData
2140
+ });
2141
+ if (!response.ok) {
2142
+ const errorText = await response.text();
2143
+ let errorMessage = errorText;
2144
+ try {
2145
+ const parsed = JSON.parse(errorText);
2146
+ errorMessage = parsed.error?.message || errorText;
2147
+ } catch {
2148
+ }
2149
+ throw new Error(`[krisspy-ai] Azure transcription failed: ${response.status} - ${errorMessage}`);
2150
+ }
2151
+ if (options.responseFormat === "text") {
2152
+ const text = await response.text();
2153
+ return { text };
2154
+ }
2155
+ if (options.responseFormat === "srt" || options.responseFormat === "vtt") {
2156
+ const text = await response.text();
2157
+ return { text };
2158
+ }
2159
+ const data = await response.json();
2160
+ console.log(`[krisspy-ai] Transcription complete. Duration: ${data.duration || "unknown"}s`);
2161
+ return {
2162
+ text: data.text,
2163
+ language: data.language,
2164
+ duration: data.duration,
2165
+ words: data.words,
2166
+ segments: data.segments
2167
+ };
2168
+ }
2169
+ };
2170
+
2171
+ // src/services/audio/openai-transcription.ts
2172
+ var import_node_fetch8 = __toESM(require("node-fetch"));
2173
+ var import_form_data4 = __toESM(require("form-data"));
2174
+ var OpenAITranscriptionService = class extends BaseTranscriptionService {
2175
+ constructor(config = {}) {
2176
+ super(config);
2177
+ this.baseUrl = config.baseUrl || "https://api.openai.com/v1";
2178
+ this.apiKey = config.apiKey || "";
2179
+ let modelName = config.deploymentName || "whisper-1";
2180
+ if (modelName === "whisper") modelName = "whisper-1";
2181
+ this.model = modelName;
2182
+ }
2183
+ getName() {
2184
+ return "openai-transcription";
2185
+ }
2186
+ async transcribe(options) {
2187
+ if (!this.apiKey) {
2188
+ throw new Error("[krisspy-ai] OpenAI apiKey is required for transcription");
2189
+ }
2190
+ const endpoint = `${this.baseUrl}/audio/transcriptions`;
2191
+ const formData = new import_form_data4.default();
2192
+ formData.append("model", this.model);
2193
+ if (Buffer.isBuffer(options.audio)) {
2194
+ formData.append("file", options.audio, { filename: "audio.mp3", contentType: "audio/mpeg" });
2195
+ } else if (typeof options.audio === "string") {
2196
+ const fs = await import("fs");
2197
+ const buffer = fs.readFileSync(options.audio);
2198
+ const filename = options.audio.split("/").pop() || "audio.mp3";
2199
+ formData.append("file", buffer, { filename });
2200
+ }
2201
+ if (options.language) {
2202
+ formData.append("language", options.language);
2203
+ }
2204
+ if (options.prompt) {
2205
+ formData.append("prompt", options.prompt);
2206
+ }
2207
+ if (options.responseFormat) {
2208
+ formData.append("response_format", options.responseFormat);
2209
+ }
2210
+ if (options.temperature !== void 0) {
2211
+ formData.append("temperature", String(options.temperature));
2212
+ }
2213
+ if (options.timestampGranularities && options.timestampGranularities.length > 0) {
2214
+ for (const granularity of options.timestampGranularities) {
2215
+ formData.append("timestamp_granularities[]", granularity);
2216
+ }
2217
+ }
2218
+ console.log(`[krisspy-ai] Transcribing audio with OpenAI (model: ${this.model})...`);
2219
+ const response = await (0, import_node_fetch8.default)(endpoint, {
2220
+ method: "POST",
2221
+ headers: {
2222
+ "Authorization": `Bearer ${this.apiKey}`,
2223
+ ...formData.getHeaders()
2224
+ },
2225
+ body: formData
2226
+ });
2227
+ if (!response.ok) {
2228
+ const errorText = await response.text();
2229
+ let errorMessage = errorText;
2230
+ try {
2231
+ const parsed = JSON.parse(errorText);
2232
+ errorMessage = parsed.error?.message || errorText;
2233
+ } catch {
2234
+ }
2235
+ throw new Error(`[krisspy-ai] OpenAI transcription failed: ${response.status} - ${errorMessage}`);
2236
+ }
2237
+ if (options.responseFormat === "text") {
2238
+ const text = await response.text();
2239
+ return { text };
2240
+ }
2241
+ if (options.responseFormat === "srt" || options.responseFormat === "vtt") {
2242
+ const text = await response.text();
2243
+ return { text };
2244
+ }
2245
+ const data = await response.json();
2246
+ console.log(`[krisspy-ai] Transcription complete. Duration: ${data.duration || "unknown"}s`);
2247
+ const result = {
2248
+ text: data.text,
2249
+ language: data.language,
2250
+ duration: data.duration,
2251
+ words: data.words,
2252
+ segments: data.segments
2253
+ };
2254
+ if (data.utterances) {
2255
+ result.utterances = data.utterances;
2256
+ console.log(`[krisspy-ai] Diarization: ${data.utterances.length} utterances from ${new Set(data.utterances.map((u) => u.speaker)).size} speakers`);
2257
+ }
2258
+ return result;
2259
+ }
2260
+ };
2261
+
2262
+ // src/services/audio/krisspy-transcription.ts
2263
+ var import_node_fetch9 = __toESM(require("node-fetch"));
2264
+ var import_form_data5 = __toESM(require("form-data"));
2265
+ var KrisspyTranscriptionService = class extends BaseTranscriptionService {
2266
+ getName() {
2267
+ return "krisspy";
2268
+ }
2269
+ async transcribe(options) {
2270
+ const { audio, language, responseFormat = "verbose_json", timestampGranularities } = options;
2271
+ const baseUrl = this.config.baseUrl || "https://api.krisspy.ai";
2272
+ const apiKey = this.config.apiKey;
2273
+ if (!apiKey) {
2274
+ throw new Error("API key is required for Krisspy transcription service");
2275
+ }
2276
+ const form = new import_form_data5.default();
2277
+ form.append("audio", audio, { filename: "audio.mp3" });
2278
+ if (language) form.append("language", language);
2279
+ if (responseFormat) form.append("response_format", responseFormat);
2280
+ if (timestampGranularities) {
2281
+ form.append("timestamp_granularities", JSON.stringify(timestampGranularities));
2282
+ }
2283
+ const response = await (0, import_node_fetch9.default)(`${baseUrl}/api/v1/ai/transcribe`, {
2284
+ method: "POST",
2285
+ headers: {
2286
+ "Authorization": `Bearer ${apiKey}`,
2287
+ ...form.getHeaders()
2288
+ },
2289
+ body: form
2290
+ });
2291
+ if (!response.ok) {
2292
+ const error = await response.json();
2293
+ throw new Error(error.message || `Transcription failed: ${response.statusText}`);
2294
+ }
2295
+ const data = await response.json();
2296
+ return data;
2297
+ }
2298
+ };
2299
+
2300
+ // src/services/audio/azure-tts.ts
2301
+ var import_node_fetch10 = __toESM(require("node-fetch"));
2302
+ var AzureTTSService = class extends BaseTTSService {
2303
+ constructor(config = {}) {
2304
+ super(config);
2305
+ this.baseUrl = config.baseUrl || "";
2306
+ this.apiKey = config.apiKey || "";
2307
+ this.deploymentName = config.deploymentName || "tts";
2308
+ this.apiVersion = config.apiVersion || "2025-03-01-preview";
2309
+ }
2310
+ getName() {
2311
+ return "azure-tts";
2312
+ }
2313
+ async synthesize(options) {
2314
+ if (!this.baseUrl) {
2315
+ throw new Error("[krisspy-ai] Azure baseUrl is required for TTS");
2316
+ }
2317
+ if (!this.apiKey) {
2318
+ throw new Error("[krisspy-ai] Azure apiKey is required for TTS");
2319
+ }
2320
+ const endpoint = `${this.baseUrl}/openai/deployments/${this.deploymentName}/audio/speech?api-version=${this.apiVersion}`;
2321
+ const body = {
2322
+ model: this.deploymentName,
2323
+ input: options.input,
2324
+ voice: options.voice || "alloy"
2325
+ };
2326
+ if (options.responseFormat) {
2327
+ body.response_format = options.responseFormat;
2328
+ }
2329
+ if (options.speed !== void 0) {
2330
+ body.speed = options.speed;
2331
+ }
2332
+ console.log(`[krisspy-ai] Synthesizing speech with Azure TTS (deployment: ${this.deploymentName})...`);
2333
+ console.log(`[krisspy-ai] Azure TTS endpoint: ${endpoint}`);
2334
+ const response = await (0, import_node_fetch10.default)(endpoint, {
2335
+ method: "POST",
2336
+ headers: {
2337
+ "Content-Type": "application/json",
2338
+ "api-key": this.apiKey
2339
+ },
2340
+ body: JSON.stringify(body)
2341
+ });
2342
+ if (!response.ok) {
2343
+ const errorText = await response.text();
2344
+ let errorMessage = errorText;
2345
+ try {
2346
+ const parsed = JSON.parse(errorText);
2347
+ errorMessage = parsed.error?.message || errorText;
2348
+ } catch {
2349
+ }
2350
+ throw new Error(`[krisspy-ai] Azure TTS failed: ${response.status} - ${errorMessage}`);
2351
+ }
2352
+ const buffer = await response.buffer();
2353
+ const format = options.responseFormat || "mp3";
2354
+ console.log(`[krisspy-ai] Speech synthesis complete: ${buffer.length} bytes (${format})`);
2355
+ return {
2356
+ audio: buffer,
2357
+ format
2358
+ };
2359
+ }
2360
+ };
2361
+
2362
+ // src/services/audio/openai-tts.ts
2363
+ var import_node_fetch11 = __toESM(require("node-fetch"));
2364
+ var OpenAITTSService = class extends BaseTTSService {
2365
+ constructor(config = {}) {
2366
+ super(config);
2367
+ this.baseUrl = config.baseUrl || "https://api.openai.com/v1";
2368
+ this.apiKey = config.apiKey || "";
2369
+ let modelName = config.deploymentName || "tts-1";
2370
+ if (modelName === "tts") modelName = "tts-1";
2371
+ if (modelName === "tts-hd") modelName = "tts-1-hd";
2372
+ this.model = modelName;
2373
+ }
2374
+ getName() {
2375
+ return "openai-tts";
2376
+ }
2377
+ async synthesize(options) {
2378
+ if (!this.apiKey) {
2379
+ throw new Error("[krisspy-ai] OpenAI apiKey is required for TTS");
2380
+ }
2381
+ const endpoint = `${this.baseUrl}/audio/speech`;
2382
+ const body = {
2383
+ model: this.model,
2384
+ input: options.input,
2385
+ voice: options.voice || "alloy"
2386
+ };
2387
+ if (options.responseFormat) {
2388
+ body.response_format = options.responseFormat;
2389
+ }
2390
+ if (options.speed !== void 0) {
2391
+ body.speed = options.speed;
2392
+ }
2393
+ if (options.instructions && this.model.includes("gpt-4o")) {
2394
+ body.instructions = options.instructions;
2395
+ }
2396
+ console.log(`[krisspy-ai] Synthesizing speech with OpenAI TTS (model: ${this.model})...`);
2397
+ const response = await (0, import_node_fetch11.default)(endpoint, {
2398
+ method: "POST",
2399
+ headers: {
2400
+ "Content-Type": "application/json",
2401
+ "Authorization": `Bearer ${this.apiKey}`
2402
+ },
2403
+ body: JSON.stringify(body)
2404
+ });
2405
+ if (!response.ok) {
2406
+ const errorText = await response.text();
2407
+ let errorMessage = errorText;
2408
+ try {
2409
+ const parsed = JSON.parse(errorText);
2410
+ errorMessage = parsed.error?.message || errorText;
2411
+ } catch {
2412
+ }
2413
+ throw new Error(`[krisspy-ai] OpenAI TTS failed: ${response.status} - ${errorMessage}`);
2414
+ }
2415
+ const buffer = await response.buffer();
2416
+ const format = options.responseFormat || "mp3";
2417
+ console.log(`[krisspy-ai] Speech synthesis complete: ${buffer.length} bytes (${format})`);
2418
+ return {
2419
+ audio: buffer,
2420
+ format
2421
+ };
2422
+ }
2423
+ };
2424
+
2425
+ // src/services/audio/krisspy-tts.ts
2426
+ var import_node_fetch12 = __toESM(require("node-fetch"));
2427
+ var KrisspyTTSService = class extends BaseTTSService {
2428
+ getName() {
2429
+ return "krisspy";
2430
+ }
2431
+ async synthesize(options) {
2432
+ const { input, voice = "nova", speed = 1, instructions, responseFormat = "mp3" } = options;
2433
+ const baseUrl = this.config.baseUrl || "https://api.krisspy.ai";
2434
+ const apiKey = this.config.apiKey;
2435
+ if (!apiKey) {
2436
+ throw new Error("API key is required for Krisspy TTS service");
2437
+ }
2438
+ const response = await (0, import_node_fetch12.default)(`${baseUrl}/api/v1/ai/tts`, {
2439
+ method: "POST",
2440
+ headers: {
2441
+ "Content-Type": "application/json",
2442
+ "Authorization": `Bearer ${apiKey}`
2443
+ },
2444
+ body: JSON.stringify({
2445
+ text: input,
2446
+ voice,
2447
+ speed,
2448
+ instructions,
2449
+ response_format: responseFormat
2450
+ })
2451
+ });
2452
+ if (!response.ok) {
2453
+ const error = await response.json();
2454
+ throw new Error(error.message || `TTS failed: ${response.statusText}`);
2455
+ }
2456
+ const data = await response.json();
2457
+ const audioBuffer = Buffer.from(data.audio, "base64");
2458
+ return {
2459
+ audio: audioBuffer,
2460
+ format: data.format || responseFormat
2461
+ };
2462
+ }
2463
+ };
2464
+
2465
+ // src/services/index.ts
2466
+ var imageServices = {
2467
+ // Krisspy AI (proxy to backend)
2468
+ "krisspy": KrisspyImageService,
2469
+ // Azure OpenAI
2470
+ "azure-dalle": AzureDalleService,
2471
+ "azure": AzureDalleService,
2472
+ "azure-gpt-image": AzureDalleService,
2473
+ // OpenAI direct
2474
+ "openai": OpenAIImageService,
2475
+ "openai-image": OpenAIImageService,
2476
+ "gpt-image": OpenAIImageService
2477
+ };
2478
+ var videoServices = {
2479
+ // Krisspy AI (proxy to backend)
2480
+ "krisspy": KrisspySoraService,
2481
+ // Azure OpenAI
2482
+ "azure-sora": AzureSoraService,
2483
+ "azure": AzureSoraService,
2484
+ // OpenAI direct
2485
+ "openai": OpenAISoraService,
2486
+ "openai-sora": OpenAISoraService,
2487
+ "sora": OpenAISoraService
2488
+ };
2489
+ var transcriptionServices = {
2490
+ // Krisspy AI (proxy to backend)
2491
+ "krisspy": KrisspyTranscriptionService,
2492
+ // Azure OpenAI
2493
+ "azure": AzureTranscriptionService,
2494
+ "azure-whisper": AzureTranscriptionService,
2495
+ "azure-transcription": AzureTranscriptionService,
2496
+ // OpenAI direct
2497
+ "openai": OpenAITranscriptionService,
2498
+ "openai-whisper": OpenAITranscriptionService,
2499
+ "whisper": OpenAITranscriptionService,
2500
+ "gpt-4o-transcribe": OpenAITranscriptionService,
2501
+ "gpt-4o-transcribe-diarize": OpenAITranscriptionService
2502
+ };
2503
+ var ttsServices = {
2504
+ // Krisspy AI (proxy to backend)
2505
+ "krisspy": KrisspyTTSService,
2506
+ // Azure OpenAI
2507
+ "azure": AzureTTSService,
2508
+ "azure-tts": AzureTTSService,
2509
+ // OpenAI direct
2510
+ "openai": OpenAITTSService,
2511
+ "openai-tts": OpenAITTSService,
2512
+ "tts": OpenAITTSService,
2513
+ "tts-hd": OpenAITTSService,
2514
+ "gpt-4o-mini-tts": OpenAITTSService
2515
+ };
2516
+ function getImageService(name, config = {}) {
2517
+ const ServiceClass = imageServices[name.toLowerCase()];
2518
+ if (!ServiceClass) {
2519
+ throw new Error(
2520
+ `[krisspy-ai] Unknown image service: ${name}. Available: ${Object.keys(imageServices).join(", ")}`
2521
+ );
2522
+ }
2523
+ return new ServiceClass(config);
2524
+ }
2525
+ function getVideoService(name, config = {}) {
2526
+ const ServiceClass = videoServices[name.toLowerCase()];
2527
+ if (!ServiceClass) {
2528
+ throw new Error(
2529
+ `[krisspy-ai] Unknown video service: ${name}. Available: ${Object.keys(videoServices).join(", ")}`
2530
+ );
2531
+ }
2532
+ return new ServiceClass(config);
2533
+ }
2534
+ function getTranscriptionService(name, config = {}) {
2535
+ const ServiceClass = transcriptionServices[name.toLowerCase()];
2536
+ if (!ServiceClass) {
2537
+ throw new Error(
2538
+ `[krisspy-ai] Unknown transcription service: ${name}. Available: ${Object.keys(transcriptionServices).join(", ")}`
2539
+ );
2540
+ }
2541
+ return new ServiceClass(config);
2542
+ }
2543
+ function getTTSService(name, config = {}) {
2544
+ const ServiceClass = ttsServices[name.toLowerCase()];
2545
+ if (!ServiceClass) {
2546
+ throw new Error(
2547
+ `[krisspy-ai] Unknown TTS service: ${name}. Available: ${Object.keys(ttsServices).join(", ")}`
2548
+ );
2549
+ }
2550
+ return new ServiceClass(config);
2551
+ }
2552
+ function registerImageService(name, ServiceClass) {
2553
+ imageServices[name.toLowerCase()] = ServiceClass;
2554
+ }
2555
+ function registerVideoService(name, ServiceClass) {
2556
+ videoServices[name.toLowerCase()] = ServiceClass;
2557
+ }
2558
+ function registerTranscriptionService(name, ServiceClass) {
2559
+ transcriptionServices[name.toLowerCase()] = ServiceClass;
2560
+ }
2561
+ function registerTTSService(name, ServiceClass) {
2562
+ ttsServices[name.toLowerCase()] = ServiceClass;
2563
+ }
2564
+ function getAvailableImageServices() {
2565
+ return Object.keys(imageServices);
2566
+ }
2567
+ function getAvailableVideoServices() {
2568
+ return Object.keys(videoServices);
2569
+ }
2570
+ function getAvailableTranscriptionServices() {
2571
+ return Object.keys(transcriptionServices);
2572
+ }
2573
+ function getAvailableTTSServices() {
2574
+ return Object.keys(ttsServices);
2575
+ }
2576
+
2577
+ // src/index.ts
2578
+ var PROVIDER_DEFAULTS = {
2579
+ anthropic: {
2580
+ baseUrl: "https://api.anthropic.com",
2581
+ model: "sonnet"
2582
+ },
2583
+ openai: {
2584
+ baseUrl: "https://api.openai.com/v1",
2585
+ model: "gpt-5.2"
2586
+ },
2587
+ gemini: {
2588
+ baseUrl: "https://generativelanguage.googleapis.com/v1beta/openai",
2589
+ model: "gemini-2.5-flash"
2590
+ },
2591
+ zai: {
2592
+ baseUrl: "https://api.z.ai/api/coding/paas/v4",
2593
+ model: "glm-4.7"
2594
+ },
2595
+ zai_direct: {
2596
+ baseUrl: "https://api.z.ai/v1",
2597
+ // Anthropic-compatible endpoint
2598
+ model: "sonnet"
2599
+ },
2600
+ bedrock: {
2601
+ baseUrl: "",
2602
+ // Constructed from region
2603
+ model: "sonnet"
2604
+ // Will be mapped to anthropic.claude-sonnet-4-20250514-v1:0
2605
+ },
2606
+ azure: {
2607
+ baseUrl: "",
2608
+ // Must be provided (e.g., https://your-resource.openai.azure.com)
2609
+ model: "gpt-5.2"
2610
+ },
2611
+ claude_cli: {
2612
+ baseUrl: "",
2613
+ // Not needed - uses CLI subscription
2614
+ model: "sonnet"
2615
+ },
2616
+ krisspy: {
2617
+ baseUrl: "https://api.krisspy.ai",
2618
+ // Krisspy AI proxy with credit billing
2619
+ model: "sonnet"
2620
+ }
2621
+ };
2622
+ var proxyServer = null;
2623
+ var proxyPort = 0;
2624
+ var currentProxyProvider = null;
2625
+ var currentProviderConfig = null;
2626
+ var lastProxyError = null;
2627
+ async function uploadFileToFilesAPI(fileData, apiKey, baseUrl = "https://api.anthropic.com") {
2628
+ const form = new import_form_data6.default();
2629
+ const buffer = Buffer.from(fileData.data, "base64");
2630
+ let filename = "document";
2631
+ if (fileData.media_type?.includes("pdf")) filename = "document.pdf";
2632
+ else if (fileData.media_type?.includes("spreadsheet") || fileData.type === "xlsx") filename = "document.xlsx";
2633
+ else if (fileData.media_type?.includes("wordprocessing") || fileData.type === "docx") filename = "document.docx";
2634
+ else if (fileData.media_type?.includes("presentation") || fileData.type === "pptx") filename = "document.pptx";
2635
+ else if (fileData.type === "csv") filename = "document.csv";
2636
+ form.append("file", buffer, { filename, contentType: fileData.media_type || "application/octet-stream" });
2637
+ const response = await (0, import_node_fetch13.default)(`${baseUrl}/v1/files`, {
2638
+ method: "POST",
2639
+ headers: {
2640
+ "x-api-key": apiKey,
2641
+ "anthropic-version": "2023-06-01",
2642
+ "anthropic-beta": "files-api-2025-04-14",
2643
+ ...form.getHeaders()
2644
+ },
2645
+ body: form
2646
+ });
2647
+ if (!response.ok) {
2648
+ const errorText = await response.text();
2649
+ throw new Error(`Files API upload failed: ${response.status} - ${errorText}`);
2650
+ }
2651
+ const result = await response.json();
2652
+ return result.id;
2653
+ }
2654
+ async function startProxyServer(providerName, config) {
2655
+ if (proxyServer && currentProxyProvider?.getName() === providerName) {
2656
+ const sameConfig = currentProviderConfig?.apiKey === config.apiKey && currentProviderConfig?.baseUrl === config.baseUrl;
2657
+ if (sameConfig) {
2658
+ return proxyPort;
2659
+ }
2660
+ }
2661
+ if (proxyServer) {
2662
+ await stopProxyServer();
2663
+ }
2664
+ currentProxyProvider = getProvider(providerName, config);
2665
+ currentProviderConfig = config;
2666
+ return new Promise((resolve, reject) => {
2667
+ proxyServer = (0, import_http.createServer)(async (req, res) => {
2668
+ if (req.method !== "POST" || !req.url?.includes("/v1/messages")) {
2669
+ res.writeHead(404);
2670
+ res.end("Not found");
2671
+ return;
2672
+ }
2673
+ let body = "";
2674
+ req.on("data", (chunk) => {
2675
+ body += chunk;
2676
+ });
2677
+ req.on("end", async () => {
2678
+ try {
2679
+ const anthropicRequest = JSON.parse(body);
2680
+ const providerRequest = currentProxyProvider.buildRequest(anthropicRequest);
2681
+ const requestBody = JSON.stringify(providerRequest);
2682
+ const headers = currentProxyProvider.getName() === "bedrock" ? currentProxyProvider.getSignedHeaders(requestBody) : currentProxyProvider.getHeaders();
2683
+ const endpoint = currentProxyProvider.getEndpoint();
2684
+ console.log(`[krisspy-ai] Proxy forwarding to: ${endpoint}`);
2685
+ console.log(`[krisspy-ai] Headers:`, JSON.stringify(headers, null, 2));
2686
+ console.log(`[krisspy-ai] Body preview:`, requestBody.substring(0, 200));
2687
+ const response = await (0, import_node_fetch13.default)(endpoint, {
2688
+ method: "POST",
2689
+ headers,
2690
+ body: requestBody
2691
+ });
2692
+ console.log(`[krisspy-ai] Response status: ${response.status}`);
2693
+ if (!response.ok) {
2694
+ const errorText = await response.text();
2695
+ let errorMessage = errorText;
2696
+ try {
2697
+ const parsed = JSON.parse(errorText);
2698
+ errorMessage = parsed.error?.message || errorText;
2699
+ } catch {
2700
+ }
2701
+ if (lastProxyError !== errorMessage) {
2702
+ lastProxyError = errorMessage;
2703
+ console.error(`[krisspy-ai] Proxy error: ${errorMessage}`);
2704
+ }
2705
+ const errorType = response.status === 401 ? "authentication_error" : response.status === 403 ? "permission_error" : response.status === 404 ? "not_found_error" : response.status === 429 ? "rate_limit_error" : "api_error";
2706
+ res.writeHead(response.status, { "Content-Type": "application/json" });
2707
+ res.end(JSON.stringify({
2708
+ type: "error",
2709
+ error: { type: errorType, message: errorMessage }
2710
+ }));
2711
+ return;
2712
+ }
2713
+ const messageId = `msg_${Date.now()}`;
2714
+ const stream = anthropicRequest.stream !== false;
2715
+ if (stream && response.body) {
2716
+ await currentProxyProvider.handleStreamingResponse(
2717
+ response,
2718
+ res,
2719
+ messageId,
2720
+ anthropicRequest.model
2721
+ );
2722
+ res.end();
2723
+ } else {
2724
+ const data = await response.json();
2725
+ const anthropicResponse = currentProxyProvider.convertResponse(data, messageId, anthropicRequest.model);
2726
+ res.writeHead(200, { "Content-Type": "application/json" });
2727
+ res.end(JSON.stringify(anthropicResponse));
2728
+ }
2729
+ } catch (error) {
2730
+ const errorMessage = error.message;
2731
+ lastProxyError = errorMessage;
2732
+ console.error(`[krisspy-ai] Proxy error: ${errorMessage}`);
2733
+ res.writeHead(500, { "Content-Type": "application/json" });
2734
+ res.end(JSON.stringify({
2735
+ type: "error",
2736
+ error: { type: "api_error", message: errorMessage }
2737
+ }));
2738
+ }
2739
+ });
2740
+ });
2741
+ proxyServer.listen(0, "127.0.0.1", () => {
2742
+ const addr = proxyServer.address();
2743
+ proxyPort = typeof addr === "object" && addr ? addr.port : 0;
2744
+ console.log(`[krisspy-ai] Proxy started on port ${proxyPort} for ${providerName}`);
2745
+ resolve(proxyPort);
2746
+ });
2747
+ proxyServer.on("error", reject);
2748
+ });
2749
+ }
2750
+ async function stopProxyServer() {
2751
+ if (proxyServer) {
2752
+ return new Promise((resolve) => {
2753
+ proxyServer.close(() => {
2754
+ proxyServer = null;
2755
+ currentProxyProvider = null;
2756
+ currentProviderConfig = null;
2757
+ proxyPort = 0;
2758
+ resolve();
2759
+ });
2760
+ });
2761
+ }
2762
+ }
2763
+ async function* query(params) {
2764
+ const { prompt, options = {} } = params;
2765
+ const {
2766
+ attachments,
2767
+ provider: explicitProvider,
2768
+ apiKey,
2769
+ baseUrl,
2770
+ model: explicitModel,
2771
+ maxThinkingTokens,
2772
+ ...claudeOptions
2773
+ } = options;
2774
+ const provider = (explicitProvider || "claude_cli").toLowerCase();
2775
+ const images = attachments?.images || [];
2776
+ const files = attachments?.files || [];
2777
+ const defaults = PROVIDER_DEFAULTS[provider] || PROVIDER_DEFAULTS.claude_cli;
2778
+ const finalBaseUrl = baseUrl || defaults.baseUrl;
2779
+ const finalModel = explicitModel || defaults.model;
2780
+ const needsProxy = ["openai", "zai", "gemini", "azure"].includes(provider);
2781
+ const providerConfig = {
2782
+ apiKey,
2783
+ baseUrl: finalBaseUrl,
2784
+ defaultModel: finalModel,
2785
+ // AWS Bedrock specific
2786
+ accessKeyId: options.accessKeyId,
2787
+ secretAccessKey: options.secretAccessKey,
2788
+ region: options.region || "us-west-2",
2789
+ // Azure specific
2790
+ deploymentName: options.deploymentName,
2791
+ apiVersion: options.apiVersion
2792
+ };
2793
+ if (provider === "bedrock") {
2794
+ if (!providerConfig.accessKeyId || !providerConfig.secretAccessKey) {
2795
+ throw new Error(`[krisspy-ai] AWS credentials required for Bedrock. Pass accessKeyId and secretAccessKey via options.`);
2796
+ }
2797
+ }
2798
+ let port = 0;
2799
+ if (needsProxy) {
2800
+ if (provider === "azure") {
2801
+ if (!apiKey) {
2802
+ throw new Error(`[krisspy-ai] API key required for Azure. Pass it via options.apiKey`);
2803
+ }
2804
+ if (!finalBaseUrl) {
2805
+ throw new Error(`[krisspy-ai] Base URL required for Azure. Pass it via options.baseUrl (e.g., https://your-resource.openai.azure.com)`);
2806
+ }
2807
+ } else if (!apiKey) {
2808
+ throw new Error(`[krisspy-ai] API key required for provider '${provider}'. Pass it via options.apiKey`);
2809
+ }
2810
+ port = await startProxyServer(provider, providerConfig);
2811
+ }
2812
+ const systemEnvKeys = ["PATH", "HOME", "USER", "SHELL", "TERM", "LANG", "LC_ALL", "TMPDIR", "TMP", "TEMP"];
2813
+ const cleanSystemEnv = {};
2814
+ for (const key of systemEnvKeys) {
2815
+ if (process.env[key]) {
2816
+ cleanSystemEnv[key] = process.env[key];
2817
+ }
2818
+ }
2819
+ const providerEnv = {};
2820
+ if (provider === "anthropic") {
2821
+ if (apiKey) providerEnv.ANTHROPIC_API_KEY = apiKey;
2822
+ providerEnv.ANTHROPIC_BASE_URL = finalBaseUrl;
2823
+ } else if (provider === "zai_direct") {
2824
+ if (apiKey) providerEnv.ANTHROPIC_API_KEY = apiKey;
2825
+ providerEnv.ANTHROPIC_BASE_URL = finalBaseUrl;
2826
+ } else if (provider === "krisspy") {
2827
+ if (!apiKey) {
2828
+ throw new Error(`[krisspy-ai] Krisspy API key required. Pass it via options.apiKey`);
2829
+ }
2830
+ providerEnv.ANTHROPIC_API_KEY = apiKey;
2831
+ const baseUrlWithoutTrailingSlash = finalBaseUrl.replace(/\/$/, "");
2832
+ providerEnv.ANTHROPIC_BASE_URL = `${baseUrlWithoutTrailingSlash}/api/v1/ai`;
2833
+ console.log(`[krisspy-ai] Using Krisspy provider with base URL ${providerEnv.ANTHROPIC_BASE_URL}`);
2834
+ } else if (provider === "bedrock") {
2835
+ providerEnv.AWS_ACCESS_KEY_ID = providerConfig.accessKeyId;
2836
+ providerEnv.AWS_SECRET_ACCESS_KEY = providerConfig.secretAccessKey;
2837
+ providerEnv.AWS_DEFAULT_REGION = providerConfig.region || "us-west-2";
2838
+ const bedrockModelMap = {
2839
+ // Haiku aliases -> Haiku 4.5 (global endpoint)
2840
+ "haiku": "global.anthropic.claude-haiku-4-5-20251001-v1:0",
2841
+ "haiku-4.5": "global.anthropic.claude-haiku-4-5-20251001-v1:0",
2842
+ // Sonnet aliases -> Sonnet 4 (global endpoint)
2843
+ "sonnet": "global.anthropic.claude-sonnet-4-20250514-v1:0",
2844
+ "sonnet-4": "global.anthropic.claude-sonnet-4-20250514-v1:0",
2845
+ "sonnet-4.5": "us.anthropic.claude-sonnet-4-5-20250929-v1:0",
2846
+ // Opus aliases -> Opus 4 / 4.5 (global endpoint)
2847
+ "opus": "global.anthropic.claude-opus-4-5-20251101-v1:0",
2848
+ "opus-4": "global.anthropic.claude-opus-4-20250514-v1:0",
2849
+ "opus-4.5": "global.anthropic.claude-opus-4-5-20251101-v1:0"
2850
+ // Also support full model IDs passed directly
2851
+ };
2852
+ const mappedModel = bedrockModelMap[finalModel] || finalModel;
2853
+ providerEnv.ANTHROPIC_MODEL = mappedModel;
2854
+ console.log(`[krisspy-ai] Bedrock native mode: model=${mappedModel}, region=${providerConfig.region}`);
2855
+ } else if (needsProxy) {
2856
+ providerEnv.ANTHROPIC_API_KEY = apiKey || "proxy-key";
2857
+ providerEnv.ANTHROPIC_BASE_URL = `http://127.0.0.1:${port}`;
2858
+ }
2859
+ const hasOfficeFiles = files.some((f) => ["xlsx", "docx", "pptx"].includes(f.type));
2860
+ const useSkills = hasOfficeFiles && !needsProxy;
2861
+ const queryOptions = {
2862
+ allowedTools: ["Read", "Edit", "Bash", "Glob", "Grep", "WebFetch", "WebSearch"],
2863
+ permissionMode: "acceptEdits",
2864
+ includePartialMessages: true,
2865
+ systemPrompt: { type: "preset", preset: "claude_code" },
2866
+ model: finalModel,
2867
+ // Use resolved model with default
2868
+ ...claudeOptions,
2869
+ env: {
2870
+ ...cleanSystemEnv,
2871
+ // Only system vars (PATH, HOME, etc.)
2872
+ ...providerEnv,
2873
+ // Our explicit provider config
2874
+ ...claudeOptions.env || {}
2875
+ }
2876
+ };
2877
+ if (useSkills) {
2878
+ console.log("[krisspy-ai] Enabling Skills betas for Office files...");
2879
+ const existingBetas = queryOptions.betas || [];
2880
+ const skillsBetas = ["code-execution-2025-08-25", "skills-2025-10-02"];
2881
+ queryOptions.betas = [.../* @__PURE__ */ new Set([...existingBetas, ...skillsBetas])];
2882
+ }
2883
+ if (maxThinkingTokens !== void 0 && maxThinkingTokens > 0) {
2884
+ queryOptions.maxThinkingTokens = maxThinkingTokens;
2885
+ }
2886
+ let finalPrompt;
2887
+ if (typeof prompt === "string" && (images.length > 0 || files.length > 0)) {
2888
+ const content = [];
2889
+ for (const img of images) {
2890
+ if (img.type === "url") {
2891
+ content.push({ type: "image", source: { type: "url", url: img.url } });
2892
+ } else if (img.type === "base64") {
2893
+ content.push({
2894
+ type: "image",
2895
+ source: { type: "base64", media_type: img.media_type || "image/jpeg", data: img.data }
2896
+ });
2897
+ }
2898
+ }
2899
+ const officeFiles = [];
2900
+ for (const file of files) {
2901
+ if (file.type === "pdf") {
2902
+ content.push({
2903
+ type: "document",
2904
+ source: { type: "base64", media_type: "application/pdf", data: file.data }
2905
+ });
2906
+ } else if (["xlsx", "docx", "pptx", "csv"].includes(file.type)) {
2907
+ officeFiles.push(file);
2908
+ } else {
2909
+ content.push({
2910
+ type: "text",
2911
+ text: `[Note: A ${file.type?.toUpperCase() || "unknown"} file was attached but this format is not supported.]`
2912
+ });
2913
+ }
2914
+ }
2915
+ if (officeFiles.length > 0 && !needsProxy) {
2916
+ const filesApiKey = apiKey || "";
2917
+ const filesBaseUrl = baseUrl || "https://api.anthropic.com";
2918
+ if (filesApiKey && !filesBaseUrl.includes("127.0.0.1") && !filesBaseUrl.includes("localhost")) {
2919
+ for (const file of officeFiles) {
2920
+ try {
2921
+ console.log(`[krisspy-ai] Uploading ${file.type.toUpperCase()} via Files API...`);
2922
+ const fileId = await uploadFileToFilesAPI(file, filesApiKey, filesBaseUrl);
2923
+ console.log(`[krisspy-ai] File uploaded: ${fileId}`);
2924
+ content.push({ type: "document", source: { type: "file", file_id: fileId } });
2925
+ } catch (err) {
2926
+ console.error(`[krisspy-ai] Failed to upload ${file.type}:`, err.message);
2927
+ content.push({
2928
+ type: "text",
2929
+ text: `[Note: Failed to upload ${file.type.toUpperCase()} file: ${err.message}]`
2930
+ });
2931
+ }
2932
+ }
2933
+ }
2934
+ } else if (officeFiles.length > 0) {
2935
+ for (const file of officeFiles) {
2936
+ content.push({
2937
+ type: "document",
2938
+ source: { type: "base64", media_type: file.media_type, data: file.data }
2939
+ });
2940
+ }
2941
+ }
2942
+ content.push({ type: "text", text: prompt });
2943
+ finalPrompt = (async function* () {
2944
+ yield { type: "user", message: { role: "user", content } };
2945
+ })();
2946
+ } else {
2947
+ finalPrompt = prompt;
2948
+ }
2949
+ console.log(`[krisspy-ai] Using provider: ${provider}${needsProxy ? ` (proxy on port ${port})` : ""}`);
2950
+ lastProxyError = null;
2951
+ try {
2952
+ for await (const event of (0, import_claude_agent_sdk.query)({ prompt: finalPrompt, options: queryOptions })) {
2953
+ if (lastProxyError) {
2954
+ yield {
2955
+ type: "result",
2956
+ subtype: "error",
2957
+ is_error: true,
2958
+ errors: [lastProxyError]
2959
+ };
2960
+ lastProxyError = null;
2961
+ return;
2962
+ }
2963
+ yield event;
2964
+ }
2965
+ } catch (error) {
2966
+ const errorMessage = lastProxyError || error.message;
2967
+ lastProxyError = null;
2968
+ yield {
2969
+ type: "result",
2970
+ subtype: "error",
2971
+ is_error: true,
2972
+ errors: [errorMessage]
2973
+ };
2974
+ }
2975
+ }
2976
+ async function cleanup() {
2977
+ await stopProxyServer();
2978
+ }
2979
+ async function generateImage(options) {
2980
+ const service = getImageService(options.service || "azure", {
2981
+ apiKey: options.apiKey,
2982
+ baseUrl: options.baseUrl,
2983
+ deploymentName: options.deploymentName,
2984
+ apiVersion: options.apiVersion
2985
+ });
2986
+ return service.generate({
2987
+ prompt: options.prompt,
2988
+ n: options.n,
2989
+ size: options.size,
2990
+ quality: options.quality,
2991
+ style: options.style,
2992
+ responseFormat: options.responseFormat,
2993
+ user: options.user
2994
+ });
2995
+ }
2996
+ async function* generateVideo(options) {
2997
+ const service = getVideoService(options.service || "azure", {
2998
+ apiKey: options.apiKey,
2999
+ baseUrl: options.baseUrl,
3000
+ deploymentName: options.deploymentName,
3001
+ apiVersion: options.apiVersion
3002
+ });
3003
+ const generator = service.generateAndWait(
3004
+ {
3005
+ prompt: options.prompt,
3006
+ duration: options.duration,
3007
+ width: options.width,
3008
+ height: options.height,
3009
+ nVariants: options.nVariants,
3010
+ user: options.user,
3011
+ referenceImage: options.referenceImage,
3012
+ referenceImageFormat: options.referenceImageFormat
3013
+ },
3014
+ options.pollInterval,
3015
+ options.timeout
3016
+ );
3017
+ let result;
3018
+ while (!(result = await generator.next()).done) {
3019
+ yield { type: "status", status: result.value };
3020
+ }
3021
+ yield { type: "result", data: result.value };
3022
+ }
3023
+ async function transcribe(options) {
3024
+ const service = getTranscriptionService(options.service || "openai", {
3025
+ apiKey: options.apiKey,
3026
+ baseUrl: options.baseUrl,
3027
+ deploymentName: options.deploymentName,
3028
+ apiVersion: options.apiVersion
3029
+ });
3030
+ return service.transcribe({
3031
+ audio: options.audio,
3032
+ language: options.language,
3033
+ prompt: options.prompt,
3034
+ responseFormat: options.responseFormat,
3035
+ temperature: options.temperature,
3036
+ timestampGranularities: options.timestampGranularities
3037
+ });
3038
+ }
3039
+ async function synthesize(options) {
3040
+ const service = getTTSService(options.service || "openai", {
3041
+ apiKey: options.apiKey,
3042
+ baseUrl: options.baseUrl,
3043
+ deploymentName: options.deploymentName,
3044
+ apiVersion: options.apiVersion
3045
+ });
3046
+ return service.synthesize({
3047
+ input: options.input,
3048
+ voice: options.voice,
3049
+ responseFormat: options.responseFormat,
3050
+ speed: options.speed,
3051
+ instructions: options.instructions
3052
+ });
3053
+ }
3054
+ // Annotate the CommonJS export names for ESM import in node:
3055
+ 0 && (module.exports = {
3056
+ AzureDalleService,
3057
+ AzureProvider,
3058
+ AzureSoraService,
3059
+ AzureTTSService,
3060
+ AzureTranscriptionService,
3061
+ BaseGenerativeService,
3062
+ BaseImageService,
3063
+ BaseProvider,
3064
+ BaseTTSService,
3065
+ BaseTranscriptionService,
3066
+ BaseVideoService,
3067
+ BedrockProvider,
3068
+ GeminiProvider,
3069
+ KrisspyProvider,
3070
+ OpenAIImageService,
3071
+ OpenAIProvider,
3072
+ OpenAISoraService,
3073
+ OpenAITTSService,
3074
+ OpenAITranscriptionService,
3075
+ ZAIProvider,
3076
+ cleanup,
3077
+ generateImage,
3078
+ generateVideo,
3079
+ getAvailableImageServices,
3080
+ getAvailableProviders,
3081
+ getAvailableTTSServices,
3082
+ getAvailableTranscriptionServices,
3083
+ getAvailableVideoServices,
3084
+ getImageService,
3085
+ getProvider,
3086
+ getTTSService,
3087
+ getTranscriptionService,
3088
+ getVideoService,
3089
+ query,
3090
+ registerImageService,
3091
+ registerProvider,
3092
+ registerTTSService,
3093
+ registerTranscriptionService,
3094
+ registerVideoService,
3095
+ synthesize,
3096
+ transcribe,
3097
+ ...require("@anthropic-ai/claude-agent-sdk")
3098
+ });
3099
+ //# sourceMappingURL=index.js.map