ai-protocol-adapters 1.0.0-alpha.2 → 1.0.0-alpha.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -397,523 +397,463 @@ function getGlobalLogger() {
397
397
  return globalLogger;
398
398
  }
399
399
 
400
- // src/core/streaming/streaming-protocol-adapter.ts
401
- var StreamingProtocolAdapter = class {
402
- constructor(options = {}) {
403
- this.config = {
404
- debugMode: options.debugMode ?? false,
405
- validateInput: options.validateInput ?? false,
406
- validateOutput: options.validateOutput ?? false,
407
- autoHeal: options.autoHeal ?? false,
408
- timeout: options.timeout ?? 3e4,
409
- retries: options.retries ?? 3,
410
- bufferSize: options.bufferSize ?? 1024,
411
- logger: options.logger ?? getGlobalLogger()
412
- };
400
+ // src/core/a2o-request-adapter/config.ts
401
+ var DEFAULT_CONFIG = {
402
+ // 原有配置
403
+ debugMode: false,
404
+ maxDescriptionLength: 100,
405
+ enableToolNameValidation: true,
406
+ enableFormatValidation: true,
407
+ // 新增默认配置
408
+ validation: {
409
+ enabled: true,
410
+ strict: false,
411
+ // 默认开启自动修复
412
+ customSchemas: {}
413
+ },
414
+ healing: {
415
+ enabled: true,
416
+ maxAttempts: 3,
417
+ enableCustomRules: true
418
+ },
419
+ recovery: {
420
+ enabled: true,
421
+ maxRetries: 2,
422
+ backoffMs: 1e3
423
+ },
424
+ monitoring: {
425
+ enabled: false,
426
+ logLevel: "warn",
427
+ enableMetrics: false
428
+ },
429
+ imageProxy: {
430
+ enabled: true,
431
+ // 默认启用图片代理(解决GitHub Copilot等不支持外部URL的问题)
432
+ timeout: 1e4,
433
+ // 10秒超时
434
+ maxSize: 10 * 1024 * 1024
435
+ // 10MB最大文件大小
413
436
  }
437
+ };
438
+ var SUPPORTED_IMAGE_TYPES = [
439
+ "image/jpeg",
440
+ "image/png",
441
+ "image/gif",
442
+ "image/webp"
443
+ ];
444
+ var TOOL_CONVERSION = {
414
445
  /**
415
- * 转换Anthropic请求为OpenAI格式
446
+ * 终极泛化:完全移除工具名称映射
447
+ * 基于GitHub Copilot API测试结果,100%保持原始格式
416
448
  */
417
- convertAnthropicToOpenAI(anthropicRequest) {
418
- const logger = this.config.logger;
419
- if (this.config.debugMode) {
420
- logger.debug("Converting Anthropic request to OpenAI format", { model: anthropicRequest.model });
449
+ PRESERVE_ORIGINAL_NAMES: true,
450
+ /**
451
+ * 默认工具描述
452
+ */
453
+ DEFAULT_DESCRIPTION: "Tool description",
454
+ /**
455
+ * 未知工具回退名称
456
+ */
457
+ UNKNOWN_TOOL_FALLBACK: "unknown_tool"
458
+ };
459
+
460
+ // src/core/a2o-request-adapter/image-proxy.ts
461
+ var SUPPORTED_IMAGE_MIME_TYPES = [
462
+ "image/jpeg",
463
+ "image/png",
464
+ "image/gif",
465
+ "image/webp"
466
+ ];
467
+ async function downloadImageAsBase64(url, options = {}) {
468
+ const {
469
+ timeout = 1e4,
470
+ maxSize = 10 * 1024 * 1024,
471
+ // 10MB
472
+ userAgent = "ai-protocol-adapters/1.0"
473
+ } = options;
474
+ try {
475
+ const controller = new AbortController();
476
+ const timeoutId = setTimeout(() => controller.abort(), timeout);
477
+ const response = await fetch(url, {
478
+ signal: controller.signal,
479
+ headers: {
480
+ "User-Agent": userAgent
481
+ }
482
+ });
483
+ clearTimeout(timeoutId);
484
+ if (!response.ok) {
485
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
421
486
  }
422
- const openaiRequest = {
423
- model: this.mapAnthropicModelToOpenAI(anthropicRequest.model),
424
- messages: this.convertMessages(anthropicRequest.messages),
425
- stream: anthropicRequest.stream ?? true,
426
- temperature: anthropicRequest.temperature,
427
- max_tokens: anthropicRequest.max_tokens
428
- };
429
- if (anthropicRequest.tools) {
430
- openaiRequest.tools = anthropicRequest.tools.map((tool) => ({
431
- type: "function",
432
- function: {
433
- name: tool.name,
434
- description: tool.description,
435
- parameters: tool.input_schema
436
- }
437
- }));
487
+ const contentType = response.headers.get("content-type");
488
+ if (!contentType || !SUPPORTED_IMAGE_MIME_TYPES.some((type) => contentType.includes(type))) {
489
+ throw new Error(`Unsupported content type: ${contentType}`);
438
490
  }
439
- const hasImages = this.hasImageContent(anthropicRequest);
440
- return {
441
- openaiRequest,
442
- metadata: {
443
- hasImages,
444
- requiresVisionHeaders: hasImages
445
- }
446
- };
491
+ const contentLength = response.headers.get("content-length");
492
+ if (contentLength && parseInt(contentLength) > maxSize) {
493
+ throw new Error(`Image too large: ${contentLength} bytes (max: ${maxSize} bytes)`);
494
+ }
495
+ const arrayBuffer = await response.arrayBuffer();
496
+ if (arrayBuffer.byteLength > maxSize) {
497
+ throw new Error(`Image too large: ${arrayBuffer.byteLength} bytes (max: ${maxSize} bytes)`);
498
+ }
499
+ const base64 = Buffer.from(arrayBuffer).toString("base64");
500
+ return `data:${contentType};base64,${base64}`;
501
+ } catch (error) {
502
+ if (error.name === "AbortError") {
503
+ throw new Error(`Image download timeout after ${timeout}ms`);
504
+ }
505
+ throw new Error(`Failed to download image from ${url}: ${error.message}`);
447
506
  }
507
+ }
508
+ function isExternalUrl(url) {
509
+ return url.startsWith("http://") || url.startsWith("https://");
510
+ }
511
+ function isBase64DataUri(url) {
512
+ return url.startsWith("data:");
513
+ }
514
+
515
+ // src/core/a2o-request-adapter/message-converter.ts
516
+ var MessageConverter = class {
448
517
  /**
449
- * 转换OpenAI流式响应为Anthropic SSE格式
518
+ * 转换消息格式,正确处理工具调用和工具结果
519
+ * 修复关键问题:将tool_use转换为tool_calls,tool_result转换为role:"tool"消息
520
+ * 使用tool_use_id溯回工具名称解决unknown_tool问题
450
521
  */
451
- convertOpenAIStreamToAnthropic(openaiStream, originalRequest) {
452
- const logger = this.config.logger;
453
- try {
454
- if (this.config.debugMode) {
455
- logger.debug("Converting OpenAI stream to Anthropic SSE", {
456
- streamLength: openaiStream.length,
457
- model: originalRequest.model
458
- });
522
+ static convertMessages(messages, system) {
523
+ const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
524
+ if (debugEnabled) {
525
+ if (system !== void 0) {
526
+ console.debug("[MessageConverter] convertMessages called with system:", JSON.stringify(system, null, 2));
527
+ } else {
528
+ console.debug("[MessageConverter] convertMessages called WITHOUT system parameter");
459
529
  }
460
- if (!openaiStream || openaiStream.trim() === "") {
461
- return {
462
- success: false,
463
- error: "Empty stream response",
464
- anthropicSSE: "",
465
- anthropicStandardResponse: null
466
- };
530
+ }
531
+ const context = this.createConversionContext(messages);
532
+ const convertedMessages = [];
533
+ for (const msg of messages) {
534
+ if (Array.isArray(msg.content)) {
535
+ const processedMessages = this.processComplexMessage(msg, context);
536
+ convertedMessages.push(...processedMessages);
537
+ } else {
538
+ const safeMsg = { ...msg };
539
+ if (safeMsg.content === null || safeMsg.content === void 0) {
540
+ safeMsg.content = "";
541
+ }
542
+ convertedMessages.push(safeMsg);
543
+ }
544
+ }
545
+ if (system) {
546
+ const systemMessage = this.processSystemMessage(system);
547
+ if (systemMessage) {
548
+ convertedMessages.unshift(systemMessage);
549
+ if (debugEnabled) {
550
+ console.debug("[MessageConverter] System message added to messages array at index 0");
551
+ }
467
552
  }
468
- const anthropicSSE = this.convertToAnthropicSSE(openaiStream, originalRequest.model);
469
- const anthropicStandardResponse = this.buildStandardResponse(openaiStream);
470
- return {
471
- success: true,
472
- anthropicSSE,
473
- anthropicStandardResponse
474
- };
475
- } catch (error) {
476
- const errorMessage = error instanceof Error ? error.message : "Unknown conversion error";
477
- logger.error("Stream conversion failed", { error: errorMessage });
478
- return {
479
- success: false,
480
- error: errorMessage,
481
- anthropicSSE: "",
482
- anthropicStandardResponse: null
483
- };
484
553
  }
554
+ if (debugEnabled) {
555
+ console.debug("[MessageConverter] Final converted messages count:", convertedMessages.length);
556
+ console.debug("[MessageConverter] First message:", JSON.stringify(convertedMessages[0], null, 2));
557
+ }
558
+ return convertedMessages.map((msg) => {
559
+ if (Array.isArray(msg.tools)) {
560
+ msg.tools = msg.tools.map((tool) => {
561
+ if (tool?.type === "function" && tool.function) {
562
+ const description = tool.function.description?.trim() || "Converted tool with no description provided.";
563
+ return {
564
+ ...tool,
565
+ function: {
566
+ ...tool.function,
567
+ description
568
+ }
569
+ };
570
+ }
571
+ return tool;
572
+ });
573
+ }
574
+ return msg;
575
+ });
485
576
  }
486
577
  /**
487
- * 将OpenAI流转换为Anthropic SSE格式
578
+ * 创建消息转换上下文
488
579
  */
489
- convertToAnthropicSSE(openaiStream, modelName) {
490
- const lines = openaiStream.split("\n");
491
- const sseLines = [];
492
- const state = this.createConversionState();
493
- sseLines.push(
494
- "event: message_start",
495
- `data: {"type":"message_start","message":{"id":"msg_${Date.now()}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
496
- ""
497
- );
498
- for (const line of lines) {
499
- if (line.startsWith("data:")) {
500
- const dataLine = line.substring(5);
501
- if (dataLine.trim() === "[DONE]") {
502
- this.addFinalEvents(state, sseLines);
503
- break;
504
- }
505
- try {
506
- const chunk = JSON.parse(dataLine);
507
- this.processStreamChunk(chunk, state, sseLines);
508
- } catch (error) {
509
- if (this.config.debugMode) {
510
- this.config.logger.warn("Failed to parse stream chunk", { line: dataLine.substring(0, 200) });
580
+ static createConversionContext(messages) {
581
+ const toolIdToNameMap = /* @__PURE__ */ new Map();
582
+ for (const msg of messages) {
583
+ if (Array.isArray(msg.content)) {
584
+ for (const item of msg.content) {
585
+ if (typeof item === "object" && item !== null && item.type === "tool_use") {
586
+ toolIdToNameMap.set(item.id, item.name);
511
587
  }
512
588
  }
513
589
  }
514
590
  }
515
- return sseLines.join("\n");
591
+ return {
592
+ toolIdToNameMap,
593
+ hasSystemMessage: false
594
+ };
516
595
  }
517
596
  /**
518
- * 处理单个流式数据块 - 支持thinking和content双模式
597
+ * 处理复杂消息(包含多种内容类型)
519
598
  */
520
- processStreamChunk(chunk, state, sseLines) {
521
- const choice = chunk.choices?.[0];
522
- if (!choice) return;
523
- const delta = choice.delta;
524
- if (delta.reasoning_content) {
525
- state.reasoningContent += delta.reasoning_content;
526
- if (!state.thinkingBlockStarted) {
527
- sseLines.push(
528
- "event: content_block_start",
529
- 'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":"<thinking>"}}',
530
- ""
531
- );
532
- state.thinkingBlockStarted = true;
599
+ static processComplexMessage(msg, context) {
600
+ const { textContent, toolUses, toolResults } = this.categorizeContent(msg.content);
601
+ const resultMessages = [];
602
+ if (msg.role === "assistant" && toolUses.length > 0) {
603
+ const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
604
+ resultMessages.push(assistantMessage);
605
+ } else if (toolResults.length > 0) {
606
+ const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
607
+ resultMessages.push(...toolMessages);
608
+ const textMessage = this.createTextMessage(msg.role, textContent);
609
+ if (textMessage) {
610
+ resultMessages.push(textMessage);
533
611
  }
534
- sseLines.push(
535
- "event: content_block_delta",
536
- `data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"${this.escapeJsonString(delta.reasoning_content)}"}}`,
537
- ""
538
- );
539
- }
540
- if (delta.content && delta.content !== "") {
541
- if (state.thinkingBlockStarted && !state.contentBlockStarted) {
542
- sseLines.push(
543
- "event: content_block_delta",
544
- 'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"</thinking>\\n\\n"}}',
545
- "",
546
- "event: content_block_stop",
547
- 'data: {"type":"content_block_stop","index":0}',
548
- "",
549
- "event: content_block_start",
550
- 'data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}',
551
- ""
552
- );
553
- state.contentBlockStarted = true;
554
- } else if (!state.contentBlockStarted && !state.thinkingBlockStarted) {
555
- sseLines.push(
556
- "event: content_block_start",
557
- 'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}',
558
- ""
559
- );
560
- state.contentBlockStarted = true;
612
+ } else if (textContent.length > 0) {
613
+ const textMessage = this.createTextMessage(msg.role, textContent);
614
+ if (textMessage) {
615
+ resultMessages.push(textMessage);
561
616
  }
562
- state.textContent += delta.content;
563
- const blockIndex = state.thinkingBlockStarted ? 1 : 0;
564
- sseLines.push(
565
- "event: content_block_delta",
566
- `data: {"type":"content_block_delta","index":${blockIndex},"delta":{"type":"text_delta","text":"${this.escapeJsonString(delta.content)}"}}`,
567
- ""
568
- );
569
- }
570
- if (delta.tool_calls) {
571
- this.processToolCalls(delta.tool_calls, state, sseLines);
572
- }
573
- if (chunk.usage) {
574
- state.usage.input_tokens = chunk.usage.prompt_tokens;
575
- state.usage.output_tokens = chunk.usage.completion_tokens;
576
617
  }
618
+ return resultMessages;
577
619
  }
578
620
  /**
579
- * 处理工具调用
621
+ * 分类内容块
580
622
  */
581
- processToolCalls(toolCalls, state, sseLines) {
582
- for (const toolCall of toolCalls) {
583
- if (toolCall.id && toolCall.function?.name) {
584
- const toolData = {
585
- id: toolCall.id,
586
- name: toolCall.function.name,
587
- input: toolCall.function.arguments || ""
588
- };
589
- state.toolCallsMap.set(toolCall.id, toolData);
590
- sseLines.push(
591
- "event: content_block_start",
592
- `data: {"type":"content_block_start","index":${state.completedToolCalls.length + 1},"content_block":{"type":"tool_use","id":"${toolCall.id}","name":"${toolCall.function.name}","input":{}}}`,
593
- ""
594
- );
623
+ static categorizeContent(content) {
624
+ const textContent = [];
625
+ const toolUses = [];
626
+ const toolResults = [];
627
+ for (const item of content) {
628
+ if (typeof item === "string") {
629
+ textContent.push({ type: "text", text: item });
630
+ } else if (typeof item === "object" && item !== null) {
631
+ switch (item.type) {
632
+ case "text":
633
+ textContent.push(item);
634
+ break;
635
+ case "tool_use":
636
+ toolUses.push(item);
637
+ break;
638
+ case "tool_result":
639
+ toolResults.push(item);
640
+ break;
641
+ case "image":
642
+ const imageContent = this.convertImageContent(item);
643
+ if (imageContent) {
644
+ textContent.push(imageContent);
645
+ }
646
+ break;
647
+ }
595
648
  }
596
649
  }
650
+ return { textContent, toolUses, toolResults };
597
651
  }
598
652
  /**
599
- * 添加最终事件 - 支持thinking+content双模式
653
+ * 转换图片内容格式
654
+ * 支持两种格式:URL 和 base64
600
655
  */
601
- addFinalEvents(state, sseLines) {
602
- if (state.contentBlockStarted) {
603
- const blockIndex = state.thinkingBlockStarted ? 1 : 0;
604
- sseLines.push(
605
- "event: content_block_stop",
606
- `data: {"type":"content_block_stop","index":${blockIndex}}`,
607
- ""
608
- );
609
- } else if (state.thinkingBlockStarted) {
610
- sseLines.push(
611
- "event: content_block_delta",
612
- 'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"</thinking>"}}',
613
- "",
614
- "event: content_block_stop",
615
- 'data: {"type":"content_block_stop","index":0}',
616
- ""
617
- );
656
+ static convertImageContent(item) {
657
+ if (!item.source) {
658
+ return null;
618
659
  }
619
- sseLines.push(
620
- "event: message_delta",
621
- `data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":${state.usage.output_tokens}}}`,
622
- "",
623
- "event: message_stop",
624
- 'data: {"type":"message_stop"}',
625
- ""
626
- );
627
- }
628
- /**
629
- * 构建标准响应格式
630
- */
631
- buildStandardResponse(openaiStream) {
632
- const state = this.createConversionState();
633
- const lines = openaiStream.split("\n");
634
- for (const line of lines) {
635
- if (line.startsWith("data: ")) {
636
- const dataLine = line.substring(6);
637
- if (dataLine.trim() === "[DONE]") break;
638
- try {
639
- const chunk = JSON.parse(dataLine);
640
- const choice = chunk.choices?.[0];
641
- if (!choice) continue;
642
- const delta = choice.delta;
643
- if (delta.content) {
644
- state.textContent += delta.content;
645
- }
646
- if (chunk.usage) {
647
- state.usage.input_tokens = chunk.usage.prompt_tokens;
648
- state.usage.output_tokens = chunk.usage.completion_tokens;
649
- }
650
- } catch (error) {
660
+ if (item.source.type === "url" && item.source.url) {
661
+ return {
662
+ type: "image_url",
663
+ image_url: {
664
+ url: item.source.url,
665
+ detail: "auto"
666
+ // OpenAI 支持的可选参数
651
667
  }
652
- }
668
+ };
653
669
  }
654
- return {
655
- id: `msg_${Date.now()}`,
656
- type: "message",
657
- role: "assistant",
658
- content: [
659
- {
660
- type: "text",
661
- text: state.textContent
670
+ if (item.source.type === "base64" && item.source.data && item.source.media_type) {
671
+ if (!SUPPORTED_IMAGE_TYPES.includes(item.source.media_type)) {
672
+ console.warn(`\u4E0D\u652F\u6301\u7684\u56FE\u7247\u683C\u5F0F: ${item.source.media_type}`);
673
+ return null;
674
+ }
675
+ const dataUri = `data:${item.source.media_type};base64,${item.source.data}`;
676
+ return {
677
+ type: "image_url",
678
+ image_url: {
679
+ url: dataUri,
680
+ detail: "auto"
662
681
  }
663
- ],
664
- model: "claude-3-sonnet-20240229",
665
- stop_reason: "end_turn",
666
- stop_sequence: null,
667
- usage: state.usage
668
- };
682
+ };
683
+ }
684
+ return null;
669
685
  }
670
686
  /**
671
- * 创建转换状态对象
687
+ * 创建包含工具调用的助手消息
672
688
  */
673
- createConversionState() {
674
- return {
675
- processedLines: 0,
676
- textContent: "",
677
- reasoningContent: "",
678
- toolCallsMap: /* @__PURE__ */ new Map(),
679
- completedToolCalls: [],
680
- allSSELines: [],
681
- errors: [],
682
- usage: {
683
- input_tokens: 0,
684
- output_tokens: 0
685
- },
686
- thinkingBlockStarted: false,
687
- contentBlockStarted: false
689
+ static createAssistantMessageWithToolCalls(textContent, toolUses) {
690
+ const assistantMessage = {
691
+ role: "assistant",
692
+ content: ""
693
+ // 默认为空字符串,避免null值
688
694
  };
689
- }
690
- /**
691
- * 转换消息格式
692
- */
693
- convertMessages(messages) {
694
- return messages.map((msg) => ({
695
- role: msg.role,
696
- content: msg.content
697
- }));
698
- }
699
- /**
700
- * 映射Anthropic模型到OpenAI模型
701
- */
702
- mapAnthropicModelToOpenAI(model) {
703
- const supportedModels = [
704
- "glm-4.5",
705
- "kimi-k2",
706
- "deepseek-v3.1",
707
- "deepseek-r1",
708
- "deepseek-v3",
709
- "qwen3-32b",
710
- "qwen3-coder",
711
- "qwen3-235b",
712
- "tstars2.0"
713
- ];
714
- if (supportedModels.includes(model)) {
715
- return model;
695
+ if (textContent.length > 0) {
696
+ const textOnly = textContent.map((item) => item.text || "").join("");
697
+ if (textOnly.trim()) {
698
+ assistantMessage.content = textOnly.trim();
699
+ }
716
700
  }
717
- const mapping = {
718
- "claude-3-sonnet-20240229": "glm-4.5",
719
- "claude-3-haiku-20240307": "kimi-k2",
720
- "claude-3-opus-20240229": "deepseek-v3.1"
721
- };
722
- return mapping[model] || "glm-4.5";
701
+ assistantMessage.tool_calls = toolUses.map((toolUse) => ({
702
+ id: toolUse.id,
703
+ type: "function",
704
+ function: {
705
+ name: toolUse.name,
706
+ arguments: JSON.stringify(toolUse.input || {})
707
+ }
708
+ }));
709
+ return assistantMessage;
723
710
  }
724
711
  /**
725
- * 检查请求是否包含图片内容
712
+ * 创建工具结果消息
726
713
  */
727
- hasImageContent(request) {
728
- return request.messages.some(
729
- (msg) => Array.isArray(msg.content) && msg.content.some((content) => content?.type === "image")
730
- );
714
+ static createToolResultMessages(toolResults, toolIdToNameMap) {
715
+ return toolResults.map((toolResult) => {
716
+ let resultContent = "No content";
717
+ if (toolResult.content) {
718
+ if (typeof toolResult.content === "string") {
719
+ resultContent = toolResult.content;
720
+ } else {
721
+ resultContent = JSON.stringify(toolResult.content, null, 2);
722
+ }
723
+ }
724
+ const toolName = toolIdToNameMap.get(toolResult.tool_use_id) || TOOL_CONVERSION.UNKNOWN_TOOL_FALLBACK;
725
+ return {
726
+ role: "tool",
727
+ tool_call_id: toolResult.tool_use_id,
728
+ name: toolName,
729
+ content: resultContent
730
+ };
731
+ });
731
732
  }
732
733
  /**
733
- * 转义JSON字符串
734
+ * 创建文本消息
734
735
  */
735
- escapeJsonString(str) {
736
- return str.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t");
736
+ static createTextMessage(role, textContent) {
737
+ if (textContent.length === 0) return null;
738
+ const hasNonTextContent = textContent.some((item) => item.type !== "text");
739
+ if (hasNonTextContent) {
740
+ return {
741
+ role,
742
+ content: textContent
743
+ };
744
+ } else {
745
+ const textOnly = textContent.map((item) => item.text || "").join("");
746
+ return {
747
+ role,
748
+ content: textOnly.trim() || ""
749
+ // 确保content为字符串,避免null
750
+ };
751
+ }
737
752
  }
738
753
  /**
739
- * 获取初始SSE事件(message_start + ping)
754
+ * 处理系统消息
740
755
  */
741
- getInitialSSEEvents(modelName = "claude-sonnet-4", messageId = `msg_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`) {
742
- return [
743
- "event: message_start",
744
- `data: {"type":"message_start","message":{"id":"${messageId}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
745
- "",
746
- "event: ping",
747
- 'data: {"type":"ping"}',
748
- ""
749
- ];
756
+ static processSystemMessage(system) {
757
+ let systemContent;
758
+ if (Array.isArray(system)) {
759
+ systemContent = system.map((s) => {
760
+ if (typeof s === "string") {
761
+ return s;
762
+ }
763
+ return s.text || "";
764
+ }).filter((text) => text.length > 0).join("\n").trim();
765
+ } else {
766
+ systemContent = system;
767
+ }
768
+ if (systemContent) {
769
+ return {
770
+ role: "system",
771
+ content: systemContent
772
+ };
773
+ }
774
+ return null;
750
775
  }
751
776
  /**
752
- * 增量转换单个OpenAI数据块为Anthropic SSE事件
753
- * 用于逐个处理流式数据片段
777
+ * 异步转换图片内容格式(支持URL自动下载转base64)
778
+ * @param item 图片内容项
779
+ * @param downloadUrls 是否下载URL并转换为base64(默认true)
754
780
  */
755
- convertIncrementalChunk(openaiDataLine, state) {
756
- const logger = this.config.logger;
757
- const sseEvents = [];
758
- if (openaiDataLine.trim() === "[DONE]") {
759
- this.addFinalEvents(state, sseEvents);
760
- return sseEvents;
781
+ static async convertImageContentAsync(item, downloadUrls = true) {
782
+ if (!item.source) {
783
+ return null;
761
784
  }
762
- try {
763
- const chunk = JSON.parse(openaiDataLine);
764
- this.processStreamChunk(chunk, state, sseEvents);
765
- return sseEvents;
766
- } catch (error) {
767
- if (this.config.debugMode) {
768
- logger.warn("Failed to parse OpenAI stream chunk in convertIncrementalChunk", {
769
- line: openaiDataLine.substring(0, 200),
770
- error: error instanceof Error ? error.message : String(error)
771
- });
785
+ if (item.source.type === "url" && item.source.url) {
786
+ const url = item.source.url;
787
+ if (isBase64DataUri(url)) {
788
+ return {
789
+ type: "image_url",
790
+ image_url: {
791
+ url,
792
+ detail: "auto"
793
+ }
794
+ };
772
795
  }
773
- return [];
774
- }
775
- }
776
- };
777
-
778
- // src/core/a2o-request-adapter/config.ts
779
- var DEFAULT_CONFIG = {
780
- // 原有配置
781
- debugMode: false,
782
- maxDescriptionLength: 100,
783
- enableToolNameValidation: true,
784
- enableFormatValidation: true,
785
- // 新增默认配置
786
- validation: {
787
- enabled: true,
788
- strict: false,
789
- // 默认开启自动修复
790
- customSchemas: {}
791
- },
792
- healing: {
793
- enabled: true,
794
- maxAttempts: 3,
795
- enableCustomRules: true
796
- },
797
- recovery: {
798
- enabled: true,
799
- maxRetries: 2,
800
- backoffMs: 1e3
801
- },
802
- monitoring: {
803
- enabled: false,
804
- logLevel: "warn",
805
- enableMetrics: false
806
- }
807
- };
808
- var SUPPORTED_IMAGE_TYPES = [
809
- "image/jpeg",
810
- "image/png",
811
- "image/gif",
812
- "image/webp"
813
- ];
814
- var TOOL_CONVERSION = {
815
- /**
816
- * 终极泛化:完全移除工具名称映射
817
- * 基于GitHub Copilot API测试结果,100%保持原始格式
818
- */
819
- PRESERVE_ORIGINAL_NAMES: true,
820
- /**
821
- * 默认工具描述
822
- */
823
- DEFAULT_DESCRIPTION: "Tool description",
824
- /**
825
- * 未知工具回退名称
826
- */
827
- UNKNOWN_TOOL_FALLBACK: "unknown_tool"
828
- };
829
-
830
- // src/core/a2o-request-adapter/message-converter.ts
831
- var MessageConverter = class {
832
- /**
833
- * 转换消息格式,正确处理工具调用和工具结果
834
- * 修复关键问题:将tool_use转换为tool_calls,tool_result转换为role:"tool"消息
835
- * 使用tool_use_id溯回工具名称解决unknown_tool问题
836
- */
837
- static convertMessages(messages, system) {
838
- const context = this.createConversionContext(messages);
839
- const convertedMessages = [];
840
- for (const msg of messages) {
841
- if (Array.isArray(msg.content)) {
842
- const processedMessages = this.processComplexMessage(msg, context);
843
- convertedMessages.push(...processedMessages);
844
- } else {
845
- const safeMsg = { ...msg };
846
- if (safeMsg.content === null || safeMsg.content === void 0) {
847
- safeMsg.content = "";
796
+ if (downloadUrls && isExternalUrl(url)) {
797
+ try {
798
+ console.log(`[MessageConverter] Downloading image from URL: ${url}`);
799
+ const base64DataUri = await downloadImageAsBase64(url);
800
+ console.log(`[MessageConverter] Successfully converted image to base64`);
801
+ return {
802
+ type: "image_url",
803
+ image_url: {
804
+ url: base64DataUri,
805
+ detail: "auto"
806
+ }
807
+ };
808
+ } catch (error) {
809
+ console.error(`[MessageConverter] Failed to download image: ${error.message}`);
810
+ return {
811
+ type: "image_url",
812
+ image_url: {
813
+ url,
814
+ detail: "auto"
815
+ }
816
+ };
848
817
  }
849
- convertedMessages.push(safeMsg);
850
- }
851
- }
852
- if (system) {
853
- const systemMessage = this.processSystemMessage(system);
854
- if (systemMessage) {
855
- convertedMessages.unshift(systemMessage);
856
818
  }
857
- }
858
- return convertedMessages;
859
- }
860
- /**
861
- * 创建消息转换上下文
862
- */
863
- static createConversionContext(messages) {
864
- const toolIdToNameMap = /* @__PURE__ */ new Map();
865
- for (const msg of messages) {
866
- if (Array.isArray(msg.content)) {
867
- for (const item of msg.content) {
868
- if (typeof item === "object" && item !== null && item.type === "tool_use") {
869
- toolIdToNameMap.set(item.id, item.name);
870
- }
819
+ return {
820
+ type: "image_url",
821
+ image_url: {
822
+ url,
823
+ detail: "auto"
871
824
  }
872
- }
825
+ };
873
826
  }
874
- return {
875
- toolIdToNameMap,
876
- hasSystemMessage: false
877
- };
878
- }
879
- /**
880
- * 处理复杂消息(包含多种内容类型)
881
- */
882
- static processComplexMessage(msg, context) {
883
- const { textContent, toolUses, toolResults } = this.categorizeContent(msg.content);
884
- const resultMessages = [];
885
- if (msg.role === "assistant" && toolUses.length > 0) {
886
- const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
887
- resultMessages.push(assistantMessage);
888
- } else if (toolResults.length > 0) {
889
- const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
890
- resultMessages.push(...toolMessages);
891
- const textMessage = this.createTextMessage(msg.role, textContent);
892
- if (textMessage) {
893
- resultMessages.push(textMessage);
894
- }
895
- } else if (textContent.length > 0) {
896
- const textMessage = this.createTextMessage(msg.role, textContent);
897
- if (textMessage) {
898
- resultMessages.push(textMessage);
827
+ if (item.source.type === "base64" && item.source.data && item.source.media_type) {
828
+ if (!SUPPORTED_IMAGE_TYPES.includes(item.source.media_type)) {
829
+ console.warn(`\u4E0D\u652F\u6301\u7684\u56FE\u7247\u683C\u5F0F: ${item.source.media_type}`);
830
+ return null;
899
831
  }
832
+ const dataUri = `data:${item.source.media_type};base64,${item.source.data}`;
833
+ return {
834
+ type: "image_url",
835
+ image_url: {
836
+ url: dataUri,
837
+ detail: "auto"
838
+ }
839
+ };
900
840
  }
901
- return resultMessages;
841
+ return null;
902
842
  }
903
843
  /**
904
- * 分类内容块
844
+ * 异步处理消息内容(支持图片URL下载)
905
845
  */
906
- static categorizeContent(content) {
846
+ static async processMessageContentAsync(content, downloadUrls = true) {
907
847
  const textContent = [];
908
848
  const toolUses = [];
909
849
  const toolResults = [];
910
850
  for (const item of content) {
911
- if (typeof item === "string") {
912
- textContent.push({ type: "text", text: item });
913
- } else if (typeof item === "object" && item !== null) {
851
+ if (item.type) {
914
852
  switch (item.type) {
915
853
  case "text":
916
- textContent.push(item);
854
+ if (item.text) {
855
+ textContent.push({ type: "text", text: item.text });
856
+ }
917
857
  break;
918
858
  case "tool_use":
919
859
  toolUses.push(item);
@@ -922,7 +862,7 @@ var MessageConverter = class {
922
862
  toolResults.push(item);
923
863
  break;
924
864
  case "image":
925
- const imageContent = this.convertImageContent(item);
865
+ const imageContent = await this.convertImageContentAsync(item, downloadUrls);
926
866
  if (imageContent) {
927
867
  textContent.push(imageContent);
928
868
  }
@@ -933,146 +873,102 @@ var MessageConverter = class {
933
873
  return { textContent, toolUses, toolResults };
934
874
  }
935
875
  /**
936
- * 转换图片内容格式
876
+ * 异步转换消息格式(支持图片URL自动下载)
877
+ * @param messages Claude格式的消息数组
878
+ * @param system 系统消息
879
+ * @param downloadImageUrls 是否下载图片URL并转换为base64(默认true,解决GitHub Copilot等API不支持外部URL的问题)
937
880
  */
938
- static convertImageContent(item) {
939
- if (item.source && item.source.type === "base64" && item.source.data && item.source.media_type) {
940
- if (!SUPPORTED_IMAGE_TYPES.includes(item.source.media_type)) {
941
- console.warn(`\u4E0D\u652F\u6301\u7684\u56FE\u7247\u683C\u5F0F: ${item.source.media_type}`);
942
- return null;
943
- }
944
- const dataUri = `data:${item.source.media_type};base64,${item.source.data}`;
945
- return {
946
- type: "image_url",
947
- image_url: {
948
- url: dataUri
881
+ static async convertMessagesAsync(messages, system, downloadImageUrls = true) {
882
+ const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
883
+ if (debugEnabled) {
884
+ console.debug(
885
+ `[MessageConverter] convertMessagesAsync called (downloadImageUrls: ${downloadImageUrls})`
886
+ );
887
+ }
888
+ const context = this.createConversionContext(messages);
889
+ const convertedMessages = [];
890
+ for (const msg of messages) {
891
+ if (Array.isArray(msg.content)) {
892
+ const processedMessages = await this.processComplexMessageAsync(msg, context, downloadImageUrls);
893
+ convertedMessages.push(...processedMessages);
894
+ } else {
895
+ const safeMsg = { ...msg };
896
+ if (safeMsg.content === null || safeMsg.content === void 0) {
897
+ safeMsg.content = "";
949
898
  }
950
- };
899
+ convertedMessages.push(safeMsg);
900
+ }
951
901
  }
952
- return null;
902
+ const systemMessage = this.processSystemMessage(system);
903
+ if (systemMessage) {
904
+ return [systemMessage, ...convertedMessages];
905
+ }
906
+ return convertedMessages;
953
907
  }
954
908
  /**
955
- * 创建包含工具调用的助手消息
909
+ * 异步处理复杂消息(支持图片URL下载)
956
910
  */
957
- static createAssistantMessageWithToolCalls(textContent, toolUses) {
958
- const assistantMessage = {
959
- role: "assistant",
960
- content: ""
961
- // 默认为空字符串,避免null值
962
- };
963
- if (textContent.length > 0) {
964
- const textOnly = textContent.map((item) => item.text || "").join("");
965
- if (textOnly.trim()) {
966
- assistantMessage.content = textOnly.trim();
911
+ static async processComplexMessageAsync(msg, context, downloadUrls) {
912
+ const { textContent, toolUses, toolResults } = await this.processMessageContentAsync(
913
+ msg.content,
914
+ downloadUrls
915
+ );
916
+ const result = [];
917
+ if (msg.role === "user") {
918
+ const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
919
+ result.push(...toolMessages);
920
+ const textMessage = this.createTextMessage("user", textContent);
921
+ if (textMessage) {
922
+ result.push(textMessage);
923
+ }
924
+ } else if (msg.role === "assistant") {
925
+ if (toolUses.length > 0) {
926
+ const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
927
+ result.push(assistantMessage);
928
+ toolUses.forEach((toolUse) => {
929
+ context.toolIdToNameMap.set(toolUse.id, toolUse.name);
930
+ });
931
+ } else {
932
+ const textMessage = this.createTextMessage("assistant", textContent);
933
+ if (textMessage) {
934
+ result.push(textMessage);
935
+ }
967
936
  }
968
937
  }
969
- assistantMessage.tool_calls = toolUses.map((toolUse) => ({
970
- id: toolUse.id,
938
+ return result;
939
+ }
940
+ };
941
+
942
+ // src/core/a2o-request-adapter/tool-converter.ts
943
+ var ToolConverter = class {
944
+ /**
945
+ * 将Anthropic工具定义转换为OpenAI格式
946
+ */
947
+ static convertAnthropicToolToOpenAI(anthropicTool) {
948
+ if (!anthropicTool || !anthropicTool.name) {
949
+ throw new Error("Invalid tool definition: missing name");
950
+ }
951
+ const openaiName = anthropicTool.name;
952
+ const description = this.simplifyDescription(anthropicTool.description || TOOL_CONVERSION.DEFAULT_DESCRIPTION);
953
+ if (!anthropicTool.input_schema) {
954
+ throw new Error(`Invalid tool definition for ${anthropicTool.name}: missing input_schema`);
955
+ }
956
+ const parameters = {
957
+ type: anthropicTool.input_schema.type || "object",
958
+ properties: anthropicTool.input_schema.properties || {},
959
+ ...anthropicTool.input_schema.required && { required: anthropicTool.input_schema.required }
960
+ };
961
+ return {
971
962
  type: "function",
972
963
  function: {
973
- name: toolUse.name,
974
- arguments: JSON.stringify(toolUse.input || {})
964
+ name: openaiName,
965
+ description,
966
+ parameters
975
967
  }
976
- }));
977
- return assistantMessage;
968
+ };
978
969
  }
979
970
  /**
980
- * 创建工具结果消息
981
- */
982
- static createToolResultMessages(toolResults, toolIdToNameMap) {
983
- return toolResults.map((toolResult) => {
984
- let resultContent = "No content";
985
- if (toolResult.content) {
986
- if (typeof toolResult.content === "string") {
987
- resultContent = toolResult.content;
988
- } else {
989
- resultContent = JSON.stringify(toolResult.content, null, 2);
990
- }
991
- }
992
- const toolName = toolIdToNameMap.get(toolResult.tool_use_id) || TOOL_CONVERSION.UNKNOWN_TOOL_FALLBACK;
993
- return {
994
- role: "tool",
995
- tool_call_id: toolResult.tool_use_id,
996
- name: toolName,
997
- content: resultContent
998
- };
999
- });
1000
- }
1001
- /**
1002
- * 创建文本消息
1003
- */
1004
- static createTextMessage(role, textContent) {
1005
- if (textContent.length === 0) return null;
1006
- const hasNonTextContent = textContent.some((item) => item.type !== "text");
1007
- if (hasNonTextContent) {
1008
- return {
1009
- role,
1010
- content: textContent
1011
- };
1012
- } else {
1013
- const textOnly = textContent.map((item) => item.text || "").join("");
1014
- return {
1015
- role,
1016
- content: textOnly.trim() || ""
1017
- // 确保content为字符串,避免null
1018
- };
1019
- }
1020
- }
1021
- /**
1022
- * 处理系统消息
1023
- */
1024
- static processSystemMessage(system) {
1025
- let systemContent;
1026
- if (Array.isArray(system)) {
1027
- systemContent = system.map((s) => {
1028
- if (typeof s === "string") {
1029
- return s;
1030
- }
1031
- return s.text || "";
1032
- }).filter((text) => text.length > 0).join("\n").trim();
1033
- } else {
1034
- systemContent = system;
1035
- }
1036
- if (systemContent) {
1037
- return {
1038
- role: "system",
1039
- content: systemContent
1040
- };
1041
- }
1042
- return null;
1043
- }
1044
- };
1045
-
1046
- // src/core/a2o-request-adapter/tool-converter.ts
1047
- var ToolConverter = class {
1048
- /**
1049
- * 将Anthropic工具定义转换为OpenAI格式
1050
- */
1051
- static convertAnthropicToolToOpenAI(anthropicTool) {
1052
- if (!anthropicTool || !anthropicTool.name) {
1053
- throw new Error("Invalid tool definition: missing name");
1054
- }
1055
- const openaiName = anthropicTool.name;
1056
- const description = this.simplifyDescription(anthropicTool.description || TOOL_CONVERSION.DEFAULT_DESCRIPTION);
1057
- if (!anthropicTool.input_schema) {
1058
- throw new Error(`Invalid tool definition for ${anthropicTool.name}: missing input_schema`);
1059
- }
1060
- const parameters = {
1061
- type: anthropicTool.input_schema.type || "object",
1062
- properties: anthropicTool.input_schema.properties || {},
1063
- ...anthropicTool.input_schema.required && { required: anthropicTool.input_schema.required }
1064
- };
1065
- return {
1066
- type: "function",
1067
- function: {
1068
- name: openaiName,
1069
- description,
1070
- parameters
1071
- }
1072
- };
1073
- }
1074
- /**
1075
- * 将OpenAI工具调用转换为Claude格式
971
+ * 将OpenAI工具调用转换为Claude格式
1076
972
  */
1077
973
  static convertOpenAIToolCallsToClaude(toolCalls) {
1078
974
  return toolCalls.map((toolCall) => {
@@ -1097,6 +993,24 @@ var ToolConverter = class {
1097
993
  static isOpenAIToolFormat(tool) {
1098
994
  return tool && tool.type === "function" && tool.function && tool.function.name;
1099
995
  }
996
+ /**
997
+ * 确保OpenAI格式工具有有效描述
998
+ * 处理空字符串、undefined、null等情况
999
+ */
1000
+ static ensureOpenAIToolDescription(tool) {
1001
+ if (!tool?.function) return tool;
1002
+ const description = tool.function.description?.trim();
1003
+ if (!description) {
1004
+ return {
1005
+ ...tool,
1006
+ function: {
1007
+ ...tool.function,
1008
+ description: TOOL_CONVERSION.DEFAULT_DESCRIPTION
1009
+ }
1010
+ };
1011
+ }
1012
+ return tool;
1013
+ }
1100
1014
  /**
1101
1015
  * 简化Claude的详细描述为OpenAI兼容的简短描述
1102
1016
  */
@@ -3673,344 +3587,1204 @@ var A2ORequestAdapter = class {
3673
3587
  } else {
3674
3588
  result.data = openaiRequest;
3675
3589
  }
3676
- if (this.config.monitoring.enabled) {
3677
- const processingTime = Date.now() - startTime;
3678
- if (this.config.monitoring.logLevel !== "none") {
3679
- console.log(`[A2O Adapter] Conversion completed in ${processingTime}ms`, {
3680
- healingApplied: result.healingApplied,
3681
- fixesCount: result.appliedFixes?.length || 0
3682
- });
3683
- }
3590
+ if (this.config.monitoring.enabled) {
3591
+ const processingTime = Date.now() - startTime;
3592
+ if (this.config.monitoring.logLevel !== "none") {
3593
+ console.log(`[A2O Adapter] Conversion completed in ${processingTime}ms`, {
3594
+ healingApplied: result.healingApplied,
3595
+ fixesCount: result.appliedFixes?.length || 0
3596
+ });
3597
+ }
3598
+ }
3599
+ result.success = true;
3600
+ return result;
3601
+ } catch (error) {
3602
+ result.errors.push(`Conversion failed: ${error.message}`);
3603
+ if (this.config.recovery.enabled) {
3604
+ result.warnings.push("Error recovery attempted but not implemented yet");
3605
+ }
3606
+ return result;
3607
+ }
3608
+ }
3609
+ /**
3610
+ * 执行核心转换逻辑(支持图片代理)
3611
+ */
3612
+ async performCoreConversion(anthropicRequest) {
3613
+ if (this.config.enableFormatValidation) {
3614
+ FormatValidator.validateClaudeRequest(anthropicRequest);
3615
+ }
3616
+ const messages = this.config.imageProxy.enabled ? await MessageConverter.convertMessagesAsync(
3617
+ anthropicRequest.messages,
3618
+ anthropicRequest.system,
3619
+ true
3620
+ // 启用图片下载
3621
+ ) : MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system);
3622
+ const openaiRequest = {
3623
+ model: anthropicRequest.model,
3624
+ messages,
3625
+ max_tokens: anthropicRequest.max_tokens,
3626
+ temperature: anthropicRequest.temperature,
3627
+ stream: anthropicRequest.stream
3628
+ };
3629
+ if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3630
+ openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3631
+ }
3632
+ const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3633
+ for (const field of specialFields) {
3634
+ if (anthropicRequest[field] !== void 0) {
3635
+ openaiRequest[field] = anthropicRequest[field];
3636
+ }
3637
+ }
3638
+ return openaiRequest;
3639
+ }
3640
+ /**
3641
+ * 转换Anthropic请求格式为OpenAI兼容格式 - 原有方法保持兼容
3642
+ */
3643
+ convertAnthropicRequestToOpenAI(anthropicRequest) {
3644
+ if (this.config.enableFormatValidation) {
3645
+ FormatValidator.validateClaudeRequest(anthropicRequest);
3646
+ }
3647
+ const openaiRequest = {
3648
+ model: anthropicRequest.model,
3649
+ messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3650
+ max_tokens: anthropicRequest.max_tokens,
3651
+ temperature: anthropicRequest.temperature,
3652
+ stream: anthropicRequest.stream,
3653
+ n: 1
3654
+ };
3655
+ if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3656
+ openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3657
+ }
3658
+ const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3659
+ for (const field of specialFields) {
3660
+ if (anthropicRequest[field] !== void 0) {
3661
+ openaiRequest[field] = anthropicRequest[field];
3662
+ }
3663
+ }
3664
+ if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
3665
+ throw new Error("Generated OpenAI request format is invalid");
3666
+ }
3667
+ return openaiRequest;
3668
+ }
3669
+ /**
3670
+ * 转换OpenAI响应格式为Claude兼容格式
3671
+ */
3672
+ convertOpenAIResponseToClaude(openaiResponse) {
3673
+ const claudeContent = [];
3674
+ const message = openaiResponse.choices?.[0]?.message;
3675
+ if (message?.reasoning_content) {
3676
+ claudeContent.push({
3677
+ type: "thinking",
3678
+ thinking: message.reasoning_content
3679
+ });
3680
+ }
3681
+ if (message?.content) {
3682
+ claudeContent.push({
3683
+ type: "text",
3684
+ text: message.content
3685
+ });
3686
+ }
3687
+ if (message?.tool_calls) {
3688
+ const toolUseContents = ToolConverter.convertOpenAIToolCallsToClaude(message.tool_calls);
3689
+ claudeContent.push(...toolUseContents);
3690
+ }
3691
+ const claudeResponse = {
3692
+ role: "assistant",
3693
+ content: claudeContent
3694
+ };
3695
+ return claudeResponse;
3696
+ }
3697
+ /**
3698
+ * 转换工具定义列表
3699
+ * 确保所有工具都有有效描述,无论是Anthropic还是OpenAI格式
3700
+ */
3701
+ convertToolDefinitions(tools) {
3702
+ return tools.map((tool) => {
3703
+ let openaiTool;
3704
+ if (ToolConverter.isOpenAIToolFormat(tool)) {
3705
+ openaiTool = tool;
3706
+ } else {
3707
+ openaiTool = ToolConverter.convertAnthropicToolToOpenAI(tool);
3708
+ }
3709
+ return ToolConverter.ensureOpenAIToolDescription(openaiTool);
3710
+ });
3711
+ }
3712
+ /**
3713
+ * 验证Claude请求格式
3714
+ */
3715
+ validateClaudeRequest(request) {
3716
+ return FormatValidator.validateClaudeRequest(request);
3717
+ }
3718
+ /**
3719
+ * 验证OpenAI请求格式
3720
+ */
3721
+ validateOpenAIRequest(request) {
3722
+ return FormatValidator.validateOpenAIRequest(request);
3723
+ }
3724
+ /**
3725
+ * 获取支持的工具列表
3726
+ */
3727
+ getSupportedTools() {
3728
+ return [];
3729
+ }
3730
+ /**
3731
+ * 检查工具是否支持
3732
+ */
3733
+ isToolSupported(_toolName) {
3734
+ return true;
3735
+ }
3736
+ /**
3737
+ * 获取工具映射(已弃用,保持兼容性)
3738
+ */
3739
+ getToolMapping(claudeToolName) {
3740
+ return claudeToolName;
3741
+ }
3742
+ /**
3743
+ * 更新配置
3744
+ */
3745
+ updateConfig(newConfig) {
3746
+ this.config = { ...this.config, ...newConfig };
3747
+ }
3748
+ /**
3749
+ * 获取当前配置
3750
+ */
3751
+ getConfig() {
3752
+ return { ...this.config };
3753
+ }
3754
+ /**
3755
+ * 执行带验证的核心转换(同步版本)
3756
+ * 为静态方法提供增强功能,但保持同步特性
3757
+ */
3758
+ performCoreConversionWithValidation(anthropicRequest) {
3759
+ if (this.config.validation.enabled) {
3760
+ try {
3761
+ validateAnthropicRequest(anthropicRequest);
3762
+ } catch (error) {
3763
+ if (this.config.validation.strict) {
3764
+ throw error;
3765
+ } else {
3766
+ const errorSummary = this.getValidationErrorSummary(error);
3767
+ console.warn(`[A2ORequestAdapter] Input validation warning: ${errorSummary}. Details saved to logs.`);
3768
+ }
3769
+ }
3770
+ }
3771
+ let processedRequest = anthropicRequest;
3772
+ if (this.config.healing.enabled) {
3773
+ try {
3774
+ processedRequest = this.applySyncHealing(anthropicRequest);
3775
+ } catch (healingError) {
3776
+ console.warn("[A2ORequestAdapter] Healing failed:", healingError);
3777
+ }
3778
+ }
3779
+ const result = this.performBasicConversion(processedRequest, true);
3780
+ if (this.config.validation.enabled) {
3781
+ try {
3782
+ validateOpenAIRequest(result);
3783
+ } catch (error) {
3784
+ if (this.config.validation.strict) {
3785
+ throw error;
3786
+ } else {
3787
+ console.warn("[A2ORequestAdapter] Output validation warning:", error);
3788
+ }
3789
+ }
3790
+ }
3791
+ return result;
3792
+ }
3793
+ /**
3794
+ * 执行基础转换逻辑(原有逻辑的提取)
3795
+ */
3796
+ performBasicConversion(anthropicRequest, skipValidation = false) {
3797
+ if (!skipValidation && this.config.enableFormatValidation) {
3798
+ FormatValidator.validateClaudeRequest(anthropicRequest);
3799
+ }
3800
+ const openaiRequest = {
3801
+ model: anthropicRequest.model,
3802
+ messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3803
+ max_tokens: anthropicRequest.max_tokens,
3804
+ temperature: anthropicRequest.temperature,
3805
+ stream: anthropicRequest.stream,
3806
+ n: 1
3807
+ };
3808
+ if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3809
+ openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3810
+ }
3811
+ const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3812
+ for (const field of specialFields) {
3813
+ if (anthropicRequest[field] !== void 0) {
3814
+ openaiRequest[field] = anthropicRequest[field];
3815
+ }
3816
+ }
3817
+ if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
3818
+ throw new Error("Generated OpenAI request format is invalid");
3819
+ }
3820
+ return openaiRequest;
3821
+ }
3822
+ /**
3823
+ * 应用同步修复逻辑
3824
+ * 简化版的修复,不依赖异步操作
3825
+ */
3826
+ applySyncHealing(request) {
3827
+ const healedRequest = { ...request };
3828
+ if (!healedRequest.max_tokens || healedRequest.max_tokens <= 0) {
3829
+ healedRequest.max_tokens = 4096;
3830
+ }
3831
+ if (!healedRequest.messages || !Array.isArray(healedRequest.messages)) {
3832
+ throw new Error("Invalid messages array");
3833
+ }
3834
+ if (!healedRequest.model) {
3835
+ healedRequest.model = "claude-sonnet-4";
3836
+ }
3837
+ for (const message of healedRequest.messages) {
3838
+ if (!message.role) {
3839
+ message.role = "user";
3840
+ }
3841
+ if (!message.content) {
3842
+ message.content = "";
3843
+ }
3844
+ }
3845
+ return healedRequest;
3846
+ }
3847
+ /**
3848
+ * 获取验证错误详情
3849
+ */
3850
+ getValidationErrors(request, type) {
3851
+ return FormatValidator.getValidationErrors(request, type);
3852
+ }
3853
+ /**
3854
+ * 生成简洁的验证错误摘要
3855
+ */
3856
+ getValidationErrorSummary(error) {
3857
+ if (error?.issues?.length > 0) {
3858
+ const invalidEnums = error.issues.filter((i) => i.code === "invalid_enum_value");
3859
+ const missingFields = error.issues.filter((i) => i.code === "invalid_type");
3860
+ const summary = [];
3861
+ if (invalidEnums.length > 0) {
3862
+ const first = invalidEnums[0];
3863
+ summary.push(`invalid_${first.path?.join(".")}: '${first.received}'`);
3864
+ }
3865
+ if (missingFields.length > 0) {
3866
+ summary.push(`${missingFields.length} missing fields`);
3867
+ }
3868
+ return summary.slice(0, 2).join(", ") + (error.issues.length > 5 ? ` (+${error.issues.length - 5} more)` : "");
3869
+ }
3870
+ return error.message || "Validation failed";
3871
+ }
3872
+ };
3873
+ var A2ORequestAdapterStatic = {
3874
+ /**
3875
+ * 转换Anthropic请求格式为OpenAI兼容格式(静态方法)
3876
+ * 内部使用增强转换器,所有调用点自动获得增强功能
3877
+ */
3878
+ convertAnthropicRequestToOpenAI: (anthropicRequest) => {
3879
+ const adapter = new A2ORequestAdapter({
3880
+ debugMode: false,
3881
+ maxDescriptionLength: 100,
3882
+ enableToolNameValidation: true,
3883
+ enableFormatValidation: true,
3884
+ validation: { enabled: true, strict: false },
3885
+ healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
3886
+ recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
3887
+ monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
3888
+ });
3889
+ try {
3890
+ const result = adapter.performCoreConversionWithValidation(anthropicRequest);
3891
+ return result;
3892
+ } catch (error) {
3893
+ console.warn(`[A2ORequestAdapterStatic] Enhanced conversion failed, using basic conversion: ${error?.message || error}`);
3894
+ return adapter.performBasicConversion(anthropicRequest, true);
3895
+ }
3896
+ },
3897
+ /**
3898
+ * 转换OpenAI响应格式为Claude兼容格式(静态方法)
3899
+ * 内部使用增强转换器
3900
+ */
3901
+ convertOpenAIResponseToClaude: (openaiResponse) => {
3902
+ const adapter = new A2ORequestAdapter({
3903
+ debugMode: false,
3904
+ maxDescriptionLength: 100,
3905
+ enableToolNameValidation: true,
3906
+ enableFormatValidation: true,
3907
+ validation: { enabled: true, strict: false },
3908
+ healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
3909
+ recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
3910
+ monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
3911
+ });
3912
+ return adapter.convertOpenAIResponseToClaude(openaiResponse);
3913
+ },
3914
+ /**
3915
+ * 验证Claude请求格式(静态方法)
3916
+ */
3917
+ validateClaudeRequest: (request) => {
3918
+ return FormatValidator.validateClaudeRequest(request);
3919
+ },
3920
+ /**
3921
+ * 验证OpenAI请求格式(静态方法)
3922
+ */
3923
+ validateOpenAIRequest: (request) => {
3924
+ return FormatValidator.validateOpenAIRequest(request);
3925
+ },
3926
+ /**
3927
+ * 获取支持的工具列表(静态方法)
3928
+ */
3929
+ getSupportedTools: () => {
3930
+ return [];
3931
+ },
3932
+ /**
3933
+ * 检查工具是否支持(静态方法)
3934
+ */
3935
+ isToolSupported: (_toolName) => {
3936
+ return true;
3937
+ },
3938
+ /**
3939
+ * 获取工具映射(静态方法,已弃用)
3940
+ */
3941
+ getToolMapping: (claudeToolName) => {
3942
+ return claudeToolName;
3943
+ },
3944
+ /**
3945
+ * 转换Anthropic请求格式为OpenAI兼容格式(异步版本,支持图片URL自动下载)
3946
+ * 解决GitHub Copilot等API不支持外部图片URL的问题
3947
+ * @param anthropicRequest Claude格式的请求
3948
+ * @param downloadImageUrls 是否下载图片URL并转换为base64(默认true)
3949
+ */
3950
+ convertAnthropicRequestToOpenAIAsync: async (anthropicRequest, downloadImageUrls = true) => {
3951
+ const adapter = new A2ORequestAdapter({
3952
+ debugMode: false,
3953
+ maxDescriptionLength: 100,
3954
+ enableToolNameValidation: true,
3955
+ enableFormatValidation: true,
3956
+ validation: { enabled: true, strict: false },
3957
+ healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
3958
+ recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
3959
+ monitoring: { enabled: false, logLevel: "none", enableMetrics: false },
3960
+ imageProxy: {
3961
+ enabled: downloadImageUrls,
3962
+ timeout: 1e4,
3963
+ maxSize: 10 * 1024 * 1024
3964
+ }
3965
+ });
3966
+ try {
3967
+ const result = await adapter.performCoreConversion(anthropicRequest);
3968
+ return result;
3969
+ } catch (error) {
3970
+ console.warn(`[A2ORequestAdapterStatic] Async conversion failed: ${error?.message || error}`);
3971
+ return adapter.performBasicConversion(anthropicRequest, true);
3972
+ }
3973
+ }
3974
+ };
3975
+
3976
+ // src/core/streaming/streaming-protocol-adapter.ts
3977
+ var StreamingProtocolAdapter = class {
3978
+ constructor(options = {}) {
3979
+ this.config = {
3980
+ debugMode: options.debugMode ?? false,
3981
+ validateInput: options.validateInput ?? false,
3982
+ validateOutput: options.validateOutput ?? false,
3983
+ autoHeal: options.autoHeal ?? false,
3984
+ timeout: options.timeout ?? 3e4,
3985
+ retries: options.retries ?? 3,
3986
+ bufferSize: options.bufferSize ?? 1024,
3987
+ logger: options.logger ?? getGlobalLogger()
3988
+ };
3989
+ }
3990
+ logDebug(message, meta) {
3991
+ if (this.config.debugMode) {
3992
+ this.config.logger.debug(message, meta);
3993
+ }
3994
+ }
3995
+ /**
3996
+ * 转换Anthropic请求为OpenAI格式
3997
+ */
3998
+ convertAnthropicToOpenAI(anthropicRequest) {
3999
+ const logger = this.config.logger;
4000
+ if (this.config.debugMode) {
4001
+ logger.debug("Converting Anthropic request to OpenAI format", { model: anthropicRequest.model });
4002
+ }
4003
+ const openaiRequest = A2ORequestAdapterStatic.convertAnthropicRequestToOpenAI(anthropicRequest);
4004
+ openaiRequest.stream = true;
4005
+ const hasImages = this.hasImageContent(anthropicRequest);
4006
+ return {
4007
+ openaiRequest,
4008
+ metadata: {
4009
+ hasImages,
4010
+ requiresVisionHeaders: hasImages
4011
+ }
4012
+ };
4013
+ }
4014
+ /**
4015
+ * 与StandardProtocolAdapter保持一致的API,用于集成测试和向后兼容。
4016
+ */
4017
+ convertRequest(anthropicRequest) {
4018
+ return this.convertAnthropicToOpenAI(anthropicRequest);
4019
+ }
4020
+ /**
4021
+ * 转换OpenAI流式响应为Anthropic SSE格式
4022
+ */
4023
+ convertOpenAIStreamToAnthropic(openaiStream, originalRequest) {
4024
+ const logger = this.config.logger;
4025
+ try {
4026
+ if (this.config.debugMode) {
4027
+ logger.debug("Converting OpenAI stream to Anthropic SSE", {
4028
+ streamLength: openaiStream.length,
4029
+ model: originalRequest.model
4030
+ });
4031
+ }
4032
+ if (!openaiStream || openaiStream.trim() === "") {
4033
+ return {
4034
+ success: false,
4035
+ error: "Empty stream response",
4036
+ anthropicSSE: "",
4037
+ anthropicStandardResponse: null
4038
+ };
3684
4039
  }
3685
- result.success = true;
3686
- return result;
4040
+ const anthropicSSE = this.convertToAnthropicSSE(openaiStream, originalRequest.model);
4041
+ const anthropicStandardResponse = this.buildStandardResponse(openaiStream);
4042
+ return {
4043
+ success: true,
4044
+ anthropicSSE,
4045
+ anthropicStandardResponse
4046
+ };
3687
4047
  } catch (error) {
3688
- result.errors.push(`Conversion failed: ${error.message}`);
3689
- if (this.config.recovery.enabled) {
3690
- result.warnings.push("Error recovery attempted but not implemented yet");
3691
- }
3692
- return result;
4048
+ const errorMessage = error instanceof Error ? error.message : "Unknown conversion error";
4049
+ logger.error("Stream conversion failed", { error: errorMessage });
4050
+ return {
4051
+ success: false,
4052
+ error: errorMessage,
4053
+ anthropicSSE: "",
4054
+ anthropicStandardResponse: null
4055
+ };
3693
4056
  }
3694
4057
  }
3695
4058
  /**
3696
- * 执行核心转换逻辑(原有逻辑保持不变)
4059
+ * 增量解析Anthropic SSE,转换为OpenAI流式chunk
4060
+ * 供 OpenAI Chat Completions 端点直接复用
3697
4061
  */
3698
- async performCoreConversion(anthropicRequest) {
3699
- if (this.config.enableFormatValidation) {
3700
- FormatValidator.validateClaudeRequest(anthropicRequest);
3701
- }
3702
- const openaiRequest = {
3703
- model: anthropicRequest.model,
3704
- messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3705
- max_tokens: anthropicRequest.max_tokens,
3706
- temperature: anthropicRequest.temperature,
3707
- stream: anthropicRequest.stream
4062
+ convertAnthropicSSEChunkToOpenAI(params) {
4063
+ const { buffer, chunk, model, flush = false } = params;
4064
+ let localBuffer = buffer + (chunk || "");
4065
+ const emittedChunks = [];
4066
+ let finishReason;
4067
+ let streamStopped = false;
4068
+ const processEvent = (eventText) => {
4069
+ const { eventType, data } = this.parseAnthropicSSEEvent(eventText);
4070
+ if (!eventType || !data) {
4071
+ return;
4072
+ }
4073
+ if (eventType === "content_block_delta") {
4074
+ const text = this.extractTextFromAnthropicDelta(data);
4075
+ if (text) {
4076
+ emittedChunks.push(this.buildOpenAIStreamChunk(model, text));
4077
+ }
4078
+ } else if (eventType === "message_stop") {
4079
+ finishReason = this.mapAnthropicStopReasonToOpenAI(data?.stop_reason);
4080
+ streamStopped = true;
4081
+ }
3708
4082
  };
3709
- if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3710
- openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3711
- }
3712
- const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3713
- for (const field of specialFields) {
3714
- if (anthropicRequest[field] !== void 0) {
3715
- openaiRequest[field] = anthropicRequest[field];
4083
+ while (true) {
4084
+ const separatorIndex = localBuffer.indexOf("\n\n");
4085
+ if (separatorIndex === -1) {
4086
+ break;
4087
+ }
4088
+ const rawEvent = localBuffer.slice(0, separatorIndex);
4089
+ localBuffer = localBuffer.slice(separatorIndex + 2);
4090
+ if (!rawEvent.trim()) {
4091
+ continue;
4092
+ }
4093
+ processEvent(rawEvent);
4094
+ if (streamStopped) {
4095
+ break;
3716
4096
  }
3717
4097
  }
3718
- return openaiRequest;
4098
+ if (flush && localBuffer.trim()) {
4099
+ processEvent(localBuffer);
4100
+ localBuffer = "";
4101
+ }
4102
+ return {
4103
+ buffer: localBuffer,
4104
+ chunks: emittedChunks,
4105
+ finishReason,
4106
+ streamStopped
4107
+ };
3719
4108
  }
3720
4109
  /**
3721
- * 转换Anthropic请求格式为OpenAI兼容格式 - 原有方法保持兼容
4110
+ * 将OpenAI流转换为Anthropic SSE格式
3722
4111
  */
3723
- convertAnthropicRequestToOpenAI(anthropicRequest) {
3724
- if (this.config.enableFormatValidation) {
3725
- FormatValidator.validateClaudeRequest(anthropicRequest);
3726
- }
3727
- const openaiRequest = {
3728
- model: anthropicRequest.model,
3729
- messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3730
- max_tokens: anthropicRequest.max_tokens,
3731
- temperature: anthropicRequest.temperature,
3732
- stream: anthropicRequest.stream,
3733
- n: 1
3734
- };
3735
- if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3736
- openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3737
- }
3738
- const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3739
- for (const field of specialFields) {
3740
- if (anthropicRequest[field] !== void 0) {
3741
- openaiRequest[field] = anthropicRequest[field];
4112
+ convertToAnthropicSSE(openaiStream, modelName) {
4113
+ const lines = openaiStream.split("\n");
4114
+ const sseLines = [];
4115
+ const state = this.createConversionState();
4116
+ sseLines.push(
4117
+ "event: message_start",
4118
+ `data: {"type":"message_start","message":{"id":"msg_${Date.now()}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
4119
+ ""
4120
+ );
4121
+ for (const line of lines) {
4122
+ if (line.startsWith("data:")) {
4123
+ const dataLine = line.substring(5);
4124
+ if (dataLine.trim() === "[DONE]") {
4125
+ this.addFinalEvents(state, sseLines);
4126
+ break;
4127
+ }
4128
+ try {
4129
+ const chunk = JSON.parse(dataLine);
4130
+ this.processStreamChunk(chunk, state, sseLines);
4131
+ } catch (error) {
4132
+ if (this.config.debugMode) {
4133
+ this.config.logger.warn("Failed to parse stream chunk", { line: dataLine.substring(0, 200) });
4134
+ }
4135
+ }
3742
4136
  }
3743
4137
  }
3744
- if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
3745
- throw new Error("Generated OpenAI request format is invalid");
3746
- }
3747
- return openaiRequest;
4138
+ return sseLines.join("\n");
3748
4139
  }
3749
4140
  /**
3750
- * 转换OpenAI响应格式为Claude兼容格式
4141
+ * 处理单个流式数据块 - 支持thinking和content双模式
3751
4142
  */
3752
- convertOpenAIResponseToClaude(openaiResponse) {
3753
- const claudeContent = [];
3754
- const message = openaiResponse.choices?.[0]?.message;
3755
- if (message?.content) {
3756
- claudeContent.push({
3757
- type: "text",
3758
- text: message.content
3759
- });
4143
+ processStreamChunk(chunk, state, sseLines) {
4144
+ if (this.isResponsesEvent(chunk)) {
4145
+ this.processResponsesEvent(chunk, state, sseLines);
4146
+ return;
3760
4147
  }
3761
- if (message?.tool_calls) {
3762
- const toolUseContents = ToolConverter.convertOpenAIToolCallsToClaude(message.tool_calls);
3763
- claudeContent.push(...toolUseContents);
4148
+ const choice = chunk.choices?.[0];
4149
+ if (choice) {
4150
+ const hasToolCalls = choice.delta?.tool_calls;
4151
+ const hasFinishReason = choice.finish_reason;
4152
+ const isNonText = !choice.delta?.content;
4153
+ if (this.config.debugMode && (hasToolCalls || hasFinishReason || isNonText && choice.delta)) {
4154
+ this.logDebug("Streaming chunk processed", { chunk });
4155
+ }
3764
4156
  }
3765
- const claudeResponse = {
3766
- role: "assistant",
3767
- content: claudeContent
3768
- };
3769
- return claudeResponse;
4157
+ if (!choice) {
4158
+ this.updateUsageFromChunk(chunk, state);
4159
+ return;
4160
+ }
4161
+ const delta = choice.delta ?? {};
4162
+ this.appendThinkingContent(this.coalesceContent(delta.reasoning_content), state, sseLines);
4163
+ this.appendTextContent(this.coalesceContent(delta.content), state, sseLines);
4164
+ if (delta.tool_calls) {
4165
+ this.processToolCalls(delta.tool_calls, state, sseLines);
4166
+ }
4167
+ this.updateUsageFromChunk(chunk, state);
3770
4168
  }
3771
4169
  /**
3772
- * 转换工具定义列表
4170
+ * 处理工具调用 - 支持OpenAI流式分块累积
4171
+ * OpenAI流式API会将tool_calls分多个chunk发送:
4172
+ * - Chunk 1: {index:0, id:"call_xxx", type:"function", function:{name:"web_search"}}
4173
+ * - Chunk 2: {index:0, function:{arguments:"{\"query\":\"xxx\"}"}}
4174
+ * - Chunk N: 继续累积arguments
3773
4175
  */
3774
- convertToolDefinitions(tools) {
3775
- return tools.map((tool) => {
3776
- if (ToolConverter.isOpenAIToolFormat(tool)) {
3777
- return tool;
3778
- } else {
3779
- return ToolConverter.convertAnthropicToolToOpenAI(tool);
4176
+ processToolCalls(toolCalls, state, sseLines) {
4177
+ this.logDebug("processToolCalls called", { toolCalls });
4178
+ for (const toolCall of toolCalls) {
4179
+ const index = toolCall.index ?? 0;
4180
+ const toolId = toolCall.id;
4181
+ const toolName = toolCall.function?.name;
4182
+ const toolArgs = toolCall.function?.arguments;
4183
+ this.logDebug(`Processing tool chunk for index ${index}`, {
4184
+ hasId: !!toolId,
4185
+ hasName: !!toolName,
4186
+ hasArgs: !!toolArgs,
4187
+ argsLength: toolArgs?.length
4188
+ });
4189
+ const stateKey = `openai_tool_${index}`;
4190
+ const toolData = this.getOrCreateToolCallState(state, stateKey);
4191
+ if (toolId && !toolData.id) {
4192
+ toolData.id = toolId;
3780
4193
  }
3781
- });
3782
- }
3783
- /**
3784
- * 验证Claude请求格式
3785
- */
3786
- validateClaudeRequest(request) {
3787
- return FormatValidator.validateClaudeRequest(request);
4194
+ if (toolName) {
4195
+ toolData.name = toolName;
4196
+ }
4197
+ this.registerToolCallAlias(state, toolId ? `openai_tool_id_${toolId}` : void 0, toolData);
4198
+ this.registerToolCallAlias(state, `openai_tool_index_${index}`, toolData);
4199
+ if (toolArgs) {
4200
+ toolData.pendingChunks.push(toolArgs);
4201
+ this.logDebug(`Accumulated tool arguments for index ${index}`, {
4202
+ currentLength: toolData.pendingChunks.reduce((acc, chunk) => acc + chunk.length, 0)
4203
+ });
4204
+ }
4205
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4206
+ if (started || toolData.blockStartSent) {
4207
+ this.flushPendingToolChunks(toolData, sseLines);
4208
+ }
4209
+ }
3788
4210
  }
3789
- /**
3790
- * 验证OpenAI请求格式
3791
- */
3792
- validateOpenAIRequest(request) {
3793
- return FormatValidator.validateOpenAIRequest(request);
4211
+ getOrCreateToolCallState(state, key) {
4212
+ let existing = state.toolCallsMap.get(key);
4213
+ if (!existing) {
4214
+ existing = {
4215
+ id: "",
4216
+ name: "",
4217
+ input: "",
4218
+ blockStartSent: false,
4219
+ blockStopSent: false,
4220
+ pendingChunks: []
4221
+ };
4222
+ state.toolCallsMap.set(key, existing);
4223
+ }
4224
+ return existing;
3794
4225
  }
3795
- /**
3796
- * 获取支持的工具列表
3797
- */
3798
- getSupportedTools() {
3799
- return [];
4226
+ registerToolCallAlias(state, alias, toolData) {
4227
+ if (!alias) return;
4228
+ const current = state.toolCallsMap.get(alias);
4229
+ if (!current || current !== toolData) {
4230
+ state.toolCallsMap.set(alias, toolData);
4231
+ }
3800
4232
  }
3801
- /**
3802
- * 检查工具是否支持
3803
- */
3804
- isToolSupported(_toolName) {
4233
+ maybeStartToolBlock(toolData, state, sseLines) {
4234
+ if (toolData.blockStartSent) return false;
4235
+ if (!toolData.name) {
4236
+ return false;
4237
+ }
4238
+ if (!toolData.id) {
4239
+ toolData.id = `call_${++state.toolCallCounter}`;
4240
+ }
4241
+ const blockIndex = toolData.blockIndex ?? state.nextToolBlockIndex++;
4242
+ toolData.blockIndex = blockIndex;
4243
+ sseLines.push(
4244
+ "event: content_block_start",
4245
+ `data: {"type":"content_block_start","index":${blockIndex},"content_block":{"type":"tool_use","id":"${this.escapeJsonString(toolData.id)}","name":"${this.escapeJsonString(toolData.name)}","input":{}}}`,
4246
+ ""
4247
+ );
4248
+ toolData.blockStartSent = true;
4249
+ this.logDebug("Sent content_block_start", { toolName: toolData.name, blockIndex });
3805
4250
  return true;
3806
4251
  }
3807
- /**
3808
- * 获取工具映射(已弃用,保持兼容性)
3809
- */
3810
- getToolMapping(claudeToolName) {
3811
- return claudeToolName;
4252
+ flushPendingToolChunks(toolData, sseLines) {
4253
+ if (!toolData.blockStartSent || toolData.blockIndex === void 0) {
4254
+ return;
4255
+ }
4256
+ while (toolData.pendingChunks.length > 0) {
4257
+ const chunk = toolData.pendingChunks.shift();
4258
+ if (chunk === void 0) continue;
4259
+ toolData.input += chunk;
4260
+ sseLines.push(
4261
+ "event: content_block_delta",
4262
+ `data: {"type":"content_block_delta","index":${toolData.blockIndex},"delta":{"type":"input_json_delta","partial_json":${JSON.stringify(chunk)}}}`,
4263
+ ""
4264
+ );
4265
+ this.logDebug("Sent input_json_delta", { blockIndex: toolData.blockIndex });
4266
+ }
4267
+ }
4268
+ coalesceContent(content) {
4269
+ if (!content) return void 0;
4270
+ if (typeof content === "string") return content;
4271
+ if (Array.isArray(content)) {
4272
+ return content.map((item) => {
4273
+ if (typeof item === "string") return item;
4274
+ if (typeof item?.text === "string") return item.text;
4275
+ if (typeof item?.content === "string") return item.content;
4276
+ return "";
4277
+ }).join("");
4278
+ }
4279
+ if (typeof content === "object" && typeof content.text === "string") {
4280
+ return content.text;
4281
+ }
4282
+ return void 0;
4283
+ }
4284
+ appendThinkingContent(content, state, sseLines) {
4285
+ if (!content) return;
4286
+ state.reasoningContent += content;
4287
+ if (!state.thinkingBlockStarted) {
4288
+ if (state.contentBlockStarted) {
4289
+ sseLines.push(
4290
+ "event: content_block_stop",
4291
+ 'data: {"type":"content_block_stop","index":0}',
4292
+ ""
4293
+ );
4294
+ state.contentBlockStarted = false;
4295
+ }
4296
+ sseLines.push(
4297
+ "event: content_block_start",
4298
+ 'data: {"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":""}}',
4299
+ ""
4300
+ );
4301
+ state.thinkingBlockStarted = true;
4302
+ }
4303
+ sseLines.push(
4304
+ "event: content_block_delta",
4305
+ `data: {"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":"${this.escapeJsonString(content)}"}}`,
4306
+ ""
4307
+ );
3812
4308
  }
3813
- /**
3814
- * 更新配置
3815
- */
3816
- updateConfig(newConfig) {
3817
- this.config = { ...this.config, ...newConfig };
4309
+ appendTextContent(content, state, sseLines) {
4310
+ if (!content || content === "") return;
4311
+ if (!state.thinkingBlockStarted && !state.contentBlockStarted && content.trim() === "") {
4312
+ state.textContent += content;
4313
+ return;
4314
+ }
4315
+ if (state.thinkingBlockStarted && !state.contentBlockStarted) {
4316
+ sseLines.push(
4317
+ "event: content_block_stop",
4318
+ 'data: {"type":"content_block_stop","index":0}',
4319
+ "",
4320
+ "event: content_block_start",
4321
+ 'data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}',
4322
+ ""
4323
+ );
4324
+ state.contentBlockStarted = true;
4325
+ } else if (!state.contentBlockStarted && !state.thinkingBlockStarted) {
4326
+ sseLines.push(
4327
+ "event: content_block_start",
4328
+ 'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}',
4329
+ ""
4330
+ );
4331
+ state.contentBlockStarted = true;
4332
+ }
4333
+ state.textContent += content;
4334
+ const blockIndex = state.thinkingBlockStarted ? 1 : 0;
4335
+ sseLines.push(
4336
+ "event: content_block_delta",
4337
+ `data: {"type":"content_block_delta","index":${blockIndex},"delta":{"type":"text_delta","text":"${this.escapeJsonString(content)}"}}`,
4338
+ ""
4339
+ );
3818
4340
  }
3819
- /**
3820
- * 获取当前配置
3821
- */
3822
- getConfig() {
3823
- return { ...this.config };
4341
+ updateUsageFromChunk(chunk, state) {
4342
+ const usage = chunk?.usage || chunk?.response?.usage;
4343
+ if (!usage) return;
4344
+ if (typeof usage.prompt_tokens === "number") {
4345
+ state.usage.input_tokens = usage.prompt_tokens;
4346
+ }
4347
+ if (typeof usage.completion_tokens === "number") {
4348
+ state.usage.output_tokens = usage.completion_tokens;
4349
+ }
4350
+ if (typeof usage.input_tokens === "number") {
4351
+ state.usage.input_tokens = usage.input_tokens;
4352
+ }
4353
+ if (typeof usage.output_tokens === "number") {
4354
+ state.usage.output_tokens = usage.output_tokens;
4355
+ }
4356
+ }
4357
+ isResponsesEvent(chunk) {
4358
+ return typeof chunk?.type === "string" && chunk.type.startsWith("response.");
4359
+ }
4360
+ processResponsesEvent(event, state, sseLines) {
4361
+ this.updateUsageFromChunk(event, state);
4362
+ switch (event.type) {
4363
+ case "response.output_item.added":
4364
+ this.handleResponsesOutputItemAdded(event, state, sseLines);
4365
+ break;
4366
+ case "response.function_call_arguments.delta":
4367
+ this.handleResponsesFunctionArgumentsDelta(event, state, sseLines);
4368
+ break;
4369
+ case "response.function_call_arguments.done":
4370
+ case "response.output_item.done":
4371
+ this.handleResponsesFunctionArgumentsDone(event, state, sseLines);
4372
+ break;
4373
+ case "response.output_text.delta":
4374
+ case "response.text.delta":
4375
+ this.appendTextContent(this.extractResponsesTextDelta(event), state, sseLines);
4376
+ break;
4377
+ case "response.output_text.done":
4378
+ case "response.text.done":
4379
+ break;
4380
+ case "response.thinking.delta":
4381
+ this.appendThinkingContent(this.extractResponsesThinkingDelta(event), state, sseLines);
4382
+ break;
4383
+ default:
4384
+ break;
4385
+ }
3824
4386
  }
3825
- /**
3826
- * 执行带验证的核心转换(同步版本)
3827
- * 为静态方法提供增强功能,但保持同步特性
3828
- */
3829
- performCoreConversionWithValidation(anthropicRequest) {
3830
- if (this.config.validation.enabled) {
3831
- try {
3832
- validateAnthropicRequest(anthropicRequest);
3833
- } catch (error) {
3834
- if (this.config.validation.strict) {
3835
- throw error;
3836
- } else {
3837
- const errorSummary = this.getValidationErrorSummary(error);
3838
- console.warn(`[A2ORequestAdapter] Input validation warning: ${errorSummary}. Details saved to logs.`);
3839
- }
4387
+ resolveResponsesToolData(identifiers, state) {
4388
+ const aliases = [];
4389
+ if (identifiers.call_id) aliases.push(`responses_call_${identifiers.call_id}`);
4390
+ if (identifiers.item_id) aliases.push(`responses_item_${identifiers.item_id}`);
4391
+ if (typeof identifiers.output_index === "number") aliases.push(`responses_index_${identifiers.output_index}`);
4392
+ let toolData;
4393
+ for (const alias of aliases) {
4394
+ const existing = state.toolCallsMap.get(alias);
4395
+ if (existing) {
4396
+ toolData = existing;
4397
+ break;
3840
4398
  }
3841
4399
  }
3842
- let processedRequest = anthropicRequest;
3843
- if (this.config.healing.enabled) {
3844
- try {
3845
- processedRequest = this.applySyncHealing(anthropicRequest);
3846
- } catch (healingError) {
3847
- console.warn("[A2ORequestAdapter] Healing failed:", healingError);
4400
+ if (!toolData) {
4401
+ const baseAlias = aliases[0] ?? `responses_auto_${++state.toolCallCounter}`;
4402
+ toolData = this.getOrCreateToolCallState(state, baseAlias);
4403
+ if (!aliases.length) {
4404
+ aliases.push(baseAlias);
3848
4405
  }
3849
4406
  }
3850
- const result = this.performBasicConversion(processedRequest, true);
3851
- if (this.config.validation.enabled) {
3852
- try {
3853
- validateOpenAIRequest(result);
3854
- } catch (error) {
3855
- if (this.config.validation.strict) {
3856
- throw error;
3857
- } else {
3858
- console.warn("[A2ORequestAdapter] Output validation warning:", error);
3859
- }
3860
- }
4407
+ for (const alias of aliases) {
4408
+ this.registerToolCallAlias(state, alias, toolData);
3861
4409
  }
3862
- return result;
4410
+ return toolData;
3863
4411
  }
3864
- /**
3865
- * 执行基础转换逻辑(原有逻辑的提取)
3866
- */
3867
- performBasicConversion(anthropicRequest, skipValidation = false) {
3868
- if (!skipValidation && this.config.enableFormatValidation) {
3869
- FormatValidator.validateClaudeRequest(anthropicRequest);
4412
+ handleResponsesOutputItemAdded(event, state, sseLines) {
4413
+ const item = event?.item;
4414
+ if (!item) return;
4415
+ const itemType = item.type;
4416
+ if (itemType !== "function_call" && itemType !== "tool_call") {
4417
+ return;
3870
4418
  }
3871
- const openaiRequest = {
3872
- model: anthropicRequest.model,
3873
- messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3874
- max_tokens: anthropicRequest.max_tokens,
3875
- temperature: anthropicRequest.temperature,
3876
- stream: anthropicRequest.stream,
3877
- n: 1
3878
- };
3879
- if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3880
- openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
4419
+ const toolData = this.resolveResponsesToolData(
4420
+ { call_id: item.call_id ?? item.id, item_id: item.id, output_index: event.output_index },
4421
+ state
4422
+ );
4423
+ if (!toolData.id) {
4424
+ toolData.id = item.call_id || item.id || `call_${++state.toolCallCounter}`;
3881
4425
  }
3882
- const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3883
- for (const field of specialFields) {
3884
- if (anthropicRequest[field] !== void 0) {
3885
- openaiRequest[field] = anthropicRequest[field];
3886
- }
4426
+ const name = item.name ?? item.function?.name ?? item.function_call?.name;
4427
+ if (name) {
4428
+ toolData.name = name;
3887
4429
  }
3888
- if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
3889
- throw new Error("Generated OpenAI request format is invalid");
4430
+ if (typeof item.arguments === "string" && item.arguments.length > 0) {
4431
+ toolData.pendingChunks.push(item.arguments);
4432
+ }
4433
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4434
+ if (started || toolData.blockStartSent) {
4435
+ this.flushPendingToolChunks(toolData, sseLines);
3890
4436
  }
3891
- return openaiRequest;
3892
4437
  }
3893
- /**
3894
- * 应用同步修复逻辑
3895
- * 简化版的修复,不依赖异步操作
3896
- */
3897
- applySyncHealing(request) {
3898
- const healedRequest = { ...request };
3899
- if (!healedRequest.max_tokens || healedRequest.max_tokens <= 0) {
3900
- healedRequest.max_tokens = 4096;
4438
+ handleResponsesFunctionArgumentsDelta(event, state, sseLines) {
4439
+ const toolData = this.resolveResponsesToolData(
4440
+ { call_id: event.call_id, item_id: event.item_id, output_index: event.output_index },
4441
+ state
4442
+ );
4443
+ if (!toolData.id && event.call_id) {
4444
+ toolData.id = event.call_id;
3901
4445
  }
3902
- if (!healedRequest.messages || !Array.isArray(healedRequest.messages)) {
3903
- throw new Error("Invalid messages array");
4446
+ const name = event.name ?? event.function_name ?? event.function?.name;
4447
+ if (name) {
4448
+ toolData.name = name;
3904
4449
  }
3905
- if (!healedRequest.model) {
3906
- healedRequest.model = "claude-sonnet-4";
4450
+ const argsChunk = this.extractArgumentsDelta(event);
4451
+ if (argsChunk) {
4452
+ toolData.pendingChunks.push(argsChunk);
3907
4453
  }
3908
- for (const message of healedRequest.messages) {
3909
- if (!message.role) {
3910
- message.role = "user";
4454
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4455
+ if (started || toolData.blockStartSent) {
4456
+ this.flushPendingToolChunks(toolData, sseLines);
4457
+ }
4458
+ }
4459
+ handleResponsesFunctionArgumentsDone(event, state, sseLines) {
4460
+ const toolData = this.resolveResponsesToolData(
4461
+ { call_id: event.call_id, item_id: event.item_id, output_index: event.output_index },
4462
+ state
4463
+ );
4464
+ if (typeof event.arguments === "string" && event.arguments.length > 0) {
4465
+ toolData.pendingChunks.push(event.arguments);
4466
+ }
4467
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4468
+ if (started || toolData.blockStartSent) {
4469
+ this.flushPendingToolChunks(toolData, sseLines);
4470
+ }
4471
+ if (toolData.blockStartSent && !toolData.blockStopSent && toolData.blockIndex !== void 0) {
4472
+ sseLines.push(
4473
+ "event: content_block_stop",
4474
+ `data: {"type":"content_block_stop","index":${toolData.blockIndex}}`,
4475
+ ""
4476
+ );
4477
+ toolData.blockStopSent = true;
4478
+ if (toolData.id && !state.completedToolCalls.includes(toolData.id)) {
4479
+ state.completedToolCalls.push(toolData.id);
4480
+ }
4481
+ this.logDebug("Sent content_block_stop", { toolName: toolData.name, blockIndex: toolData.blockIndex });
4482
+ }
4483
+ }
4484
+ extractResponsesTextDelta(event) {
4485
+ if (!event) return void 0;
4486
+ if (typeof event.delta === "string") return event.delta;
4487
+ if (event.delta && typeof event.delta.text === "string") return event.delta.text;
4488
+ if (typeof event.text === "string") return event.text;
4489
+ if (Array.isArray(event.output_text)) {
4490
+ return event.output_text.map((item) => item?.text ?? "").join("");
4491
+ }
4492
+ return void 0;
4493
+ }
4494
+ extractResponsesThinkingDelta(event) {
4495
+ if (!event) return void 0;
4496
+ if (typeof event.delta === "string") return event.delta;
4497
+ if (event.delta && typeof event.delta.thinking === "string") return event.delta.thinking;
4498
+ if (typeof event.text === "string") return event.text;
4499
+ return void 0;
4500
+ }
4501
+ extractArgumentsDelta(event) {
4502
+ if (!event) return void 0;
4503
+ if (typeof event.delta === "string") return event.delta;
4504
+ if (event.delta && typeof event.delta.arguments === "string") return event.delta.arguments;
4505
+ if (typeof event.arguments_delta === "string") return event.arguments_delta;
4506
+ if (typeof event.arguments === "string") return event.arguments;
4507
+ if (typeof event.partial_json === "string") return event.partial_json;
4508
+ return void 0;
4509
+ }
4510
+ /**
4511
+ * 在流结束时关闭所有未关闭的工具调用块
4512
+ */
4513
+ closeAllToolCallBlocks(state, sseLines) {
4514
+ const processed = /* @__PURE__ */ new Set();
4515
+ for (const toolData of state.toolCallsMap.values()) {
4516
+ if (processed.has(toolData)) continue;
4517
+ processed.add(toolData);
4518
+ if (!toolData.blockStartSent && toolData.pendingChunks.length > 0) {
4519
+ if (!toolData.name) {
4520
+ toolData.name = "unknown_tool";
4521
+ }
4522
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4523
+ if (started) {
4524
+ this.flushPendingToolChunks(toolData, sseLines);
4525
+ }
3911
4526
  }
3912
- if (!message.content) {
3913
- message.content = "";
4527
+ if (toolData.blockStartSent && !toolData.blockStopSent && toolData.blockIndex !== void 0) {
4528
+ this.flushPendingToolChunks(toolData, sseLines);
4529
+ sseLines.push(
4530
+ "event: content_block_stop",
4531
+ `data: {"type":"content_block_stop","index":${toolData.blockIndex}}`,
4532
+ ""
4533
+ );
4534
+ toolData.blockStopSent = true;
4535
+ if (toolData.id && !state.completedToolCalls.includes(toolData.id)) {
4536
+ state.completedToolCalls.push(toolData.id);
4537
+ }
4538
+ this.logDebug("Sent content_block_stop", { toolName: toolData.name, blockIndex: toolData.blockIndex });
3914
4539
  }
3915
4540
  }
3916
- return healedRequest;
3917
4541
  }
3918
4542
  /**
3919
- * 获取验证错误详情
4543
+ * 添加最终事件 - 支持thinking+content双模式
3920
4544
  */
3921
- getValidationErrors(request, type) {
3922
- return FormatValidator.getValidationErrors(request, type);
4545
+ addFinalEvents(state, sseLines) {
4546
+ this.closeAllToolCallBlocks(state, sseLines);
4547
+ if (state.contentBlockStarted) {
4548
+ const blockIndex = state.thinkingBlockStarted ? 1 : 0;
4549
+ sseLines.push(
4550
+ "event: content_block_stop",
4551
+ `data: {"type":"content_block_stop","index":${blockIndex}}`,
4552
+ ""
4553
+ );
4554
+ } else if (state.thinkingBlockStarted) {
4555
+ sseLines.push(
4556
+ "event: content_block_stop",
4557
+ 'data: {"type":"content_block_stop","index":0}',
4558
+ ""
4559
+ );
4560
+ }
4561
+ const stopReason = state.completedToolCalls.length > 0 ? "tool_use" : "end_turn";
4562
+ const usagePayload = state.usage.input_tokens > 0 ? `{"input_tokens":${state.usage.input_tokens},"output_tokens":${state.usage.output_tokens}}` : `{"output_tokens":${state.usage.output_tokens}}`;
4563
+ sseLines.push(
4564
+ "event: message_delta",
4565
+ `data: {"type":"message_delta","delta":{"stop_reason":"${stopReason}","stop_sequence":null},"usage":${usagePayload}}`,
4566
+ "",
4567
+ "event: message_stop",
4568
+ 'data: {"type":"message_stop"}',
4569
+ ""
4570
+ );
3923
4571
  }
3924
4572
  /**
3925
- * 生成简洁的验证错误摘要
4573
+ * 构建标准响应格式
3926
4574
  */
3927
- getValidationErrorSummary(error) {
3928
- if (error?.issues?.length > 0) {
3929
- const invalidEnums = error.issues.filter((i) => i.code === "invalid_enum_value");
3930
- const missingFields = error.issues.filter((i) => i.code === "invalid_type");
3931
- const summary = [];
3932
- if (invalidEnums.length > 0) {
3933
- const first = invalidEnums[0];
3934
- summary.push(`invalid_${first.path?.join(".")}: '${first.received}'`);
3935
- }
3936
- if (missingFields.length > 0) {
3937
- summary.push(`${missingFields.length} missing fields`);
4575
+ buildStandardResponse(openaiStream) {
4576
+ const state = this.createConversionState();
4577
+ const lines = openaiStream.split("\n");
4578
+ const noopSseLines = [];
4579
+ for (const line of lines) {
4580
+ if (line.startsWith("data:")) {
4581
+ const dataLine = line.startsWith("data: ") ? line.substring(6) : line.substring(5);
4582
+ if (dataLine.trim() === "[DONE]") break;
4583
+ try {
4584
+ const chunk = JSON.parse(dataLine);
4585
+ noopSseLines.length = 0;
4586
+ this.processStreamChunk(chunk, state, noopSseLines);
4587
+ } catch (error) {
4588
+ }
3938
4589
  }
3939
- return summary.slice(0, 2).join(", ") + (error.issues.length > 5 ? ` (+${error.issues.length - 5} more)` : "");
3940
4590
  }
3941
- return error.message || "Validation failed";
4591
+ const stopReason = state.completedToolCalls.length > 0 ? "tool_use" : "end_turn";
4592
+ return {
4593
+ id: `msg_${Date.now()}`,
4594
+ type: "message",
4595
+ role: "assistant",
4596
+ content: state.textContent ? [
4597
+ {
4598
+ type: "text",
4599
+ text: state.textContent
4600
+ }
4601
+ ] : [],
4602
+ model: "claude-3-sonnet-20240229",
4603
+ stop_reason: stopReason,
4604
+ stop_sequence: null,
4605
+ usage: state.usage
4606
+ };
3942
4607
  }
3943
- };
3944
- var A2ORequestAdapterStatic = {
3945
4608
  /**
3946
- * 转换Anthropic请求格式为OpenAI兼容格式(静态方法)
3947
- * 内部使用增强转换器,所有调用点自动获得增强功能
4609
+ * 创建转换状态对象
3948
4610
  */
3949
- convertAnthropicRequestToOpenAI: (anthropicRequest) => {
3950
- const adapter = new A2ORequestAdapter({
3951
- debugMode: false,
3952
- maxDescriptionLength: 100,
3953
- enableToolNameValidation: true,
3954
- enableFormatValidation: true,
3955
- validation: { enabled: true, strict: false },
3956
- healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
3957
- recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
3958
- monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
3959
- });
3960
- try {
3961
- const result = adapter.performCoreConversionWithValidation(anthropicRequest);
3962
- return result;
3963
- } catch (error) {
3964
- console.warn(`[A2ORequestAdapterStatic] Enhanced conversion failed, using basic conversion: ${error?.message || error}`);
3965
- return adapter.performBasicConversion(anthropicRequest, true);
4611
+ createConversionState() {
4612
+ return {
4613
+ processedLines: 0,
4614
+ textContent: "",
4615
+ reasoningContent: "",
4616
+ toolCallsMap: /* @__PURE__ */ new Map(),
4617
+ completedToolCalls: [],
4618
+ allSSELines: [],
4619
+ errors: [],
4620
+ usage: {
4621
+ input_tokens: 0,
4622
+ output_tokens: 0
4623
+ },
4624
+ thinkingBlockStarted: false,
4625
+ contentBlockStarted: false,
4626
+ toolCallCounter: 0,
4627
+ nextToolBlockIndex: 1
4628
+ };
4629
+ }
4630
+ parseAnthropicSSEEvent(rawEvent) {
4631
+ const lines = rawEvent.split("\n");
4632
+ let eventType = null;
4633
+ const dataLines = [];
4634
+ for (const line of lines) {
4635
+ if (line.startsWith("event:")) {
4636
+ eventType = line.slice(6).trim();
4637
+ } else if (line.startsWith("data:")) {
4638
+ dataLines.push(line.slice(5).trim());
4639
+ }
3966
4640
  }
3967
- },
4641
+ const dataString = dataLines.join("\n");
4642
+ let data = null;
4643
+ if (dataString) {
4644
+ try {
4645
+ data = JSON.parse(dataString);
4646
+ } catch (error) {
4647
+ this.logDebug("Failed to parse Anthropic SSE JSON", { error });
4648
+ }
4649
+ }
4650
+ return { eventType, data };
4651
+ }
4652
+ extractTextFromAnthropicDelta(data) {
4653
+ const delta = data?.delta;
4654
+ if (!delta) return null;
4655
+ if (typeof delta.text === "string") {
4656
+ return delta.text;
4657
+ }
4658
+ if (delta.type === "text_delta" && typeof delta.text === "string") {
4659
+ return delta.text;
4660
+ }
4661
+ return null;
4662
+ }
4663
+ mapAnthropicStopReasonToOpenAI(reason) {
4664
+ switch (reason) {
4665
+ case "max_tokens":
4666
+ return "length";
4667
+ case "tool_use":
4668
+ return "tool_calls";
4669
+ case "stop_sequence":
4670
+ case "end_turn":
4671
+ default:
4672
+ return "stop";
4673
+ }
4674
+ }
4675
+ buildOpenAIStreamChunk(model, content, finishReason = null) {
4676
+ return {
4677
+ id: `chatcmpl-${Date.now()}`,
4678
+ object: "chat.completion.chunk",
4679
+ created: Math.floor(Date.now() / 1e3),
4680
+ model,
4681
+ choices: [{
4682
+ index: 0,
4683
+ delta: content ? { content } : {},
4684
+ finish_reason: finishReason
4685
+ }]
4686
+ };
4687
+ }
3968
4688
  /**
3969
- * 转换OpenAI响应格式为Claude兼容格式(静态方法)
3970
- * 内部使用增强转换器
4689
+ * 转换消息格式
3971
4690
  */
3972
- convertOpenAIResponseToClaude: (openaiResponse) => {
3973
- const adapter = new A2ORequestAdapter({
3974
- debugMode: false,
3975
- maxDescriptionLength: 100,
3976
- enableToolNameValidation: true,
3977
- enableFormatValidation: true,
3978
- validation: { enabled: true, strict: false },
3979
- healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
3980
- recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
3981
- monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
3982
- });
3983
- return adapter.convertOpenAIResponseToClaude(openaiResponse);
3984
- },
4691
+ convertMessages(messages) {
4692
+ return messages.map((msg) => ({
4693
+ role: msg.role,
4694
+ content: msg.content
4695
+ }));
4696
+ }
4697
+ /**
4698
+ * 映射Anthropic模型到OpenAI模型
4699
+ */
4700
+ mapAnthropicModelToOpenAI(model) {
4701
+ const supportedModels = [
4702
+ "glm-4.5",
4703
+ "kimi-k2",
4704
+ "deepseek-v3.1",
4705
+ "deepseek-r1",
4706
+ "deepseek-v3",
4707
+ "qwen3-32b",
4708
+ "qwen3-coder",
4709
+ "qwen3-235b",
4710
+ "tstars2.0"
4711
+ ];
4712
+ if (supportedModels.includes(model)) {
4713
+ return model;
4714
+ }
4715
+ const mapping = {
4716
+ "claude-3-sonnet-20240229": "glm-4.5",
4717
+ "claude-3-haiku-20240307": "kimi-k2",
4718
+ "claude-3-opus-20240229": "deepseek-v3.1"
4719
+ };
4720
+ return mapping[model] || "glm-4.5";
4721
+ }
3985
4722
  /**
3986
- * 验证Claude请求格式(静态方法)
4723
+ * 检查请求是否包含图片内容
3987
4724
  */
3988
- validateClaudeRequest: (request) => {
3989
- return FormatValidator.validateClaudeRequest(request);
3990
- },
4725
+ hasImageContent(request) {
4726
+ return request.messages.some(
4727
+ (msg) => Array.isArray(msg.content) && msg.content.some((content) => content?.type === "image")
4728
+ );
4729
+ }
3991
4730
  /**
3992
- * 验证OpenAI请求格式(静态方法)
4731
+ * 转义JSON字符串
3993
4732
  */
3994
- validateOpenAIRequest: (request) => {
3995
- return FormatValidator.validateOpenAIRequest(request);
3996
- },
4733
+ escapeJsonString(str) {
4734
+ return str.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t");
4735
+ }
3997
4736
  /**
3998
- * 获取支持的工具列表(静态方法)
4737
+ * 获取初始SSE事件(message_start + ping)
3999
4738
  */
4000
- getSupportedTools: () => {
4001
- return [];
4002
- },
4739
+ getInitialSSEEvents(modelName = "claude-sonnet-4", messageId = `msg_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`) {
4740
+ return [
4741
+ "event: message_start",
4742
+ `data: {"type":"message_start","message":{"id":"${messageId}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
4743
+ "",
4744
+ "event: ping",
4745
+ 'data: {"type":"ping"}',
4746
+ ""
4747
+ ];
4748
+ }
4003
4749
  /**
4004
- * 检查工具是否支持(静态方法)
4750
+ * 增量转换单个OpenAI数据块为Anthropic SSE事件
4751
+ * 用于逐个处理流式数据片段
4005
4752
  */
4006
- isToolSupported: (_toolName) => {
4007
- return true;
4008
- },
4753
+ convertIncrementalChunk(openaiDataLine, state) {
4754
+ const logger = this.config.logger;
4755
+ const sseEvents = [];
4756
+ state.processedLines += 1;
4757
+ if (openaiDataLine.trim() === "[DONE]") {
4758
+ this.addFinalEvents(state, sseEvents);
4759
+ state.allSSELines.push(...sseEvents);
4760
+ return sseEvents;
4761
+ }
4762
+ try {
4763
+ const chunk = JSON.parse(openaiDataLine);
4764
+ this.processStreamChunk(chunk, state, sseEvents);
4765
+ if (sseEvents.length > 0) {
4766
+ state.allSSELines.push(...sseEvents);
4767
+ }
4768
+ return sseEvents;
4769
+ } catch (error) {
4770
+ if (this.config.debugMode) {
4771
+ logger.warn("Failed to parse OpenAI stream chunk in convertIncrementalChunk", {
4772
+ line: openaiDataLine.substring(0, 200),
4773
+ error: error instanceof Error ? error.message : String(error)
4774
+ });
4775
+ }
4776
+ state.errors.push({
4777
+ error: error instanceof Error ? error.message : String(error),
4778
+ raw: openaiDataLine
4779
+ });
4780
+ return [];
4781
+ }
4782
+ }
4009
4783
  /**
4010
- * 获取工具映射(静态方法,已弃用)
4784
+ * 暴露内部状态创建方法,供外部增量处理流程使用。
4011
4785
  */
4012
- getToolMapping: (claudeToolName) => {
4013
- return claudeToolName;
4786
+ createIncrementalState() {
4787
+ return this.createConversionState();
4014
4788
  }
4015
4789
  };
4016
4790
 
@@ -4151,15 +4925,36 @@ var ToolCallProcessor = class _ToolCallProcessor {
4151
4925
  * 处理增量工具调用
4152
4926
  */
4153
4927
  static processIncrementalToolCalls(toolCalls, state, sseLines) {
4928
+ const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
4929
+ if (debugEnabled) {
4930
+ console.debug("[ToolProcessor] processIncrementalToolCalls called with:", JSON.stringify(toolCalls, null, 2));
4931
+ }
4154
4932
  for (const toolCall of toolCalls) {
4155
4933
  const toolId = toolCall.id;
4156
4934
  const toolName = toolCall.function?.name;
4157
4935
  const toolArgs = toolCall.function?.arguments;
4936
+ if (debugEnabled) {
4937
+ console.debug("[ToolProcessor] Processing tool call:", {
4938
+ toolId,
4939
+ toolName,
4940
+ hasArgs: !!toolArgs
4941
+ });
4942
+ }
4158
4943
  if (toolName && toolId && !state.toolCallsMap.has(toolId)) {
4944
+ if (debugEnabled) {
4945
+ console.debug("[ToolProcessor] Starting new tool call:", toolName);
4946
+ }
4159
4947
  _ToolCallProcessor.processToolCallStart(toolId, toolName, state, sseLines);
4160
4948
  }
4161
4949
  if (toolArgs) {
4950
+ if (debugEnabled) {
4951
+ console.debug("[ToolProcessor] Processing tool args, calling processToolArgs");
4952
+ }
4162
4953
  _ToolCallProcessor.processToolArgs(toolId, toolArgs, state, sseLines);
4954
+ } else if (toolName && toolId) {
4955
+ _ToolCallProcessor.processToolArgs(toolId, "", state, sseLines);
4956
+ } else {
4957
+ console.warn("\u26A0\uFE0F\u26A0\uFE0F\u26A0\uFE0F [ToolProcessor] No tool args to process! This will result in empty input!");
4163
4958
  }
4164
4959
  }
4165
4960
  }
@@ -4167,15 +4962,36 @@ var ToolCallProcessor = class _ToolCallProcessor {
4167
4962
  * 处理工具调用
4168
4963
  */
4169
4964
  static processBatchToolCalls(toolCalls, state, sseLines) {
4965
+ const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
4966
+ if (debugEnabled) {
4967
+ console.debug("[ToolProcessor] processBatchToolCalls called with:", JSON.stringify(toolCalls, null, 2));
4968
+ }
4170
4969
  for (const toolCall of toolCalls) {
4171
4970
  const toolId = toolCall.id;
4172
4971
  const toolName = toolCall.function?.name;
4173
4972
  const toolArgs = toolCall.function?.arguments;
4973
+ if (debugEnabled) {
4974
+ console.debug("[ToolProcessor] Processing batch tool call:", {
4975
+ toolId,
4976
+ toolName,
4977
+ hasArgs: !!toolArgs
4978
+ });
4979
+ }
4174
4980
  if (toolName && toolId && !state.toolCallsMap.has(toolId)) {
4981
+ if (debugEnabled) {
4982
+ console.debug("[ToolProcessor] Starting new batch tool call:", toolName);
4983
+ }
4175
4984
  _ToolCallProcessor.processToolCallStart(toolId, toolName, state, sseLines);
4176
4985
  }
4177
4986
  if (toolArgs) {
4987
+ if (debugEnabled) {
4988
+ console.debug("[ToolProcessor] Processing batch tool args, calling processToolArgs");
4989
+ }
4178
4990
  _ToolCallProcessor.processToolArgs(toolId, toolArgs, state, sseLines);
4991
+ } else if (toolName && toolId) {
4992
+ _ToolCallProcessor.processToolArgs(toolId, "", state, sseLines);
4993
+ } else {
4994
+ console.warn("\u26A0\uFE0F\u26A0\uFE0F\u26A0\uFE0F [ToolProcessor] No batch tool args to process! This will result in empty input!");
4179
4995
  }
4180
4996
  }
4181
4997
  }
@@ -4262,9 +5078,10 @@ var StreamingStateManager = class {
4262
5078
  return {
4263
5079
  hasContent: false,
4264
5080
  hasThinking: false,
4265
- contentBlockIndex: 0,
4266
- thinkingBlockIndex: 1,
4267
- // thinking block在content block之后
5081
+ contentBlockIndex: 1,
5082
+ // 🔧 修复:text block 在 thinking 之后 (index 1)
5083
+ thinkingBlockIndex: 0,
5084
+ // 🔧 修复:thinking block 先发送 (index 0)
4268
5085
  toolCallsMap: /* @__PURE__ */ new Map(),
4269
5086
  completedToolCalls: /* @__PURE__ */ new Set(),
4270
5087
  accumulatedUsage: {
@@ -4280,6 +5097,9 @@ var StreamingStateManager = class {
4280
5097
  * 处理文本内容
4281
5098
  */
4282
5099
  static processTextContent(content, state, sseLines) {
5100
+ if (!state.hasThinking && !state.hasContent && content.trim() === "") {
5101
+ return;
5102
+ }
4283
5103
  if (!state.hasContent) {
4284
5104
  sseLines.push(...SSEEventGenerator.generateTextBlockStart(state.contentBlockIndex));
4285
5105
  state.hasContent = true;
@@ -4373,6 +5193,247 @@ function generateMessageId() {
4373
5193
  return `msg_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`;
4374
5194
  }
4375
5195
 
5196
+ // src/core/o2a-sse-adapter/stream-converter.ts
5197
+ var StreamConverter = class {
5198
+ constructor(adapter, options = {}) {
5199
+ this.buffer = "";
5200
+ this.adapter = adapter;
5201
+ this.options = {
5202
+ bufferTimeout: 5e3,
5203
+ errorRecovery: true,
5204
+ maxRetries: 3,
5205
+ debug: false,
5206
+ ...options
5207
+ };
5208
+ this.state = this.adapter.createIncrementalState();
5209
+ this.stats = {
5210
+ chunksProcessed: 0,
5211
+ eventsGenerated: 0,
5212
+ errors: 0,
5213
+ retries: 0,
5214
+ startTime: Date.now(),
5215
+ lastUpdateTime: Date.now(),
5216
+ bufferSize: 0
5217
+ };
5218
+ if (this.options.debug) {
5219
+ console.log("[StreamConverter] \u5DF2\u521D\u59CB\u5316\uFF0C\u914D\u7F6E:", this.options);
5220
+ }
5221
+ }
5222
+ /**
5223
+ * 获取初始事件
5224
+ */
5225
+ getInitialEvents() {
5226
+ const events = this.adapter.getInitialSSEEvents(
5227
+ this.options.modelName,
5228
+ this.options.messageId
5229
+ );
5230
+ this.stats.eventsGenerated += events.length;
5231
+ this.stats.lastUpdateTime = Date.now();
5232
+ if (this.options.debug) {
5233
+ console.log("[StreamConverter] \u751F\u6210\u521D\u59CB\u4E8B\u4EF6:", events.length, "\u4E2A");
5234
+ }
5235
+ return events;
5236
+ }
5237
+ /**
5238
+ * 处理单个数据块
5239
+ */
5240
+ processChunk(chunk) {
5241
+ this.stats.chunksProcessed++;
5242
+ this.stats.lastUpdateTime = Date.now();
5243
+ if (this.options.debug) {
5244
+ console.log("[StreamConverter] \u5904\u7406\u6570\u636E\u5757:", chunk.substring(0, 100) + "...");
5245
+ }
5246
+ try {
5247
+ const events = this.processBufferedData(chunk);
5248
+ this.stats.eventsGenerated += events.length;
5249
+ if (this.options.onChunkProcessed) {
5250
+ this.options.onChunkProcessed(chunk, events);
5251
+ }
5252
+ return events;
5253
+ } catch (error) {
5254
+ return this.handleChunkError(error, chunk);
5255
+ }
5256
+ }
5257
+ /**
5258
+ * 结束流处理
5259
+ */
5260
+ finalize() {
5261
+ if (this.options.debug) {
5262
+ console.log("[StreamConverter] \u7ED3\u675F\u6D41\u5904\u7406\uFF0C\u7F13\u51B2\u533A\u5927\u5C0F:", this.buffer.length);
5263
+ }
5264
+ let events = [];
5265
+ if (this.buffer.trim()) {
5266
+ console.warn("[StreamConverter] \u7F13\u51B2\u533A\u4E2D\u6709\u672A\u5904\u7406\u6570\u636E\uFF0C\u5F3A\u5236\u5904\u7406:", this.buffer);
5267
+ events = this.processIncompleteBuffer();
5268
+ }
5269
+ try {
5270
+ const finalEvents = this.adapter.convertIncrementalChunk("[DONE]", this.state);
5271
+ events.push(...finalEvents);
5272
+ this.stats.eventsGenerated += finalEvents.length;
5273
+ } catch (error) {
5274
+ console.error("[StreamConverter] \u5904\u7406\u7ED3\u675F\u4E8B\u4EF6\u5931\u8D25:", error);
5275
+ }
5276
+ this.clearBufferTimeout();
5277
+ this.stats.lastUpdateTime = Date.now();
5278
+ if (this.options.debug) {
5279
+ console.log("[StreamConverter] \u6D41\u5904\u7406\u5B8C\u6210\uFF0C\u7EDF\u8BA1\u4FE1\u606F:", this.stats);
5280
+ }
5281
+ return events;
5282
+ }
5283
+ /**
5284
+ * 获取当前状态
5285
+ */
5286
+ getState() {
5287
+ return { ...this.state };
5288
+ }
5289
+ /**
5290
+ * 重置状态
5291
+ */
5292
+ reset() {
5293
+ this.state = this.adapter.createIncrementalState();
5294
+ this.buffer = "";
5295
+ this.clearBufferTimeout();
5296
+ this.stats = {
5297
+ chunksProcessed: 0,
5298
+ eventsGenerated: 0,
5299
+ errors: 0,
5300
+ retries: 0,
5301
+ startTime: Date.now(),
5302
+ lastUpdateTime: Date.now(),
5303
+ bufferSize: 0
5304
+ };
5305
+ if (this.options.debug) {
5306
+ console.log("[StreamConverter] \u72B6\u6001\u5DF2\u91CD\u7F6E");
5307
+ }
5308
+ }
5309
+ /**
5310
+ * 获取统计信息
5311
+ */
5312
+ getStats() {
5313
+ return {
5314
+ ...this.stats,
5315
+ bufferSize: this.buffer.length
5316
+ };
5317
+ }
5318
+ /**
5319
+ * 处理缓冲的数据
5320
+ */
5321
+ processBufferedData(newChunk) {
5322
+ this.buffer += newChunk;
5323
+ this.stats.bufferSize = this.buffer.length;
5324
+ const lines = this.buffer.split("\n");
5325
+ this.buffer = lines.pop() || "";
5326
+ const events = [];
5327
+ for (const line of lines) {
5328
+ if (line.startsWith("data:")) {
5329
+ const jsonStr = line.slice(5).trim();
5330
+ if (jsonStr && jsonStr !== "[DONE]") {
5331
+ const lineEvents = this.processDataLine(jsonStr);
5332
+ events.push(...lineEvents);
5333
+ } else if (jsonStr === "[DONE]") {
5334
+ const finalEvents = this.adapter.convertIncrementalChunk("[DONE]", this.state);
5335
+ events.push(...finalEvents);
5336
+ }
5337
+ }
5338
+ }
5339
+ this.resetBufferTimeout();
5340
+ return events;
5341
+ }
5342
+ /**
5343
+ * 处理单行数据
5344
+ */
5345
+ processDataLine(jsonStr, attempt = 0) {
5346
+ try {
5347
+ const chunkEvents = this.adapter.convertIncrementalChunk(jsonStr, this.state);
5348
+ if (this.options.debug && chunkEvents.length > 0) {
5349
+ console.log("[StreamConverter] \u751F\u6210\u4E8B\u4EF6:", chunkEvents.length, "\u4E2A");
5350
+ }
5351
+ return chunkEvents;
5352
+ } catch (error) {
5353
+ if (this.options.errorRecovery && attempt < (this.options.maxRetries || 3)) {
5354
+ console.warn(`[StreamConverter] \u5904\u7406\u6570\u636E\u884C\u5931\u8D25\uFF0C\u91CD\u8BD5 ${attempt + 1}/${this.options.maxRetries}:`, error);
5355
+ this.stats.retries++;
5356
+ return this.processDataLine(jsonStr, attempt + 1);
5357
+ }
5358
+ this.stats.errors++;
5359
+ console.error("[StreamConverter] \u5904\u7406\u6570\u636E\u884C\u6700\u7EC8\u5931\u8D25:", error, "Data:", jsonStr);
5360
+ if (this.options.onError) {
5361
+ this.options.onError(error, {
5362
+ chunk: jsonStr,
5363
+ state: this.state,
5364
+ attempt,
5365
+ totalRetries: this.stats.retries
5366
+ });
5367
+ }
5368
+ return [];
5369
+ }
5370
+ }
5371
+ /**
5372
+ * 处理块错误
5373
+ */
5374
+ handleChunkError(error, chunk) {
5375
+ this.stats.errors++;
5376
+ if (this.options.debug) {
5377
+ console.error("[StreamConverter] \u5757\u5904\u7406\u9519\u8BEF:", error.message);
5378
+ }
5379
+ if (!this.options.errorRecovery) {
5380
+ throw error;
5381
+ }
5382
+ this.state.errors.push(`Chunk processing error: ${error.message}`);
5383
+ if (this.options.onError) {
5384
+ this.options.onError(error, {
5385
+ chunk,
5386
+ state: this.state,
5387
+ totalRetries: this.stats.retries
5388
+ });
5389
+ }
5390
+ return [];
5391
+ }
5392
+ /**
5393
+ * 处理不完整的缓冲区数据
5394
+ */
5395
+ processIncompleteBuffer() {
5396
+ if (!this.buffer.trim()) {
5397
+ return [];
5398
+ }
5399
+ console.warn("[StreamConverter] \u5904\u7406\u4E0D\u5B8C\u6574\u7F13\u51B2\u533A\u6570\u636E:", this.buffer);
5400
+ if (this.buffer.startsWith("data:")) {
5401
+ const jsonStr = this.buffer.slice(5).trim();
5402
+ if (jsonStr) {
5403
+ return this.processDataLine(jsonStr);
5404
+ }
5405
+ }
5406
+ return [];
5407
+ }
5408
+ /**
5409
+ * 重置缓冲区超时
5410
+ */
5411
+ resetBufferTimeout() {
5412
+ this.clearBufferTimeout();
5413
+ if (this.options.bufferTimeout && this.options.bufferTimeout > 0) {
5414
+ this.bufferTimeout = setTimeout(() => {
5415
+ if (this.buffer.trim()) {
5416
+ console.warn("[StreamConverter] \u7F13\u51B2\u533A\u8D85\u65F6\uFF0C\u5F3A\u5236\u5904\u7406\u6570\u636E:", this.buffer);
5417
+ const events = this.processIncompleteBuffer();
5418
+ this.buffer = "";
5419
+ if (events.length > 0 && this.options.onChunkProcessed) {
5420
+ this.options.onChunkProcessed("TIMEOUT_FLUSH", events);
5421
+ }
5422
+ }
5423
+ }, this.options.bufferTimeout);
5424
+ }
5425
+ }
5426
+ /**
5427
+ * 清理缓冲区超时
5428
+ */
5429
+ clearBufferTimeout() {
5430
+ if (this.bufferTimeout) {
5431
+ clearTimeout(this.bufferTimeout);
5432
+ this.bufferTimeout = void 0;
5433
+ }
5434
+ }
5435
+ };
5436
+
4376
5437
  // src/core/o2a-sse-adapter/adapter.ts
4377
5438
  var O2ASSEAdapter = class {
4378
5439
  constructor(debugMode = false, config = {}) {
@@ -4544,6 +5605,17 @@ var O2ASSEAdapter = class {
4544
5605
  }
4545
5606
  try {
4546
5607
  const data = JSON.parse(dataContent);
5608
+ if ((data.choices?.length === 0 || !data.choices) && data.prompt_filter_results) {
5609
+ if (this.debugMode) {
5610
+ console.warn("\u26A0\uFE0F [O2ASSEAdapter] \u68C0\u6D4B\u5230Azure\u5185\u5BB9\u8FC7\u6EE4\u5668\u54CD\u5E94:", data.prompt_filter_results);
5611
+ }
5612
+ StreamingStateManager.processTextContent(
5613
+ `\u9519\u8BEF\uFF1A\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42\u3002\u8BF7\u68C0\u67E5\u8F93\u5165\u5185\u5BB9\u662F\u5426\u7B26\u5408\u4F7F\u7528\u653F\u7B56\u3002`,
5614
+ state,
5615
+ sseLines
5616
+ );
5617
+ break;
5618
+ }
4547
5619
  const choice = data.choices?.[0];
4548
5620
  const delta = choice?.delta;
4549
5621
  if (!delta) {
@@ -4558,6 +5630,11 @@ var O2ASSEAdapter = class {
4558
5630
  if (delta.content) {
4559
5631
  StreamingStateManager.processTextContent(delta.content, state, sseLines);
4560
5632
  }
5633
+ if (delta.reasoning_content) {
5634
+ if (typeof delta.reasoning_content === "string") {
5635
+ StreamingStateManager.processReasoningContent(delta.reasoning_content, state, sseLines);
5636
+ }
5637
+ }
4561
5638
  if (delta.tool_calls) {
4562
5639
  ToolCallProcessor.processBatchToolCalls(delta.tool_calls, state, sseLines);
4563
5640
  }
@@ -4587,6 +5664,19 @@ var O2ASSEAdapter = class {
4587
5664
  processNonStreamingResponse(data, state, sseLines) {
4588
5665
  const choice = data.choices?.[0];
4589
5666
  if (!choice) {
5667
+ if (data.prompt_filter_results || data.choices?.length === 0) {
5668
+ const errorMsg = "Azure\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42";
5669
+ const filterDetails = data.prompt_filter_results ? JSON.stringify(data.prompt_filter_results).substring(0, 500) : "choices\u4E3A\u7A7A";
5670
+ if (this.debugMode) {
5671
+ console.warn(`\u26A0\uFE0F [O2ASSEAdapter] ${errorMsg}:`, filterDetails);
5672
+ }
5673
+ StreamingStateManager.processTextContent(
5674
+ `\u9519\u8BEF\uFF1A\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42\u3002\u8BF7\u68C0\u67E5\u8F93\u5165\u5185\u5BB9\u662F\u5426\u7B26\u5408\u4F7F\u7528\u653F\u7B56\u3002`,
5675
+ state,
5676
+ sseLines
5677
+ );
5678
+ return;
5679
+ }
4590
5680
  if (this.debugMode) {
4591
5681
  console.warn("\u26A0\uFE0F [O2ASSEAdapter] \u975E\u6D41\u5F0F\u54CD\u5E94\u6CA1\u6709choices\u6570\u636E");
4592
5682
  }
@@ -4627,6 +5717,101 @@ var O2ASSEAdapter = class {
4627
5717
  validateClaudeSSE(sseContent) {
4628
5718
  return FormatValidator2.validateClaudeSSE(sseContent);
4629
5719
  }
5720
+ /**
5721
+ * 将 OpenAI Response 流直接转换为 Anthropic SSE 流
5722
+ * 这是新增的核心流式处理方法,支持实时转换
5723
+ */
5724
+ convertResponseStream(openaiResponse, options = {}) {
5725
+ if (!openaiResponse.body) {
5726
+ throw new Error("Response body is null or undefined");
5727
+ }
5728
+ return this.convertReadableStream(openaiResponse.body, options);
5729
+ }
5730
+ /**
5731
+ * 将 ReadableStream 转换为 Anthropic SSE 流
5732
+ */
5733
+ convertReadableStream(openaiStream, options = {}) {
5734
+ const converter = this.createStreamConverter(options);
5735
+ const decoder = new TextDecoder();
5736
+ return new ReadableStream({
5737
+ async start(controller) {
5738
+ if (options.debug) {
5739
+ console.log("[O2ASSEAdapter] \u5F00\u59CB\u6D41\u5F0F\u8F6C\u6362\uFF0C\u914D\u7F6E:", options);
5740
+ }
5741
+ try {
5742
+ const initialEvents = converter.getInitialEvents();
5743
+ for (const event of initialEvents) {
5744
+ controller.enqueue(event);
5745
+ }
5746
+ } catch (error) {
5747
+ console.error("[O2ASSEAdapter] \u521D\u59CB\u5316\u5931\u8D25:", error);
5748
+ controller.error(error);
5749
+ return;
5750
+ }
5751
+ const reader = openaiStream.getReader();
5752
+ try {
5753
+ while (true) {
5754
+ const { done, value } = await reader.read();
5755
+ if (done) {
5756
+ try {
5757
+ const finalEvents = converter.finalize();
5758
+ for (const event of finalEvents) {
5759
+ controller.enqueue(event);
5760
+ }
5761
+ if (options.debug) {
5762
+ console.log("[O2ASSEAdapter] \u6D41\u5F0F\u8F6C\u6362\u5B8C\u6210\uFF0C\u7EDF\u8BA1:", converter.getStats());
5763
+ }
5764
+ } catch (error) {
5765
+ console.error("[O2ASSEAdapter] \u7ED3\u675F\u5904\u7406\u5931\u8D25:", error);
5766
+ }
5767
+ break;
5768
+ }
5769
+ const chunk = decoder.decode(value, { stream: true });
5770
+ try {
5771
+ const events = converter.processChunk(chunk);
5772
+ for (const event of events) {
5773
+ controller.enqueue(event);
5774
+ }
5775
+ } catch (error) {
5776
+ console.error("[O2ASSEAdapter] \u5757\u5904\u7406\u5931\u8D25:", error);
5777
+ if (options.errorRecovery === false) {
5778
+ controller.error(error);
5779
+ return;
5780
+ }
5781
+ if (options.onError) {
5782
+ options.onError(error, {
5783
+ chunk,
5784
+ state: converter.getState()
5785
+ });
5786
+ }
5787
+ }
5788
+ }
5789
+ } catch (error) {
5790
+ console.error("[O2ASSEAdapter] \u6D41\u5904\u7406\u5931\u8D25:", error);
5791
+ if (options.onError) {
5792
+ options.onError(error, {
5793
+ chunk: "",
5794
+ state: converter.getState()
5795
+ });
5796
+ }
5797
+ controller.error(error);
5798
+ } finally {
5799
+ controller.close();
5800
+ }
5801
+ }
5802
+ });
5803
+ }
5804
+ /**
5805
+ * 创建流式转换器实例
5806
+ * 提供更精细的流处理控制
5807
+ */
5808
+ createStreamConverter(options = {}) {
5809
+ return new StreamConverter(this, {
5810
+ modelName: options.modelName || this.config.defaultModel,
5811
+ debug: options.debug || this.debugMode,
5812
+ ...options
5813
+ });
5814
+ }
4630
5815
  /**
4631
5816
  * 应用增强功能到SSE转换
4632
5817
  * 包括输入验证、输出修复等
@@ -4698,15 +5883,56 @@ var O2ASSEAdapterStatic = {
4698
5883
  validateClaudeSSE: (sseContent) => {
4699
5884
  const adapter = new O2ASSEAdapter(false);
4700
5885
  return adapter.validateClaudeSSE(sseContent);
5886
+ },
5887
+ /**
5888
+ * 转换 Response 流为 Anthropic SSE(静态方法)
5889
+ * 新增:直接处理 Response 对象的流式转换
5890
+ */
5891
+ convertResponseStream: (openaiResponse, options = {}) => {
5892
+ const adapter = new O2ASSEAdapter(options.debug || false, {
5893
+ defaultModel: options.modelName || "claude-sonnet-4",
5894
+ generateUniqueMessageId: !options.messageId,
5895
+ errorDataMaxLength: 500
5896
+ });
5897
+ return adapter.convertResponseStream(openaiResponse, options);
5898
+ },
5899
+ /**
5900
+ * 转换 ReadableStream 为 Anthropic SSE(静态方法)
5901
+ * 新增:处理任意 ReadableStream<Uint8Array> 的流式转换
5902
+ */
5903
+ convertReadableStream: (openaiStream, options = {}) => {
5904
+ const adapter = new O2ASSEAdapter(options.debug || false, {
5905
+ defaultModel: options.modelName || "claude-sonnet-4",
5906
+ generateUniqueMessageId: !options.messageId,
5907
+ errorDataMaxLength: 500
5908
+ });
5909
+ return adapter.convertReadableStream(openaiStream, options);
5910
+ },
5911
+ /**
5912
+ * 创建流式转换器(静态方法)
5913
+ * 新增:提供更精细的流处理控制
5914
+ */
5915
+ createStreamConverter: (options = {}) => {
5916
+ const adapter = new O2ASSEAdapter(options.debug || false, {
5917
+ defaultModel: options.modelName || "claude-sonnet-4",
5918
+ generateUniqueMessageId: !options.messageId,
5919
+ errorDataMaxLength: 500
5920
+ });
5921
+ return adapter.createStreamConverter(options);
4701
5922
  }
4702
5923
  };
4703
5924
 
4704
5925
  // src/core/standard/standard-protocol-adapter.ts
4705
5926
  var StandardProtocolAdapter = class {
4706
5927
  constructor(options = {}) {
4707
- this.debugMode = options.debugMode || false;
5928
+ this.debugMode = options.debugMode ?? process.env.AI_PROTOCOL_DEBUG === "true";
4708
5929
  this.sseAdapter = new O2ASSEAdapter(this.debugMode);
4709
5930
  }
5931
+ logDebug(message, meta) {
5932
+ if (this.debugMode) {
5933
+ console.debug(message, meta ?? "");
5934
+ }
5935
+ }
4710
5936
  /**
4711
5937
  * 转换Anthropic请求为OpenAI请求格式
4712
5938
  * @param anthropicRequest - Anthropic格式的请求
@@ -4730,7 +5956,7 @@ var StandardProtocolAdapter = class {
4730
5956
  */
4731
5957
  convertFromStreamToStandard(openaiRawStream, modelName, messageId) {
4732
5958
  if (this.debugMode) {
4733
- console.log("\u{1F504} [StandardProtocolAdapter] convertFromStreamToStandard \u5F00\u59CB\u5904\u7406:", {
5959
+ this.logDebug("\u{1F504} [StandardProtocolAdapter] convertFromStreamToStandard \u5F00\u59CB\u5904\u7406:", {
4734
5960
  rawStreamLength: openaiRawStream.length,
4735
5961
  modelName,
4736
5962
  messageId,
@@ -4739,14 +5965,14 @@ var StandardProtocolAdapter = class {
4739
5965
  }
4740
5966
  const sseResult = this.sseAdapter.convertToClaudeSSE(openaiRawStream, modelName, messageId);
4741
5967
  if (this.debugMode) {
4742
- console.log("\u{1F504} [StandardProtocolAdapter] SSE\u8F6C\u6362\u5B8C\u6210:", {
5968
+ this.logDebug("\u{1F504} [StandardProtocolAdapter] SSE\u8F6C\u6362\u5B8C\u6210:", {
4743
5969
  sseResultLength: sseResult.length,
4744
5970
  ssePreview: sseResult.substring(0, 500)
4745
5971
  });
4746
5972
  }
4747
5973
  const standardResponse = this.extractStandardResponseFromSSE(sseResult, modelName, messageId);
4748
5974
  if (this.debugMode) {
4749
- console.log("\u{1F504} [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u63D0\u53D6\u5B8C\u6210:", {
5975
+ this.logDebug("\u{1F504} [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u63D0\u53D6\u5B8C\u6210:", {
4750
5976
  contentLength: standardResponse.content.length,
4751
5977
  usage: standardResponse.usage,
4752
5978
  stopReason: standardResponse.stop_reason
@@ -4761,7 +5987,7 @@ var StandardProtocolAdapter = class {
4761
5987
  const lines = sseContent.split("\n");
4762
5988
  const finalMessageId = messageId || generateMessageId();
4763
5989
  if (this.debugMode) {
4764
- console.log("\u{1F50D} [StandardProtocolAdapter] extractStandardResponseFromSSE \u5F00\u59CB\u89E3\u6790:", {
5990
+ this.logDebug("\u{1F50D} [StandardProtocolAdapter] extractStandardResponseFromSSE \u5F00\u59CB\u89E3\u6790:", {
4765
5991
  totalLines: lines.length,
4766
5992
  messageId: finalMessageId
4767
5993
  });
@@ -4780,7 +6006,10 @@ var StandardProtocolAdapter = class {
4780
6006
  }
4781
6007
  };
4782
6008
  let currentTextContent = "";
6009
+ let currentThinkingContent = "";
4783
6010
  const toolCalls = /* @__PURE__ */ new Map();
6011
+ const toolInputBuffers = /* @__PURE__ */ new Map();
6012
+ const indexToToolId = /* @__PURE__ */ new Map();
4784
6013
  let processedDataLines = 0;
4785
6014
  for (const line of lines) {
4786
6015
  if (line.startsWith("data: ")) {
@@ -4793,24 +6022,80 @@ var StandardProtocolAdapter = class {
4793
6022
  if (data.type === "content_block_start") {
4794
6023
  const contentBlock = data.content_block;
4795
6024
  if (contentBlock.type === "tool_use") {
6025
+ const toolIndex = data.index;
4796
6026
  toolCalls.set(contentBlock.id, {
4797
6027
  type: "tool_use",
4798
6028
  id: contentBlock.id,
4799
6029
  name: contentBlock.name,
4800
6030
  input: contentBlock.input || {}
6031
+ // 初始为空对象,稍后会被更新
6032
+ });
6033
+ toolInputBuffers.set(toolIndex, "");
6034
+ indexToToolId.set(toolIndex, contentBlock.id);
6035
+ console.log("\u{1F527}\u{1F527}\u{1F527} [StandardProtocolAdapter] \u6DFB\u52A0\u5DE5\u5177\u8C03\u7528:", {
6036
+ index: toolIndex,
6037
+ toolId: contentBlock.id,
6038
+ name: contentBlock.name,
6039
+ indexToToolIdSize: indexToToolId.size
4801
6040
  });
4802
- if (this.debugMode) {
4803
- console.log("\u{1F527} [StandardProtocolAdapter] \u6DFB\u52A0\u5DE5\u5177\u8C03\u7528:", contentBlock);
4804
- }
4805
6041
  }
4806
6042
  }
4807
6043
  if (data.type === "content_block_delta" && data.delta?.type === "text_delta") {
4808
6044
  currentTextContent += data.delta.text;
4809
6045
  if (this.debugMode && currentTextContent.length % 50 === 0) {
4810
- console.log(`\u{1F4DD} [StandardProtocolAdapter] \u7D2F\u79EF\u6587\u672C\u5185\u5BB9 (${currentTextContent.length}\u5B57\u7B26):`, currentTextContent.substring(currentTextContent.length - 20));
6046
+ this.logDebug(`\u{1F4DD} [StandardProtocolAdapter] \u7D2F\u79EF\u6587\u672C\u5185\u5BB9 (${currentTextContent.length}\u5B57\u7B26)`, currentTextContent.substring(currentTextContent.length - 20));
6047
+ }
6048
+ }
6049
+ if (data.type === "content_block_delta" && data.delta?.type === "thinking_delta") {
6050
+ currentThinkingContent += data.delta.thinking;
6051
+ if (this.debugMode && currentThinkingContent.length % 100 === 0) {
6052
+ this.logDebug(`\u{1F9E0} [StandardProtocolAdapter] \u7D2F\u79EFthinking\u5185\u5BB9 (${currentThinkingContent.length}\u5B57\u7B26)`);
4811
6053
  }
4812
6054
  }
4813
6055
  if (data.type === "content_block_delta" && data.delta?.type === "input_json_delta") {
6056
+ const toolIndex = data.index;
6057
+ const toolId = indexToToolId.get(toolIndex);
6058
+ if (this.debugMode) {
6059
+ this.logDebug(`\u{1F527}\u{1F527}\u{1F527} [StandardProtocolAdapter] \u68C0\u6D4B\u5230input_json_delta\u4E8B\u4EF6\uFF01`, {
6060
+ toolIndex,
6061
+ toolId: toolId || "NOT_FOUND",
6062
+ delta: data.delta.partial_json
6063
+ });
6064
+ }
6065
+ if (toolId) {
6066
+ const currentBuffer = toolInputBuffers.get(toolIndex) || "";
6067
+ const newBuffer = currentBuffer + data.delta.partial_json;
6068
+ toolInputBuffers.set(toolIndex, newBuffer);
6069
+ if (this.debugMode) {
6070
+ this.logDebug(`\u{1F527} [StandardProtocolAdapter] \u7D2F\u79EF\u5DE5\u5177\u53C2\u6570 (index=${toolIndex}, id=${toolId})`, {
6071
+ bufferLength: newBuffer.length
6072
+ });
6073
+ }
6074
+ } else {
6075
+ console.warn(`\u26A0\uFE0F [StandardProtocolAdapter] \u627E\u4E0D\u5230toolId for index=${toolIndex}`);
6076
+ }
6077
+ }
6078
+ if (data.type === "content_block_stop") {
6079
+ const toolIndex = data.index;
6080
+ const toolId = indexToToolId.get(toolIndex);
6081
+ if (toolId) {
6082
+ const jsonBuffer = toolInputBuffers.get(toolIndex);
6083
+ const tool = toolCalls.get(toolId);
6084
+ if (jsonBuffer && tool) {
6085
+ try {
6086
+ const parsedInput = JSON.parse(jsonBuffer);
6087
+ tool.input = parsedInput;
6088
+ if (this.debugMode) {
6089
+ this.logDebug(`\u2705 [StandardProtocolAdapter] \u5DE5\u5177\u53C2\u6570\u89E3\u6790\u5B8C\u6210 (index=${toolIndex}, id=${toolId})`, parsedInput);
6090
+ }
6091
+ } catch (parseError) {
6092
+ console.warn(`\u26A0\uFE0F [StandardProtocolAdapter] \u5DE5\u5177\u53C2\u6570JSON\u89E3\u6790\u5931\u8D25 (index=${toolIndex}, id=${toolId}):`, {
6093
+ buffer: jsonBuffer,
6094
+ error: parseError
6095
+ });
6096
+ }
6097
+ }
6098
+ }
4814
6099
  }
4815
6100
  if (data.type === "message_delta") {
4816
6101
  if (data.delta?.stop_reason) {
@@ -4819,7 +6104,7 @@ var StandardProtocolAdapter = class {
4819
6104
  if (data.usage) {
4820
6105
  response.usage = data.usage;
4821
6106
  if (this.debugMode) {
4822
- console.log("\u{1F4CA} [StandardProtocolAdapter] \u66F4\u65B0usage\u4FE1\u606F:", data.usage);
6107
+ this.logDebug("\u{1F4CA} [StandardProtocolAdapter] \u66F4\u65B0usage\u4FE1\u606F:", data.usage);
4823
6108
  }
4824
6109
  }
4825
6110
  }
@@ -4831,6 +6116,12 @@ var StandardProtocolAdapter = class {
4831
6116
  }
4832
6117
  }
4833
6118
  }
6119
+ if (currentThinkingContent.trim()) {
6120
+ response.content.push({
6121
+ type: "thinking",
6122
+ thinking: currentThinkingContent.trim()
6123
+ });
6124
+ }
4834
6125
  if (currentTextContent.trim()) {
4835
6126
  response.content.push({
4836
6127
  type: "text",
@@ -4839,8 +6130,9 @@ var StandardProtocolAdapter = class {
4839
6130
  }
4840
6131
  response.content.push(...Array.from(toolCalls.values()));
4841
6132
  if (this.debugMode) {
4842
- console.log("\u2705 [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u6784\u5EFA\u5B8C\u6210:", {
6133
+ this.logDebug("\u2705 [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u6784\u5EFA\u5B8C\u6210:", {
4843
6134
  contentCount: response.content.length,
6135
+ thinkingLength: currentThinkingContent.length,
4844
6136
  textLength: currentTextContent.length,
4845
6137
  toolCallsCount: toolCalls.size,
4846
6138
  finalUsage: response.usage,
@@ -5699,6 +6991,7 @@ export {
5699
6991
  createAnthropicSDK,
5700
6992
  createOpenAISDK,
5701
6993
  createValidator,
6994
+ downloadImageAsBase64,
5702
6995
  errorRecovery,
5703
6996
  getAllHealingStrategies,
5704
6997
  getGlobalLogger,
@@ -5708,6 +7001,8 @@ export {
5708
7001
  healO2ARequest,
5709
7002
  healO2AResponse,
5710
7003
  healingValidate,
7004
+ isBase64DataUri,
7005
+ isExternalUrl,
5711
7006
  isRecoverable,
5712
7007
  protocolHealer,
5713
7008
  safeValidate,