ai-protocol-adapters 1.0.0-alpha.2 → 1.0.0-alpha.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -453,6 +453,7 @@ __export(index_exports, {
453
453
  createAnthropicSDK: () => createAnthropicSDK,
454
454
  createOpenAISDK: () => createOpenAISDK,
455
455
  createValidator: () => createValidator,
456
+ downloadImageAsBase64: () => downloadImageAsBase64,
456
457
  errorRecovery: () => errorRecovery,
457
458
  getAllHealingStrategies: () => getAllHealingStrategies,
458
459
  getGlobalLogger: () => getGlobalLogger,
@@ -462,6 +463,8 @@ __export(index_exports, {
462
463
  healO2ARequest: () => healO2ARequest,
463
464
  healO2AResponse: () => healO2AResponse,
464
465
  healingValidate: () => healingValidate,
466
+ isBase64DataUri: () => isBase64DataUri,
467
+ isExternalUrl: () => isExternalUrl,
465
468
  isRecoverable: () => isRecoverable,
466
469
  protocolHealer: () => protocolHealer,
467
470
  safeValidate: () => safeValidate,
@@ -504,523 +507,463 @@ function getGlobalLogger() {
504
507
  return globalLogger;
505
508
  }
506
509
 
507
- // src/core/streaming/streaming-protocol-adapter.ts
508
- var StreamingProtocolAdapter = class {
509
- constructor(options = {}) {
510
- this.config = {
511
- debugMode: options.debugMode ?? false,
512
- validateInput: options.validateInput ?? false,
513
- validateOutput: options.validateOutput ?? false,
514
- autoHeal: options.autoHeal ?? false,
515
- timeout: options.timeout ?? 3e4,
516
- retries: options.retries ?? 3,
517
- bufferSize: options.bufferSize ?? 1024,
518
- logger: options.logger ?? getGlobalLogger()
519
- };
510
+ // src/core/a2o-request-adapter/config.ts
511
+ var DEFAULT_CONFIG = {
512
+ // 原有配置
513
+ debugMode: false,
514
+ maxDescriptionLength: 100,
515
+ enableToolNameValidation: true,
516
+ enableFormatValidation: true,
517
+ // 新增默认配置
518
+ validation: {
519
+ enabled: true,
520
+ strict: false,
521
+ // 默认开启自动修复
522
+ customSchemas: {}
523
+ },
524
+ healing: {
525
+ enabled: true,
526
+ maxAttempts: 3,
527
+ enableCustomRules: true
528
+ },
529
+ recovery: {
530
+ enabled: true,
531
+ maxRetries: 2,
532
+ backoffMs: 1e3
533
+ },
534
+ monitoring: {
535
+ enabled: false,
536
+ logLevel: "warn",
537
+ enableMetrics: false
538
+ },
539
+ imageProxy: {
540
+ enabled: true,
541
+ // 默认启用图片代理(解决GitHub Copilot等不支持外部URL的问题)
542
+ timeout: 1e4,
543
+ // 10秒超时
544
+ maxSize: 10 * 1024 * 1024
545
+ // 10MB最大文件大小
520
546
  }
547
+ };
548
+ var SUPPORTED_IMAGE_TYPES = [
549
+ "image/jpeg",
550
+ "image/png",
551
+ "image/gif",
552
+ "image/webp"
553
+ ];
554
+ var TOOL_CONVERSION = {
521
555
  /**
522
- * 转换Anthropic请求为OpenAI格式
556
+ * 终极泛化:完全移除工具名称映射
557
+ * 基于GitHub Copilot API测试结果,100%保持原始格式
523
558
  */
524
- convertAnthropicToOpenAI(anthropicRequest) {
525
- const logger = this.config.logger;
526
- if (this.config.debugMode) {
527
- logger.debug("Converting Anthropic request to OpenAI format", { model: anthropicRequest.model });
559
+ PRESERVE_ORIGINAL_NAMES: true,
560
+ /**
561
+ * 默认工具描述
562
+ */
563
+ DEFAULT_DESCRIPTION: "Tool description",
564
+ /**
565
+ * 未知工具回退名称
566
+ */
567
+ UNKNOWN_TOOL_FALLBACK: "unknown_tool"
568
+ };
569
+
570
+ // src/core/a2o-request-adapter/image-proxy.ts
571
+ var SUPPORTED_IMAGE_MIME_TYPES = [
572
+ "image/jpeg",
573
+ "image/png",
574
+ "image/gif",
575
+ "image/webp"
576
+ ];
577
+ async function downloadImageAsBase64(url, options = {}) {
578
+ const {
579
+ timeout = 1e4,
580
+ maxSize = 10 * 1024 * 1024,
581
+ // 10MB
582
+ userAgent = "ai-protocol-adapters/1.0"
583
+ } = options;
584
+ try {
585
+ const controller = new AbortController();
586
+ const timeoutId = setTimeout(() => controller.abort(), timeout);
587
+ const response = await fetch(url, {
588
+ signal: controller.signal,
589
+ headers: {
590
+ "User-Agent": userAgent
591
+ }
592
+ });
593
+ clearTimeout(timeoutId);
594
+ if (!response.ok) {
595
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
528
596
  }
529
- const openaiRequest = {
530
- model: this.mapAnthropicModelToOpenAI(anthropicRequest.model),
531
- messages: this.convertMessages(anthropicRequest.messages),
532
- stream: anthropicRequest.stream ?? true,
533
- temperature: anthropicRequest.temperature,
534
- max_tokens: anthropicRequest.max_tokens
535
- };
536
- if (anthropicRequest.tools) {
537
- openaiRequest.tools = anthropicRequest.tools.map((tool) => ({
538
- type: "function",
539
- function: {
540
- name: tool.name,
541
- description: tool.description,
542
- parameters: tool.input_schema
543
- }
544
- }));
597
+ const contentType = response.headers.get("content-type");
598
+ if (!contentType || !SUPPORTED_IMAGE_MIME_TYPES.some((type) => contentType.includes(type))) {
599
+ throw new Error(`Unsupported content type: ${contentType}`);
545
600
  }
546
- const hasImages = this.hasImageContent(anthropicRequest);
547
- return {
548
- openaiRequest,
549
- metadata: {
550
- hasImages,
551
- requiresVisionHeaders: hasImages
552
- }
553
- };
601
+ const contentLength = response.headers.get("content-length");
602
+ if (contentLength && parseInt(contentLength) > maxSize) {
603
+ throw new Error(`Image too large: ${contentLength} bytes (max: ${maxSize} bytes)`);
604
+ }
605
+ const arrayBuffer = await response.arrayBuffer();
606
+ if (arrayBuffer.byteLength > maxSize) {
607
+ throw new Error(`Image too large: ${arrayBuffer.byteLength} bytes (max: ${maxSize} bytes)`);
608
+ }
609
+ const base64 = Buffer.from(arrayBuffer).toString("base64");
610
+ return `data:${contentType};base64,${base64}`;
611
+ } catch (error) {
612
+ if (error.name === "AbortError") {
613
+ throw new Error(`Image download timeout after ${timeout}ms`);
614
+ }
615
+ throw new Error(`Failed to download image from ${url}: ${error.message}`);
554
616
  }
617
+ }
618
+ function isExternalUrl(url) {
619
+ return url.startsWith("http://") || url.startsWith("https://");
620
+ }
621
+ function isBase64DataUri(url) {
622
+ return url.startsWith("data:");
623
+ }
624
+
625
+ // src/core/a2o-request-adapter/message-converter.ts
626
+ var MessageConverter = class {
555
627
  /**
556
- * 转换OpenAI流式响应为Anthropic SSE格式
628
+ * 转换消息格式,正确处理工具调用和工具结果
629
+ * 修复关键问题:将tool_use转换为tool_calls,tool_result转换为role:"tool"消息
630
+ * 使用tool_use_id溯回工具名称解决unknown_tool问题
557
631
  */
558
- convertOpenAIStreamToAnthropic(openaiStream, originalRequest) {
559
- const logger = this.config.logger;
560
- try {
561
- if (this.config.debugMode) {
562
- logger.debug("Converting OpenAI stream to Anthropic SSE", {
563
- streamLength: openaiStream.length,
564
- model: originalRequest.model
565
- });
632
+ static convertMessages(messages, system) {
633
+ const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
634
+ if (debugEnabled) {
635
+ if (system !== void 0) {
636
+ console.debug("[MessageConverter] convertMessages called with system:", JSON.stringify(system, null, 2));
637
+ } else {
638
+ console.debug("[MessageConverter] convertMessages called WITHOUT system parameter");
566
639
  }
567
- if (!openaiStream || openaiStream.trim() === "") {
568
- return {
569
- success: false,
570
- error: "Empty stream response",
571
- anthropicSSE: "",
572
- anthropicStandardResponse: null
573
- };
640
+ }
641
+ const context = this.createConversionContext(messages);
642
+ const convertedMessages = [];
643
+ for (const msg of messages) {
644
+ if (Array.isArray(msg.content)) {
645
+ const processedMessages = this.processComplexMessage(msg, context);
646
+ convertedMessages.push(...processedMessages);
647
+ } else {
648
+ const safeMsg = { ...msg };
649
+ if (safeMsg.content === null || safeMsg.content === void 0) {
650
+ safeMsg.content = "";
651
+ }
652
+ convertedMessages.push(safeMsg);
653
+ }
654
+ }
655
+ if (system) {
656
+ const systemMessage = this.processSystemMessage(system);
657
+ if (systemMessage) {
658
+ convertedMessages.unshift(systemMessage);
659
+ if (debugEnabled) {
660
+ console.debug("[MessageConverter] System message added to messages array at index 0");
661
+ }
574
662
  }
575
- const anthropicSSE = this.convertToAnthropicSSE(openaiStream, originalRequest.model);
576
- const anthropicStandardResponse = this.buildStandardResponse(openaiStream);
577
- return {
578
- success: true,
579
- anthropicSSE,
580
- anthropicStandardResponse
581
- };
582
- } catch (error) {
583
- const errorMessage = error instanceof Error ? error.message : "Unknown conversion error";
584
- logger.error("Stream conversion failed", { error: errorMessage });
585
- return {
586
- success: false,
587
- error: errorMessage,
588
- anthropicSSE: "",
589
- anthropicStandardResponse: null
590
- };
591
663
  }
664
+ if (debugEnabled) {
665
+ console.debug("[MessageConverter] Final converted messages count:", convertedMessages.length);
666
+ console.debug("[MessageConverter] First message:", JSON.stringify(convertedMessages[0], null, 2));
667
+ }
668
+ return convertedMessages.map((msg) => {
669
+ if (Array.isArray(msg.tools)) {
670
+ msg.tools = msg.tools.map((tool) => {
671
+ if (tool?.type === "function" && tool.function) {
672
+ const description = tool.function.description?.trim() || "Converted tool with no description provided.";
673
+ return {
674
+ ...tool,
675
+ function: {
676
+ ...tool.function,
677
+ description
678
+ }
679
+ };
680
+ }
681
+ return tool;
682
+ });
683
+ }
684
+ return msg;
685
+ });
592
686
  }
593
687
  /**
594
- * 将OpenAI流转换为Anthropic SSE格式
688
+ * 创建消息转换上下文
595
689
  */
596
- convertToAnthropicSSE(openaiStream, modelName) {
597
- const lines = openaiStream.split("\n");
598
- const sseLines = [];
599
- const state = this.createConversionState();
600
- sseLines.push(
601
- "event: message_start",
602
- `data: {"type":"message_start","message":{"id":"msg_${Date.now()}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
603
- ""
604
- );
605
- for (const line of lines) {
606
- if (line.startsWith("data:")) {
607
- const dataLine = line.substring(5);
608
- if (dataLine.trim() === "[DONE]") {
609
- this.addFinalEvents(state, sseLines);
610
- break;
611
- }
612
- try {
613
- const chunk = JSON.parse(dataLine);
614
- this.processStreamChunk(chunk, state, sseLines);
615
- } catch (error) {
616
- if (this.config.debugMode) {
617
- this.config.logger.warn("Failed to parse stream chunk", { line: dataLine.substring(0, 200) });
690
+ static createConversionContext(messages) {
691
+ const toolIdToNameMap = /* @__PURE__ */ new Map();
692
+ for (const msg of messages) {
693
+ if (Array.isArray(msg.content)) {
694
+ for (const item of msg.content) {
695
+ if (typeof item === "object" && item !== null && item.type === "tool_use") {
696
+ toolIdToNameMap.set(item.id, item.name);
618
697
  }
619
698
  }
620
699
  }
621
700
  }
622
- return sseLines.join("\n");
701
+ return {
702
+ toolIdToNameMap,
703
+ hasSystemMessage: false
704
+ };
623
705
  }
624
706
  /**
625
- * 处理单个流式数据块 - 支持thinking和content双模式
707
+ * 处理复杂消息(包含多种内容类型)
626
708
  */
627
- processStreamChunk(chunk, state, sseLines) {
628
- const choice = chunk.choices?.[0];
629
- if (!choice) return;
630
- const delta = choice.delta;
631
- if (delta.reasoning_content) {
632
- state.reasoningContent += delta.reasoning_content;
633
- if (!state.thinkingBlockStarted) {
634
- sseLines.push(
635
- "event: content_block_start",
636
- 'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":"<thinking>"}}',
637
- ""
638
- );
639
- state.thinkingBlockStarted = true;
709
+ static processComplexMessage(msg, context) {
710
+ const { textContent, toolUses, toolResults } = this.categorizeContent(msg.content);
711
+ const resultMessages = [];
712
+ if (msg.role === "assistant" && toolUses.length > 0) {
713
+ const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
714
+ resultMessages.push(assistantMessage);
715
+ } else if (toolResults.length > 0) {
716
+ const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
717
+ resultMessages.push(...toolMessages);
718
+ const textMessage = this.createTextMessage(msg.role, textContent);
719
+ if (textMessage) {
720
+ resultMessages.push(textMessage);
640
721
  }
641
- sseLines.push(
642
- "event: content_block_delta",
643
- `data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"${this.escapeJsonString(delta.reasoning_content)}"}}`,
644
- ""
645
- );
646
- }
647
- if (delta.content && delta.content !== "") {
648
- if (state.thinkingBlockStarted && !state.contentBlockStarted) {
649
- sseLines.push(
650
- "event: content_block_delta",
651
- 'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"</thinking>\\n\\n"}}',
652
- "",
653
- "event: content_block_stop",
654
- 'data: {"type":"content_block_stop","index":0}',
655
- "",
656
- "event: content_block_start",
657
- 'data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}',
658
- ""
659
- );
660
- state.contentBlockStarted = true;
661
- } else if (!state.contentBlockStarted && !state.thinkingBlockStarted) {
662
- sseLines.push(
663
- "event: content_block_start",
664
- 'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}',
665
- ""
666
- );
667
- state.contentBlockStarted = true;
722
+ } else if (textContent.length > 0) {
723
+ const textMessage = this.createTextMessage(msg.role, textContent);
724
+ if (textMessage) {
725
+ resultMessages.push(textMessage);
668
726
  }
669
- state.textContent += delta.content;
670
- const blockIndex = state.thinkingBlockStarted ? 1 : 0;
671
- sseLines.push(
672
- "event: content_block_delta",
673
- `data: {"type":"content_block_delta","index":${blockIndex},"delta":{"type":"text_delta","text":"${this.escapeJsonString(delta.content)}"}}`,
674
- ""
675
- );
676
- }
677
- if (delta.tool_calls) {
678
- this.processToolCalls(delta.tool_calls, state, sseLines);
679
- }
680
- if (chunk.usage) {
681
- state.usage.input_tokens = chunk.usage.prompt_tokens;
682
- state.usage.output_tokens = chunk.usage.completion_tokens;
683
727
  }
728
+ return resultMessages;
684
729
  }
685
730
  /**
686
- * 处理工具调用
731
+ * 分类内容块
687
732
  */
688
- processToolCalls(toolCalls, state, sseLines) {
689
- for (const toolCall of toolCalls) {
690
- if (toolCall.id && toolCall.function?.name) {
691
- const toolData = {
692
- id: toolCall.id,
693
- name: toolCall.function.name,
694
- input: toolCall.function.arguments || ""
695
- };
696
- state.toolCallsMap.set(toolCall.id, toolData);
697
- sseLines.push(
698
- "event: content_block_start",
699
- `data: {"type":"content_block_start","index":${state.completedToolCalls.length + 1},"content_block":{"type":"tool_use","id":"${toolCall.id}","name":"${toolCall.function.name}","input":{}}}`,
700
- ""
701
- );
733
+ static categorizeContent(content) {
734
+ const textContent = [];
735
+ const toolUses = [];
736
+ const toolResults = [];
737
+ for (const item of content) {
738
+ if (typeof item === "string") {
739
+ textContent.push({ type: "text", text: item });
740
+ } else if (typeof item === "object" && item !== null) {
741
+ switch (item.type) {
742
+ case "text":
743
+ textContent.push(item);
744
+ break;
745
+ case "tool_use":
746
+ toolUses.push(item);
747
+ break;
748
+ case "tool_result":
749
+ toolResults.push(item);
750
+ break;
751
+ case "image":
752
+ const imageContent = this.convertImageContent(item);
753
+ if (imageContent) {
754
+ textContent.push(imageContent);
755
+ }
756
+ break;
757
+ }
702
758
  }
703
759
  }
760
+ return { textContent, toolUses, toolResults };
704
761
  }
705
762
  /**
706
- * 添加最终事件 - 支持thinking+content双模式
763
+ * 转换图片内容格式
764
+ * 支持两种格式:URL 和 base64
707
765
  */
708
- addFinalEvents(state, sseLines) {
709
- if (state.contentBlockStarted) {
710
- const blockIndex = state.thinkingBlockStarted ? 1 : 0;
711
- sseLines.push(
712
- "event: content_block_stop",
713
- `data: {"type":"content_block_stop","index":${blockIndex}}`,
714
- ""
715
- );
716
- } else if (state.thinkingBlockStarted) {
717
- sseLines.push(
718
- "event: content_block_delta",
719
- 'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"</thinking>"}}',
720
- "",
721
- "event: content_block_stop",
722
- 'data: {"type":"content_block_stop","index":0}',
723
- ""
724
- );
766
+ static convertImageContent(item) {
767
+ if (!item.source) {
768
+ return null;
725
769
  }
726
- sseLines.push(
727
- "event: message_delta",
728
- `data: {"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"output_tokens":${state.usage.output_tokens}}}`,
729
- "",
730
- "event: message_stop",
731
- 'data: {"type":"message_stop"}',
732
- ""
733
- );
734
- }
735
- /**
736
- * 构建标准响应格式
737
- */
738
- buildStandardResponse(openaiStream) {
739
- const state = this.createConversionState();
740
- const lines = openaiStream.split("\n");
741
- for (const line of lines) {
742
- if (line.startsWith("data: ")) {
743
- const dataLine = line.substring(6);
744
- if (dataLine.trim() === "[DONE]") break;
745
- try {
746
- const chunk = JSON.parse(dataLine);
747
- const choice = chunk.choices?.[0];
748
- if (!choice) continue;
749
- const delta = choice.delta;
750
- if (delta.content) {
751
- state.textContent += delta.content;
752
- }
753
- if (chunk.usage) {
754
- state.usage.input_tokens = chunk.usage.prompt_tokens;
755
- state.usage.output_tokens = chunk.usage.completion_tokens;
756
- }
757
- } catch (error) {
770
+ if (item.source.type === "url" && item.source.url) {
771
+ return {
772
+ type: "image_url",
773
+ image_url: {
774
+ url: item.source.url,
775
+ detail: "auto"
776
+ // OpenAI 支持的可选参数
758
777
  }
759
- }
778
+ };
760
779
  }
761
- return {
762
- id: `msg_${Date.now()}`,
763
- type: "message",
764
- role: "assistant",
765
- content: [
766
- {
767
- type: "text",
768
- text: state.textContent
780
+ if (item.source.type === "base64" && item.source.data && item.source.media_type) {
781
+ if (!SUPPORTED_IMAGE_TYPES.includes(item.source.media_type)) {
782
+ console.warn(`\u4E0D\u652F\u6301\u7684\u56FE\u7247\u683C\u5F0F: ${item.source.media_type}`);
783
+ return null;
784
+ }
785
+ const dataUri = `data:${item.source.media_type};base64,${item.source.data}`;
786
+ return {
787
+ type: "image_url",
788
+ image_url: {
789
+ url: dataUri,
790
+ detail: "auto"
769
791
  }
770
- ],
771
- model: "claude-3-sonnet-20240229",
772
- stop_reason: "end_turn",
773
- stop_sequence: null,
774
- usage: state.usage
775
- };
792
+ };
793
+ }
794
+ return null;
776
795
  }
777
796
  /**
778
- * 创建转换状态对象
797
+ * 创建包含工具调用的助手消息
779
798
  */
780
- createConversionState() {
781
- return {
782
- processedLines: 0,
783
- textContent: "",
784
- reasoningContent: "",
785
- toolCallsMap: /* @__PURE__ */ new Map(),
786
- completedToolCalls: [],
787
- allSSELines: [],
788
- errors: [],
789
- usage: {
790
- input_tokens: 0,
791
- output_tokens: 0
792
- },
793
- thinkingBlockStarted: false,
794
- contentBlockStarted: false
799
+ static createAssistantMessageWithToolCalls(textContent, toolUses) {
800
+ const assistantMessage = {
801
+ role: "assistant",
802
+ content: ""
803
+ // 默认为空字符串,避免null值
795
804
  };
796
- }
797
- /**
798
- * 转换消息格式
799
- */
800
- convertMessages(messages) {
801
- return messages.map((msg) => ({
802
- role: msg.role,
803
- content: msg.content
804
- }));
805
- }
806
- /**
807
- * 映射Anthropic模型到OpenAI模型
808
- */
809
- mapAnthropicModelToOpenAI(model) {
810
- const supportedModels = [
811
- "glm-4.5",
812
- "kimi-k2",
813
- "deepseek-v3.1",
814
- "deepseek-r1",
815
- "deepseek-v3",
816
- "qwen3-32b",
817
- "qwen3-coder",
818
- "qwen3-235b",
819
- "tstars2.0"
820
- ];
821
- if (supportedModels.includes(model)) {
822
- return model;
805
+ if (textContent.length > 0) {
806
+ const textOnly = textContent.map((item) => item.text || "").join("");
807
+ if (textOnly.trim()) {
808
+ assistantMessage.content = textOnly.trim();
809
+ }
823
810
  }
824
- const mapping = {
825
- "claude-3-sonnet-20240229": "glm-4.5",
826
- "claude-3-haiku-20240307": "kimi-k2",
827
- "claude-3-opus-20240229": "deepseek-v3.1"
828
- };
829
- return mapping[model] || "glm-4.5";
811
+ assistantMessage.tool_calls = toolUses.map((toolUse) => ({
812
+ id: toolUse.id,
813
+ type: "function",
814
+ function: {
815
+ name: toolUse.name,
816
+ arguments: JSON.stringify(toolUse.input || {})
817
+ }
818
+ }));
819
+ return assistantMessage;
830
820
  }
831
821
  /**
832
- * 检查请求是否包含图片内容
822
+ * 创建工具结果消息
833
823
  */
834
- hasImageContent(request) {
835
- return request.messages.some(
836
- (msg) => Array.isArray(msg.content) && msg.content.some((content) => content?.type === "image")
837
- );
824
+ static createToolResultMessages(toolResults, toolIdToNameMap) {
825
+ return toolResults.map((toolResult) => {
826
+ let resultContent = "No content";
827
+ if (toolResult.content) {
828
+ if (typeof toolResult.content === "string") {
829
+ resultContent = toolResult.content;
830
+ } else {
831
+ resultContent = JSON.stringify(toolResult.content, null, 2);
832
+ }
833
+ }
834
+ const toolName = toolIdToNameMap.get(toolResult.tool_use_id) || TOOL_CONVERSION.UNKNOWN_TOOL_FALLBACK;
835
+ return {
836
+ role: "tool",
837
+ tool_call_id: toolResult.tool_use_id,
838
+ name: toolName,
839
+ content: resultContent
840
+ };
841
+ });
838
842
  }
839
843
  /**
840
- * 转义JSON字符串
844
+ * 创建文本消息
841
845
  */
842
- escapeJsonString(str) {
843
- return str.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t");
846
+ static createTextMessage(role, textContent) {
847
+ if (textContent.length === 0) return null;
848
+ const hasNonTextContent = textContent.some((item) => item.type !== "text");
849
+ if (hasNonTextContent) {
850
+ return {
851
+ role,
852
+ content: textContent
853
+ };
854
+ } else {
855
+ const textOnly = textContent.map((item) => item.text || "").join("");
856
+ return {
857
+ role,
858
+ content: textOnly.trim() || ""
859
+ // 确保content为字符串,避免null
860
+ };
861
+ }
844
862
  }
845
863
  /**
846
- * 获取初始SSE事件(message_start + ping)
864
+ * 处理系统消息
847
865
  */
848
- getInitialSSEEvents(modelName = "claude-sonnet-4", messageId = `msg_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`) {
849
- return [
850
- "event: message_start",
851
- `data: {"type":"message_start","message":{"id":"${messageId}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
852
- "",
853
- "event: ping",
854
- 'data: {"type":"ping"}',
855
- ""
856
- ];
866
+ static processSystemMessage(system) {
867
+ let systemContent;
868
+ if (Array.isArray(system)) {
869
+ systemContent = system.map((s) => {
870
+ if (typeof s === "string") {
871
+ return s;
872
+ }
873
+ return s.text || "";
874
+ }).filter((text) => text.length > 0).join("\n").trim();
875
+ } else {
876
+ systemContent = system;
877
+ }
878
+ if (systemContent) {
879
+ return {
880
+ role: "system",
881
+ content: systemContent
882
+ };
883
+ }
884
+ return null;
857
885
  }
858
886
  /**
859
- * 增量转换单个OpenAI数据块为Anthropic SSE事件
860
- * 用于逐个处理流式数据片段
887
+ * 异步转换图片内容格式(支持URL自动下载转base64)
888
+ * @param item 图片内容项
889
+ * @param downloadUrls 是否下载URL并转换为base64(默认true)
861
890
  */
862
- convertIncrementalChunk(openaiDataLine, state) {
863
- const logger = this.config.logger;
864
- const sseEvents = [];
865
- if (openaiDataLine.trim() === "[DONE]") {
866
- this.addFinalEvents(state, sseEvents);
867
- return sseEvents;
891
+ static async convertImageContentAsync(item, downloadUrls = true) {
892
+ if (!item.source) {
893
+ return null;
868
894
  }
869
- try {
870
- const chunk = JSON.parse(openaiDataLine);
871
- this.processStreamChunk(chunk, state, sseEvents);
872
- return sseEvents;
873
- } catch (error) {
874
- if (this.config.debugMode) {
875
- logger.warn("Failed to parse OpenAI stream chunk in convertIncrementalChunk", {
876
- line: openaiDataLine.substring(0, 200),
877
- error: error instanceof Error ? error.message : String(error)
878
- });
895
+ if (item.source.type === "url" && item.source.url) {
896
+ const url = item.source.url;
897
+ if (isBase64DataUri(url)) {
898
+ return {
899
+ type: "image_url",
900
+ image_url: {
901
+ url,
902
+ detail: "auto"
903
+ }
904
+ };
879
905
  }
880
- return [];
881
- }
882
- }
883
- };
884
-
885
- // src/core/a2o-request-adapter/config.ts
886
- var DEFAULT_CONFIG = {
887
- // 原有配置
888
- debugMode: false,
889
- maxDescriptionLength: 100,
890
- enableToolNameValidation: true,
891
- enableFormatValidation: true,
892
- // 新增默认配置
893
- validation: {
894
- enabled: true,
895
- strict: false,
896
- // 默认开启自动修复
897
- customSchemas: {}
898
- },
899
- healing: {
900
- enabled: true,
901
- maxAttempts: 3,
902
- enableCustomRules: true
903
- },
904
- recovery: {
905
- enabled: true,
906
- maxRetries: 2,
907
- backoffMs: 1e3
908
- },
909
- monitoring: {
910
- enabled: false,
911
- logLevel: "warn",
912
- enableMetrics: false
913
- }
914
- };
915
- var SUPPORTED_IMAGE_TYPES = [
916
- "image/jpeg",
917
- "image/png",
918
- "image/gif",
919
- "image/webp"
920
- ];
921
- var TOOL_CONVERSION = {
922
- /**
923
- * 终极泛化:完全移除工具名称映射
924
- * 基于GitHub Copilot API测试结果,100%保持原始格式
925
- */
926
- PRESERVE_ORIGINAL_NAMES: true,
927
- /**
928
- * 默认工具描述
929
- */
930
- DEFAULT_DESCRIPTION: "Tool description",
931
- /**
932
- * 未知工具回退名称
933
- */
934
- UNKNOWN_TOOL_FALLBACK: "unknown_tool"
935
- };
936
-
937
- // src/core/a2o-request-adapter/message-converter.ts
938
- var MessageConverter = class {
939
- /**
940
- * 转换消息格式,正确处理工具调用和工具结果
941
- * 修复关键问题:将tool_use转换为tool_calls,tool_result转换为role:"tool"消息
942
- * 使用tool_use_id溯回工具名称解决unknown_tool问题
943
- */
944
- static convertMessages(messages, system) {
945
- const context = this.createConversionContext(messages);
946
- const convertedMessages = [];
947
- for (const msg of messages) {
948
- if (Array.isArray(msg.content)) {
949
- const processedMessages = this.processComplexMessage(msg, context);
950
- convertedMessages.push(...processedMessages);
951
- } else {
952
- const safeMsg = { ...msg };
953
- if (safeMsg.content === null || safeMsg.content === void 0) {
954
- safeMsg.content = "";
906
+ if (downloadUrls && isExternalUrl(url)) {
907
+ try {
908
+ console.log(`[MessageConverter] Downloading image from URL: ${url}`);
909
+ const base64DataUri = await downloadImageAsBase64(url);
910
+ console.log(`[MessageConverter] Successfully converted image to base64`);
911
+ return {
912
+ type: "image_url",
913
+ image_url: {
914
+ url: base64DataUri,
915
+ detail: "auto"
916
+ }
917
+ };
918
+ } catch (error) {
919
+ console.error(`[MessageConverter] Failed to download image: ${error.message}`);
920
+ return {
921
+ type: "image_url",
922
+ image_url: {
923
+ url,
924
+ detail: "auto"
925
+ }
926
+ };
955
927
  }
956
- convertedMessages.push(safeMsg);
957
- }
958
- }
959
- if (system) {
960
- const systemMessage = this.processSystemMessage(system);
961
- if (systemMessage) {
962
- convertedMessages.unshift(systemMessage);
963
928
  }
964
- }
965
- return convertedMessages;
966
- }
967
- /**
968
- * 创建消息转换上下文
969
- */
970
- static createConversionContext(messages) {
971
- const toolIdToNameMap = /* @__PURE__ */ new Map();
972
- for (const msg of messages) {
973
- if (Array.isArray(msg.content)) {
974
- for (const item of msg.content) {
975
- if (typeof item === "object" && item !== null && item.type === "tool_use") {
976
- toolIdToNameMap.set(item.id, item.name);
977
- }
929
+ return {
930
+ type: "image_url",
931
+ image_url: {
932
+ url,
933
+ detail: "auto"
978
934
  }
979
- }
935
+ };
980
936
  }
981
- return {
982
- toolIdToNameMap,
983
- hasSystemMessage: false
984
- };
985
- }
986
- /**
987
- * 处理复杂消息(包含多种内容类型)
988
- */
989
- static processComplexMessage(msg, context) {
990
- const { textContent, toolUses, toolResults } = this.categorizeContent(msg.content);
991
- const resultMessages = [];
992
- if (msg.role === "assistant" && toolUses.length > 0) {
993
- const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
994
- resultMessages.push(assistantMessage);
995
- } else if (toolResults.length > 0) {
996
- const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
997
- resultMessages.push(...toolMessages);
998
- const textMessage = this.createTextMessage(msg.role, textContent);
999
- if (textMessage) {
1000
- resultMessages.push(textMessage);
1001
- }
1002
- } else if (textContent.length > 0) {
1003
- const textMessage = this.createTextMessage(msg.role, textContent);
1004
- if (textMessage) {
1005
- resultMessages.push(textMessage);
937
+ if (item.source.type === "base64" && item.source.data && item.source.media_type) {
938
+ if (!SUPPORTED_IMAGE_TYPES.includes(item.source.media_type)) {
939
+ console.warn(`\u4E0D\u652F\u6301\u7684\u56FE\u7247\u683C\u5F0F: ${item.source.media_type}`);
940
+ return null;
1006
941
  }
942
+ const dataUri = `data:${item.source.media_type};base64,${item.source.data}`;
943
+ return {
944
+ type: "image_url",
945
+ image_url: {
946
+ url: dataUri,
947
+ detail: "auto"
948
+ }
949
+ };
1007
950
  }
1008
- return resultMessages;
951
+ return null;
1009
952
  }
1010
953
  /**
1011
- * 分类内容块
954
+ * 异步处理消息内容(支持图片URL下载)
1012
955
  */
1013
- static categorizeContent(content) {
956
+ static async processMessageContentAsync(content, downloadUrls = true) {
1014
957
  const textContent = [];
1015
958
  const toolUses = [];
1016
959
  const toolResults = [];
1017
960
  for (const item of content) {
1018
- if (typeof item === "string") {
1019
- textContent.push({ type: "text", text: item });
1020
- } else if (typeof item === "object" && item !== null) {
961
+ if (item.type) {
1021
962
  switch (item.type) {
1022
963
  case "text":
1023
- textContent.push(item);
964
+ if (item.text) {
965
+ textContent.push({ type: "text", text: item.text });
966
+ }
1024
967
  break;
1025
968
  case "tool_use":
1026
969
  toolUses.push(item);
@@ -1029,7 +972,7 @@ var MessageConverter = class {
1029
972
  toolResults.push(item);
1030
973
  break;
1031
974
  case "image":
1032
- const imageContent = this.convertImageContent(item);
975
+ const imageContent = await this.convertImageContentAsync(item, downloadUrls);
1033
976
  if (imageContent) {
1034
977
  textContent.push(imageContent);
1035
978
  }
@@ -1040,146 +983,102 @@ var MessageConverter = class {
1040
983
  return { textContent, toolUses, toolResults };
1041
984
  }
1042
985
  /**
1043
- * 转换图片内容格式
986
+ * 异步转换消息格式(支持图片URL自动下载)
987
+ * @param messages Claude格式的消息数组
988
+ * @param system 系统消息
989
+ * @param downloadImageUrls 是否下载图片URL并转换为base64(默认true,解决GitHub Copilot等API不支持外部URL的问题)
1044
990
  */
1045
- static convertImageContent(item) {
1046
- if (item.source && item.source.type === "base64" && item.source.data && item.source.media_type) {
1047
- if (!SUPPORTED_IMAGE_TYPES.includes(item.source.media_type)) {
1048
- console.warn(`\u4E0D\u652F\u6301\u7684\u56FE\u7247\u683C\u5F0F: ${item.source.media_type}`);
1049
- return null;
1050
- }
1051
- const dataUri = `data:${item.source.media_type};base64,${item.source.data}`;
1052
- return {
1053
- type: "image_url",
1054
- image_url: {
1055
- url: dataUri
991
+ static async convertMessagesAsync(messages, system, downloadImageUrls = true) {
992
+ const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
993
+ if (debugEnabled) {
994
+ console.debug(
995
+ `[MessageConverter] convertMessagesAsync called (downloadImageUrls: ${downloadImageUrls})`
996
+ );
997
+ }
998
+ const context = this.createConversionContext(messages);
999
+ const convertedMessages = [];
1000
+ for (const msg of messages) {
1001
+ if (Array.isArray(msg.content)) {
1002
+ const processedMessages = await this.processComplexMessageAsync(msg, context, downloadImageUrls);
1003
+ convertedMessages.push(...processedMessages);
1004
+ } else {
1005
+ const safeMsg = { ...msg };
1006
+ if (safeMsg.content === null || safeMsg.content === void 0) {
1007
+ safeMsg.content = "";
1056
1008
  }
1057
- };
1009
+ convertedMessages.push(safeMsg);
1010
+ }
1058
1011
  }
1059
- return null;
1012
+ const systemMessage = this.processSystemMessage(system);
1013
+ if (systemMessage) {
1014
+ return [systemMessage, ...convertedMessages];
1015
+ }
1016
+ return convertedMessages;
1060
1017
  }
1061
1018
  /**
1062
- * 创建包含工具调用的助手消息
1019
+ * 异步处理复杂消息(支持图片URL下载)
1063
1020
  */
1064
- static createAssistantMessageWithToolCalls(textContent, toolUses) {
1065
- const assistantMessage = {
1066
- role: "assistant",
1067
- content: ""
1068
- // 默认为空字符串,避免null值
1069
- };
1070
- if (textContent.length > 0) {
1071
- const textOnly = textContent.map((item) => item.text || "").join("");
1072
- if (textOnly.trim()) {
1073
- assistantMessage.content = textOnly.trim();
1021
+ static async processComplexMessageAsync(msg, context, downloadUrls) {
1022
+ const { textContent, toolUses, toolResults } = await this.processMessageContentAsync(
1023
+ msg.content,
1024
+ downloadUrls
1025
+ );
1026
+ const result = [];
1027
+ if (msg.role === "user") {
1028
+ const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
1029
+ result.push(...toolMessages);
1030
+ const textMessage = this.createTextMessage("user", textContent);
1031
+ if (textMessage) {
1032
+ result.push(textMessage);
1033
+ }
1034
+ } else if (msg.role === "assistant") {
1035
+ if (toolUses.length > 0) {
1036
+ const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
1037
+ result.push(assistantMessage);
1038
+ toolUses.forEach((toolUse) => {
1039
+ context.toolIdToNameMap.set(toolUse.id, toolUse.name);
1040
+ });
1041
+ } else {
1042
+ const textMessage = this.createTextMessage("assistant", textContent);
1043
+ if (textMessage) {
1044
+ result.push(textMessage);
1045
+ }
1074
1046
  }
1075
1047
  }
1076
- assistantMessage.tool_calls = toolUses.map((toolUse) => ({
1077
- id: toolUse.id,
1048
+ return result;
1049
+ }
1050
+ };
1051
+
1052
+ // src/core/a2o-request-adapter/tool-converter.ts
1053
+ var ToolConverter = class {
1054
+ /**
1055
+ * 将Anthropic工具定义转换为OpenAI格式
1056
+ */
1057
+ static convertAnthropicToolToOpenAI(anthropicTool) {
1058
+ if (!anthropicTool || !anthropicTool.name) {
1059
+ throw new Error("Invalid tool definition: missing name");
1060
+ }
1061
+ const openaiName = anthropicTool.name;
1062
+ const description = this.simplifyDescription(anthropicTool.description || TOOL_CONVERSION.DEFAULT_DESCRIPTION);
1063
+ if (!anthropicTool.input_schema) {
1064
+ throw new Error(`Invalid tool definition for ${anthropicTool.name}: missing input_schema`);
1065
+ }
1066
+ const parameters = {
1067
+ type: anthropicTool.input_schema.type || "object",
1068
+ properties: anthropicTool.input_schema.properties || {},
1069
+ ...anthropicTool.input_schema.required && { required: anthropicTool.input_schema.required }
1070
+ };
1071
+ return {
1078
1072
  type: "function",
1079
1073
  function: {
1080
- name: toolUse.name,
1081
- arguments: JSON.stringify(toolUse.input || {})
1074
+ name: openaiName,
1075
+ description,
1076
+ parameters
1082
1077
  }
1083
- }));
1084
- return assistantMessage;
1078
+ };
1085
1079
  }
1086
1080
  /**
1087
- * 创建工具结果消息
1088
- */
1089
- static createToolResultMessages(toolResults, toolIdToNameMap) {
1090
- return toolResults.map((toolResult) => {
1091
- let resultContent = "No content";
1092
- if (toolResult.content) {
1093
- if (typeof toolResult.content === "string") {
1094
- resultContent = toolResult.content;
1095
- } else {
1096
- resultContent = JSON.stringify(toolResult.content, null, 2);
1097
- }
1098
- }
1099
- const toolName = toolIdToNameMap.get(toolResult.tool_use_id) || TOOL_CONVERSION.UNKNOWN_TOOL_FALLBACK;
1100
- return {
1101
- role: "tool",
1102
- tool_call_id: toolResult.tool_use_id,
1103
- name: toolName,
1104
- content: resultContent
1105
- };
1106
- });
1107
- }
1108
- /**
1109
- * 创建文本消息
1110
- */
1111
- static createTextMessage(role, textContent) {
1112
- if (textContent.length === 0) return null;
1113
- const hasNonTextContent = textContent.some((item) => item.type !== "text");
1114
- if (hasNonTextContent) {
1115
- return {
1116
- role,
1117
- content: textContent
1118
- };
1119
- } else {
1120
- const textOnly = textContent.map((item) => item.text || "").join("");
1121
- return {
1122
- role,
1123
- content: textOnly.trim() || ""
1124
- // 确保content为字符串,避免null
1125
- };
1126
- }
1127
- }
1128
- /**
1129
- * 处理系统消息
1130
- */
1131
- static processSystemMessage(system) {
1132
- let systemContent;
1133
- if (Array.isArray(system)) {
1134
- systemContent = system.map((s) => {
1135
- if (typeof s === "string") {
1136
- return s;
1137
- }
1138
- return s.text || "";
1139
- }).filter((text) => text.length > 0).join("\n").trim();
1140
- } else {
1141
- systemContent = system;
1142
- }
1143
- if (systemContent) {
1144
- return {
1145
- role: "system",
1146
- content: systemContent
1147
- };
1148
- }
1149
- return null;
1150
- }
1151
- };
1152
-
1153
- // src/core/a2o-request-adapter/tool-converter.ts
1154
- var ToolConverter = class {
1155
- /**
1156
- * 将Anthropic工具定义转换为OpenAI格式
1157
- */
1158
- static convertAnthropicToolToOpenAI(anthropicTool) {
1159
- if (!anthropicTool || !anthropicTool.name) {
1160
- throw new Error("Invalid tool definition: missing name");
1161
- }
1162
- const openaiName = anthropicTool.name;
1163
- const description = this.simplifyDescription(anthropicTool.description || TOOL_CONVERSION.DEFAULT_DESCRIPTION);
1164
- if (!anthropicTool.input_schema) {
1165
- throw new Error(`Invalid tool definition for ${anthropicTool.name}: missing input_schema`);
1166
- }
1167
- const parameters = {
1168
- type: anthropicTool.input_schema.type || "object",
1169
- properties: anthropicTool.input_schema.properties || {},
1170
- ...anthropicTool.input_schema.required && { required: anthropicTool.input_schema.required }
1171
- };
1172
- return {
1173
- type: "function",
1174
- function: {
1175
- name: openaiName,
1176
- description,
1177
- parameters
1178
- }
1179
- };
1180
- }
1181
- /**
1182
- * 将OpenAI工具调用转换为Claude格式
1081
+ * 将OpenAI工具调用转换为Claude格式
1183
1082
  */
1184
1083
  static convertOpenAIToolCallsToClaude(toolCalls) {
1185
1084
  return toolCalls.map((toolCall) => {
@@ -1204,6 +1103,24 @@ var ToolConverter = class {
1204
1103
  static isOpenAIToolFormat(tool) {
1205
1104
  return tool && tool.type === "function" && tool.function && tool.function.name;
1206
1105
  }
1106
+ /**
1107
+ * 确保OpenAI格式工具有有效描述
1108
+ * 处理空字符串、undefined、null等情况
1109
+ */
1110
+ static ensureOpenAIToolDescription(tool) {
1111
+ if (!tool?.function) return tool;
1112
+ const description = tool.function.description?.trim();
1113
+ if (!description) {
1114
+ return {
1115
+ ...tool,
1116
+ function: {
1117
+ ...tool.function,
1118
+ description: TOOL_CONVERSION.DEFAULT_DESCRIPTION
1119
+ }
1120
+ };
1121
+ }
1122
+ return tool;
1123
+ }
1207
1124
  /**
1208
1125
  * 简化Claude的详细描述为OpenAI兼容的简短描述
1209
1126
  */
@@ -3800,324 +3717,1174 @@ var A2ORequestAdapter = class {
3800
3717
  }
3801
3718
  }
3802
3719
  /**
3803
- * 执行核心转换逻辑(原有逻辑保持不变)
3720
+ * 执行核心转换逻辑(支持图片代理)
3804
3721
  */
3805
3722
  async performCoreConversion(anthropicRequest) {
3806
3723
  if (this.config.enableFormatValidation) {
3807
3724
  FormatValidator.validateClaudeRequest(anthropicRequest);
3808
3725
  }
3726
+ const messages = this.config.imageProxy.enabled ? await MessageConverter.convertMessagesAsync(
3727
+ anthropicRequest.messages,
3728
+ anthropicRequest.system,
3729
+ true
3730
+ // 启用图片下载
3731
+ ) : MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system);
3809
3732
  const openaiRequest = {
3810
3733
  model: anthropicRequest.model,
3811
- messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3734
+ messages,
3812
3735
  max_tokens: anthropicRequest.max_tokens,
3813
3736
  temperature: anthropicRequest.temperature,
3814
3737
  stream: anthropicRequest.stream
3815
3738
  };
3816
- if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3817
- openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3818
- }
3819
- const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3820
- for (const field of specialFields) {
3821
- if (anthropicRequest[field] !== void 0) {
3822
- openaiRequest[field] = anthropicRequest[field];
3823
- }
3739
+ if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3740
+ openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3741
+ }
3742
+ const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3743
+ for (const field of specialFields) {
3744
+ if (anthropicRequest[field] !== void 0) {
3745
+ openaiRequest[field] = anthropicRequest[field];
3746
+ }
3747
+ }
3748
+ return openaiRequest;
3749
+ }
3750
+ /**
3751
+ * 转换Anthropic请求格式为OpenAI兼容格式 - 原有方法保持兼容
3752
+ */
3753
+ convertAnthropicRequestToOpenAI(anthropicRequest) {
3754
+ if (this.config.enableFormatValidation) {
3755
+ FormatValidator.validateClaudeRequest(anthropicRequest);
3756
+ }
3757
+ const openaiRequest = {
3758
+ model: anthropicRequest.model,
3759
+ messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3760
+ max_tokens: anthropicRequest.max_tokens,
3761
+ temperature: anthropicRequest.temperature,
3762
+ stream: anthropicRequest.stream,
3763
+ n: 1
3764
+ };
3765
+ if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3766
+ openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3767
+ }
3768
+ const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3769
+ for (const field of specialFields) {
3770
+ if (anthropicRequest[field] !== void 0) {
3771
+ openaiRequest[field] = anthropicRequest[field];
3772
+ }
3773
+ }
3774
+ if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
3775
+ throw new Error("Generated OpenAI request format is invalid");
3776
+ }
3777
+ return openaiRequest;
3778
+ }
3779
+ /**
3780
+ * 转换OpenAI响应格式为Claude兼容格式
3781
+ */
3782
+ convertOpenAIResponseToClaude(openaiResponse) {
3783
+ const claudeContent = [];
3784
+ const message = openaiResponse.choices?.[0]?.message;
3785
+ if (message?.content) {
3786
+ claudeContent.push({
3787
+ type: "text",
3788
+ text: message.content
3789
+ });
3790
+ }
3791
+ if (message?.tool_calls) {
3792
+ const toolUseContents = ToolConverter.convertOpenAIToolCallsToClaude(message.tool_calls);
3793
+ claudeContent.push(...toolUseContents);
3794
+ }
3795
+ const claudeResponse = {
3796
+ role: "assistant",
3797
+ content: claudeContent
3798
+ };
3799
+ return claudeResponse;
3800
+ }
3801
+ /**
3802
+ * 转换工具定义列表
3803
+ * 确保所有工具都有有效描述,无论是Anthropic还是OpenAI格式
3804
+ */
3805
+ convertToolDefinitions(tools) {
3806
+ return tools.map((tool) => {
3807
+ let openaiTool;
3808
+ if (ToolConverter.isOpenAIToolFormat(tool)) {
3809
+ openaiTool = tool;
3810
+ } else {
3811
+ openaiTool = ToolConverter.convertAnthropicToolToOpenAI(tool);
3812
+ }
3813
+ return ToolConverter.ensureOpenAIToolDescription(openaiTool);
3814
+ });
3815
+ }
3816
+ /**
3817
+ * 验证Claude请求格式
3818
+ */
3819
+ validateClaudeRequest(request) {
3820
+ return FormatValidator.validateClaudeRequest(request);
3821
+ }
3822
+ /**
3823
+ * 验证OpenAI请求格式
3824
+ */
3825
+ validateOpenAIRequest(request) {
3826
+ return FormatValidator.validateOpenAIRequest(request);
3827
+ }
3828
+ /**
3829
+ * 获取支持的工具列表
3830
+ */
3831
+ getSupportedTools() {
3832
+ return [];
3833
+ }
3834
+ /**
3835
+ * 检查工具是否支持
3836
+ */
3837
+ isToolSupported(_toolName) {
3838
+ return true;
3839
+ }
3840
+ /**
3841
+ * 获取工具映射(已弃用,保持兼容性)
3842
+ */
3843
+ getToolMapping(claudeToolName) {
3844
+ return claudeToolName;
3845
+ }
3846
+ /**
3847
+ * 更新配置
3848
+ */
3849
+ updateConfig(newConfig) {
3850
+ this.config = { ...this.config, ...newConfig };
3851
+ }
3852
+ /**
3853
+ * 获取当前配置
3854
+ */
3855
+ getConfig() {
3856
+ return { ...this.config };
3857
+ }
3858
+ /**
3859
+ * 执行带验证的核心转换(同步版本)
3860
+ * 为静态方法提供增强功能,但保持同步特性
3861
+ */
3862
+ performCoreConversionWithValidation(anthropicRequest) {
3863
+ if (this.config.validation.enabled) {
3864
+ try {
3865
+ validateAnthropicRequest(anthropicRequest);
3866
+ } catch (error) {
3867
+ if (this.config.validation.strict) {
3868
+ throw error;
3869
+ } else {
3870
+ const errorSummary = this.getValidationErrorSummary(error);
3871
+ console.warn(`[A2ORequestAdapter] Input validation warning: ${errorSummary}. Details saved to logs.`);
3872
+ }
3873
+ }
3874
+ }
3875
+ let processedRequest = anthropicRequest;
3876
+ if (this.config.healing.enabled) {
3877
+ try {
3878
+ processedRequest = this.applySyncHealing(anthropicRequest);
3879
+ } catch (healingError) {
3880
+ console.warn("[A2ORequestAdapter] Healing failed:", healingError);
3881
+ }
3882
+ }
3883
+ const result = this.performBasicConversion(processedRequest, true);
3884
+ if (this.config.validation.enabled) {
3885
+ try {
3886
+ validateOpenAIRequest(result);
3887
+ } catch (error) {
3888
+ if (this.config.validation.strict) {
3889
+ throw error;
3890
+ } else {
3891
+ console.warn("[A2ORequestAdapter] Output validation warning:", error);
3892
+ }
3893
+ }
3894
+ }
3895
+ return result;
3896
+ }
3897
+ /**
3898
+ * 执行基础转换逻辑(原有逻辑的提取)
3899
+ */
3900
+ performBasicConversion(anthropicRequest, skipValidation = false) {
3901
+ if (!skipValidation && this.config.enableFormatValidation) {
3902
+ FormatValidator.validateClaudeRequest(anthropicRequest);
3903
+ }
3904
+ const openaiRequest = {
3905
+ model: anthropicRequest.model,
3906
+ messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3907
+ max_tokens: anthropicRequest.max_tokens,
3908
+ temperature: anthropicRequest.temperature,
3909
+ stream: anthropicRequest.stream,
3910
+ n: 1
3911
+ };
3912
+ if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3913
+ openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3914
+ }
3915
+ const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3916
+ for (const field of specialFields) {
3917
+ if (anthropicRequest[field] !== void 0) {
3918
+ openaiRequest[field] = anthropicRequest[field];
3919
+ }
3920
+ }
3921
+ if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
3922
+ throw new Error("Generated OpenAI request format is invalid");
3923
+ }
3924
+ return openaiRequest;
3925
+ }
3926
+ /**
3927
+ * 应用同步修复逻辑
3928
+ * 简化版的修复,不依赖异步操作
3929
+ */
3930
+ applySyncHealing(request) {
3931
+ const healedRequest = { ...request };
3932
+ if (!healedRequest.max_tokens || healedRequest.max_tokens <= 0) {
3933
+ healedRequest.max_tokens = 4096;
3934
+ }
3935
+ if (!healedRequest.messages || !Array.isArray(healedRequest.messages)) {
3936
+ throw new Error("Invalid messages array");
3937
+ }
3938
+ if (!healedRequest.model) {
3939
+ healedRequest.model = "claude-sonnet-4";
3940
+ }
3941
+ for (const message of healedRequest.messages) {
3942
+ if (!message.role) {
3943
+ message.role = "user";
3944
+ }
3945
+ if (!message.content) {
3946
+ message.content = "";
3947
+ }
3948
+ }
3949
+ return healedRequest;
3950
+ }
3951
+ /**
3952
+ * 获取验证错误详情
3953
+ */
3954
+ getValidationErrors(request, type) {
3955
+ return FormatValidator.getValidationErrors(request, type);
3956
+ }
3957
+ /**
3958
+ * 生成简洁的验证错误摘要
3959
+ */
3960
+ getValidationErrorSummary(error) {
3961
+ if (error?.issues?.length > 0) {
3962
+ const invalidEnums = error.issues.filter((i) => i.code === "invalid_enum_value");
3963
+ const missingFields = error.issues.filter((i) => i.code === "invalid_type");
3964
+ const summary = [];
3965
+ if (invalidEnums.length > 0) {
3966
+ const first = invalidEnums[0];
3967
+ summary.push(`invalid_${first.path?.join(".")}: '${first.received}'`);
3968
+ }
3969
+ if (missingFields.length > 0) {
3970
+ summary.push(`${missingFields.length} missing fields`);
3971
+ }
3972
+ return summary.slice(0, 2).join(", ") + (error.issues.length > 5 ? ` (+${error.issues.length - 5} more)` : "");
3973
+ }
3974
+ return error.message || "Validation failed";
3975
+ }
3976
+ };
3977
+ var A2ORequestAdapterStatic = {
3978
+ /**
3979
+ * 转换Anthropic请求格式为OpenAI兼容格式(静态方法)
3980
+ * 内部使用增强转换器,所有调用点自动获得增强功能
3981
+ */
3982
+ convertAnthropicRequestToOpenAI: (anthropicRequest) => {
3983
+ const adapter = new A2ORequestAdapter({
3984
+ debugMode: false,
3985
+ maxDescriptionLength: 100,
3986
+ enableToolNameValidation: true,
3987
+ enableFormatValidation: true,
3988
+ validation: { enabled: true, strict: false },
3989
+ healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
3990
+ recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
3991
+ monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
3992
+ });
3993
+ try {
3994
+ const result = adapter.performCoreConversionWithValidation(anthropicRequest);
3995
+ return result;
3996
+ } catch (error) {
3997
+ console.warn(`[A2ORequestAdapterStatic] Enhanced conversion failed, using basic conversion: ${error?.message || error}`);
3998
+ return adapter.performBasicConversion(anthropicRequest, true);
3999
+ }
4000
+ },
4001
+ /**
4002
+ * 转换OpenAI响应格式为Claude兼容格式(静态方法)
4003
+ * 内部使用增强转换器
4004
+ */
4005
+ convertOpenAIResponseToClaude: (openaiResponse) => {
4006
+ const adapter = new A2ORequestAdapter({
4007
+ debugMode: false,
4008
+ maxDescriptionLength: 100,
4009
+ enableToolNameValidation: true,
4010
+ enableFormatValidation: true,
4011
+ validation: { enabled: true, strict: false },
4012
+ healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
4013
+ recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
4014
+ monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
4015
+ });
4016
+ return adapter.convertOpenAIResponseToClaude(openaiResponse);
4017
+ },
4018
+ /**
4019
+ * 验证Claude请求格式(静态方法)
4020
+ */
4021
+ validateClaudeRequest: (request) => {
4022
+ return FormatValidator.validateClaudeRequest(request);
4023
+ },
4024
+ /**
4025
+ * 验证OpenAI请求格式(静态方法)
4026
+ */
4027
+ validateOpenAIRequest: (request) => {
4028
+ return FormatValidator.validateOpenAIRequest(request);
4029
+ },
4030
+ /**
4031
+ * 获取支持的工具列表(静态方法)
4032
+ */
4033
+ getSupportedTools: () => {
4034
+ return [];
4035
+ },
4036
+ /**
4037
+ * 检查工具是否支持(静态方法)
4038
+ */
4039
+ isToolSupported: (_toolName) => {
4040
+ return true;
4041
+ },
4042
+ /**
4043
+ * 获取工具映射(静态方法,已弃用)
4044
+ */
4045
+ getToolMapping: (claudeToolName) => {
4046
+ return claudeToolName;
4047
+ },
4048
+ /**
4049
+ * 转换Anthropic请求格式为OpenAI兼容格式(异步版本,支持图片URL自动下载)
4050
+ * 解决GitHub Copilot等API不支持外部图片URL的问题
4051
+ * @param anthropicRequest Claude格式的请求
4052
+ * @param downloadImageUrls 是否下载图片URL并转换为base64(默认true)
4053
+ */
4054
+ convertAnthropicRequestToOpenAIAsync: async (anthropicRequest, downloadImageUrls = true) => {
4055
+ const adapter = new A2ORequestAdapter({
4056
+ debugMode: false,
4057
+ maxDescriptionLength: 100,
4058
+ enableToolNameValidation: true,
4059
+ enableFormatValidation: true,
4060
+ validation: { enabled: true, strict: false },
4061
+ healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
4062
+ recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
4063
+ monitoring: { enabled: false, logLevel: "none", enableMetrics: false },
4064
+ imageProxy: {
4065
+ enabled: downloadImageUrls,
4066
+ timeout: 1e4,
4067
+ maxSize: 10 * 1024 * 1024
4068
+ }
4069
+ });
4070
+ try {
4071
+ const result = await adapter.performCoreConversion(anthropicRequest);
4072
+ return result;
4073
+ } catch (error) {
4074
+ console.warn(`[A2ORequestAdapterStatic] Async conversion failed: ${error?.message || error}`);
4075
+ return adapter.performBasicConversion(anthropicRequest, true);
4076
+ }
4077
+ }
4078
+ };
4079
+
4080
+ // src/core/streaming/streaming-protocol-adapter.ts
4081
+ var StreamingProtocolAdapter = class {
4082
+ constructor(options = {}) {
4083
+ this.config = {
4084
+ debugMode: options.debugMode ?? false,
4085
+ validateInput: options.validateInput ?? false,
4086
+ validateOutput: options.validateOutput ?? false,
4087
+ autoHeal: options.autoHeal ?? false,
4088
+ timeout: options.timeout ?? 3e4,
4089
+ retries: options.retries ?? 3,
4090
+ bufferSize: options.bufferSize ?? 1024,
4091
+ logger: options.logger ?? getGlobalLogger()
4092
+ };
4093
+ }
4094
+ logDebug(message, meta) {
4095
+ if (this.config.debugMode) {
4096
+ this.config.logger.debug(message, meta);
3824
4097
  }
3825
- return openaiRequest;
3826
4098
  }
3827
4099
  /**
3828
- * 转换Anthropic请求格式为OpenAI兼容格式 - 原有方法保持兼容
4100
+ * 转换Anthropic请求为OpenAI格式
3829
4101
  */
3830
- convertAnthropicRequestToOpenAI(anthropicRequest) {
3831
- if (this.config.enableFormatValidation) {
3832
- FormatValidator.validateClaudeRequest(anthropicRequest);
4102
+ convertAnthropicToOpenAI(anthropicRequest) {
4103
+ const logger = this.config.logger;
4104
+ if (this.config.debugMode) {
4105
+ logger.debug("Converting Anthropic request to OpenAI format", { model: anthropicRequest.model });
3833
4106
  }
3834
- const openaiRequest = {
3835
- model: anthropicRequest.model,
3836
- messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3837
- max_tokens: anthropicRequest.max_tokens,
3838
- temperature: anthropicRequest.temperature,
3839
- stream: anthropicRequest.stream,
3840
- n: 1
4107
+ const openaiRequest = A2ORequestAdapterStatic.convertAnthropicRequestToOpenAI(anthropicRequest);
4108
+ openaiRequest.stream = true;
4109
+ const hasImages = this.hasImageContent(anthropicRequest);
4110
+ return {
4111
+ openaiRequest,
4112
+ metadata: {
4113
+ hasImages,
4114
+ requiresVisionHeaders: hasImages
4115
+ }
3841
4116
  };
3842
- if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3843
- openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
3844
- }
3845
- const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3846
- for (const field of specialFields) {
3847
- if (anthropicRequest[field] !== void 0) {
3848
- openaiRequest[field] = anthropicRequest[field];
4117
+ }
4118
+ /**
4119
+ * 与StandardProtocolAdapter保持一致的API,用于集成测试和向后兼容。
4120
+ */
4121
+ convertRequest(anthropicRequest) {
4122
+ return this.convertAnthropicToOpenAI(anthropicRequest);
4123
+ }
4124
+ /**
4125
+ * 转换OpenAI流式响应为Anthropic SSE格式
4126
+ */
4127
+ convertOpenAIStreamToAnthropic(openaiStream, originalRequest) {
4128
+ const logger = this.config.logger;
4129
+ try {
4130
+ if (this.config.debugMode) {
4131
+ logger.debug("Converting OpenAI stream to Anthropic SSE", {
4132
+ streamLength: openaiStream.length,
4133
+ model: originalRequest.model
4134
+ });
3849
4135
  }
4136
+ if (!openaiStream || openaiStream.trim() === "") {
4137
+ return {
4138
+ success: false,
4139
+ error: "Empty stream response",
4140
+ anthropicSSE: "",
4141
+ anthropicStandardResponse: null
4142
+ };
4143
+ }
4144
+ const anthropicSSE = this.convertToAnthropicSSE(openaiStream, originalRequest.model);
4145
+ const anthropicStandardResponse = this.buildStandardResponse(openaiStream);
4146
+ return {
4147
+ success: true,
4148
+ anthropicSSE,
4149
+ anthropicStandardResponse
4150
+ };
4151
+ } catch (error) {
4152
+ const errorMessage = error instanceof Error ? error.message : "Unknown conversion error";
4153
+ logger.error("Stream conversion failed", { error: errorMessage });
4154
+ return {
4155
+ success: false,
4156
+ error: errorMessage,
4157
+ anthropicSSE: "",
4158
+ anthropicStandardResponse: null
4159
+ };
3850
4160
  }
3851
- if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
3852
- throw new Error("Generated OpenAI request format is invalid");
3853
- }
3854
- return openaiRequest;
3855
4161
  }
3856
4162
  /**
3857
- * 转换OpenAI响应格式为Claude兼容格式
4163
+ * 增量解析Anthropic SSE,转换为OpenAI流式chunk
4164
+ * 供 OpenAI Chat Completions 端点直接复用
3858
4165
  */
3859
- convertOpenAIResponseToClaude(openaiResponse) {
3860
- const claudeContent = [];
3861
- const message = openaiResponse.choices?.[0]?.message;
3862
- if (message?.content) {
3863
- claudeContent.push({
3864
- type: "text",
3865
- text: message.content
3866
- });
4166
+ convertAnthropicSSEChunkToOpenAI(params) {
4167
+ const { buffer, chunk, model, flush = false } = params;
4168
+ let localBuffer = buffer + (chunk || "");
4169
+ const emittedChunks = [];
4170
+ let finishReason;
4171
+ let streamStopped = false;
4172
+ const processEvent = (eventText) => {
4173
+ const { eventType, data } = this.parseAnthropicSSEEvent(eventText);
4174
+ if (!eventType || !data) {
4175
+ return;
4176
+ }
4177
+ if (eventType === "content_block_delta") {
4178
+ const text = this.extractTextFromAnthropicDelta(data);
4179
+ if (text) {
4180
+ emittedChunks.push(this.buildOpenAIStreamChunk(model, text));
4181
+ }
4182
+ } else if (eventType === "message_stop") {
4183
+ finishReason = this.mapAnthropicStopReasonToOpenAI(data?.stop_reason);
4184
+ streamStopped = true;
4185
+ }
4186
+ };
4187
+ while (true) {
4188
+ const separatorIndex = localBuffer.indexOf("\n\n");
4189
+ if (separatorIndex === -1) {
4190
+ break;
4191
+ }
4192
+ const rawEvent = localBuffer.slice(0, separatorIndex);
4193
+ localBuffer = localBuffer.slice(separatorIndex + 2);
4194
+ if (!rawEvent.trim()) {
4195
+ continue;
4196
+ }
4197
+ processEvent(rawEvent);
4198
+ if (streamStopped) {
4199
+ break;
4200
+ }
3867
4201
  }
3868
- if (message?.tool_calls) {
3869
- const toolUseContents = ToolConverter.convertOpenAIToolCallsToClaude(message.tool_calls);
3870
- claudeContent.push(...toolUseContents);
4202
+ if (flush && localBuffer.trim()) {
4203
+ processEvent(localBuffer);
4204
+ localBuffer = "";
3871
4205
  }
3872
- const claudeResponse = {
3873
- role: "assistant",
3874
- content: claudeContent
4206
+ return {
4207
+ buffer: localBuffer,
4208
+ chunks: emittedChunks,
4209
+ finishReason,
4210
+ streamStopped
3875
4211
  };
3876
- return claudeResponse;
3877
4212
  }
3878
4213
  /**
3879
- * 转换工具定义列表
4214
+ * 将OpenAI流转换为Anthropic SSE格式
3880
4215
  */
3881
- convertToolDefinitions(tools) {
3882
- return tools.map((tool) => {
3883
- if (ToolConverter.isOpenAIToolFormat(tool)) {
3884
- return tool;
3885
- } else {
3886
- return ToolConverter.convertAnthropicToolToOpenAI(tool);
4216
+ convertToAnthropicSSE(openaiStream, modelName) {
4217
+ const lines = openaiStream.split("\n");
4218
+ const sseLines = [];
4219
+ const state = this.createConversionState();
4220
+ sseLines.push(
4221
+ "event: message_start",
4222
+ `data: {"type":"message_start","message":{"id":"msg_${Date.now()}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
4223
+ ""
4224
+ );
4225
+ for (const line of lines) {
4226
+ if (line.startsWith("data:")) {
4227
+ const dataLine = line.substring(5);
4228
+ if (dataLine.trim() === "[DONE]") {
4229
+ this.addFinalEvents(state, sseLines);
4230
+ break;
4231
+ }
4232
+ try {
4233
+ const chunk = JSON.parse(dataLine);
4234
+ this.processStreamChunk(chunk, state, sseLines);
4235
+ } catch (error) {
4236
+ if (this.config.debugMode) {
4237
+ this.config.logger.warn("Failed to parse stream chunk", { line: dataLine.substring(0, 200) });
4238
+ }
4239
+ }
3887
4240
  }
3888
- });
4241
+ }
4242
+ return sseLines.join("\n");
3889
4243
  }
3890
4244
  /**
3891
- * 验证Claude请求格式
4245
+ * 处理单个流式数据块 - 支持thinking和content双模式
3892
4246
  */
3893
- validateClaudeRequest(request) {
3894
- return FormatValidator.validateClaudeRequest(request);
4247
+ processStreamChunk(chunk, state, sseLines) {
4248
+ if (this.isResponsesEvent(chunk)) {
4249
+ this.processResponsesEvent(chunk, state, sseLines);
4250
+ return;
4251
+ }
4252
+ const choice = chunk.choices?.[0];
4253
+ if (choice) {
4254
+ const hasToolCalls = choice.delta?.tool_calls;
4255
+ const hasFinishReason = choice.finish_reason;
4256
+ const isNonText = !choice.delta?.content;
4257
+ if (this.config.debugMode && (hasToolCalls || hasFinishReason || isNonText && choice.delta)) {
4258
+ this.logDebug("Streaming chunk processed", { chunk });
4259
+ }
4260
+ }
4261
+ if (!choice) {
4262
+ this.updateUsageFromChunk(chunk, state);
4263
+ return;
4264
+ }
4265
+ const delta = choice.delta ?? {};
4266
+ this.appendThinkingContent(this.coalesceContent(delta.reasoning_content), state, sseLines);
4267
+ this.appendTextContent(this.coalesceContent(delta.content), state, sseLines);
4268
+ if (delta.tool_calls) {
4269
+ this.processToolCalls(delta.tool_calls, state, sseLines);
4270
+ }
4271
+ this.updateUsageFromChunk(chunk, state);
3895
4272
  }
3896
4273
  /**
3897
- * 验证OpenAI请求格式
4274
+ * 处理工具调用 - 支持OpenAI流式分块累积
4275
+ * OpenAI流式API会将tool_calls分多个chunk发送:
4276
+ * - Chunk 1: {index:0, id:"call_xxx", type:"function", function:{name:"web_search"}}
4277
+ * - Chunk 2: {index:0, function:{arguments:"{\"query\":\"xxx\"}"}}
4278
+ * - Chunk N: 继续累积arguments
3898
4279
  */
3899
- validateOpenAIRequest(request) {
3900
- return FormatValidator.validateOpenAIRequest(request);
4280
+ processToolCalls(toolCalls, state, sseLines) {
4281
+ this.logDebug("processToolCalls called", { toolCalls });
4282
+ for (const toolCall of toolCalls) {
4283
+ const index = toolCall.index ?? 0;
4284
+ const toolId = toolCall.id;
4285
+ const toolName = toolCall.function?.name;
4286
+ const toolArgs = toolCall.function?.arguments;
4287
+ this.logDebug(`Processing tool chunk for index ${index}`, {
4288
+ hasId: !!toolId,
4289
+ hasName: !!toolName,
4290
+ hasArgs: !!toolArgs,
4291
+ argsLength: toolArgs?.length
4292
+ });
4293
+ const stateKey = `openai_tool_${index}`;
4294
+ const toolData = this.getOrCreateToolCallState(state, stateKey);
4295
+ if (toolId && !toolData.id) {
4296
+ toolData.id = toolId;
4297
+ }
4298
+ if (toolName) {
4299
+ toolData.name = toolName;
4300
+ }
4301
+ this.registerToolCallAlias(state, toolId ? `openai_tool_id_${toolId}` : void 0, toolData);
4302
+ this.registerToolCallAlias(state, `openai_tool_index_${index}`, toolData);
4303
+ if (toolArgs) {
4304
+ toolData.pendingChunks.push(toolArgs);
4305
+ this.logDebug(`Accumulated tool arguments for index ${index}`, {
4306
+ currentLength: toolData.pendingChunks.reduce((acc, chunk) => acc + chunk.length, 0)
4307
+ });
4308
+ }
4309
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4310
+ if (started || toolData.blockStartSent) {
4311
+ this.flushPendingToolChunks(toolData, sseLines);
4312
+ }
4313
+ }
3901
4314
  }
3902
- /**
3903
- * 获取支持的工具列表
3904
- */
3905
- getSupportedTools() {
3906
- return [];
4315
+ getOrCreateToolCallState(state, key) {
4316
+ let existing = state.toolCallsMap.get(key);
4317
+ if (!existing) {
4318
+ existing = {
4319
+ id: "",
4320
+ name: "",
4321
+ input: "",
4322
+ blockStartSent: false,
4323
+ blockStopSent: false,
4324
+ pendingChunks: []
4325
+ };
4326
+ state.toolCallsMap.set(key, existing);
4327
+ }
4328
+ return existing;
3907
4329
  }
3908
- /**
3909
- * 检查工具是否支持
3910
- */
3911
- isToolSupported(_toolName) {
4330
+ registerToolCallAlias(state, alias, toolData) {
4331
+ if (!alias) return;
4332
+ const current = state.toolCallsMap.get(alias);
4333
+ if (!current || current !== toolData) {
4334
+ state.toolCallsMap.set(alias, toolData);
4335
+ }
4336
+ }
4337
+ maybeStartToolBlock(toolData, state, sseLines) {
4338
+ if (toolData.blockStartSent) return false;
4339
+ if (!toolData.name) {
4340
+ return false;
4341
+ }
4342
+ if (!toolData.id) {
4343
+ toolData.id = `call_${++state.toolCallCounter}`;
4344
+ }
4345
+ const blockIndex = toolData.blockIndex ?? state.nextToolBlockIndex++;
4346
+ toolData.blockIndex = blockIndex;
4347
+ sseLines.push(
4348
+ "event: content_block_start",
4349
+ `data: {"type":"content_block_start","index":${blockIndex},"content_block":{"type":"tool_use","id":"${this.escapeJsonString(toolData.id)}","name":"${this.escapeJsonString(toolData.name)}","input":{}}}`,
4350
+ ""
4351
+ );
4352
+ toolData.blockStartSent = true;
4353
+ this.logDebug("Sent content_block_start", { toolName: toolData.name, blockIndex });
3912
4354
  return true;
3913
4355
  }
3914
- /**
3915
- * 获取工具映射(已弃用,保持兼容性)
3916
- */
3917
- getToolMapping(claudeToolName) {
3918
- return claudeToolName;
4356
+ flushPendingToolChunks(toolData, sseLines) {
4357
+ if (!toolData.blockStartSent || toolData.blockIndex === void 0) {
4358
+ return;
4359
+ }
4360
+ while (toolData.pendingChunks.length > 0) {
4361
+ const chunk = toolData.pendingChunks.shift();
4362
+ if (chunk === void 0) continue;
4363
+ toolData.input += chunk;
4364
+ sseLines.push(
4365
+ "event: content_block_delta",
4366
+ `data: {"type":"content_block_delta","index":${toolData.blockIndex},"delta":{"type":"input_json_delta","partial_json":${JSON.stringify(chunk)}}}`,
4367
+ ""
4368
+ );
4369
+ this.logDebug("Sent input_json_delta", { blockIndex: toolData.blockIndex });
4370
+ }
4371
+ }
4372
+ coalesceContent(content) {
4373
+ if (!content) return void 0;
4374
+ if (typeof content === "string") return content;
4375
+ if (Array.isArray(content)) {
4376
+ return content.map((item) => {
4377
+ if (typeof item === "string") return item;
4378
+ if (typeof item?.text === "string") return item.text;
4379
+ if (typeof item?.content === "string") return item.content;
4380
+ return "";
4381
+ }).join("");
4382
+ }
4383
+ if (typeof content === "object" && typeof content.text === "string") {
4384
+ return content.text;
4385
+ }
4386
+ return void 0;
4387
+ }
4388
+ appendThinkingContent(content, state, sseLines) {
4389
+ if (!content) return;
4390
+ state.reasoningContent += content;
4391
+ if (!state.thinkingBlockStarted) {
4392
+ if (state.contentBlockStarted) {
4393
+ sseLines.push(
4394
+ "event: content_block_stop",
4395
+ 'data: {"type":"content_block_stop","index":0}',
4396
+ ""
4397
+ );
4398
+ state.contentBlockStarted = false;
4399
+ }
4400
+ sseLines.push(
4401
+ "event: content_block_start",
4402
+ 'data: {"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":""}}',
4403
+ ""
4404
+ );
4405
+ state.thinkingBlockStarted = true;
4406
+ }
4407
+ sseLines.push(
4408
+ "event: content_block_delta",
4409
+ `data: {"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":"${this.escapeJsonString(content)}"}}`,
4410
+ ""
4411
+ );
3919
4412
  }
3920
- /**
3921
- * 更新配置
3922
- */
3923
- updateConfig(newConfig) {
3924
- this.config = { ...this.config, ...newConfig };
4413
+ appendTextContent(content, state, sseLines) {
4414
+ if (!content || content === "") return;
4415
+ if (state.thinkingBlockStarted && !state.contentBlockStarted) {
4416
+ sseLines.push(
4417
+ "event: content_block_stop",
4418
+ 'data: {"type":"content_block_stop","index":0}',
4419
+ "",
4420
+ "event: content_block_start",
4421
+ 'data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}',
4422
+ ""
4423
+ );
4424
+ state.contentBlockStarted = true;
4425
+ } else if (!state.contentBlockStarted && !state.thinkingBlockStarted) {
4426
+ sseLines.push(
4427
+ "event: content_block_start",
4428
+ 'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}',
4429
+ ""
4430
+ );
4431
+ state.contentBlockStarted = true;
4432
+ }
4433
+ state.textContent += content;
4434
+ const blockIndex = state.thinkingBlockStarted ? 1 : 0;
4435
+ sseLines.push(
4436
+ "event: content_block_delta",
4437
+ `data: {"type":"content_block_delta","index":${blockIndex},"delta":{"type":"text_delta","text":"${this.escapeJsonString(content)}"}}`,
4438
+ ""
4439
+ );
3925
4440
  }
3926
- /**
3927
- * 获取当前配置
3928
- */
3929
- getConfig() {
3930
- return { ...this.config };
4441
+ updateUsageFromChunk(chunk, state) {
4442
+ const usage = chunk?.usage || chunk?.response?.usage;
4443
+ if (!usage) return;
4444
+ if (typeof usage.prompt_tokens === "number") {
4445
+ state.usage.input_tokens = usage.prompt_tokens;
4446
+ }
4447
+ if (typeof usage.completion_tokens === "number") {
4448
+ state.usage.output_tokens = usage.completion_tokens;
4449
+ }
4450
+ if (typeof usage.input_tokens === "number") {
4451
+ state.usage.input_tokens = usage.input_tokens;
4452
+ }
4453
+ if (typeof usage.output_tokens === "number") {
4454
+ state.usage.output_tokens = usage.output_tokens;
4455
+ }
3931
4456
  }
3932
- /**
3933
- * 执行带验证的核心转换(同步版本)
3934
- * 为静态方法提供增强功能,但保持同步特性
3935
- */
3936
- performCoreConversionWithValidation(anthropicRequest) {
3937
- if (this.config.validation.enabled) {
3938
- try {
3939
- validateAnthropicRequest(anthropicRequest);
3940
- } catch (error) {
3941
- if (this.config.validation.strict) {
3942
- throw error;
3943
- } else {
3944
- const errorSummary = this.getValidationErrorSummary(error);
3945
- console.warn(`[A2ORequestAdapter] Input validation warning: ${errorSummary}. Details saved to logs.`);
3946
- }
3947
- }
4457
+ isResponsesEvent(chunk) {
4458
+ return typeof chunk?.type === "string" && chunk.type.startsWith("response.");
4459
+ }
4460
+ processResponsesEvent(event, state, sseLines) {
4461
+ this.updateUsageFromChunk(event, state);
4462
+ switch (event.type) {
4463
+ case "response.output_item.added":
4464
+ this.handleResponsesOutputItemAdded(event, state, sseLines);
4465
+ break;
4466
+ case "response.function_call_arguments.delta":
4467
+ this.handleResponsesFunctionArgumentsDelta(event, state, sseLines);
4468
+ break;
4469
+ case "response.function_call_arguments.done":
4470
+ case "response.output_item.done":
4471
+ this.handleResponsesFunctionArgumentsDone(event, state, sseLines);
4472
+ break;
4473
+ case "response.output_text.delta":
4474
+ case "response.text.delta":
4475
+ this.appendTextContent(this.extractResponsesTextDelta(event), state, sseLines);
4476
+ break;
4477
+ case "response.output_text.done":
4478
+ case "response.text.done":
4479
+ break;
4480
+ case "response.thinking.delta":
4481
+ this.appendThinkingContent(this.extractResponsesThinkingDelta(event), state, sseLines);
4482
+ break;
4483
+ default:
4484
+ break;
3948
4485
  }
3949
- let processedRequest = anthropicRequest;
3950
- if (this.config.healing.enabled) {
3951
- try {
3952
- processedRequest = this.applySyncHealing(anthropicRequest);
3953
- } catch (healingError) {
3954
- console.warn("[A2ORequestAdapter] Healing failed:", healingError);
4486
+ }
4487
+ resolveResponsesToolData(identifiers, state) {
4488
+ const aliases = [];
4489
+ if (identifiers.call_id) aliases.push(`responses_call_${identifiers.call_id}`);
4490
+ if (identifiers.item_id) aliases.push(`responses_item_${identifiers.item_id}`);
4491
+ if (typeof identifiers.output_index === "number") aliases.push(`responses_index_${identifiers.output_index}`);
4492
+ let toolData;
4493
+ for (const alias of aliases) {
4494
+ const existing = state.toolCallsMap.get(alias);
4495
+ if (existing) {
4496
+ toolData = existing;
4497
+ break;
3955
4498
  }
3956
4499
  }
3957
- const result = this.performBasicConversion(processedRequest, true);
3958
- if (this.config.validation.enabled) {
3959
- try {
3960
- validateOpenAIRequest(result);
3961
- } catch (error) {
3962
- if (this.config.validation.strict) {
3963
- throw error;
3964
- } else {
3965
- console.warn("[A2ORequestAdapter] Output validation warning:", error);
3966
- }
4500
+ if (!toolData) {
4501
+ const baseAlias = aliases[0] ?? `responses_auto_${++state.toolCallCounter}`;
4502
+ toolData = this.getOrCreateToolCallState(state, baseAlias);
4503
+ if (!aliases.length) {
4504
+ aliases.push(baseAlias);
3967
4505
  }
3968
4506
  }
3969
- return result;
4507
+ for (const alias of aliases) {
4508
+ this.registerToolCallAlias(state, alias, toolData);
4509
+ }
4510
+ return toolData;
3970
4511
  }
3971
- /**
3972
- * 执行基础转换逻辑(原有逻辑的提取)
3973
- */
3974
- performBasicConversion(anthropicRequest, skipValidation = false) {
3975
- if (!skipValidation && this.config.enableFormatValidation) {
3976
- FormatValidator.validateClaudeRequest(anthropicRequest);
4512
+ handleResponsesOutputItemAdded(event, state, sseLines) {
4513
+ const item = event?.item;
4514
+ if (!item) return;
4515
+ const itemType = item.type;
4516
+ if (itemType !== "function_call" && itemType !== "tool_call") {
4517
+ return;
3977
4518
  }
3978
- const openaiRequest = {
3979
- model: anthropicRequest.model,
3980
- messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
3981
- max_tokens: anthropicRequest.max_tokens,
3982
- temperature: anthropicRequest.temperature,
3983
- stream: anthropicRequest.stream,
3984
- n: 1
3985
- };
3986
- if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
3987
- openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
4519
+ const toolData = this.resolveResponsesToolData(
4520
+ { call_id: item.call_id ?? item.id, item_id: item.id, output_index: event.output_index },
4521
+ state
4522
+ );
4523
+ if (!toolData.id) {
4524
+ toolData.id = item.call_id || item.id || `call_${++state.toolCallCounter}`;
3988
4525
  }
3989
- const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
3990
- for (const field of specialFields) {
3991
- if (anthropicRequest[field] !== void 0) {
3992
- openaiRequest[field] = anthropicRequest[field];
3993
- }
4526
+ const name = item.name ?? item.function?.name ?? item.function_call?.name;
4527
+ if (name) {
4528
+ toolData.name = name;
3994
4529
  }
3995
- if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
3996
- throw new Error("Generated OpenAI request format is invalid");
4530
+ if (typeof item.arguments === "string" && item.arguments.length > 0) {
4531
+ toolData.pendingChunks.push(item.arguments);
4532
+ }
4533
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4534
+ if (started || toolData.blockStartSent) {
4535
+ this.flushPendingToolChunks(toolData, sseLines);
3997
4536
  }
3998
- return openaiRequest;
3999
4537
  }
4000
- /**
4001
- * 应用同步修复逻辑
4002
- * 简化版的修复,不依赖异步操作
4003
- */
4004
- applySyncHealing(request) {
4005
- const healedRequest = { ...request };
4006
- if (!healedRequest.max_tokens || healedRequest.max_tokens <= 0) {
4007
- healedRequest.max_tokens = 4096;
4538
+ handleResponsesFunctionArgumentsDelta(event, state, sseLines) {
4539
+ const toolData = this.resolveResponsesToolData(
4540
+ { call_id: event.call_id, item_id: event.item_id, output_index: event.output_index },
4541
+ state
4542
+ );
4543
+ if (!toolData.id && event.call_id) {
4544
+ toolData.id = event.call_id;
4008
4545
  }
4009
- if (!healedRequest.messages || !Array.isArray(healedRequest.messages)) {
4010
- throw new Error("Invalid messages array");
4546
+ const name = event.name ?? event.function_name ?? event.function?.name;
4547
+ if (name) {
4548
+ toolData.name = name;
4011
4549
  }
4012
- if (!healedRequest.model) {
4013
- healedRequest.model = "claude-sonnet-4";
4550
+ const argsChunk = this.extractArgumentsDelta(event);
4551
+ if (argsChunk) {
4552
+ toolData.pendingChunks.push(argsChunk);
4014
4553
  }
4015
- for (const message of healedRequest.messages) {
4016
- if (!message.role) {
4017
- message.role = "user";
4554
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4555
+ if (started || toolData.blockStartSent) {
4556
+ this.flushPendingToolChunks(toolData, sseLines);
4557
+ }
4558
+ }
4559
+ handleResponsesFunctionArgumentsDone(event, state, sseLines) {
4560
+ const toolData = this.resolveResponsesToolData(
4561
+ { call_id: event.call_id, item_id: event.item_id, output_index: event.output_index },
4562
+ state
4563
+ );
4564
+ if (typeof event.arguments === "string" && event.arguments.length > 0) {
4565
+ toolData.pendingChunks.push(event.arguments);
4566
+ }
4567
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4568
+ if (started || toolData.blockStartSent) {
4569
+ this.flushPendingToolChunks(toolData, sseLines);
4570
+ }
4571
+ if (toolData.blockStartSent && !toolData.blockStopSent && toolData.blockIndex !== void 0) {
4572
+ sseLines.push(
4573
+ "event: content_block_stop",
4574
+ `data: {"type":"content_block_stop","index":${toolData.blockIndex}}`,
4575
+ ""
4576
+ );
4577
+ toolData.blockStopSent = true;
4578
+ if (toolData.id && !state.completedToolCalls.includes(toolData.id)) {
4579
+ state.completedToolCalls.push(toolData.id);
4580
+ }
4581
+ this.logDebug("Sent content_block_stop", { toolName: toolData.name, blockIndex: toolData.blockIndex });
4582
+ }
4583
+ }
4584
+ extractResponsesTextDelta(event) {
4585
+ if (!event) return void 0;
4586
+ if (typeof event.delta === "string") return event.delta;
4587
+ if (event.delta && typeof event.delta.text === "string") return event.delta.text;
4588
+ if (typeof event.text === "string") return event.text;
4589
+ if (Array.isArray(event.output_text)) {
4590
+ return event.output_text.map((item) => item?.text ?? "").join("");
4591
+ }
4592
+ return void 0;
4593
+ }
4594
+ extractResponsesThinkingDelta(event) {
4595
+ if (!event) return void 0;
4596
+ if (typeof event.delta === "string") return event.delta;
4597
+ if (event.delta && typeof event.delta.thinking === "string") return event.delta.thinking;
4598
+ if (typeof event.text === "string") return event.text;
4599
+ return void 0;
4600
+ }
4601
+ extractArgumentsDelta(event) {
4602
+ if (!event) return void 0;
4603
+ if (typeof event.delta === "string") return event.delta;
4604
+ if (event.delta && typeof event.delta.arguments === "string") return event.delta.arguments;
4605
+ if (typeof event.arguments_delta === "string") return event.arguments_delta;
4606
+ if (typeof event.arguments === "string") return event.arguments;
4607
+ if (typeof event.partial_json === "string") return event.partial_json;
4608
+ return void 0;
4609
+ }
4610
+ /**
4611
+ * 在流结束时关闭所有未关闭的工具调用块
4612
+ */
4613
+ closeAllToolCallBlocks(state, sseLines) {
4614
+ const processed = /* @__PURE__ */ new Set();
4615
+ for (const toolData of state.toolCallsMap.values()) {
4616
+ if (processed.has(toolData)) continue;
4617
+ processed.add(toolData);
4618
+ if (!toolData.blockStartSent && toolData.pendingChunks.length > 0) {
4619
+ if (!toolData.name) {
4620
+ toolData.name = "unknown_tool";
4621
+ }
4622
+ const started = this.maybeStartToolBlock(toolData, state, sseLines);
4623
+ if (started) {
4624
+ this.flushPendingToolChunks(toolData, sseLines);
4625
+ }
4018
4626
  }
4019
- if (!message.content) {
4020
- message.content = "";
4627
+ if (toolData.blockStartSent && !toolData.blockStopSent && toolData.blockIndex !== void 0) {
4628
+ this.flushPendingToolChunks(toolData, sseLines);
4629
+ sseLines.push(
4630
+ "event: content_block_stop",
4631
+ `data: {"type":"content_block_stop","index":${toolData.blockIndex}}`,
4632
+ ""
4633
+ );
4634
+ toolData.blockStopSent = true;
4635
+ if (toolData.id && !state.completedToolCalls.includes(toolData.id)) {
4636
+ state.completedToolCalls.push(toolData.id);
4637
+ }
4638
+ this.logDebug("Sent content_block_stop", { toolName: toolData.name, blockIndex: toolData.blockIndex });
4021
4639
  }
4022
4640
  }
4023
- return healedRequest;
4024
4641
  }
4025
4642
  /**
4026
- * 获取验证错误详情
4643
+ * 添加最终事件 - 支持thinking+content双模式
4027
4644
  */
4028
- getValidationErrors(request, type) {
4029
- return FormatValidator.getValidationErrors(request, type);
4645
+ addFinalEvents(state, sseLines) {
4646
+ this.closeAllToolCallBlocks(state, sseLines);
4647
+ if (state.contentBlockStarted) {
4648
+ const blockIndex = state.thinkingBlockStarted ? 1 : 0;
4649
+ sseLines.push(
4650
+ "event: content_block_stop",
4651
+ `data: {"type":"content_block_stop","index":${blockIndex}}`,
4652
+ ""
4653
+ );
4654
+ } else if (state.thinkingBlockStarted) {
4655
+ sseLines.push(
4656
+ "event: content_block_stop",
4657
+ 'data: {"type":"content_block_stop","index":0}',
4658
+ ""
4659
+ );
4660
+ }
4661
+ const stopReason = state.completedToolCalls.length > 0 ? "tool_use" : "end_turn";
4662
+ const usagePayload = state.usage.input_tokens > 0 ? `{"input_tokens":${state.usage.input_tokens},"output_tokens":${state.usage.output_tokens}}` : `{"output_tokens":${state.usage.output_tokens}}`;
4663
+ sseLines.push(
4664
+ "event: message_delta",
4665
+ `data: {"type":"message_delta","delta":{"stop_reason":"${stopReason}","stop_sequence":null},"usage":${usagePayload}}`,
4666
+ "",
4667
+ "event: message_stop",
4668
+ 'data: {"type":"message_stop"}',
4669
+ ""
4670
+ );
4030
4671
  }
4031
4672
  /**
4032
- * 生成简洁的验证错误摘要
4673
+ * 构建标准响应格式
4033
4674
  */
4034
- getValidationErrorSummary(error) {
4035
- if (error?.issues?.length > 0) {
4036
- const invalidEnums = error.issues.filter((i) => i.code === "invalid_enum_value");
4037
- const missingFields = error.issues.filter((i) => i.code === "invalid_type");
4038
- const summary = [];
4039
- if (invalidEnums.length > 0) {
4040
- const first = invalidEnums[0];
4041
- summary.push(`invalid_${first.path?.join(".")}: '${first.received}'`);
4042
- }
4043
- if (missingFields.length > 0) {
4044
- summary.push(`${missingFields.length} missing fields`);
4675
+ buildStandardResponse(openaiStream) {
4676
+ const state = this.createConversionState();
4677
+ const lines = openaiStream.split("\n");
4678
+ const noopSseLines = [];
4679
+ for (const line of lines) {
4680
+ if (line.startsWith("data:")) {
4681
+ const dataLine = line.startsWith("data: ") ? line.substring(6) : line.substring(5);
4682
+ if (dataLine.trim() === "[DONE]") break;
4683
+ try {
4684
+ const chunk = JSON.parse(dataLine);
4685
+ noopSseLines.length = 0;
4686
+ this.processStreamChunk(chunk, state, noopSseLines);
4687
+ } catch (error) {
4688
+ }
4045
4689
  }
4046
- return summary.slice(0, 2).join(", ") + (error.issues.length > 5 ? ` (+${error.issues.length - 5} more)` : "");
4047
4690
  }
4048
- return error.message || "Validation failed";
4691
+ const stopReason = state.completedToolCalls.length > 0 ? "tool_use" : "end_turn";
4692
+ return {
4693
+ id: `msg_${Date.now()}`,
4694
+ type: "message",
4695
+ role: "assistant",
4696
+ content: state.textContent ? [
4697
+ {
4698
+ type: "text",
4699
+ text: state.textContent
4700
+ }
4701
+ ] : [],
4702
+ model: "claude-3-sonnet-20240229",
4703
+ stop_reason: stopReason,
4704
+ stop_sequence: null,
4705
+ usage: state.usage
4706
+ };
4049
4707
  }
4050
- };
4051
- var A2ORequestAdapterStatic = {
4052
4708
  /**
4053
- * 转换Anthropic请求格式为OpenAI兼容格式(静态方法)
4054
- * 内部使用增强转换器,所有调用点自动获得增强功能
4709
+ * 创建转换状态对象
4055
4710
  */
4056
- convertAnthropicRequestToOpenAI: (anthropicRequest) => {
4057
- const adapter = new A2ORequestAdapter({
4058
- debugMode: false,
4059
- maxDescriptionLength: 100,
4060
- enableToolNameValidation: true,
4061
- enableFormatValidation: true,
4062
- validation: { enabled: true, strict: false },
4063
- healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
4064
- recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
4065
- monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
4066
- });
4067
- try {
4068
- const result = adapter.performCoreConversionWithValidation(anthropicRequest);
4069
- return result;
4070
- } catch (error) {
4071
- console.warn(`[A2ORequestAdapterStatic] Enhanced conversion failed, using basic conversion: ${error?.message || error}`);
4072
- return adapter.performBasicConversion(anthropicRequest, true);
4711
+ createConversionState() {
4712
+ return {
4713
+ processedLines: 0,
4714
+ textContent: "",
4715
+ reasoningContent: "",
4716
+ toolCallsMap: /* @__PURE__ */ new Map(),
4717
+ completedToolCalls: [],
4718
+ allSSELines: [],
4719
+ errors: [],
4720
+ usage: {
4721
+ input_tokens: 0,
4722
+ output_tokens: 0
4723
+ },
4724
+ thinkingBlockStarted: false,
4725
+ contentBlockStarted: false,
4726
+ toolCallCounter: 0,
4727
+ nextToolBlockIndex: 1
4728
+ };
4729
+ }
4730
+ parseAnthropicSSEEvent(rawEvent) {
4731
+ const lines = rawEvent.split("\n");
4732
+ let eventType = null;
4733
+ const dataLines = [];
4734
+ for (const line of lines) {
4735
+ if (line.startsWith("event:")) {
4736
+ eventType = line.slice(6).trim();
4737
+ } else if (line.startsWith("data:")) {
4738
+ dataLines.push(line.slice(5).trim());
4739
+ }
4073
4740
  }
4074
- },
4741
+ const dataString = dataLines.join("\n");
4742
+ let data = null;
4743
+ if (dataString) {
4744
+ try {
4745
+ data = JSON.parse(dataString);
4746
+ } catch (error) {
4747
+ this.logDebug("Failed to parse Anthropic SSE JSON", { error });
4748
+ }
4749
+ }
4750
+ return { eventType, data };
4751
+ }
4752
+ extractTextFromAnthropicDelta(data) {
4753
+ const delta = data?.delta;
4754
+ if (!delta) return null;
4755
+ if (typeof delta.text === "string") {
4756
+ return delta.text;
4757
+ }
4758
+ if (delta.type === "text_delta" && typeof delta.text === "string") {
4759
+ return delta.text;
4760
+ }
4761
+ return null;
4762
+ }
4763
+ mapAnthropicStopReasonToOpenAI(reason) {
4764
+ switch (reason) {
4765
+ case "max_tokens":
4766
+ return "length";
4767
+ case "tool_use":
4768
+ return "tool_calls";
4769
+ case "stop_sequence":
4770
+ case "end_turn":
4771
+ default:
4772
+ return "stop";
4773
+ }
4774
+ }
4775
+ buildOpenAIStreamChunk(model, content, finishReason = null) {
4776
+ return {
4777
+ id: `chatcmpl-${Date.now()}`,
4778
+ object: "chat.completion.chunk",
4779
+ created: Math.floor(Date.now() / 1e3),
4780
+ model,
4781
+ choices: [{
4782
+ index: 0,
4783
+ delta: content ? { content } : {},
4784
+ finish_reason: finishReason
4785
+ }]
4786
+ };
4787
+ }
4075
4788
  /**
4076
- * 转换OpenAI响应格式为Claude兼容格式(静态方法)
4077
- * 内部使用增强转换器
4789
+ * 转换消息格式
4078
4790
  */
4079
- convertOpenAIResponseToClaude: (openaiResponse) => {
4080
- const adapter = new A2ORequestAdapter({
4081
- debugMode: false,
4082
- maxDescriptionLength: 100,
4083
- enableToolNameValidation: true,
4084
- enableFormatValidation: true,
4085
- validation: { enabled: true, strict: false },
4086
- healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
4087
- recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
4088
- monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
4089
- });
4090
- return adapter.convertOpenAIResponseToClaude(openaiResponse);
4091
- },
4791
+ convertMessages(messages) {
4792
+ return messages.map((msg) => ({
4793
+ role: msg.role,
4794
+ content: msg.content
4795
+ }));
4796
+ }
4797
+ /**
4798
+ * 映射Anthropic模型到OpenAI模型
4799
+ */
4800
+ mapAnthropicModelToOpenAI(model) {
4801
+ const supportedModels = [
4802
+ "glm-4.5",
4803
+ "kimi-k2",
4804
+ "deepseek-v3.1",
4805
+ "deepseek-r1",
4806
+ "deepseek-v3",
4807
+ "qwen3-32b",
4808
+ "qwen3-coder",
4809
+ "qwen3-235b",
4810
+ "tstars2.0"
4811
+ ];
4812
+ if (supportedModels.includes(model)) {
4813
+ return model;
4814
+ }
4815
+ const mapping = {
4816
+ "claude-3-sonnet-20240229": "glm-4.5",
4817
+ "claude-3-haiku-20240307": "kimi-k2",
4818
+ "claude-3-opus-20240229": "deepseek-v3.1"
4819
+ };
4820
+ return mapping[model] || "glm-4.5";
4821
+ }
4092
4822
  /**
4093
- * 验证Claude请求格式(静态方法)
4823
+ * 检查请求是否包含图片内容
4094
4824
  */
4095
- validateClaudeRequest: (request) => {
4096
- return FormatValidator.validateClaudeRequest(request);
4097
- },
4825
+ hasImageContent(request) {
4826
+ return request.messages.some(
4827
+ (msg) => Array.isArray(msg.content) && msg.content.some((content) => content?.type === "image")
4828
+ );
4829
+ }
4098
4830
  /**
4099
- * 验证OpenAI请求格式(静态方法)
4831
+ * 转义JSON字符串
4100
4832
  */
4101
- validateOpenAIRequest: (request) => {
4102
- return FormatValidator.validateOpenAIRequest(request);
4103
- },
4833
+ escapeJsonString(str) {
4834
+ return str.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t");
4835
+ }
4104
4836
  /**
4105
- * 获取支持的工具列表(静态方法)
4837
+ * 获取初始SSE事件(message_start + ping)
4106
4838
  */
4107
- getSupportedTools: () => {
4108
- return [];
4109
- },
4839
+ getInitialSSEEvents(modelName = "claude-sonnet-4", messageId = `msg_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`) {
4840
+ return [
4841
+ "event: message_start",
4842
+ `data: {"type":"message_start","message":{"id":"${messageId}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
4843
+ "",
4844
+ "event: ping",
4845
+ 'data: {"type":"ping"}',
4846
+ ""
4847
+ ];
4848
+ }
4110
4849
  /**
4111
- * 检查工具是否支持(静态方法)
4850
+ * 增量转换单个OpenAI数据块为Anthropic SSE事件
4851
+ * 用于逐个处理流式数据片段
4112
4852
  */
4113
- isToolSupported: (_toolName) => {
4114
- return true;
4115
- },
4853
+ convertIncrementalChunk(openaiDataLine, state) {
4854
+ const logger = this.config.logger;
4855
+ const sseEvents = [];
4856
+ state.processedLines += 1;
4857
+ if (openaiDataLine.trim() === "[DONE]") {
4858
+ this.addFinalEvents(state, sseEvents);
4859
+ state.allSSELines.push(...sseEvents);
4860
+ return sseEvents;
4861
+ }
4862
+ try {
4863
+ const chunk = JSON.parse(openaiDataLine);
4864
+ this.processStreamChunk(chunk, state, sseEvents);
4865
+ if (sseEvents.length > 0) {
4866
+ state.allSSELines.push(...sseEvents);
4867
+ }
4868
+ return sseEvents;
4869
+ } catch (error) {
4870
+ if (this.config.debugMode) {
4871
+ logger.warn("Failed to parse OpenAI stream chunk in convertIncrementalChunk", {
4872
+ line: openaiDataLine.substring(0, 200),
4873
+ error: error instanceof Error ? error.message : String(error)
4874
+ });
4875
+ }
4876
+ state.errors.push({
4877
+ error: error instanceof Error ? error.message : String(error),
4878
+ raw: openaiDataLine
4879
+ });
4880
+ return [];
4881
+ }
4882
+ }
4116
4883
  /**
4117
- * 获取工具映射(静态方法,已弃用)
4884
+ * 暴露内部状态创建方法,供外部增量处理流程使用。
4118
4885
  */
4119
- getToolMapping: (claudeToolName) => {
4120
- return claudeToolName;
4886
+ createIncrementalState() {
4887
+ return this.createConversionState();
4121
4888
  }
4122
4889
  };
4123
4890
 
@@ -4258,15 +5025,36 @@ var ToolCallProcessor = class _ToolCallProcessor {
4258
5025
  * 处理增量工具调用
4259
5026
  */
4260
5027
  static processIncrementalToolCalls(toolCalls, state, sseLines) {
5028
+ const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
5029
+ if (debugEnabled) {
5030
+ console.debug("[ToolProcessor] processIncrementalToolCalls called with:", JSON.stringify(toolCalls, null, 2));
5031
+ }
4261
5032
  for (const toolCall of toolCalls) {
4262
5033
  const toolId = toolCall.id;
4263
5034
  const toolName = toolCall.function?.name;
4264
5035
  const toolArgs = toolCall.function?.arguments;
5036
+ if (debugEnabled) {
5037
+ console.debug("[ToolProcessor] Processing tool call:", {
5038
+ toolId,
5039
+ toolName,
5040
+ hasArgs: !!toolArgs
5041
+ });
5042
+ }
4265
5043
  if (toolName && toolId && !state.toolCallsMap.has(toolId)) {
5044
+ if (debugEnabled) {
5045
+ console.debug("[ToolProcessor] Starting new tool call:", toolName);
5046
+ }
4266
5047
  _ToolCallProcessor.processToolCallStart(toolId, toolName, state, sseLines);
4267
5048
  }
4268
5049
  if (toolArgs) {
5050
+ if (debugEnabled) {
5051
+ console.debug("[ToolProcessor] Processing tool args, calling processToolArgs");
5052
+ }
4269
5053
  _ToolCallProcessor.processToolArgs(toolId, toolArgs, state, sseLines);
5054
+ } else if (toolName && toolId) {
5055
+ _ToolCallProcessor.processToolArgs(toolId, "", state, sseLines);
5056
+ } else {
5057
+ console.warn("\u26A0\uFE0F\u26A0\uFE0F\u26A0\uFE0F [ToolProcessor] No tool args to process! This will result in empty input!");
4270
5058
  }
4271
5059
  }
4272
5060
  }
@@ -4274,15 +5062,36 @@ var ToolCallProcessor = class _ToolCallProcessor {
4274
5062
  * 处理工具调用
4275
5063
  */
4276
5064
  static processBatchToolCalls(toolCalls, state, sseLines) {
5065
+ const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
5066
+ if (debugEnabled) {
5067
+ console.debug("[ToolProcessor] processBatchToolCalls called with:", JSON.stringify(toolCalls, null, 2));
5068
+ }
4277
5069
  for (const toolCall of toolCalls) {
4278
5070
  const toolId = toolCall.id;
4279
5071
  const toolName = toolCall.function?.name;
4280
5072
  const toolArgs = toolCall.function?.arguments;
5073
+ if (debugEnabled) {
5074
+ console.debug("[ToolProcessor] Processing batch tool call:", {
5075
+ toolId,
5076
+ toolName,
5077
+ hasArgs: !!toolArgs
5078
+ });
5079
+ }
4281
5080
  if (toolName && toolId && !state.toolCallsMap.has(toolId)) {
5081
+ if (debugEnabled) {
5082
+ console.debug("[ToolProcessor] Starting new batch tool call:", toolName);
5083
+ }
4282
5084
  _ToolCallProcessor.processToolCallStart(toolId, toolName, state, sseLines);
4283
5085
  }
4284
5086
  if (toolArgs) {
5087
+ if (debugEnabled) {
5088
+ console.debug("[ToolProcessor] Processing batch tool args, calling processToolArgs");
5089
+ }
4285
5090
  _ToolCallProcessor.processToolArgs(toolId, toolArgs, state, sseLines);
5091
+ } else if (toolName && toolId) {
5092
+ _ToolCallProcessor.processToolArgs(toolId, "", state, sseLines);
5093
+ } else {
5094
+ console.warn("\u26A0\uFE0F\u26A0\uFE0F\u26A0\uFE0F [ToolProcessor] No batch tool args to process! This will result in empty input!");
4286
5095
  }
4287
5096
  }
4288
5097
  }
@@ -4480,6 +5289,247 @@ function generateMessageId() {
4480
5289
  return `msg_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`;
4481
5290
  }
4482
5291
 
5292
+ // src/core/o2a-sse-adapter/stream-converter.ts
5293
+ var StreamConverter = class {
5294
+ constructor(adapter, options = {}) {
5295
+ this.buffer = "";
5296
+ this.adapter = adapter;
5297
+ this.options = {
5298
+ bufferTimeout: 5e3,
5299
+ errorRecovery: true,
5300
+ maxRetries: 3,
5301
+ debug: false,
5302
+ ...options
5303
+ };
5304
+ this.state = this.adapter.createIncrementalState();
5305
+ this.stats = {
5306
+ chunksProcessed: 0,
5307
+ eventsGenerated: 0,
5308
+ errors: 0,
5309
+ retries: 0,
5310
+ startTime: Date.now(),
5311
+ lastUpdateTime: Date.now(),
5312
+ bufferSize: 0
5313
+ };
5314
+ if (this.options.debug) {
5315
+ console.log("[StreamConverter] \u5DF2\u521D\u59CB\u5316\uFF0C\u914D\u7F6E:", this.options);
5316
+ }
5317
+ }
5318
+ /**
5319
+ * 获取初始事件
5320
+ */
5321
+ getInitialEvents() {
5322
+ const events = this.adapter.getInitialSSEEvents(
5323
+ this.options.modelName,
5324
+ this.options.messageId
5325
+ );
5326
+ this.stats.eventsGenerated += events.length;
5327
+ this.stats.lastUpdateTime = Date.now();
5328
+ if (this.options.debug) {
5329
+ console.log("[StreamConverter] \u751F\u6210\u521D\u59CB\u4E8B\u4EF6:", events.length, "\u4E2A");
5330
+ }
5331
+ return events;
5332
+ }
5333
+ /**
5334
+ * 处理单个数据块
5335
+ */
5336
+ processChunk(chunk) {
5337
+ this.stats.chunksProcessed++;
5338
+ this.stats.lastUpdateTime = Date.now();
5339
+ if (this.options.debug) {
5340
+ console.log("[StreamConverter] \u5904\u7406\u6570\u636E\u5757:", chunk.substring(0, 100) + "...");
5341
+ }
5342
+ try {
5343
+ const events = this.processBufferedData(chunk);
5344
+ this.stats.eventsGenerated += events.length;
5345
+ if (this.options.onChunkProcessed) {
5346
+ this.options.onChunkProcessed(chunk, events);
5347
+ }
5348
+ return events;
5349
+ } catch (error) {
5350
+ return this.handleChunkError(error, chunk);
5351
+ }
5352
+ }
5353
+ /**
5354
+ * 结束流处理
5355
+ */
5356
+ finalize() {
5357
+ if (this.options.debug) {
5358
+ console.log("[StreamConverter] \u7ED3\u675F\u6D41\u5904\u7406\uFF0C\u7F13\u51B2\u533A\u5927\u5C0F:", this.buffer.length);
5359
+ }
5360
+ let events = [];
5361
+ if (this.buffer.trim()) {
5362
+ console.warn("[StreamConverter] \u7F13\u51B2\u533A\u4E2D\u6709\u672A\u5904\u7406\u6570\u636E\uFF0C\u5F3A\u5236\u5904\u7406:", this.buffer);
5363
+ events = this.processIncompleteBuffer();
5364
+ }
5365
+ try {
5366
+ const finalEvents = this.adapter.convertIncrementalChunk("[DONE]", this.state);
5367
+ events.push(...finalEvents);
5368
+ this.stats.eventsGenerated += finalEvents.length;
5369
+ } catch (error) {
5370
+ console.error("[StreamConverter] \u5904\u7406\u7ED3\u675F\u4E8B\u4EF6\u5931\u8D25:", error);
5371
+ }
5372
+ this.clearBufferTimeout();
5373
+ this.stats.lastUpdateTime = Date.now();
5374
+ if (this.options.debug) {
5375
+ console.log("[StreamConverter] \u6D41\u5904\u7406\u5B8C\u6210\uFF0C\u7EDF\u8BA1\u4FE1\u606F:", this.stats);
5376
+ }
5377
+ return events;
5378
+ }
5379
+ /**
5380
+ * 获取当前状态
5381
+ */
5382
+ getState() {
5383
+ return { ...this.state };
5384
+ }
5385
+ /**
5386
+ * 重置状态
5387
+ */
5388
+ reset() {
5389
+ this.state = this.adapter.createIncrementalState();
5390
+ this.buffer = "";
5391
+ this.clearBufferTimeout();
5392
+ this.stats = {
5393
+ chunksProcessed: 0,
5394
+ eventsGenerated: 0,
5395
+ errors: 0,
5396
+ retries: 0,
5397
+ startTime: Date.now(),
5398
+ lastUpdateTime: Date.now(),
5399
+ bufferSize: 0
5400
+ };
5401
+ if (this.options.debug) {
5402
+ console.log("[StreamConverter] \u72B6\u6001\u5DF2\u91CD\u7F6E");
5403
+ }
5404
+ }
5405
+ /**
5406
+ * 获取统计信息
5407
+ */
5408
+ getStats() {
5409
+ return {
5410
+ ...this.stats,
5411
+ bufferSize: this.buffer.length
5412
+ };
5413
+ }
5414
+ /**
5415
+ * 处理缓冲的数据
5416
+ */
5417
+ processBufferedData(newChunk) {
5418
+ this.buffer += newChunk;
5419
+ this.stats.bufferSize = this.buffer.length;
5420
+ const lines = this.buffer.split("\n");
5421
+ this.buffer = lines.pop() || "";
5422
+ const events = [];
5423
+ for (const line of lines) {
5424
+ if (line.startsWith("data:")) {
5425
+ const jsonStr = line.slice(5).trim();
5426
+ if (jsonStr && jsonStr !== "[DONE]") {
5427
+ const lineEvents = this.processDataLine(jsonStr);
5428
+ events.push(...lineEvents);
5429
+ } else if (jsonStr === "[DONE]") {
5430
+ const finalEvents = this.adapter.convertIncrementalChunk("[DONE]", this.state);
5431
+ events.push(...finalEvents);
5432
+ }
5433
+ }
5434
+ }
5435
+ this.resetBufferTimeout();
5436
+ return events;
5437
+ }
5438
+ /**
5439
+ * 处理单行数据
5440
+ */
5441
+ processDataLine(jsonStr, attempt = 0) {
5442
+ try {
5443
+ const chunkEvents = this.adapter.convertIncrementalChunk(jsonStr, this.state);
5444
+ if (this.options.debug && chunkEvents.length > 0) {
5445
+ console.log("[StreamConverter] \u751F\u6210\u4E8B\u4EF6:", chunkEvents.length, "\u4E2A");
5446
+ }
5447
+ return chunkEvents;
5448
+ } catch (error) {
5449
+ if (this.options.errorRecovery && attempt < (this.options.maxRetries || 3)) {
5450
+ console.warn(`[StreamConverter] \u5904\u7406\u6570\u636E\u884C\u5931\u8D25\uFF0C\u91CD\u8BD5 ${attempt + 1}/${this.options.maxRetries}:`, error);
5451
+ this.stats.retries++;
5452
+ return this.processDataLine(jsonStr, attempt + 1);
5453
+ }
5454
+ this.stats.errors++;
5455
+ console.error("[StreamConverter] \u5904\u7406\u6570\u636E\u884C\u6700\u7EC8\u5931\u8D25:", error, "Data:", jsonStr);
5456
+ if (this.options.onError) {
5457
+ this.options.onError(error, {
5458
+ chunk: jsonStr,
5459
+ state: this.state,
5460
+ attempt,
5461
+ totalRetries: this.stats.retries
5462
+ });
5463
+ }
5464
+ return [];
5465
+ }
5466
+ }
5467
+ /**
5468
+ * 处理块错误
5469
+ */
5470
+ handleChunkError(error, chunk) {
5471
+ this.stats.errors++;
5472
+ if (this.options.debug) {
5473
+ console.error("[StreamConverter] \u5757\u5904\u7406\u9519\u8BEF:", error.message);
5474
+ }
5475
+ if (!this.options.errorRecovery) {
5476
+ throw error;
5477
+ }
5478
+ this.state.errors.push(`Chunk processing error: ${error.message}`);
5479
+ if (this.options.onError) {
5480
+ this.options.onError(error, {
5481
+ chunk,
5482
+ state: this.state,
5483
+ totalRetries: this.stats.retries
5484
+ });
5485
+ }
5486
+ return [];
5487
+ }
5488
+ /**
5489
+ * 处理不完整的缓冲区数据
5490
+ */
5491
+ processIncompleteBuffer() {
5492
+ if (!this.buffer.trim()) {
5493
+ return [];
5494
+ }
5495
+ console.warn("[StreamConverter] \u5904\u7406\u4E0D\u5B8C\u6574\u7F13\u51B2\u533A\u6570\u636E:", this.buffer);
5496
+ if (this.buffer.startsWith("data:")) {
5497
+ const jsonStr = this.buffer.slice(5).trim();
5498
+ if (jsonStr) {
5499
+ return this.processDataLine(jsonStr);
5500
+ }
5501
+ }
5502
+ return [];
5503
+ }
5504
+ /**
5505
+ * 重置缓冲区超时
5506
+ */
5507
+ resetBufferTimeout() {
5508
+ this.clearBufferTimeout();
5509
+ if (this.options.bufferTimeout && this.options.bufferTimeout > 0) {
5510
+ this.bufferTimeout = setTimeout(() => {
5511
+ if (this.buffer.trim()) {
5512
+ console.warn("[StreamConverter] \u7F13\u51B2\u533A\u8D85\u65F6\uFF0C\u5F3A\u5236\u5904\u7406\u6570\u636E:", this.buffer);
5513
+ const events = this.processIncompleteBuffer();
5514
+ this.buffer = "";
5515
+ if (events.length > 0 && this.options.onChunkProcessed) {
5516
+ this.options.onChunkProcessed("TIMEOUT_FLUSH", events);
5517
+ }
5518
+ }
5519
+ }, this.options.bufferTimeout);
5520
+ }
5521
+ }
5522
+ /**
5523
+ * 清理缓冲区超时
5524
+ */
5525
+ clearBufferTimeout() {
5526
+ if (this.bufferTimeout) {
5527
+ clearTimeout(this.bufferTimeout);
5528
+ this.bufferTimeout = void 0;
5529
+ }
5530
+ }
5531
+ };
5532
+
4483
5533
  // src/core/o2a-sse-adapter/adapter.ts
4484
5534
  var O2ASSEAdapter = class {
4485
5535
  constructor(debugMode = false, config = {}) {
@@ -4651,6 +5701,17 @@ var O2ASSEAdapter = class {
4651
5701
  }
4652
5702
  try {
4653
5703
  const data = JSON.parse(dataContent);
5704
+ if ((data.choices?.length === 0 || !data.choices) && data.prompt_filter_results) {
5705
+ if (this.debugMode) {
5706
+ console.warn("\u26A0\uFE0F [O2ASSEAdapter] \u68C0\u6D4B\u5230Azure\u5185\u5BB9\u8FC7\u6EE4\u5668\u54CD\u5E94:", data.prompt_filter_results);
5707
+ }
5708
+ StreamingStateManager.processTextContent(
5709
+ `\u9519\u8BEF\uFF1A\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42\u3002\u8BF7\u68C0\u67E5\u8F93\u5165\u5185\u5BB9\u662F\u5426\u7B26\u5408\u4F7F\u7528\u653F\u7B56\u3002`,
5710
+ state,
5711
+ sseLines
5712
+ );
5713
+ break;
5714
+ }
4654
5715
  const choice = data.choices?.[0];
4655
5716
  const delta = choice?.delta;
4656
5717
  if (!delta) {
@@ -4694,6 +5755,19 @@ var O2ASSEAdapter = class {
4694
5755
  processNonStreamingResponse(data, state, sseLines) {
4695
5756
  const choice = data.choices?.[0];
4696
5757
  if (!choice) {
5758
+ if (data.prompt_filter_results || data.choices?.length === 0) {
5759
+ const errorMsg = "Azure\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42";
5760
+ const filterDetails = data.prompt_filter_results ? JSON.stringify(data.prompt_filter_results).substring(0, 500) : "choices\u4E3A\u7A7A";
5761
+ if (this.debugMode) {
5762
+ console.warn(`\u26A0\uFE0F [O2ASSEAdapter] ${errorMsg}:`, filterDetails);
5763
+ }
5764
+ StreamingStateManager.processTextContent(
5765
+ `\u9519\u8BEF\uFF1A\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42\u3002\u8BF7\u68C0\u67E5\u8F93\u5165\u5185\u5BB9\u662F\u5426\u7B26\u5408\u4F7F\u7528\u653F\u7B56\u3002`,
5766
+ state,
5767
+ sseLines
5768
+ );
5769
+ return;
5770
+ }
4697
5771
  if (this.debugMode) {
4698
5772
  console.warn("\u26A0\uFE0F [O2ASSEAdapter] \u975E\u6D41\u5F0F\u54CD\u5E94\u6CA1\u6709choices\u6570\u636E");
4699
5773
  }
@@ -4734,6 +5808,101 @@ var O2ASSEAdapter = class {
4734
5808
  validateClaudeSSE(sseContent) {
4735
5809
  return FormatValidator2.validateClaudeSSE(sseContent);
4736
5810
  }
5811
+ /**
5812
+ * 将 OpenAI Response 流直接转换为 Anthropic SSE 流
5813
+ * 这是新增的核心流式处理方法,支持实时转换
5814
+ */
5815
+ convertResponseStream(openaiResponse, options = {}) {
5816
+ if (!openaiResponse.body) {
5817
+ throw new Error("Response body is null or undefined");
5818
+ }
5819
+ return this.convertReadableStream(openaiResponse.body, options);
5820
+ }
5821
+ /**
5822
+ * 将 ReadableStream 转换为 Anthropic SSE 流
5823
+ */
5824
+ convertReadableStream(openaiStream, options = {}) {
5825
+ const converter = this.createStreamConverter(options);
5826
+ const decoder = new TextDecoder();
5827
+ return new ReadableStream({
5828
+ async start(controller) {
5829
+ if (options.debug) {
5830
+ console.log("[O2ASSEAdapter] \u5F00\u59CB\u6D41\u5F0F\u8F6C\u6362\uFF0C\u914D\u7F6E:", options);
5831
+ }
5832
+ try {
5833
+ const initialEvents = converter.getInitialEvents();
5834
+ for (const event of initialEvents) {
5835
+ controller.enqueue(event);
5836
+ }
5837
+ } catch (error) {
5838
+ console.error("[O2ASSEAdapter] \u521D\u59CB\u5316\u5931\u8D25:", error);
5839
+ controller.error(error);
5840
+ return;
5841
+ }
5842
+ const reader = openaiStream.getReader();
5843
+ try {
5844
+ while (true) {
5845
+ const { done, value } = await reader.read();
5846
+ if (done) {
5847
+ try {
5848
+ const finalEvents = converter.finalize();
5849
+ for (const event of finalEvents) {
5850
+ controller.enqueue(event);
5851
+ }
5852
+ if (options.debug) {
5853
+ console.log("[O2ASSEAdapter] \u6D41\u5F0F\u8F6C\u6362\u5B8C\u6210\uFF0C\u7EDF\u8BA1:", converter.getStats());
5854
+ }
5855
+ } catch (error) {
5856
+ console.error("[O2ASSEAdapter] \u7ED3\u675F\u5904\u7406\u5931\u8D25:", error);
5857
+ }
5858
+ break;
5859
+ }
5860
+ const chunk = decoder.decode(value, { stream: true });
5861
+ try {
5862
+ const events = converter.processChunk(chunk);
5863
+ for (const event of events) {
5864
+ controller.enqueue(event);
5865
+ }
5866
+ } catch (error) {
5867
+ console.error("[O2ASSEAdapter] \u5757\u5904\u7406\u5931\u8D25:", error);
5868
+ if (options.errorRecovery === false) {
5869
+ controller.error(error);
5870
+ return;
5871
+ }
5872
+ if (options.onError) {
5873
+ options.onError(error, {
5874
+ chunk,
5875
+ state: converter.getState()
5876
+ });
5877
+ }
5878
+ }
5879
+ }
5880
+ } catch (error) {
5881
+ console.error("[O2ASSEAdapter] \u6D41\u5904\u7406\u5931\u8D25:", error);
5882
+ if (options.onError) {
5883
+ options.onError(error, {
5884
+ chunk: "",
5885
+ state: converter.getState()
5886
+ });
5887
+ }
5888
+ controller.error(error);
5889
+ } finally {
5890
+ controller.close();
5891
+ }
5892
+ }
5893
+ });
5894
+ }
5895
+ /**
5896
+ * 创建流式转换器实例
5897
+ * 提供更精细的流处理控制
5898
+ */
5899
+ createStreamConverter(options = {}) {
5900
+ return new StreamConverter(this, {
5901
+ modelName: options.modelName || this.config.defaultModel,
5902
+ debug: options.debug || this.debugMode,
5903
+ ...options
5904
+ });
5905
+ }
4737
5906
  /**
4738
5907
  * 应用增强功能到SSE转换
4739
5908
  * 包括输入验证、输出修复等
@@ -4805,15 +5974,56 @@ var O2ASSEAdapterStatic = {
4805
5974
  validateClaudeSSE: (sseContent) => {
4806
5975
  const adapter = new O2ASSEAdapter(false);
4807
5976
  return adapter.validateClaudeSSE(sseContent);
5977
+ },
5978
+ /**
5979
+ * 转换 Response 流为 Anthropic SSE(静态方法)
5980
+ * 新增:直接处理 Response 对象的流式转换
5981
+ */
5982
+ convertResponseStream: (openaiResponse, options = {}) => {
5983
+ const adapter = new O2ASSEAdapter(options.debug || false, {
5984
+ defaultModel: options.modelName || "claude-sonnet-4",
5985
+ generateUniqueMessageId: !options.messageId,
5986
+ errorDataMaxLength: 500
5987
+ });
5988
+ return adapter.convertResponseStream(openaiResponse, options);
5989
+ },
5990
+ /**
5991
+ * 转换 ReadableStream 为 Anthropic SSE(静态方法)
5992
+ * 新增:处理任意 ReadableStream<Uint8Array> 的流式转换
5993
+ */
5994
+ convertReadableStream: (openaiStream, options = {}) => {
5995
+ const adapter = new O2ASSEAdapter(options.debug || false, {
5996
+ defaultModel: options.modelName || "claude-sonnet-4",
5997
+ generateUniqueMessageId: !options.messageId,
5998
+ errorDataMaxLength: 500
5999
+ });
6000
+ return adapter.convertReadableStream(openaiStream, options);
6001
+ },
6002
+ /**
6003
+ * 创建流式转换器(静态方法)
6004
+ * 新增:提供更精细的流处理控制
6005
+ */
6006
+ createStreamConverter: (options = {}) => {
6007
+ const adapter = new O2ASSEAdapter(options.debug || false, {
6008
+ defaultModel: options.modelName || "claude-sonnet-4",
6009
+ generateUniqueMessageId: !options.messageId,
6010
+ errorDataMaxLength: 500
6011
+ });
6012
+ return adapter.createStreamConverter(options);
4808
6013
  }
4809
6014
  };
4810
6015
 
4811
6016
  // src/core/standard/standard-protocol-adapter.ts
4812
6017
  var StandardProtocolAdapter = class {
4813
6018
  constructor(options = {}) {
4814
- this.debugMode = options.debugMode || false;
6019
+ this.debugMode = options.debugMode ?? process.env.AI_PROTOCOL_DEBUG === "true";
4815
6020
  this.sseAdapter = new O2ASSEAdapter(this.debugMode);
4816
6021
  }
6022
+ logDebug(message, meta) {
6023
+ if (this.debugMode) {
6024
+ console.debug(message, meta ?? "");
6025
+ }
6026
+ }
4817
6027
  /**
4818
6028
  * 转换Anthropic请求为OpenAI请求格式
4819
6029
  * @param anthropicRequest - Anthropic格式的请求
@@ -4837,7 +6047,7 @@ var StandardProtocolAdapter = class {
4837
6047
  */
4838
6048
  convertFromStreamToStandard(openaiRawStream, modelName, messageId) {
4839
6049
  if (this.debugMode) {
4840
- console.log("\u{1F504} [StandardProtocolAdapter] convertFromStreamToStandard \u5F00\u59CB\u5904\u7406:", {
6050
+ this.logDebug("\u{1F504} [StandardProtocolAdapter] convertFromStreamToStandard \u5F00\u59CB\u5904\u7406:", {
4841
6051
  rawStreamLength: openaiRawStream.length,
4842
6052
  modelName,
4843
6053
  messageId,
@@ -4846,14 +6056,14 @@ var StandardProtocolAdapter = class {
4846
6056
  }
4847
6057
  const sseResult = this.sseAdapter.convertToClaudeSSE(openaiRawStream, modelName, messageId);
4848
6058
  if (this.debugMode) {
4849
- console.log("\u{1F504} [StandardProtocolAdapter] SSE\u8F6C\u6362\u5B8C\u6210:", {
6059
+ this.logDebug("\u{1F504} [StandardProtocolAdapter] SSE\u8F6C\u6362\u5B8C\u6210:", {
4850
6060
  sseResultLength: sseResult.length,
4851
6061
  ssePreview: sseResult.substring(0, 500)
4852
6062
  });
4853
6063
  }
4854
6064
  const standardResponse = this.extractStandardResponseFromSSE(sseResult, modelName, messageId);
4855
6065
  if (this.debugMode) {
4856
- console.log("\u{1F504} [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u63D0\u53D6\u5B8C\u6210:", {
6066
+ this.logDebug("\u{1F504} [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u63D0\u53D6\u5B8C\u6210:", {
4857
6067
  contentLength: standardResponse.content.length,
4858
6068
  usage: standardResponse.usage,
4859
6069
  stopReason: standardResponse.stop_reason
@@ -4868,7 +6078,7 @@ var StandardProtocolAdapter = class {
4868
6078
  const lines = sseContent.split("\n");
4869
6079
  const finalMessageId = messageId || generateMessageId();
4870
6080
  if (this.debugMode) {
4871
- console.log("\u{1F50D} [StandardProtocolAdapter] extractStandardResponseFromSSE \u5F00\u59CB\u89E3\u6790:", {
6081
+ this.logDebug("\u{1F50D} [StandardProtocolAdapter] extractStandardResponseFromSSE \u5F00\u59CB\u89E3\u6790:", {
4872
6082
  totalLines: lines.length,
4873
6083
  messageId: finalMessageId
4874
6084
  });
@@ -4888,6 +6098,8 @@ var StandardProtocolAdapter = class {
4888
6098
  };
4889
6099
  let currentTextContent = "";
4890
6100
  const toolCalls = /* @__PURE__ */ new Map();
6101
+ const toolInputBuffers = /* @__PURE__ */ new Map();
6102
+ const indexToToolId = /* @__PURE__ */ new Map();
4891
6103
  let processedDataLines = 0;
4892
6104
  for (const line of lines) {
4893
6105
  if (line.startsWith("data: ")) {
@@ -4900,24 +6112,74 @@ var StandardProtocolAdapter = class {
4900
6112
  if (data.type === "content_block_start") {
4901
6113
  const contentBlock = data.content_block;
4902
6114
  if (contentBlock.type === "tool_use") {
6115
+ const toolIndex = data.index;
4903
6116
  toolCalls.set(contentBlock.id, {
4904
6117
  type: "tool_use",
4905
6118
  id: contentBlock.id,
4906
6119
  name: contentBlock.name,
4907
6120
  input: contentBlock.input || {}
6121
+ // 初始为空对象,稍后会被更新
6122
+ });
6123
+ toolInputBuffers.set(toolIndex, "");
6124
+ indexToToolId.set(toolIndex, contentBlock.id);
6125
+ console.log("\u{1F527}\u{1F527}\u{1F527} [StandardProtocolAdapter] \u6DFB\u52A0\u5DE5\u5177\u8C03\u7528:", {
6126
+ index: toolIndex,
6127
+ toolId: contentBlock.id,
6128
+ name: contentBlock.name,
6129
+ indexToToolIdSize: indexToToolId.size
4908
6130
  });
4909
- if (this.debugMode) {
4910
- console.log("\u{1F527} [StandardProtocolAdapter] \u6DFB\u52A0\u5DE5\u5177\u8C03\u7528:", contentBlock);
4911
- }
4912
6131
  }
4913
6132
  }
4914
6133
  if (data.type === "content_block_delta" && data.delta?.type === "text_delta") {
4915
6134
  currentTextContent += data.delta.text;
4916
6135
  if (this.debugMode && currentTextContent.length % 50 === 0) {
4917
- console.log(`\u{1F4DD} [StandardProtocolAdapter] \u7D2F\u79EF\u6587\u672C\u5185\u5BB9 (${currentTextContent.length}\u5B57\u7B26):`, currentTextContent.substring(currentTextContent.length - 20));
6136
+ this.logDebug(`\u{1F4DD} [StandardProtocolAdapter] \u7D2F\u79EF\u6587\u672C\u5185\u5BB9 (${currentTextContent.length}\u5B57\u7B26)`, currentTextContent.substring(currentTextContent.length - 20));
4918
6137
  }
4919
6138
  }
4920
6139
  if (data.type === "content_block_delta" && data.delta?.type === "input_json_delta") {
6140
+ const toolIndex = data.index;
6141
+ const toolId = indexToToolId.get(toolIndex);
6142
+ if (this.debugMode) {
6143
+ this.logDebug(`\u{1F527}\u{1F527}\u{1F527} [StandardProtocolAdapter] \u68C0\u6D4B\u5230input_json_delta\u4E8B\u4EF6\uFF01`, {
6144
+ toolIndex,
6145
+ toolId: toolId || "NOT_FOUND",
6146
+ delta: data.delta.partial_json
6147
+ });
6148
+ }
6149
+ if (toolId) {
6150
+ const currentBuffer = toolInputBuffers.get(toolIndex) || "";
6151
+ const newBuffer = currentBuffer + data.delta.partial_json;
6152
+ toolInputBuffers.set(toolIndex, newBuffer);
6153
+ if (this.debugMode) {
6154
+ this.logDebug(`\u{1F527} [StandardProtocolAdapter] \u7D2F\u79EF\u5DE5\u5177\u53C2\u6570 (index=${toolIndex}, id=${toolId})`, {
6155
+ bufferLength: newBuffer.length
6156
+ });
6157
+ }
6158
+ } else {
6159
+ console.warn(`\u26A0\uFE0F [StandardProtocolAdapter] \u627E\u4E0D\u5230toolId for index=${toolIndex}`);
6160
+ }
6161
+ }
6162
+ if (data.type === "content_block_stop") {
6163
+ const toolIndex = data.index;
6164
+ const toolId = indexToToolId.get(toolIndex);
6165
+ if (toolId) {
6166
+ const jsonBuffer = toolInputBuffers.get(toolIndex);
6167
+ const tool = toolCalls.get(toolId);
6168
+ if (jsonBuffer && tool) {
6169
+ try {
6170
+ const parsedInput = JSON.parse(jsonBuffer);
6171
+ tool.input = parsedInput;
6172
+ if (this.debugMode) {
6173
+ this.logDebug(`\u2705 [StandardProtocolAdapter] \u5DE5\u5177\u53C2\u6570\u89E3\u6790\u5B8C\u6210 (index=${toolIndex}, id=${toolId})`, parsedInput);
6174
+ }
6175
+ } catch (parseError) {
6176
+ console.warn(`\u26A0\uFE0F [StandardProtocolAdapter] \u5DE5\u5177\u53C2\u6570JSON\u89E3\u6790\u5931\u8D25 (index=${toolIndex}, id=${toolId}):`, {
6177
+ buffer: jsonBuffer,
6178
+ error: parseError
6179
+ });
6180
+ }
6181
+ }
6182
+ }
4921
6183
  }
4922
6184
  if (data.type === "message_delta") {
4923
6185
  if (data.delta?.stop_reason) {
@@ -4926,7 +6188,7 @@ var StandardProtocolAdapter = class {
4926
6188
  if (data.usage) {
4927
6189
  response.usage = data.usage;
4928
6190
  if (this.debugMode) {
4929
- console.log("\u{1F4CA} [StandardProtocolAdapter] \u66F4\u65B0usage\u4FE1\u606F:", data.usage);
6191
+ this.logDebug("\u{1F4CA} [StandardProtocolAdapter] \u66F4\u65B0usage\u4FE1\u606F:", data.usage);
4930
6192
  }
4931
6193
  }
4932
6194
  }
@@ -4946,7 +6208,7 @@ var StandardProtocolAdapter = class {
4946
6208
  }
4947
6209
  response.content.push(...Array.from(toolCalls.values()));
4948
6210
  if (this.debugMode) {
4949
- console.log("\u2705 [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u6784\u5EFA\u5B8C\u6210:", {
6211
+ this.logDebug("\u2705 [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u6784\u5EFA\u5B8C\u6210:", {
4950
6212
  contentCount: response.content.length,
4951
6213
  textLength: currentTextContent.length,
4952
6214
  toolCallsCount: toolCalls.size,
@@ -5807,6 +7069,7 @@ var FEATURES = {
5807
7069
  createAnthropicSDK,
5808
7070
  createOpenAISDK,
5809
7071
  createValidator,
7072
+ downloadImageAsBase64,
5810
7073
  errorRecovery,
5811
7074
  getAllHealingStrategies,
5812
7075
  getGlobalLogger,
@@ -5816,6 +7079,8 @@ var FEATURES = {
5816
7079
  healO2ARequest,
5817
7080
  healO2AResponse,
5818
7081
  healingValidate,
7082
+ isBase64DataUri,
7083
+ isExternalUrl,
5819
7084
  isRecoverable,
5820
7085
  protocolHealer,
5821
7086
  safeValidate,