ai-protocol-adapters 1.0.0-alpha.2 → 1.0.0-alpha.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -7
- package/dist/index.d.mts +862 -648
- package/dist/index.d.ts +862 -648
- package/dist/index.js +2153 -855
- package/dist/index.mjs +2150 -855
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -453,6 +453,7 @@ __export(index_exports, {
|
|
|
453
453
|
createAnthropicSDK: () => createAnthropicSDK,
|
|
454
454
|
createOpenAISDK: () => createOpenAISDK,
|
|
455
455
|
createValidator: () => createValidator,
|
|
456
|
+
downloadImageAsBase64: () => downloadImageAsBase64,
|
|
456
457
|
errorRecovery: () => errorRecovery,
|
|
457
458
|
getAllHealingStrategies: () => getAllHealingStrategies,
|
|
458
459
|
getGlobalLogger: () => getGlobalLogger,
|
|
@@ -462,6 +463,8 @@ __export(index_exports, {
|
|
|
462
463
|
healO2ARequest: () => healO2ARequest,
|
|
463
464
|
healO2AResponse: () => healO2AResponse,
|
|
464
465
|
healingValidate: () => healingValidate,
|
|
466
|
+
isBase64DataUri: () => isBase64DataUri,
|
|
467
|
+
isExternalUrl: () => isExternalUrl,
|
|
465
468
|
isRecoverable: () => isRecoverable,
|
|
466
469
|
protocolHealer: () => protocolHealer,
|
|
467
470
|
safeValidate: () => safeValidate,
|
|
@@ -504,523 +507,463 @@ function getGlobalLogger() {
|
|
|
504
507
|
return globalLogger;
|
|
505
508
|
}
|
|
506
509
|
|
|
507
|
-
// src/core/
|
|
508
|
-
var
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
}
|
|
510
|
+
// src/core/a2o-request-adapter/config.ts
|
|
511
|
+
var DEFAULT_CONFIG = {
|
|
512
|
+
// 原有配置
|
|
513
|
+
debugMode: false,
|
|
514
|
+
maxDescriptionLength: 100,
|
|
515
|
+
enableToolNameValidation: true,
|
|
516
|
+
enableFormatValidation: true,
|
|
517
|
+
// 新增默认配置
|
|
518
|
+
validation: {
|
|
519
|
+
enabled: true,
|
|
520
|
+
strict: false,
|
|
521
|
+
// 默认开启自动修复
|
|
522
|
+
customSchemas: {}
|
|
523
|
+
},
|
|
524
|
+
healing: {
|
|
525
|
+
enabled: true,
|
|
526
|
+
maxAttempts: 3,
|
|
527
|
+
enableCustomRules: true
|
|
528
|
+
},
|
|
529
|
+
recovery: {
|
|
530
|
+
enabled: true,
|
|
531
|
+
maxRetries: 2,
|
|
532
|
+
backoffMs: 1e3
|
|
533
|
+
},
|
|
534
|
+
monitoring: {
|
|
535
|
+
enabled: false,
|
|
536
|
+
logLevel: "warn",
|
|
537
|
+
enableMetrics: false
|
|
538
|
+
},
|
|
539
|
+
imageProxy: {
|
|
540
|
+
enabled: true,
|
|
541
|
+
// 默认启用图片代理(解决GitHub Copilot等不支持外部URL的问题)
|
|
542
|
+
timeout: 1e4,
|
|
543
|
+
// 10秒超时
|
|
544
|
+
maxSize: 10 * 1024 * 1024
|
|
545
|
+
// 10MB最大文件大小
|
|
520
546
|
}
|
|
547
|
+
};
|
|
548
|
+
var SUPPORTED_IMAGE_TYPES = [
|
|
549
|
+
"image/jpeg",
|
|
550
|
+
"image/png",
|
|
551
|
+
"image/gif",
|
|
552
|
+
"image/webp"
|
|
553
|
+
];
|
|
554
|
+
var TOOL_CONVERSION = {
|
|
521
555
|
/**
|
|
522
|
-
*
|
|
556
|
+
* 终极泛化:完全移除工具名称映射
|
|
557
|
+
* 基于GitHub Copilot API测试结果,100%保持原始格式
|
|
523
558
|
*/
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
559
|
+
PRESERVE_ORIGINAL_NAMES: true,
|
|
560
|
+
/**
|
|
561
|
+
* 默认工具描述
|
|
562
|
+
*/
|
|
563
|
+
DEFAULT_DESCRIPTION: "Tool description",
|
|
564
|
+
/**
|
|
565
|
+
* 未知工具回退名称
|
|
566
|
+
*/
|
|
567
|
+
UNKNOWN_TOOL_FALLBACK: "unknown_tool"
|
|
568
|
+
};
|
|
569
|
+
|
|
570
|
+
// src/core/a2o-request-adapter/image-proxy.ts
|
|
571
|
+
var SUPPORTED_IMAGE_MIME_TYPES = [
|
|
572
|
+
"image/jpeg",
|
|
573
|
+
"image/png",
|
|
574
|
+
"image/gif",
|
|
575
|
+
"image/webp"
|
|
576
|
+
];
|
|
577
|
+
async function downloadImageAsBase64(url, options = {}) {
|
|
578
|
+
const {
|
|
579
|
+
timeout = 1e4,
|
|
580
|
+
maxSize = 10 * 1024 * 1024,
|
|
581
|
+
// 10MB
|
|
582
|
+
userAgent = "ai-protocol-adapters/1.0"
|
|
583
|
+
} = options;
|
|
584
|
+
try {
|
|
585
|
+
const controller = new AbortController();
|
|
586
|
+
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
|
587
|
+
const response = await fetch(url, {
|
|
588
|
+
signal: controller.signal,
|
|
589
|
+
headers: {
|
|
590
|
+
"User-Agent": userAgent
|
|
591
|
+
}
|
|
592
|
+
});
|
|
593
|
+
clearTimeout(timeoutId);
|
|
594
|
+
if (!response.ok) {
|
|
595
|
+
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
|
528
596
|
}
|
|
529
|
-
const
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
stream: anthropicRequest.stream ?? true,
|
|
533
|
-
temperature: anthropicRequest.temperature,
|
|
534
|
-
max_tokens: anthropicRequest.max_tokens
|
|
535
|
-
};
|
|
536
|
-
if (anthropicRequest.tools) {
|
|
537
|
-
openaiRequest.tools = anthropicRequest.tools.map((tool) => ({
|
|
538
|
-
type: "function",
|
|
539
|
-
function: {
|
|
540
|
-
name: tool.name,
|
|
541
|
-
description: tool.description,
|
|
542
|
-
parameters: tool.input_schema
|
|
543
|
-
}
|
|
544
|
-
}));
|
|
597
|
+
const contentType = response.headers.get("content-type");
|
|
598
|
+
if (!contentType || !SUPPORTED_IMAGE_MIME_TYPES.some((type) => contentType.includes(type))) {
|
|
599
|
+
throw new Error(`Unsupported content type: ${contentType}`);
|
|
545
600
|
}
|
|
546
|
-
const
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
}
|
|
553
|
-
}
|
|
601
|
+
const contentLength = response.headers.get("content-length");
|
|
602
|
+
if (contentLength && parseInt(contentLength) > maxSize) {
|
|
603
|
+
throw new Error(`Image too large: ${contentLength} bytes (max: ${maxSize} bytes)`);
|
|
604
|
+
}
|
|
605
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
606
|
+
if (arrayBuffer.byteLength > maxSize) {
|
|
607
|
+
throw new Error(`Image too large: ${arrayBuffer.byteLength} bytes (max: ${maxSize} bytes)`);
|
|
608
|
+
}
|
|
609
|
+
const base64 = Buffer.from(arrayBuffer).toString("base64");
|
|
610
|
+
return `data:${contentType};base64,${base64}`;
|
|
611
|
+
} catch (error) {
|
|
612
|
+
if (error.name === "AbortError") {
|
|
613
|
+
throw new Error(`Image download timeout after ${timeout}ms`);
|
|
614
|
+
}
|
|
615
|
+
throw new Error(`Failed to download image from ${url}: ${error.message}`);
|
|
554
616
|
}
|
|
617
|
+
}
|
|
618
|
+
function isExternalUrl(url) {
|
|
619
|
+
return url.startsWith("http://") || url.startsWith("https://");
|
|
620
|
+
}
|
|
621
|
+
function isBase64DataUri(url) {
|
|
622
|
+
return url.startsWith("data:");
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
// src/core/a2o-request-adapter/message-converter.ts
|
|
626
|
+
var MessageConverter = class {
|
|
555
627
|
/**
|
|
556
|
-
*
|
|
628
|
+
* 转换消息格式,正确处理工具调用和工具结果
|
|
629
|
+
* 修复关键问题:将tool_use转换为tool_calls,tool_result转换为role:"tool"消息
|
|
630
|
+
* 使用tool_use_id溯回工具名称解决unknown_tool问题
|
|
557
631
|
*/
|
|
558
|
-
|
|
559
|
-
const
|
|
560
|
-
|
|
561
|
-
if (
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
});
|
|
632
|
+
static convertMessages(messages, system) {
|
|
633
|
+
const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
|
|
634
|
+
if (debugEnabled) {
|
|
635
|
+
if (system !== void 0) {
|
|
636
|
+
console.debug("[MessageConverter] convertMessages called with system:", JSON.stringify(system, null, 2));
|
|
637
|
+
} else {
|
|
638
|
+
console.debug("[MessageConverter] convertMessages called WITHOUT system parameter");
|
|
566
639
|
}
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
640
|
+
}
|
|
641
|
+
const context = this.createConversionContext(messages);
|
|
642
|
+
const convertedMessages = [];
|
|
643
|
+
for (const msg of messages) {
|
|
644
|
+
if (Array.isArray(msg.content)) {
|
|
645
|
+
const processedMessages = this.processComplexMessage(msg, context);
|
|
646
|
+
convertedMessages.push(...processedMessages);
|
|
647
|
+
} else {
|
|
648
|
+
const safeMsg = { ...msg };
|
|
649
|
+
if (safeMsg.content === null || safeMsg.content === void 0) {
|
|
650
|
+
safeMsg.content = "";
|
|
651
|
+
}
|
|
652
|
+
convertedMessages.push(safeMsg);
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
if (system) {
|
|
656
|
+
const systemMessage = this.processSystemMessage(system);
|
|
657
|
+
if (systemMessage) {
|
|
658
|
+
convertedMessages.unshift(systemMessage);
|
|
659
|
+
if (debugEnabled) {
|
|
660
|
+
console.debug("[MessageConverter] System message added to messages array at index 0");
|
|
661
|
+
}
|
|
574
662
|
}
|
|
575
|
-
const anthropicSSE = this.convertToAnthropicSSE(openaiStream, originalRequest.model);
|
|
576
|
-
const anthropicStandardResponse = this.buildStandardResponse(openaiStream);
|
|
577
|
-
return {
|
|
578
|
-
success: true,
|
|
579
|
-
anthropicSSE,
|
|
580
|
-
anthropicStandardResponse
|
|
581
|
-
};
|
|
582
|
-
} catch (error) {
|
|
583
|
-
const errorMessage = error instanceof Error ? error.message : "Unknown conversion error";
|
|
584
|
-
logger.error("Stream conversion failed", { error: errorMessage });
|
|
585
|
-
return {
|
|
586
|
-
success: false,
|
|
587
|
-
error: errorMessage,
|
|
588
|
-
anthropicSSE: "",
|
|
589
|
-
anthropicStandardResponse: null
|
|
590
|
-
};
|
|
591
663
|
}
|
|
664
|
+
if (debugEnabled) {
|
|
665
|
+
console.debug("[MessageConverter] Final converted messages count:", convertedMessages.length);
|
|
666
|
+
console.debug("[MessageConverter] First message:", JSON.stringify(convertedMessages[0], null, 2));
|
|
667
|
+
}
|
|
668
|
+
return convertedMessages.map((msg) => {
|
|
669
|
+
if (Array.isArray(msg.tools)) {
|
|
670
|
+
msg.tools = msg.tools.map((tool) => {
|
|
671
|
+
if (tool?.type === "function" && tool.function) {
|
|
672
|
+
const description = tool.function.description?.trim() || "Converted tool with no description provided.";
|
|
673
|
+
return {
|
|
674
|
+
...tool,
|
|
675
|
+
function: {
|
|
676
|
+
...tool.function,
|
|
677
|
+
description
|
|
678
|
+
}
|
|
679
|
+
};
|
|
680
|
+
}
|
|
681
|
+
return tool;
|
|
682
|
+
});
|
|
683
|
+
}
|
|
684
|
+
return msg;
|
|
685
|
+
});
|
|
592
686
|
}
|
|
593
687
|
/**
|
|
594
|
-
*
|
|
688
|
+
* 创建消息转换上下文
|
|
595
689
|
*/
|
|
596
|
-
|
|
597
|
-
const
|
|
598
|
-
const
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
""
|
|
604
|
-
);
|
|
605
|
-
for (const line of lines) {
|
|
606
|
-
if (line.startsWith("data:")) {
|
|
607
|
-
const dataLine = line.substring(5);
|
|
608
|
-
if (dataLine.trim() === "[DONE]") {
|
|
609
|
-
this.addFinalEvents(state, sseLines);
|
|
610
|
-
break;
|
|
611
|
-
}
|
|
612
|
-
try {
|
|
613
|
-
const chunk = JSON.parse(dataLine);
|
|
614
|
-
this.processStreamChunk(chunk, state, sseLines);
|
|
615
|
-
} catch (error) {
|
|
616
|
-
if (this.config.debugMode) {
|
|
617
|
-
this.config.logger.warn("Failed to parse stream chunk", { line: dataLine.substring(0, 200) });
|
|
690
|
+
static createConversionContext(messages) {
|
|
691
|
+
const toolIdToNameMap = /* @__PURE__ */ new Map();
|
|
692
|
+
for (const msg of messages) {
|
|
693
|
+
if (Array.isArray(msg.content)) {
|
|
694
|
+
for (const item of msg.content) {
|
|
695
|
+
if (typeof item === "object" && item !== null && item.type === "tool_use") {
|
|
696
|
+
toolIdToNameMap.set(item.id, item.name);
|
|
618
697
|
}
|
|
619
698
|
}
|
|
620
699
|
}
|
|
621
700
|
}
|
|
622
|
-
return
|
|
701
|
+
return {
|
|
702
|
+
toolIdToNameMap,
|
|
703
|
+
hasSystemMessage: false
|
|
704
|
+
};
|
|
623
705
|
}
|
|
624
706
|
/**
|
|
625
|
-
*
|
|
707
|
+
* 处理复杂消息(包含多种内容类型)
|
|
626
708
|
*/
|
|
627
|
-
|
|
628
|
-
const
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
);
|
|
639
|
-
state.thinkingBlockStarted = true;
|
|
709
|
+
static processComplexMessage(msg, context) {
|
|
710
|
+
const { textContent, toolUses, toolResults } = this.categorizeContent(msg.content);
|
|
711
|
+
const resultMessages = [];
|
|
712
|
+
if (msg.role === "assistant" && toolUses.length > 0) {
|
|
713
|
+
const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
|
|
714
|
+
resultMessages.push(assistantMessage);
|
|
715
|
+
} else if (toolResults.length > 0) {
|
|
716
|
+
const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
|
|
717
|
+
resultMessages.push(...toolMessages);
|
|
718
|
+
const textMessage = this.createTextMessage(msg.role, textContent);
|
|
719
|
+
if (textMessage) {
|
|
720
|
+
resultMessages.push(textMessage);
|
|
640
721
|
}
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
);
|
|
646
|
-
}
|
|
647
|
-
if (delta.content && delta.content !== "") {
|
|
648
|
-
if (state.thinkingBlockStarted && !state.contentBlockStarted) {
|
|
649
|
-
sseLines.push(
|
|
650
|
-
"event: content_block_delta",
|
|
651
|
-
'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"</thinking>\\n\\n"}}',
|
|
652
|
-
"",
|
|
653
|
-
"event: content_block_stop",
|
|
654
|
-
'data: {"type":"content_block_stop","index":0}',
|
|
655
|
-
"",
|
|
656
|
-
"event: content_block_start",
|
|
657
|
-
'data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}',
|
|
658
|
-
""
|
|
659
|
-
);
|
|
660
|
-
state.contentBlockStarted = true;
|
|
661
|
-
} else if (!state.contentBlockStarted && !state.thinkingBlockStarted) {
|
|
662
|
-
sseLines.push(
|
|
663
|
-
"event: content_block_start",
|
|
664
|
-
'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}',
|
|
665
|
-
""
|
|
666
|
-
);
|
|
667
|
-
state.contentBlockStarted = true;
|
|
722
|
+
} else if (textContent.length > 0) {
|
|
723
|
+
const textMessage = this.createTextMessage(msg.role, textContent);
|
|
724
|
+
if (textMessage) {
|
|
725
|
+
resultMessages.push(textMessage);
|
|
668
726
|
}
|
|
669
|
-
state.textContent += delta.content;
|
|
670
|
-
const blockIndex = state.thinkingBlockStarted ? 1 : 0;
|
|
671
|
-
sseLines.push(
|
|
672
|
-
"event: content_block_delta",
|
|
673
|
-
`data: {"type":"content_block_delta","index":${blockIndex},"delta":{"type":"text_delta","text":"${this.escapeJsonString(delta.content)}"}}`,
|
|
674
|
-
""
|
|
675
|
-
);
|
|
676
|
-
}
|
|
677
|
-
if (delta.tool_calls) {
|
|
678
|
-
this.processToolCalls(delta.tool_calls, state, sseLines);
|
|
679
|
-
}
|
|
680
|
-
if (chunk.usage) {
|
|
681
|
-
state.usage.input_tokens = chunk.usage.prompt_tokens;
|
|
682
|
-
state.usage.output_tokens = chunk.usage.completion_tokens;
|
|
683
727
|
}
|
|
728
|
+
return resultMessages;
|
|
684
729
|
}
|
|
685
730
|
/**
|
|
686
|
-
*
|
|
731
|
+
* 分类内容块
|
|
687
732
|
*/
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
""
|
|
701
|
-
|
|
733
|
+
static categorizeContent(content) {
|
|
734
|
+
const textContent = [];
|
|
735
|
+
const toolUses = [];
|
|
736
|
+
const toolResults = [];
|
|
737
|
+
for (const item of content) {
|
|
738
|
+
if (typeof item === "string") {
|
|
739
|
+
textContent.push({ type: "text", text: item });
|
|
740
|
+
} else if (typeof item === "object" && item !== null) {
|
|
741
|
+
switch (item.type) {
|
|
742
|
+
case "text":
|
|
743
|
+
textContent.push(item);
|
|
744
|
+
break;
|
|
745
|
+
case "tool_use":
|
|
746
|
+
toolUses.push(item);
|
|
747
|
+
break;
|
|
748
|
+
case "tool_result":
|
|
749
|
+
toolResults.push(item);
|
|
750
|
+
break;
|
|
751
|
+
case "image":
|
|
752
|
+
const imageContent = this.convertImageContent(item);
|
|
753
|
+
if (imageContent) {
|
|
754
|
+
textContent.push(imageContent);
|
|
755
|
+
}
|
|
756
|
+
break;
|
|
757
|
+
}
|
|
702
758
|
}
|
|
703
759
|
}
|
|
760
|
+
return { textContent, toolUses, toolResults };
|
|
704
761
|
}
|
|
705
762
|
/**
|
|
706
|
-
*
|
|
763
|
+
* 转换图片内容格式
|
|
764
|
+
* 支持两种格式:URL 和 base64
|
|
707
765
|
*/
|
|
708
|
-
|
|
709
|
-
if (
|
|
710
|
-
|
|
711
|
-
sseLines.push(
|
|
712
|
-
"event: content_block_stop",
|
|
713
|
-
`data: {"type":"content_block_stop","index":${blockIndex}}`,
|
|
714
|
-
""
|
|
715
|
-
);
|
|
716
|
-
} else if (state.thinkingBlockStarted) {
|
|
717
|
-
sseLines.push(
|
|
718
|
-
"event: content_block_delta",
|
|
719
|
-
'data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"</thinking>"}}',
|
|
720
|
-
"",
|
|
721
|
-
"event: content_block_stop",
|
|
722
|
-
'data: {"type":"content_block_stop","index":0}',
|
|
723
|
-
""
|
|
724
|
-
);
|
|
766
|
+
static convertImageContent(item) {
|
|
767
|
+
if (!item.source) {
|
|
768
|
+
return null;
|
|
725
769
|
}
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
);
|
|
734
|
-
}
|
|
735
|
-
/**
|
|
736
|
-
* 构建标准响应格式
|
|
737
|
-
*/
|
|
738
|
-
buildStandardResponse(openaiStream) {
|
|
739
|
-
const state = this.createConversionState();
|
|
740
|
-
const lines = openaiStream.split("\n");
|
|
741
|
-
for (const line of lines) {
|
|
742
|
-
if (line.startsWith("data: ")) {
|
|
743
|
-
const dataLine = line.substring(6);
|
|
744
|
-
if (dataLine.trim() === "[DONE]") break;
|
|
745
|
-
try {
|
|
746
|
-
const chunk = JSON.parse(dataLine);
|
|
747
|
-
const choice = chunk.choices?.[0];
|
|
748
|
-
if (!choice) continue;
|
|
749
|
-
const delta = choice.delta;
|
|
750
|
-
if (delta.content) {
|
|
751
|
-
state.textContent += delta.content;
|
|
752
|
-
}
|
|
753
|
-
if (chunk.usage) {
|
|
754
|
-
state.usage.input_tokens = chunk.usage.prompt_tokens;
|
|
755
|
-
state.usage.output_tokens = chunk.usage.completion_tokens;
|
|
756
|
-
}
|
|
757
|
-
} catch (error) {
|
|
770
|
+
if (item.source.type === "url" && item.source.url) {
|
|
771
|
+
return {
|
|
772
|
+
type: "image_url",
|
|
773
|
+
image_url: {
|
|
774
|
+
url: item.source.url,
|
|
775
|
+
detail: "auto"
|
|
776
|
+
// OpenAI 支持的可选参数
|
|
758
777
|
}
|
|
759
|
-
}
|
|
778
|
+
};
|
|
760
779
|
}
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
780
|
+
if (item.source.type === "base64" && item.source.data && item.source.media_type) {
|
|
781
|
+
if (!SUPPORTED_IMAGE_TYPES.includes(item.source.media_type)) {
|
|
782
|
+
console.warn(`\u4E0D\u652F\u6301\u7684\u56FE\u7247\u683C\u5F0F: ${item.source.media_type}`);
|
|
783
|
+
return null;
|
|
784
|
+
}
|
|
785
|
+
const dataUri = `data:${item.source.media_type};base64,${item.source.data}`;
|
|
786
|
+
return {
|
|
787
|
+
type: "image_url",
|
|
788
|
+
image_url: {
|
|
789
|
+
url: dataUri,
|
|
790
|
+
detail: "auto"
|
|
769
791
|
}
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
stop_sequence: null,
|
|
774
|
-
usage: state.usage
|
|
775
|
-
};
|
|
792
|
+
};
|
|
793
|
+
}
|
|
794
|
+
return null;
|
|
776
795
|
}
|
|
777
796
|
/**
|
|
778
|
-
*
|
|
797
|
+
* 创建包含工具调用的助手消息
|
|
779
798
|
*/
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
toolCallsMap: /* @__PURE__ */ new Map(),
|
|
786
|
-
completedToolCalls: [],
|
|
787
|
-
allSSELines: [],
|
|
788
|
-
errors: [],
|
|
789
|
-
usage: {
|
|
790
|
-
input_tokens: 0,
|
|
791
|
-
output_tokens: 0
|
|
792
|
-
},
|
|
793
|
-
thinkingBlockStarted: false,
|
|
794
|
-
contentBlockStarted: false
|
|
799
|
+
static createAssistantMessageWithToolCalls(textContent, toolUses) {
|
|
800
|
+
const assistantMessage = {
|
|
801
|
+
role: "assistant",
|
|
802
|
+
content: ""
|
|
803
|
+
// 默认为空字符串,避免null值
|
|
795
804
|
};
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
return messages.map((msg) => ({
|
|
802
|
-
role: msg.role,
|
|
803
|
-
content: msg.content
|
|
804
|
-
}));
|
|
805
|
-
}
|
|
806
|
-
/**
|
|
807
|
-
* 映射Anthropic模型到OpenAI模型
|
|
808
|
-
*/
|
|
809
|
-
mapAnthropicModelToOpenAI(model) {
|
|
810
|
-
const supportedModels = [
|
|
811
|
-
"glm-4.5",
|
|
812
|
-
"kimi-k2",
|
|
813
|
-
"deepseek-v3.1",
|
|
814
|
-
"deepseek-r1",
|
|
815
|
-
"deepseek-v3",
|
|
816
|
-
"qwen3-32b",
|
|
817
|
-
"qwen3-coder",
|
|
818
|
-
"qwen3-235b",
|
|
819
|
-
"tstars2.0"
|
|
820
|
-
];
|
|
821
|
-
if (supportedModels.includes(model)) {
|
|
822
|
-
return model;
|
|
805
|
+
if (textContent.length > 0) {
|
|
806
|
+
const textOnly = textContent.map((item) => item.text || "").join("");
|
|
807
|
+
if (textOnly.trim()) {
|
|
808
|
+
assistantMessage.content = textOnly.trim();
|
|
809
|
+
}
|
|
823
810
|
}
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
811
|
+
assistantMessage.tool_calls = toolUses.map((toolUse) => ({
|
|
812
|
+
id: toolUse.id,
|
|
813
|
+
type: "function",
|
|
814
|
+
function: {
|
|
815
|
+
name: toolUse.name,
|
|
816
|
+
arguments: JSON.stringify(toolUse.input || {})
|
|
817
|
+
}
|
|
818
|
+
}));
|
|
819
|
+
return assistantMessage;
|
|
830
820
|
}
|
|
831
821
|
/**
|
|
832
|
-
*
|
|
822
|
+
* 创建工具结果消息
|
|
833
823
|
*/
|
|
834
|
-
|
|
835
|
-
return
|
|
836
|
-
|
|
837
|
-
|
|
824
|
+
static createToolResultMessages(toolResults, toolIdToNameMap) {
|
|
825
|
+
return toolResults.map((toolResult) => {
|
|
826
|
+
let resultContent = "No content";
|
|
827
|
+
if (toolResult.content) {
|
|
828
|
+
if (typeof toolResult.content === "string") {
|
|
829
|
+
resultContent = toolResult.content;
|
|
830
|
+
} else {
|
|
831
|
+
resultContent = JSON.stringify(toolResult.content, null, 2);
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
const toolName = toolIdToNameMap.get(toolResult.tool_use_id) || TOOL_CONVERSION.UNKNOWN_TOOL_FALLBACK;
|
|
835
|
+
return {
|
|
836
|
+
role: "tool",
|
|
837
|
+
tool_call_id: toolResult.tool_use_id,
|
|
838
|
+
name: toolName,
|
|
839
|
+
content: resultContent
|
|
840
|
+
};
|
|
841
|
+
});
|
|
838
842
|
}
|
|
839
843
|
/**
|
|
840
|
-
*
|
|
844
|
+
* 创建文本消息
|
|
841
845
|
*/
|
|
842
|
-
|
|
843
|
-
|
|
846
|
+
static createTextMessage(role, textContent) {
|
|
847
|
+
if (textContent.length === 0) return null;
|
|
848
|
+
const hasNonTextContent = textContent.some((item) => item.type !== "text");
|
|
849
|
+
if (hasNonTextContent) {
|
|
850
|
+
return {
|
|
851
|
+
role,
|
|
852
|
+
content: textContent
|
|
853
|
+
};
|
|
854
|
+
} else {
|
|
855
|
+
const textOnly = textContent.map((item) => item.text || "").join("");
|
|
856
|
+
return {
|
|
857
|
+
role,
|
|
858
|
+
content: textOnly.trim() || ""
|
|
859
|
+
// 确保content为字符串,避免null
|
|
860
|
+
};
|
|
861
|
+
}
|
|
844
862
|
}
|
|
845
863
|
/**
|
|
846
|
-
*
|
|
864
|
+
* 处理系统消息
|
|
847
865
|
*/
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
866
|
+
static processSystemMessage(system) {
|
|
867
|
+
let systemContent;
|
|
868
|
+
if (Array.isArray(system)) {
|
|
869
|
+
systemContent = system.map((s) => {
|
|
870
|
+
if (typeof s === "string") {
|
|
871
|
+
return s;
|
|
872
|
+
}
|
|
873
|
+
return s.text || "";
|
|
874
|
+
}).filter((text) => text.length > 0).join("\n").trim();
|
|
875
|
+
} else {
|
|
876
|
+
systemContent = system;
|
|
877
|
+
}
|
|
878
|
+
if (systemContent) {
|
|
879
|
+
return {
|
|
880
|
+
role: "system",
|
|
881
|
+
content: systemContent
|
|
882
|
+
};
|
|
883
|
+
}
|
|
884
|
+
return null;
|
|
857
885
|
}
|
|
858
886
|
/**
|
|
859
|
-
*
|
|
860
|
-
*
|
|
887
|
+
* 异步转换图片内容格式(支持URL自动下载转base64)
|
|
888
|
+
* @param item 图片内容项
|
|
889
|
+
* @param downloadUrls 是否下载URL并转换为base64(默认true)
|
|
861
890
|
*/
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
if (openaiDataLine.trim() === "[DONE]") {
|
|
866
|
-
this.addFinalEvents(state, sseEvents);
|
|
867
|
-
return sseEvents;
|
|
891
|
+
static async convertImageContentAsync(item, downloadUrls = true) {
|
|
892
|
+
if (!item.source) {
|
|
893
|
+
return null;
|
|
868
894
|
}
|
|
869
|
-
|
|
870
|
-
const
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
}
|
|
895
|
+
if (item.source.type === "url" && item.source.url) {
|
|
896
|
+
const url = item.source.url;
|
|
897
|
+
if (isBase64DataUri(url)) {
|
|
898
|
+
return {
|
|
899
|
+
type: "image_url",
|
|
900
|
+
image_url: {
|
|
901
|
+
url,
|
|
902
|
+
detail: "auto"
|
|
903
|
+
}
|
|
904
|
+
};
|
|
879
905
|
}
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
maxAttempts: 3,
|
|
902
|
-
enableCustomRules: true
|
|
903
|
-
},
|
|
904
|
-
recovery: {
|
|
905
|
-
enabled: true,
|
|
906
|
-
maxRetries: 2,
|
|
907
|
-
backoffMs: 1e3
|
|
908
|
-
},
|
|
909
|
-
monitoring: {
|
|
910
|
-
enabled: false,
|
|
911
|
-
logLevel: "warn",
|
|
912
|
-
enableMetrics: false
|
|
913
|
-
}
|
|
914
|
-
};
|
|
915
|
-
var SUPPORTED_IMAGE_TYPES = [
|
|
916
|
-
"image/jpeg",
|
|
917
|
-
"image/png",
|
|
918
|
-
"image/gif",
|
|
919
|
-
"image/webp"
|
|
920
|
-
];
|
|
921
|
-
var TOOL_CONVERSION = {
|
|
922
|
-
/**
|
|
923
|
-
* 终极泛化:完全移除工具名称映射
|
|
924
|
-
* 基于GitHub Copilot API测试结果,100%保持原始格式
|
|
925
|
-
*/
|
|
926
|
-
PRESERVE_ORIGINAL_NAMES: true,
|
|
927
|
-
/**
|
|
928
|
-
* 默认工具描述
|
|
929
|
-
*/
|
|
930
|
-
DEFAULT_DESCRIPTION: "Tool description",
|
|
931
|
-
/**
|
|
932
|
-
* 未知工具回退名称
|
|
933
|
-
*/
|
|
934
|
-
UNKNOWN_TOOL_FALLBACK: "unknown_tool"
|
|
935
|
-
};
|
|
936
|
-
|
|
937
|
-
// src/core/a2o-request-adapter/message-converter.ts
|
|
938
|
-
var MessageConverter = class {
|
|
939
|
-
/**
|
|
940
|
-
* 转换消息格式,正确处理工具调用和工具结果
|
|
941
|
-
* 修复关键问题:将tool_use转换为tool_calls,tool_result转换为role:"tool"消息
|
|
942
|
-
* 使用tool_use_id溯回工具名称解决unknown_tool问题
|
|
943
|
-
*/
|
|
944
|
-
static convertMessages(messages, system) {
|
|
945
|
-
const context = this.createConversionContext(messages);
|
|
946
|
-
const convertedMessages = [];
|
|
947
|
-
for (const msg of messages) {
|
|
948
|
-
if (Array.isArray(msg.content)) {
|
|
949
|
-
const processedMessages = this.processComplexMessage(msg, context);
|
|
950
|
-
convertedMessages.push(...processedMessages);
|
|
951
|
-
} else {
|
|
952
|
-
const safeMsg = { ...msg };
|
|
953
|
-
if (safeMsg.content === null || safeMsg.content === void 0) {
|
|
954
|
-
safeMsg.content = "";
|
|
906
|
+
if (downloadUrls && isExternalUrl(url)) {
|
|
907
|
+
try {
|
|
908
|
+
console.log(`[MessageConverter] Downloading image from URL: ${url}`);
|
|
909
|
+
const base64DataUri = await downloadImageAsBase64(url);
|
|
910
|
+
console.log(`[MessageConverter] Successfully converted image to base64`);
|
|
911
|
+
return {
|
|
912
|
+
type: "image_url",
|
|
913
|
+
image_url: {
|
|
914
|
+
url: base64DataUri,
|
|
915
|
+
detail: "auto"
|
|
916
|
+
}
|
|
917
|
+
};
|
|
918
|
+
} catch (error) {
|
|
919
|
+
console.error(`[MessageConverter] Failed to download image: ${error.message}`);
|
|
920
|
+
return {
|
|
921
|
+
type: "image_url",
|
|
922
|
+
image_url: {
|
|
923
|
+
url,
|
|
924
|
+
detail: "auto"
|
|
925
|
+
}
|
|
926
|
+
};
|
|
955
927
|
}
|
|
956
|
-
convertedMessages.push(safeMsg);
|
|
957
|
-
}
|
|
958
|
-
}
|
|
959
|
-
if (system) {
|
|
960
|
-
const systemMessage = this.processSystemMessage(system);
|
|
961
|
-
if (systemMessage) {
|
|
962
|
-
convertedMessages.unshift(systemMessage);
|
|
963
928
|
}
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
*/
|
|
970
|
-
static createConversionContext(messages) {
|
|
971
|
-
const toolIdToNameMap = /* @__PURE__ */ new Map();
|
|
972
|
-
for (const msg of messages) {
|
|
973
|
-
if (Array.isArray(msg.content)) {
|
|
974
|
-
for (const item of msg.content) {
|
|
975
|
-
if (typeof item === "object" && item !== null && item.type === "tool_use") {
|
|
976
|
-
toolIdToNameMap.set(item.id, item.name);
|
|
977
|
-
}
|
|
929
|
+
return {
|
|
930
|
+
type: "image_url",
|
|
931
|
+
image_url: {
|
|
932
|
+
url,
|
|
933
|
+
detail: "auto"
|
|
978
934
|
}
|
|
979
|
-
}
|
|
935
|
+
};
|
|
980
936
|
}
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
}
|
|
986
|
-
/**
|
|
987
|
-
* 处理复杂消息(包含多种内容类型)
|
|
988
|
-
*/
|
|
989
|
-
static processComplexMessage(msg, context) {
|
|
990
|
-
const { textContent, toolUses, toolResults } = this.categorizeContent(msg.content);
|
|
991
|
-
const resultMessages = [];
|
|
992
|
-
if (msg.role === "assistant" && toolUses.length > 0) {
|
|
993
|
-
const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
|
|
994
|
-
resultMessages.push(assistantMessage);
|
|
995
|
-
} else if (toolResults.length > 0) {
|
|
996
|
-
const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
|
|
997
|
-
resultMessages.push(...toolMessages);
|
|
998
|
-
const textMessage = this.createTextMessage(msg.role, textContent);
|
|
999
|
-
if (textMessage) {
|
|
1000
|
-
resultMessages.push(textMessage);
|
|
1001
|
-
}
|
|
1002
|
-
} else if (textContent.length > 0) {
|
|
1003
|
-
const textMessage = this.createTextMessage(msg.role, textContent);
|
|
1004
|
-
if (textMessage) {
|
|
1005
|
-
resultMessages.push(textMessage);
|
|
937
|
+
if (item.source.type === "base64" && item.source.data && item.source.media_type) {
|
|
938
|
+
if (!SUPPORTED_IMAGE_TYPES.includes(item.source.media_type)) {
|
|
939
|
+
console.warn(`\u4E0D\u652F\u6301\u7684\u56FE\u7247\u683C\u5F0F: ${item.source.media_type}`);
|
|
940
|
+
return null;
|
|
1006
941
|
}
|
|
942
|
+
const dataUri = `data:${item.source.media_type};base64,${item.source.data}`;
|
|
943
|
+
return {
|
|
944
|
+
type: "image_url",
|
|
945
|
+
image_url: {
|
|
946
|
+
url: dataUri,
|
|
947
|
+
detail: "auto"
|
|
948
|
+
}
|
|
949
|
+
};
|
|
1007
950
|
}
|
|
1008
|
-
return
|
|
951
|
+
return null;
|
|
1009
952
|
}
|
|
1010
953
|
/**
|
|
1011
|
-
*
|
|
954
|
+
* 异步处理消息内容(支持图片URL下载)
|
|
1012
955
|
*/
|
|
1013
|
-
static
|
|
956
|
+
static async processMessageContentAsync(content, downloadUrls = true) {
|
|
1014
957
|
const textContent = [];
|
|
1015
958
|
const toolUses = [];
|
|
1016
959
|
const toolResults = [];
|
|
1017
960
|
for (const item of content) {
|
|
1018
|
-
if (
|
|
1019
|
-
textContent.push({ type: "text", text: item });
|
|
1020
|
-
} else if (typeof item === "object" && item !== null) {
|
|
961
|
+
if (item.type) {
|
|
1021
962
|
switch (item.type) {
|
|
1022
963
|
case "text":
|
|
1023
|
-
|
|
964
|
+
if (item.text) {
|
|
965
|
+
textContent.push({ type: "text", text: item.text });
|
|
966
|
+
}
|
|
1024
967
|
break;
|
|
1025
968
|
case "tool_use":
|
|
1026
969
|
toolUses.push(item);
|
|
@@ -1029,7 +972,7 @@ var MessageConverter = class {
|
|
|
1029
972
|
toolResults.push(item);
|
|
1030
973
|
break;
|
|
1031
974
|
case "image":
|
|
1032
|
-
const imageContent = this.
|
|
975
|
+
const imageContent = await this.convertImageContentAsync(item, downloadUrls);
|
|
1033
976
|
if (imageContent) {
|
|
1034
977
|
textContent.push(imageContent);
|
|
1035
978
|
}
|
|
@@ -1040,146 +983,102 @@ var MessageConverter = class {
|
|
|
1040
983
|
return { textContent, toolUses, toolResults };
|
|
1041
984
|
}
|
|
1042
985
|
/**
|
|
1043
|
-
*
|
|
986
|
+
* 异步转换消息格式(支持图片URL自动下载)
|
|
987
|
+
* @param messages Claude格式的消息数组
|
|
988
|
+
* @param system 系统消息
|
|
989
|
+
* @param downloadImageUrls 是否下载图片URL并转换为base64(默认true,解决GitHub Copilot等API不支持外部URL的问题)
|
|
1044
990
|
*/
|
|
1045
|
-
static
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
991
|
+
static async convertMessagesAsync(messages, system, downloadImageUrls = true) {
|
|
992
|
+
const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
|
|
993
|
+
if (debugEnabled) {
|
|
994
|
+
console.debug(
|
|
995
|
+
`[MessageConverter] convertMessagesAsync called (downloadImageUrls: ${downloadImageUrls})`
|
|
996
|
+
);
|
|
997
|
+
}
|
|
998
|
+
const context = this.createConversionContext(messages);
|
|
999
|
+
const convertedMessages = [];
|
|
1000
|
+
for (const msg of messages) {
|
|
1001
|
+
if (Array.isArray(msg.content)) {
|
|
1002
|
+
const processedMessages = await this.processComplexMessageAsync(msg, context, downloadImageUrls);
|
|
1003
|
+
convertedMessages.push(...processedMessages);
|
|
1004
|
+
} else {
|
|
1005
|
+
const safeMsg = { ...msg };
|
|
1006
|
+
if (safeMsg.content === null || safeMsg.content === void 0) {
|
|
1007
|
+
safeMsg.content = "";
|
|
1056
1008
|
}
|
|
1057
|
-
|
|
1009
|
+
convertedMessages.push(safeMsg);
|
|
1010
|
+
}
|
|
1058
1011
|
}
|
|
1059
|
-
|
|
1012
|
+
const systemMessage = this.processSystemMessage(system);
|
|
1013
|
+
if (systemMessage) {
|
|
1014
|
+
return [systemMessage, ...convertedMessages];
|
|
1015
|
+
}
|
|
1016
|
+
return convertedMessages;
|
|
1060
1017
|
}
|
|
1061
1018
|
/**
|
|
1062
|
-
*
|
|
1019
|
+
* 异步处理复杂消息(支持图片URL下载)
|
|
1063
1020
|
*/
|
|
1064
|
-
static
|
|
1065
|
-
const
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
if (
|
|
1071
|
-
const
|
|
1072
|
-
|
|
1073
|
-
|
|
1021
|
+
static async processComplexMessageAsync(msg, context, downloadUrls) {
|
|
1022
|
+
const { textContent, toolUses, toolResults } = await this.processMessageContentAsync(
|
|
1023
|
+
msg.content,
|
|
1024
|
+
downloadUrls
|
|
1025
|
+
);
|
|
1026
|
+
const result = [];
|
|
1027
|
+
if (msg.role === "user") {
|
|
1028
|
+
const toolMessages = this.createToolResultMessages(toolResults, context.toolIdToNameMap);
|
|
1029
|
+
result.push(...toolMessages);
|
|
1030
|
+
const textMessage = this.createTextMessage("user", textContent);
|
|
1031
|
+
if (textMessage) {
|
|
1032
|
+
result.push(textMessage);
|
|
1033
|
+
}
|
|
1034
|
+
} else if (msg.role === "assistant") {
|
|
1035
|
+
if (toolUses.length > 0) {
|
|
1036
|
+
const assistantMessage = this.createAssistantMessageWithToolCalls(textContent, toolUses);
|
|
1037
|
+
result.push(assistantMessage);
|
|
1038
|
+
toolUses.forEach((toolUse) => {
|
|
1039
|
+
context.toolIdToNameMap.set(toolUse.id, toolUse.name);
|
|
1040
|
+
});
|
|
1041
|
+
} else {
|
|
1042
|
+
const textMessage = this.createTextMessage("assistant", textContent);
|
|
1043
|
+
if (textMessage) {
|
|
1044
|
+
result.push(textMessage);
|
|
1045
|
+
}
|
|
1074
1046
|
}
|
|
1075
1047
|
}
|
|
1076
|
-
|
|
1077
|
-
|
|
1048
|
+
return result;
|
|
1049
|
+
}
|
|
1050
|
+
};
|
|
1051
|
+
|
|
1052
|
+
// src/core/a2o-request-adapter/tool-converter.ts
|
|
1053
|
+
var ToolConverter = class {
|
|
1054
|
+
/**
|
|
1055
|
+
* 将Anthropic工具定义转换为OpenAI格式
|
|
1056
|
+
*/
|
|
1057
|
+
static convertAnthropicToolToOpenAI(anthropicTool) {
|
|
1058
|
+
if (!anthropicTool || !anthropicTool.name) {
|
|
1059
|
+
throw new Error("Invalid tool definition: missing name");
|
|
1060
|
+
}
|
|
1061
|
+
const openaiName = anthropicTool.name;
|
|
1062
|
+
const description = this.simplifyDescription(anthropicTool.description || TOOL_CONVERSION.DEFAULT_DESCRIPTION);
|
|
1063
|
+
if (!anthropicTool.input_schema) {
|
|
1064
|
+
throw new Error(`Invalid tool definition for ${anthropicTool.name}: missing input_schema`);
|
|
1065
|
+
}
|
|
1066
|
+
const parameters = {
|
|
1067
|
+
type: anthropicTool.input_schema.type || "object",
|
|
1068
|
+
properties: anthropicTool.input_schema.properties || {},
|
|
1069
|
+
...anthropicTool.input_schema.required && { required: anthropicTool.input_schema.required }
|
|
1070
|
+
};
|
|
1071
|
+
return {
|
|
1078
1072
|
type: "function",
|
|
1079
1073
|
function: {
|
|
1080
|
-
name:
|
|
1081
|
-
|
|
1074
|
+
name: openaiName,
|
|
1075
|
+
description,
|
|
1076
|
+
parameters
|
|
1082
1077
|
}
|
|
1083
|
-
}
|
|
1084
|
-
return assistantMessage;
|
|
1078
|
+
};
|
|
1085
1079
|
}
|
|
1086
1080
|
/**
|
|
1087
|
-
*
|
|
1088
|
-
*/
|
|
1089
|
-
static createToolResultMessages(toolResults, toolIdToNameMap) {
|
|
1090
|
-
return toolResults.map((toolResult) => {
|
|
1091
|
-
let resultContent = "No content";
|
|
1092
|
-
if (toolResult.content) {
|
|
1093
|
-
if (typeof toolResult.content === "string") {
|
|
1094
|
-
resultContent = toolResult.content;
|
|
1095
|
-
} else {
|
|
1096
|
-
resultContent = JSON.stringify(toolResult.content, null, 2);
|
|
1097
|
-
}
|
|
1098
|
-
}
|
|
1099
|
-
const toolName = toolIdToNameMap.get(toolResult.tool_use_id) || TOOL_CONVERSION.UNKNOWN_TOOL_FALLBACK;
|
|
1100
|
-
return {
|
|
1101
|
-
role: "tool",
|
|
1102
|
-
tool_call_id: toolResult.tool_use_id,
|
|
1103
|
-
name: toolName,
|
|
1104
|
-
content: resultContent
|
|
1105
|
-
};
|
|
1106
|
-
});
|
|
1107
|
-
}
|
|
1108
|
-
/**
|
|
1109
|
-
* 创建文本消息
|
|
1110
|
-
*/
|
|
1111
|
-
static createTextMessage(role, textContent) {
|
|
1112
|
-
if (textContent.length === 0) return null;
|
|
1113
|
-
const hasNonTextContent = textContent.some((item) => item.type !== "text");
|
|
1114
|
-
if (hasNonTextContent) {
|
|
1115
|
-
return {
|
|
1116
|
-
role,
|
|
1117
|
-
content: textContent
|
|
1118
|
-
};
|
|
1119
|
-
} else {
|
|
1120
|
-
const textOnly = textContent.map((item) => item.text || "").join("");
|
|
1121
|
-
return {
|
|
1122
|
-
role,
|
|
1123
|
-
content: textOnly.trim() || ""
|
|
1124
|
-
// 确保content为字符串,避免null
|
|
1125
|
-
};
|
|
1126
|
-
}
|
|
1127
|
-
}
|
|
1128
|
-
/**
|
|
1129
|
-
* 处理系统消息
|
|
1130
|
-
*/
|
|
1131
|
-
static processSystemMessage(system) {
|
|
1132
|
-
let systemContent;
|
|
1133
|
-
if (Array.isArray(system)) {
|
|
1134
|
-
systemContent = system.map((s) => {
|
|
1135
|
-
if (typeof s === "string") {
|
|
1136
|
-
return s;
|
|
1137
|
-
}
|
|
1138
|
-
return s.text || "";
|
|
1139
|
-
}).filter((text) => text.length > 0).join("\n").trim();
|
|
1140
|
-
} else {
|
|
1141
|
-
systemContent = system;
|
|
1142
|
-
}
|
|
1143
|
-
if (systemContent) {
|
|
1144
|
-
return {
|
|
1145
|
-
role: "system",
|
|
1146
|
-
content: systemContent
|
|
1147
|
-
};
|
|
1148
|
-
}
|
|
1149
|
-
return null;
|
|
1150
|
-
}
|
|
1151
|
-
};
|
|
1152
|
-
|
|
1153
|
-
// src/core/a2o-request-adapter/tool-converter.ts
|
|
1154
|
-
var ToolConverter = class {
|
|
1155
|
-
/**
|
|
1156
|
-
* 将Anthropic工具定义转换为OpenAI格式
|
|
1157
|
-
*/
|
|
1158
|
-
static convertAnthropicToolToOpenAI(anthropicTool) {
|
|
1159
|
-
if (!anthropicTool || !anthropicTool.name) {
|
|
1160
|
-
throw new Error("Invalid tool definition: missing name");
|
|
1161
|
-
}
|
|
1162
|
-
const openaiName = anthropicTool.name;
|
|
1163
|
-
const description = this.simplifyDescription(anthropicTool.description || TOOL_CONVERSION.DEFAULT_DESCRIPTION);
|
|
1164
|
-
if (!anthropicTool.input_schema) {
|
|
1165
|
-
throw new Error(`Invalid tool definition for ${anthropicTool.name}: missing input_schema`);
|
|
1166
|
-
}
|
|
1167
|
-
const parameters = {
|
|
1168
|
-
type: anthropicTool.input_schema.type || "object",
|
|
1169
|
-
properties: anthropicTool.input_schema.properties || {},
|
|
1170
|
-
...anthropicTool.input_schema.required && { required: anthropicTool.input_schema.required }
|
|
1171
|
-
};
|
|
1172
|
-
return {
|
|
1173
|
-
type: "function",
|
|
1174
|
-
function: {
|
|
1175
|
-
name: openaiName,
|
|
1176
|
-
description,
|
|
1177
|
-
parameters
|
|
1178
|
-
}
|
|
1179
|
-
};
|
|
1180
|
-
}
|
|
1181
|
-
/**
|
|
1182
|
-
* 将OpenAI工具调用转换为Claude格式
|
|
1081
|
+
* 将OpenAI工具调用转换为Claude格式
|
|
1183
1082
|
*/
|
|
1184
1083
|
static convertOpenAIToolCallsToClaude(toolCalls) {
|
|
1185
1084
|
return toolCalls.map((toolCall) => {
|
|
@@ -1204,6 +1103,24 @@ var ToolConverter = class {
|
|
|
1204
1103
|
static isOpenAIToolFormat(tool) {
|
|
1205
1104
|
return tool && tool.type === "function" && tool.function && tool.function.name;
|
|
1206
1105
|
}
|
|
1106
|
+
/**
|
|
1107
|
+
* 确保OpenAI格式工具有有效描述
|
|
1108
|
+
* 处理空字符串、undefined、null等情况
|
|
1109
|
+
*/
|
|
1110
|
+
static ensureOpenAIToolDescription(tool) {
|
|
1111
|
+
if (!tool?.function) return tool;
|
|
1112
|
+
const description = tool.function.description?.trim();
|
|
1113
|
+
if (!description) {
|
|
1114
|
+
return {
|
|
1115
|
+
...tool,
|
|
1116
|
+
function: {
|
|
1117
|
+
...tool.function,
|
|
1118
|
+
description: TOOL_CONVERSION.DEFAULT_DESCRIPTION
|
|
1119
|
+
}
|
|
1120
|
+
};
|
|
1121
|
+
}
|
|
1122
|
+
return tool;
|
|
1123
|
+
}
|
|
1207
1124
|
/**
|
|
1208
1125
|
* 简化Claude的详细描述为OpenAI兼容的简短描述
|
|
1209
1126
|
*/
|
|
@@ -3780,344 +3697,1204 @@ var A2ORequestAdapter = class {
|
|
|
3780
3697
|
} else {
|
|
3781
3698
|
result.data = openaiRequest;
|
|
3782
3699
|
}
|
|
3783
|
-
if (this.config.monitoring.enabled) {
|
|
3784
|
-
const processingTime = Date.now() - startTime;
|
|
3785
|
-
if (this.config.monitoring.logLevel !== "none") {
|
|
3786
|
-
console.log(`[A2O Adapter] Conversion completed in ${processingTime}ms`, {
|
|
3787
|
-
healingApplied: result.healingApplied,
|
|
3788
|
-
fixesCount: result.appliedFixes?.length || 0
|
|
3789
|
-
});
|
|
3790
|
-
}
|
|
3700
|
+
if (this.config.monitoring.enabled) {
|
|
3701
|
+
const processingTime = Date.now() - startTime;
|
|
3702
|
+
if (this.config.monitoring.logLevel !== "none") {
|
|
3703
|
+
console.log(`[A2O Adapter] Conversion completed in ${processingTime}ms`, {
|
|
3704
|
+
healingApplied: result.healingApplied,
|
|
3705
|
+
fixesCount: result.appliedFixes?.length || 0
|
|
3706
|
+
});
|
|
3707
|
+
}
|
|
3708
|
+
}
|
|
3709
|
+
result.success = true;
|
|
3710
|
+
return result;
|
|
3711
|
+
} catch (error) {
|
|
3712
|
+
result.errors.push(`Conversion failed: ${error.message}`);
|
|
3713
|
+
if (this.config.recovery.enabled) {
|
|
3714
|
+
result.warnings.push("Error recovery attempted but not implemented yet");
|
|
3715
|
+
}
|
|
3716
|
+
return result;
|
|
3717
|
+
}
|
|
3718
|
+
}
|
|
3719
|
+
/**
|
|
3720
|
+
* 执行核心转换逻辑(支持图片代理)
|
|
3721
|
+
*/
|
|
3722
|
+
async performCoreConversion(anthropicRequest) {
|
|
3723
|
+
if (this.config.enableFormatValidation) {
|
|
3724
|
+
FormatValidator.validateClaudeRequest(anthropicRequest);
|
|
3725
|
+
}
|
|
3726
|
+
const messages = this.config.imageProxy.enabled ? await MessageConverter.convertMessagesAsync(
|
|
3727
|
+
anthropicRequest.messages,
|
|
3728
|
+
anthropicRequest.system,
|
|
3729
|
+
true
|
|
3730
|
+
// 启用图片下载
|
|
3731
|
+
) : MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system);
|
|
3732
|
+
const openaiRequest = {
|
|
3733
|
+
model: anthropicRequest.model,
|
|
3734
|
+
messages,
|
|
3735
|
+
max_tokens: anthropicRequest.max_tokens,
|
|
3736
|
+
temperature: anthropicRequest.temperature,
|
|
3737
|
+
stream: anthropicRequest.stream
|
|
3738
|
+
};
|
|
3739
|
+
if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
|
|
3740
|
+
openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
|
|
3741
|
+
}
|
|
3742
|
+
const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
|
|
3743
|
+
for (const field of specialFields) {
|
|
3744
|
+
if (anthropicRequest[field] !== void 0) {
|
|
3745
|
+
openaiRequest[field] = anthropicRequest[field];
|
|
3746
|
+
}
|
|
3747
|
+
}
|
|
3748
|
+
return openaiRequest;
|
|
3749
|
+
}
|
|
3750
|
+
/**
|
|
3751
|
+
* 转换Anthropic请求格式为OpenAI兼容格式 - 原有方法保持兼容
|
|
3752
|
+
*/
|
|
3753
|
+
convertAnthropicRequestToOpenAI(anthropicRequest) {
|
|
3754
|
+
if (this.config.enableFormatValidation) {
|
|
3755
|
+
FormatValidator.validateClaudeRequest(anthropicRequest);
|
|
3756
|
+
}
|
|
3757
|
+
const openaiRequest = {
|
|
3758
|
+
model: anthropicRequest.model,
|
|
3759
|
+
messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
|
|
3760
|
+
max_tokens: anthropicRequest.max_tokens,
|
|
3761
|
+
temperature: anthropicRequest.temperature,
|
|
3762
|
+
stream: anthropicRequest.stream,
|
|
3763
|
+
n: 1
|
|
3764
|
+
};
|
|
3765
|
+
if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
|
|
3766
|
+
openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
|
|
3767
|
+
}
|
|
3768
|
+
const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
|
|
3769
|
+
for (const field of specialFields) {
|
|
3770
|
+
if (anthropicRequest[field] !== void 0) {
|
|
3771
|
+
openaiRequest[field] = anthropicRequest[field];
|
|
3772
|
+
}
|
|
3773
|
+
}
|
|
3774
|
+
if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
|
|
3775
|
+
throw new Error("Generated OpenAI request format is invalid");
|
|
3776
|
+
}
|
|
3777
|
+
return openaiRequest;
|
|
3778
|
+
}
|
|
3779
|
+
/**
|
|
3780
|
+
* 转换OpenAI响应格式为Claude兼容格式
|
|
3781
|
+
*/
|
|
3782
|
+
convertOpenAIResponseToClaude(openaiResponse) {
|
|
3783
|
+
const claudeContent = [];
|
|
3784
|
+
const message = openaiResponse.choices?.[0]?.message;
|
|
3785
|
+
if (message?.reasoning_content) {
|
|
3786
|
+
claudeContent.push({
|
|
3787
|
+
type: "thinking",
|
|
3788
|
+
thinking: message.reasoning_content
|
|
3789
|
+
});
|
|
3790
|
+
}
|
|
3791
|
+
if (message?.content) {
|
|
3792
|
+
claudeContent.push({
|
|
3793
|
+
type: "text",
|
|
3794
|
+
text: message.content
|
|
3795
|
+
});
|
|
3796
|
+
}
|
|
3797
|
+
if (message?.tool_calls) {
|
|
3798
|
+
const toolUseContents = ToolConverter.convertOpenAIToolCallsToClaude(message.tool_calls);
|
|
3799
|
+
claudeContent.push(...toolUseContents);
|
|
3800
|
+
}
|
|
3801
|
+
const claudeResponse = {
|
|
3802
|
+
role: "assistant",
|
|
3803
|
+
content: claudeContent
|
|
3804
|
+
};
|
|
3805
|
+
return claudeResponse;
|
|
3806
|
+
}
|
|
3807
|
+
/**
|
|
3808
|
+
* 转换工具定义列表
|
|
3809
|
+
* 确保所有工具都有有效描述,无论是Anthropic还是OpenAI格式
|
|
3810
|
+
*/
|
|
3811
|
+
convertToolDefinitions(tools) {
|
|
3812
|
+
return tools.map((tool) => {
|
|
3813
|
+
let openaiTool;
|
|
3814
|
+
if (ToolConverter.isOpenAIToolFormat(tool)) {
|
|
3815
|
+
openaiTool = tool;
|
|
3816
|
+
} else {
|
|
3817
|
+
openaiTool = ToolConverter.convertAnthropicToolToOpenAI(tool);
|
|
3818
|
+
}
|
|
3819
|
+
return ToolConverter.ensureOpenAIToolDescription(openaiTool);
|
|
3820
|
+
});
|
|
3821
|
+
}
|
|
3822
|
+
/**
|
|
3823
|
+
* 验证Claude请求格式
|
|
3824
|
+
*/
|
|
3825
|
+
validateClaudeRequest(request) {
|
|
3826
|
+
return FormatValidator.validateClaudeRequest(request);
|
|
3827
|
+
}
|
|
3828
|
+
/**
|
|
3829
|
+
* 验证OpenAI请求格式
|
|
3830
|
+
*/
|
|
3831
|
+
validateOpenAIRequest(request) {
|
|
3832
|
+
return FormatValidator.validateOpenAIRequest(request);
|
|
3833
|
+
}
|
|
3834
|
+
/**
|
|
3835
|
+
* 获取支持的工具列表
|
|
3836
|
+
*/
|
|
3837
|
+
getSupportedTools() {
|
|
3838
|
+
return [];
|
|
3839
|
+
}
|
|
3840
|
+
/**
|
|
3841
|
+
* 检查工具是否支持
|
|
3842
|
+
*/
|
|
3843
|
+
isToolSupported(_toolName) {
|
|
3844
|
+
return true;
|
|
3845
|
+
}
|
|
3846
|
+
/**
|
|
3847
|
+
* 获取工具映射(已弃用,保持兼容性)
|
|
3848
|
+
*/
|
|
3849
|
+
getToolMapping(claudeToolName) {
|
|
3850
|
+
return claudeToolName;
|
|
3851
|
+
}
|
|
3852
|
+
/**
|
|
3853
|
+
* 更新配置
|
|
3854
|
+
*/
|
|
3855
|
+
updateConfig(newConfig) {
|
|
3856
|
+
this.config = { ...this.config, ...newConfig };
|
|
3857
|
+
}
|
|
3858
|
+
/**
|
|
3859
|
+
* 获取当前配置
|
|
3860
|
+
*/
|
|
3861
|
+
getConfig() {
|
|
3862
|
+
return { ...this.config };
|
|
3863
|
+
}
|
|
3864
|
+
/**
|
|
3865
|
+
* 执行带验证的核心转换(同步版本)
|
|
3866
|
+
* 为静态方法提供增强功能,但保持同步特性
|
|
3867
|
+
*/
|
|
3868
|
+
performCoreConversionWithValidation(anthropicRequest) {
|
|
3869
|
+
if (this.config.validation.enabled) {
|
|
3870
|
+
try {
|
|
3871
|
+
validateAnthropicRequest(anthropicRequest);
|
|
3872
|
+
} catch (error) {
|
|
3873
|
+
if (this.config.validation.strict) {
|
|
3874
|
+
throw error;
|
|
3875
|
+
} else {
|
|
3876
|
+
const errorSummary = this.getValidationErrorSummary(error);
|
|
3877
|
+
console.warn(`[A2ORequestAdapter] Input validation warning: ${errorSummary}. Details saved to logs.`);
|
|
3878
|
+
}
|
|
3879
|
+
}
|
|
3880
|
+
}
|
|
3881
|
+
let processedRequest = anthropicRequest;
|
|
3882
|
+
if (this.config.healing.enabled) {
|
|
3883
|
+
try {
|
|
3884
|
+
processedRequest = this.applySyncHealing(anthropicRequest);
|
|
3885
|
+
} catch (healingError) {
|
|
3886
|
+
console.warn("[A2ORequestAdapter] Healing failed:", healingError);
|
|
3887
|
+
}
|
|
3888
|
+
}
|
|
3889
|
+
const result = this.performBasicConversion(processedRequest, true);
|
|
3890
|
+
if (this.config.validation.enabled) {
|
|
3891
|
+
try {
|
|
3892
|
+
validateOpenAIRequest(result);
|
|
3893
|
+
} catch (error) {
|
|
3894
|
+
if (this.config.validation.strict) {
|
|
3895
|
+
throw error;
|
|
3896
|
+
} else {
|
|
3897
|
+
console.warn("[A2ORequestAdapter] Output validation warning:", error);
|
|
3898
|
+
}
|
|
3899
|
+
}
|
|
3900
|
+
}
|
|
3901
|
+
return result;
|
|
3902
|
+
}
|
|
3903
|
+
/**
|
|
3904
|
+
* 执行基础转换逻辑(原有逻辑的提取)
|
|
3905
|
+
*/
|
|
3906
|
+
performBasicConversion(anthropicRequest, skipValidation = false) {
|
|
3907
|
+
if (!skipValidation && this.config.enableFormatValidation) {
|
|
3908
|
+
FormatValidator.validateClaudeRequest(anthropicRequest);
|
|
3909
|
+
}
|
|
3910
|
+
const openaiRequest = {
|
|
3911
|
+
model: anthropicRequest.model,
|
|
3912
|
+
messages: MessageConverter.convertMessages(anthropicRequest.messages, anthropicRequest.system),
|
|
3913
|
+
max_tokens: anthropicRequest.max_tokens,
|
|
3914
|
+
temperature: anthropicRequest.temperature,
|
|
3915
|
+
stream: anthropicRequest.stream,
|
|
3916
|
+
n: 1
|
|
3917
|
+
};
|
|
3918
|
+
if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
|
|
3919
|
+
openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
|
|
3920
|
+
}
|
|
3921
|
+
const specialFields = ["_anthropic_protocol", "_rovo_tool_injected", "_routeResult"];
|
|
3922
|
+
for (const field of specialFields) {
|
|
3923
|
+
if (anthropicRequest[field] !== void 0) {
|
|
3924
|
+
openaiRequest[field] = anthropicRequest[field];
|
|
3925
|
+
}
|
|
3926
|
+
}
|
|
3927
|
+
if (this.config.enableFormatValidation && !FormatValidator.validateOpenAIRequest(openaiRequest)) {
|
|
3928
|
+
throw new Error("Generated OpenAI request format is invalid");
|
|
3929
|
+
}
|
|
3930
|
+
return openaiRequest;
|
|
3931
|
+
}
|
|
3932
|
+
/**
|
|
3933
|
+
* 应用同步修复逻辑
|
|
3934
|
+
* 简化版的修复,不依赖异步操作
|
|
3935
|
+
*/
|
|
3936
|
+
applySyncHealing(request) {
|
|
3937
|
+
const healedRequest = { ...request };
|
|
3938
|
+
if (!healedRequest.max_tokens || healedRequest.max_tokens <= 0) {
|
|
3939
|
+
healedRequest.max_tokens = 4096;
|
|
3940
|
+
}
|
|
3941
|
+
if (!healedRequest.messages || !Array.isArray(healedRequest.messages)) {
|
|
3942
|
+
throw new Error("Invalid messages array");
|
|
3943
|
+
}
|
|
3944
|
+
if (!healedRequest.model) {
|
|
3945
|
+
healedRequest.model = "claude-sonnet-4";
|
|
3946
|
+
}
|
|
3947
|
+
for (const message of healedRequest.messages) {
|
|
3948
|
+
if (!message.role) {
|
|
3949
|
+
message.role = "user";
|
|
3950
|
+
}
|
|
3951
|
+
if (!message.content) {
|
|
3952
|
+
message.content = "";
|
|
3953
|
+
}
|
|
3954
|
+
}
|
|
3955
|
+
return healedRequest;
|
|
3956
|
+
}
|
|
3957
|
+
/**
|
|
3958
|
+
* 获取验证错误详情
|
|
3959
|
+
*/
|
|
3960
|
+
getValidationErrors(request, type) {
|
|
3961
|
+
return FormatValidator.getValidationErrors(request, type);
|
|
3962
|
+
}
|
|
3963
|
+
/**
|
|
3964
|
+
* 生成简洁的验证错误摘要
|
|
3965
|
+
*/
|
|
3966
|
+
getValidationErrorSummary(error) {
|
|
3967
|
+
if (error?.issues?.length > 0) {
|
|
3968
|
+
const invalidEnums = error.issues.filter((i) => i.code === "invalid_enum_value");
|
|
3969
|
+
const missingFields = error.issues.filter((i) => i.code === "invalid_type");
|
|
3970
|
+
const summary = [];
|
|
3971
|
+
if (invalidEnums.length > 0) {
|
|
3972
|
+
const first = invalidEnums[0];
|
|
3973
|
+
summary.push(`invalid_${first.path?.join(".")}: '${first.received}'`);
|
|
3974
|
+
}
|
|
3975
|
+
if (missingFields.length > 0) {
|
|
3976
|
+
summary.push(`${missingFields.length} missing fields`);
|
|
3977
|
+
}
|
|
3978
|
+
return summary.slice(0, 2).join(", ") + (error.issues.length > 5 ? ` (+${error.issues.length - 5} more)` : "");
|
|
3979
|
+
}
|
|
3980
|
+
return error.message || "Validation failed";
|
|
3981
|
+
}
|
|
3982
|
+
};
|
|
3983
|
+
var A2ORequestAdapterStatic = {
|
|
3984
|
+
/**
|
|
3985
|
+
* 转换Anthropic请求格式为OpenAI兼容格式(静态方法)
|
|
3986
|
+
* 内部使用增强转换器,所有调用点自动获得增强功能
|
|
3987
|
+
*/
|
|
3988
|
+
convertAnthropicRequestToOpenAI: (anthropicRequest) => {
|
|
3989
|
+
const adapter = new A2ORequestAdapter({
|
|
3990
|
+
debugMode: false,
|
|
3991
|
+
maxDescriptionLength: 100,
|
|
3992
|
+
enableToolNameValidation: true,
|
|
3993
|
+
enableFormatValidation: true,
|
|
3994
|
+
validation: { enabled: true, strict: false },
|
|
3995
|
+
healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
|
|
3996
|
+
recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
|
|
3997
|
+
monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
|
|
3998
|
+
});
|
|
3999
|
+
try {
|
|
4000
|
+
const result = adapter.performCoreConversionWithValidation(anthropicRequest);
|
|
4001
|
+
return result;
|
|
4002
|
+
} catch (error) {
|
|
4003
|
+
console.warn(`[A2ORequestAdapterStatic] Enhanced conversion failed, using basic conversion: ${error?.message || error}`);
|
|
4004
|
+
return adapter.performBasicConversion(anthropicRequest, true);
|
|
4005
|
+
}
|
|
4006
|
+
},
|
|
4007
|
+
/**
|
|
4008
|
+
* 转换OpenAI响应格式为Claude兼容格式(静态方法)
|
|
4009
|
+
* 内部使用增强转换器
|
|
4010
|
+
*/
|
|
4011
|
+
convertOpenAIResponseToClaude: (openaiResponse) => {
|
|
4012
|
+
const adapter = new A2ORequestAdapter({
|
|
4013
|
+
debugMode: false,
|
|
4014
|
+
maxDescriptionLength: 100,
|
|
4015
|
+
enableToolNameValidation: true,
|
|
4016
|
+
enableFormatValidation: true,
|
|
4017
|
+
validation: { enabled: true, strict: false },
|
|
4018
|
+
healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
|
|
4019
|
+
recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
|
|
4020
|
+
monitoring: { enabled: false, logLevel: "none", enableMetrics: false }
|
|
4021
|
+
});
|
|
4022
|
+
return adapter.convertOpenAIResponseToClaude(openaiResponse);
|
|
4023
|
+
},
|
|
4024
|
+
/**
|
|
4025
|
+
* 验证Claude请求格式(静态方法)
|
|
4026
|
+
*/
|
|
4027
|
+
validateClaudeRequest: (request) => {
|
|
4028
|
+
return FormatValidator.validateClaudeRequest(request);
|
|
4029
|
+
},
|
|
4030
|
+
/**
|
|
4031
|
+
* 验证OpenAI请求格式(静态方法)
|
|
4032
|
+
*/
|
|
4033
|
+
validateOpenAIRequest: (request) => {
|
|
4034
|
+
return FormatValidator.validateOpenAIRequest(request);
|
|
4035
|
+
},
|
|
4036
|
+
/**
|
|
4037
|
+
* 获取支持的工具列表(静态方法)
|
|
4038
|
+
*/
|
|
4039
|
+
getSupportedTools: () => {
|
|
4040
|
+
return [];
|
|
4041
|
+
},
|
|
4042
|
+
/**
|
|
4043
|
+
* 检查工具是否支持(静态方法)
|
|
4044
|
+
*/
|
|
4045
|
+
isToolSupported: (_toolName) => {
|
|
4046
|
+
return true;
|
|
4047
|
+
},
|
|
4048
|
+
/**
|
|
4049
|
+
* 获取工具映射(静态方法,已弃用)
|
|
4050
|
+
*/
|
|
4051
|
+
getToolMapping: (claudeToolName) => {
|
|
4052
|
+
return claudeToolName;
|
|
4053
|
+
},
|
|
4054
|
+
/**
|
|
4055
|
+
* 转换Anthropic请求格式为OpenAI兼容格式(异步版本,支持图片URL自动下载)
|
|
4056
|
+
* 解决GitHub Copilot等API不支持外部图片URL的问题
|
|
4057
|
+
* @param anthropicRequest Claude格式的请求
|
|
4058
|
+
* @param downloadImageUrls 是否下载图片URL并转换为base64(默认true)
|
|
4059
|
+
*/
|
|
4060
|
+
convertAnthropicRequestToOpenAIAsync: async (anthropicRequest, downloadImageUrls = true) => {
|
|
4061
|
+
const adapter = new A2ORequestAdapter({
|
|
4062
|
+
debugMode: false,
|
|
4063
|
+
maxDescriptionLength: 100,
|
|
4064
|
+
enableToolNameValidation: true,
|
|
4065
|
+
enableFormatValidation: true,
|
|
4066
|
+
validation: { enabled: true, strict: false },
|
|
4067
|
+
healing: { enabled: true, maxAttempts: 2, enableCustomRules: true },
|
|
4068
|
+
recovery: { enabled: false, maxRetries: 0, backoffMs: 1e3 },
|
|
4069
|
+
monitoring: { enabled: false, logLevel: "none", enableMetrics: false },
|
|
4070
|
+
imageProxy: {
|
|
4071
|
+
enabled: downloadImageUrls,
|
|
4072
|
+
timeout: 1e4,
|
|
4073
|
+
maxSize: 10 * 1024 * 1024
|
|
4074
|
+
}
|
|
4075
|
+
});
|
|
4076
|
+
try {
|
|
4077
|
+
const result = await adapter.performCoreConversion(anthropicRequest);
|
|
4078
|
+
return result;
|
|
4079
|
+
} catch (error) {
|
|
4080
|
+
console.warn(`[A2ORequestAdapterStatic] Async conversion failed: ${error?.message || error}`);
|
|
4081
|
+
return adapter.performBasicConversion(anthropicRequest, true);
|
|
4082
|
+
}
|
|
4083
|
+
}
|
|
4084
|
+
};
|
|
4085
|
+
|
|
4086
|
+
// src/core/streaming/streaming-protocol-adapter.ts
|
|
4087
|
+
var StreamingProtocolAdapter = class {
|
|
4088
|
+
constructor(options = {}) {
|
|
4089
|
+
this.config = {
|
|
4090
|
+
debugMode: options.debugMode ?? false,
|
|
4091
|
+
validateInput: options.validateInput ?? false,
|
|
4092
|
+
validateOutput: options.validateOutput ?? false,
|
|
4093
|
+
autoHeal: options.autoHeal ?? false,
|
|
4094
|
+
timeout: options.timeout ?? 3e4,
|
|
4095
|
+
retries: options.retries ?? 3,
|
|
4096
|
+
bufferSize: options.bufferSize ?? 1024,
|
|
4097
|
+
logger: options.logger ?? getGlobalLogger()
|
|
4098
|
+
};
|
|
4099
|
+
}
|
|
4100
|
+
logDebug(message, meta) {
|
|
4101
|
+
if (this.config.debugMode) {
|
|
4102
|
+
this.config.logger.debug(message, meta);
|
|
4103
|
+
}
|
|
4104
|
+
}
|
|
4105
|
+
/**
|
|
4106
|
+
* 转换Anthropic请求为OpenAI格式
|
|
4107
|
+
*/
|
|
4108
|
+
convertAnthropicToOpenAI(anthropicRequest) {
|
|
4109
|
+
const logger = this.config.logger;
|
|
4110
|
+
if (this.config.debugMode) {
|
|
4111
|
+
logger.debug("Converting Anthropic request to OpenAI format", { model: anthropicRequest.model });
|
|
4112
|
+
}
|
|
4113
|
+
const openaiRequest = A2ORequestAdapterStatic.convertAnthropicRequestToOpenAI(anthropicRequest);
|
|
4114
|
+
openaiRequest.stream = true;
|
|
4115
|
+
const hasImages = this.hasImageContent(anthropicRequest);
|
|
4116
|
+
return {
|
|
4117
|
+
openaiRequest,
|
|
4118
|
+
metadata: {
|
|
4119
|
+
hasImages,
|
|
4120
|
+
requiresVisionHeaders: hasImages
|
|
4121
|
+
}
|
|
4122
|
+
};
|
|
4123
|
+
}
|
|
4124
|
+
/**
|
|
4125
|
+
* 与StandardProtocolAdapter保持一致的API,用于集成测试和向后兼容。
|
|
4126
|
+
*/
|
|
4127
|
+
convertRequest(anthropicRequest) {
|
|
4128
|
+
return this.convertAnthropicToOpenAI(anthropicRequest);
|
|
4129
|
+
}
|
|
4130
|
+
/**
|
|
4131
|
+
* 转换OpenAI流式响应为Anthropic SSE格式
|
|
4132
|
+
*/
|
|
4133
|
+
convertOpenAIStreamToAnthropic(openaiStream, originalRequest) {
|
|
4134
|
+
const logger = this.config.logger;
|
|
4135
|
+
try {
|
|
4136
|
+
if (this.config.debugMode) {
|
|
4137
|
+
logger.debug("Converting OpenAI stream to Anthropic SSE", {
|
|
4138
|
+
streamLength: openaiStream.length,
|
|
4139
|
+
model: originalRequest.model
|
|
4140
|
+
});
|
|
4141
|
+
}
|
|
4142
|
+
if (!openaiStream || openaiStream.trim() === "") {
|
|
4143
|
+
return {
|
|
4144
|
+
success: false,
|
|
4145
|
+
error: "Empty stream response",
|
|
4146
|
+
anthropicSSE: "",
|
|
4147
|
+
anthropicStandardResponse: null
|
|
4148
|
+
};
|
|
3791
4149
|
}
|
|
3792
|
-
|
|
3793
|
-
|
|
4150
|
+
const anthropicSSE = this.convertToAnthropicSSE(openaiStream, originalRequest.model);
|
|
4151
|
+
const anthropicStandardResponse = this.buildStandardResponse(openaiStream);
|
|
4152
|
+
return {
|
|
4153
|
+
success: true,
|
|
4154
|
+
anthropicSSE,
|
|
4155
|
+
anthropicStandardResponse
|
|
4156
|
+
};
|
|
3794
4157
|
} catch (error) {
|
|
3795
|
-
|
|
3796
|
-
|
|
3797
|
-
|
|
3798
|
-
|
|
3799
|
-
|
|
4158
|
+
const errorMessage = error instanceof Error ? error.message : "Unknown conversion error";
|
|
4159
|
+
logger.error("Stream conversion failed", { error: errorMessage });
|
|
4160
|
+
return {
|
|
4161
|
+
success: false,
|
|
4162
|
+
error: errorMessage,
|
|
4163
|
+
anthropicSSE: "",
|
|
4164
|
+
anthropicStandardResponse: null
|
|
4165
|
+
};
|
|
3800
4166
|
}
|
|
3801
4167
|
}
|
|
3802
4168
|
/**
|
|
3803
|
-
*
|
|
4169
|
+
* 增量解析Anthropic SSE,转换为OpenAI流式chunk
|
|
4170
|
+
* 供 OpenAI Chat Completions 端点直接复用
|
|
3804
4171
|
*/
|
|
3805
|
-
|
|
3806
|
-
|
|
3807
|
-
|
|
3808
|
-
|
|
3809
|
-
|
|
3810
|
-
|
|
3811
|
-
|
|
3812
|
-
|
|
3813
|
-
|
|
3814
|
-
|
|
4172
|
+
convertAnthropicSSEChunkToOpenAI(params) {
|
|
4173
|
+
const { buffer, chunk, model, flush = false } = params;
|
|
4174
|
+
let localBuffer = buffer + (chunk || "");
|
|
4175
|
+
const emittedChunks = [];
|
|
4176
|
+
let finishReason;
|
|
4177
|
+
let streamStopped = false;
|
|
4178
|
+
const processEvent = (eventText) => {
|
|
4179
|
+
const { eventType, data } = this.parseAnthropicSSEEvent(eventText);
|
|
4180
|
+
if (!eventType || !data) {
|
|
4181
|
+
return;
|
|
4182
|
+
}
|
|
4183
|
+
if (eventType === "content_block_delta") {
|
|
4184
|
+
const text = this.extractTextFromAnthropicDelta(data);
|
|
4185
|
+
if (text) {
|
|
4186
|
+
emittedChunks.push(this.buildOpenAIStreamChunk(model, text));
|
|
4187
|
+
}
|
|
4188
|
+
} else if (eventType === "message_stop") {
|
|
4189
|
+
finishReason = this.mapAnthropicStopReasonToOpenAI(data?.stop_reason);
|
|
4190
|
+
streamStopped = true;
|
|
4191
|
+
}
|
|
3815
4192
|
};
|
|
3816
|
-
|
|
3817
|
-
|
|
3818
|
-
|
|
3819
|
-
|
|
3820
|
-
|
|
3821
|
-
|
|
3822
|
-
|
|
4193
|
+
while (true) {
|
|
4194
|
+
const separatorIndex = localBuffer.indexOf("\n\n");
|
|
4195
|
+
if (separatorIndex === -1) {
|
|
4196
|
+
break;
|
|
4197
|
+
}
|
|
4198
|
+
const rawEvent = localBuffer.slice(0, separatorIndex);
|
|
4199
|
+
localBuffer = localBuffer.slice(separatorIndex + 2);
|
|
4200
|
+
if (!rawEvent.trim()) {
|
|
4201
|
+
continue;
|
|
4202
|
+
}
|
|
4203
|
+
processEvent(rawEvent);
|
|
4204
|
+
if (streamStopped) {
|
|
4205
|
+
break;
|
|
3823
4206
|
}
|
|
3824
4207
|
}
|
|
3825
|
-
|
|
4208
|
+
if (flush && localBuffer.trim()) {
|
|
4209
|
+
processEvent(localBuffer);
|
|
4210
|
+
localBuffer = "";
|
|
4211
|
+
}
|
|
4212
|
+
return {
|
|
4213
|
+
buffer: localBuffer,
|
|
4214
|
+
chunks: emittedChunks,
|
|
4215
|
+
finishReason,
|
|
4216
|
+
streamStopped
|
|
4217
|
+
};
|
|
3826
4218
|
}
|
|
3827
4219
|
/**
|
|
3828
|
-
*
|
|
4220
|
+
* 将OpenAI流转换为Anthropic SSE格式
|
|
3829
4221
|
*/
|
|
3830
|
-
|
|
3831
|
-
|
|
3832
|
-
|
|
3833
|
-
|
|
3834
|
-
|
|
3835
|
-
|
|
3836
|
-
|
|
3837
|
-
|
|
3838
|
-
|
|
3839
|
-
|
|
3840
|
-
|
|
3841
|
-
|
|
3842
|
-
|
|
3843
|
-
|
|
3844
|
-
|
|
3845
|
-
|
|
3846
|
-
|
|
3847
|
-
|
|
3848
|
-
|
|
4222
|
+
convertToAnthropicSSE(openaiStream, modelName) {
|
|
4223
|
+
const lines = openaiStream.split("\n");
|
|
4224
|
+
const sseLines = [];
|
|
4225
|
+
const state = this.createConversionState();
|
|
4226
|
+
sseLines.push(
|
|
4227
|
+
"event: message_start",
|
|
4228
|
+
`data: {"type":"message_start","message":{"id":"msg_${Date.now()}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
|
|
4229
|
+
""
|
|
4230
|
+
);
|
|
4231
|
+
for (const line of lines) {
|
|
4232
|
+
if (line.startsWith("data:")) {
|
|
4233
|
+
const dataLine = line.substring(5);
|
|
4234
|
+
if (dataLine.trim() === "[DONE]") {
|
|
4235
|
+
this.addFinalEvents(state, sseLines);
|
|
4236
|
+
break;
|
|
4237
|
+
}
|
|
4238
|
+
try {
|
|
4239
|
+
const chunk = JSON.parse(dataLine);
|
|
4240
|
+
this.processStreamChunk(chunk, state, sseLines);
|
|
4241
|
+
} catch (error) {
|
|
4242
|
+
if (this.config.debugMode) {
|
|
4243
|
+
this.config.logger.warn("Failed to parse stream chunk", { line: dataLine.substring(0, 200) });
|
|
4244
|
+
}
|
|
4245
|
+
}
|
|
3849
4246
|
}
|
|
3850
4247
|
}
|
|
3851
|
-
|
|
3852
|
-
throw new Error("Generated OpenAI request format is invalid");
|
|
3853
|
-
}
|
|
3854
|
-
return openaiRequest;
|
|
4248
|
+
return sseLines.join("\n");
|
|
3855
4249
|
}
|
|
3856
4250
|
/**
|
|
3857
|
-
*
|
|
4251
|
+
* 处理单个流式数据块 - 支持thinking和content双模式
|
|
3858
4252
|
*/
|
|
3859
|
-
|
|
3860
|
-
|
|
3861
|
-
|
|
3862
|
-
|
|
3863
|
-
claudeContent.push({
|
|
3864
|
-
type: "text",
|
|
3865
|
-
text: message.content
|
|
3866
|
-
});
|
|
4253
|
+
processStreamChunk(chunk, state, sseLines) {
|
|
4254
|
+
if (this.isResponsesEvent(chunk)) {
|
|
4255
|
+
this.processResponsesEvent(chunk, state, sseLines);
|
|
4256
|
+
return;
|
|
3867
4257
|
}
|
|
3868
|
-
|
|
3869
|
-
|
|
3870
|
-
|
|
4258
|
+
const choice = chunk.choices?.[0];
|
|
4259
|
+
if (choice) {
|
|
4260
|
+
const hasToolCalls = choice.delta?.tool_calls;
|
|
4261
|
+
const hasFinishReason = choice.finish_reason;
|
|
4262
|
+
const isNonText = !choice.delta?.content;
|
|
4263
|
+
if (this.config.debugMode && (hasToolCalls || hasFinishReason || isNonText && choice.delta)) {
|
|
4264
|
+
this.logDebug("Streaming chunk processed", { chunk });
|
|
4265
|
+
}
|
|
3871
4266
|
}
|
|
3872
|
-
|
|
3873
|
-
|
|
3874
|
-
|
|
3875
|
-
}
|
|
3876
|
-
|
|
4267
|
+
if (!choice) {
|
|
4268
|
+
this.updateUsageFromChunk(chunk, state);
|
|
4269
|
+
return;
|
|
4270
|
+
}
|
|
4271
|
+
const delta = choice.delta ?? {};
|
|
4272
|
+
this.appendThinkingContent(this.coalesceContent(delta.reasoning_content), state, sseLines);
|
|
4273
|
+
this.appendTextContent(this.coalesceContent(delta.content), state, sseLines);
|
|
4274
|
+
if (delta.tool_calls) {
|
|
4275
|
+
this.processToolCalls(delta.tool_calls, state, sseLines);
|
|
4276
|
+
}
|
|
4277
|
+
this.updateUsageFromChunk(chunk, state);
|
|
3877
4278
|
}
|
|
3878
4279
|
/**
|
|
3879
|
-
*
|
|
4280
|
+
* 处理工具调用 - 支持OpenAI流式分块累积
|
|
4281
|
+
* OpenAI流式API会将tool_calls分多个chunk发送:
|
|
4282
|
+
* - Chunk 1: {index:0, id:"call_xxx", type:"function", function:{name:"web_search"}}
|
|
4283
|
+
* - Chunk 2: {index:0, function:{arguments:"{\"query\":\"xxx\"}"}}
|
|
4284
|
+
* - Chunk N: 继续累积arguments
|
|
3880
4285
|
*/
|
|
3881
|
-
|
|
3882
|
-
|
|
3883
|
-
|
|
3884
|
-
|
|
3885
|
-
|
|
3886
|
-
|
|
4286
|
+
processToolCalls(toolCalls, state, sseLines) {
|
|
4287
|
+
this.logDebug("processToolCalls called", { toolCalls });
|
|
4288
|
+
for (const toolCall of toolCalls) {
|
|
4289
|
+
const index = toolCall.index ?? 0;
|
|
4290
|
+
const toolId = toolCall.id;
|
|
4291
|
+
const toolName = toolCall.function?.name;
|
|
4292
|
+
const toolArgs = toolCall.function?.arguments;
|
|
4293
|
+
this.logDebug(`Processing tool chunk for index ${index}`, {
|
|
4294
|
+
hasId: !!toolId,
|
|
4295
|
+
hasName: !!toolName,
|
|
4296
|
+
hasArgs: !!toolArgs,
|
|
4297
|
+
argsLength: toolArgs?.length
|
|
4298
|
+
});
|
|
4299
|
+
const stateKey = `openai_tool_${index}`;
|
|
4300
|
+
const toolData = this.getOrCreateToolCallState(state, stateKey);
|
|
4301
|
+
if (toolId && !toolData.id) {
|
|
4302
|
+
toolData.id = toolId;
|
|
3887
4303
|
}
|
|
3888
|
-
|
|
3889
|
-
|
|
3890
|
-
|
|
3891
|
-
|
|
3892
|
-
|
|
3893
|
-
|
|
3894
|
-
|
|
4304
|
+
if (toolName) {
|
|
4305
|
+
toolData.name = toolName;
|
|
4306
|
+
}
|
|
4307
|
+
this.registerToolCallAlias(state, toolId ? `openai_tool_id_${toolId}` : void 0, toolData);
|
|
4308
|
+
this.registerToolCallAlias(state, `openai_tool_index_${index}`, toolData);
|
|
4309
|
+
if (toolArgs) {
|
|
4310
|
+
toolData.pendingChunks.push(toolArgs);
|
|
4311
|
+
this.logDebug(`Accumulated tool arguments for index ${index}`, {
|
|
4312
|
+
currentLength: toolData.pendingChunks.reduce((acc, chunk) => acc + chunk.length, 0)
|
|
4313
|
+
});
|
|
4314
|
+
}
|
|
4315
|
+
const started = this.maybeStartToolBlock(toolData, state, sseLines);
|
|
4316
|
+
if (started || toolData.blockStartSent) {
|
|
4317
|
+
this.flushPendingToolChunks(toolData, sseLines);
|
|
4318
|
+
}
|
|
4319
|
+
}
|
|
3895
4320
|
}
|
|
3896
|
-
|
|
3897
|
-
|
|
3898
|
-
|
|
3899
|
-
|
|
3900
|
-
|
|
4321
|
+
getOrCreateToolCallState(state, key) {
|
|
4322
|
+
let existing = state.toolCallsMap.get(key);
|
|
4323
|
+
if (!existing) {
|
|
4324
|
+
existing = {
|
|
4325
|
+
id: "",
|
|
4326
|
+
name: "",
|
|
4327
|
+
input: "",
|
|
4328
|
+
blockStartSent: false,
|
|
4329
|
+
blockStopSent: false,
|
|
4330
|
+
pendingChunks: []
|
|
4331
|
+
};
|
|
4332
|
+
state.toolCallsMap.set(key, existing);
|
|
4333
|
+
}
|
|
4334
|
+
return existing;
|
|
3901
4335
|
}
|
|
3902
|
-
|
|
3903
|
-
|
|
3904
|
-
|
|
3905
|
-
|
|
3906
|
-
|
|
4336
|
+
registerToolCallAlias(state, alias, toolData) {
|
|
4337
|
+
if (!alias) return;
|
|
4338
|
+
const current = state.toolCallsMap.get(alias);
|
|
4339
|
+
if (!current || current !== toolData) {
|
|
4340
|
+
state.toolCallsMap.set(alias, toolData);
|
|
4341
|
+
}
|
|
3907
4342
|
}
|
|
3908
|
-
|
|
3909
|
-
|
|
3910
|
-
|
|
3911
|
-
|
|
4343
|
+
maybeStartToolBlock(toolData, state, sseLines) {
|
|
4344
|
+
if (toolData.blockStartSent) return false;
|
|
4345
|
+
if (!toolData.name) {
|
|
4346
|
+
return false;
|
|
4347
|
+
}
|
|
4348
|
+
if (!toolData.id) {
|
|
4349
|
+
toolData.id = `call_${++state.toolCallCounter}`;
|
|
4350
|
+
}
|
|
4351
|
+
const blockIndex = toolData.blockIndex ?? state.nextToolBlockIndex++;
|
|
4352
|
+
toolData.blockIndex = blockIndex;
|
|
4353
|
+
sseLines.push(
|
|
4354
|
+
"event: content_block_start",
|
|
4355
|
+
`data: {"type":"content_block_start","index":${blockIndex},"content_block":{"type":"tool_use","id":"${this.escapeJsonString(toolData.id)}","name":"${this.escapeJsonString(toolData.name)}","input":{}}}`,
|
|
4356
|
+
""
|
|
4357
|
+
);
|
|
4358
|
+
toolData.blockStartSent = true;
|
|
4359
|
+
this.logDebug("Sent content_block_start", { toolName: toolData.name, blockIndex });
|
|
3912
4360
|
return true;
|
|
3913
4361
|
}
|
|
3914
|
-
|
|
3915
|
-
|
|
3916
|
-
|
|
3917
|
-
|
|
3918
|
-
|
|
4362
|
+
flushPendingToolChunks(toolData, sseLines) {
|
|
4363
|
+
if (!toolData.blockStartSent || toolData.blockIndex === void 0) {
|
|
4364
|
+
return;
|
|
4365
|
+
}
|
|
4366
|
+
while (toolData.pendingChunks.length > 0) {
|
|
4367
|
+
const chunk = toolData.pendingChunks.shift();
|
|
4368
|
+
if (chunk === void 0) continue;
|
|
4369
|
+
toolData.input += chunk;
|
|
4370
|
+
sseLines.push(
|
|
4371
|
+
"event: content_block_delta",
|
|
4372
|
+
`data: {"type":"content_block_delta","index":${toolData.blockIndex},"delta":{"type":"input_json_delta","partial_json":${JSON.stringify(chunk)}}}`,
|
|
4373
|
+
""
|
|
4374
|
+
);
|
|
4375
|
+
this.logDebug("Sent input_json_delta", { blockIndex: toolData.blockIndex });
|
|
4376
|
+
}
|
|
4377
|
+
}
|
|
4378
|
+
coalesceContent(content) {
|
|
4379
|
+
if (!content) return void 0;
|
|
4380
|
+
if (typeof content === "string") return content;
|
|
4381
|
+
if (Array.isArray(content)) {
|
|
4382
|
+
return content.map((item) => {
|
|
4383
|
+
if (typeof item === "string") return item;
|
|
4384
|
+
if (typeof item?.text === "string") return item.text;
|
|
4385
|
+
if (typeof item?.content === "string") return item.content;
|
|
4386
|
+
return "";
|
|
4387
|
+
}).join("");
|
|
4388
|
+
}
|
|
4389
|
+
if (typeof content === "object" && typeof content.text === "string") {
|
|
4390
|
+
return content.text;
|
|
4391
|
+
}
|
|
4392
|
+
return void 0;
|
|
4393
|
+
}
|
|
4394
|
+
appendThinkingContent(content, state, sseLines) {
|
|
4395
|
+
if (!content) return;
|
|
4396
|
+
state.reasoningContent += content;
|
|
4397
|
+
if (!state.thinkingBlockStarted) {
|
|
4398
|
+
if (state.contentBlockStarted) {
|
|
4399
|
+
sseLines.push(
|
|
4400
|
+
"event: content_block_stop",
|
|
4401
|
+
'data: {"type":"content_block_stop","index":0}',
|
|
4402
|
+
""
|
|
4403
|
+
);
|
|
4404
|
+
state.contentBlockStarted = false;
|
|
4405
|
+
}
|
|
4406
|
+
sseLines.push(
|
|
4407
|
+
"event: content_block_start",
|
|
4408
|
+
'data: {"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":""}}',
|
|
4409
|
+
""
|
|
4410
|
+
);
|
|
4411
|
+
state.thinkingBlockStarted = true;
|
|
4412
|
+
}
|
|
4413
|
+
sseLines.push(
|
|
4414
|
+
"event: content_block_delta",
|
|
4415
|
+
`data: {"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":"${this.escapeJsonString(content)}"}}`,
|
|
4416
|
+
""
|
|
4417
|
+
);
|
|
3919
4418
|
}
|
|
3920
|
-
|
|
3921
|
-
|
|
3922
|
-
|
|
3923
|
-
|
|
3924
|
-
|
|
4419
|
+
appendTextContent(content, state, sseLines) {
|
|
4420
|
+
if (!content || content === "") return;
|
|
4421
|
+
if (!state.thinkingBlockStarted && !state.contentBlockStarted && content.trim() === "") {
|
|
4422
|
+
state.textContent += content;
|
|
4423
|
+
return;
|
|
4424
|
+
}
|
|
4425
|
+
if (state.thinkingBlockStarted && !state.contentBlockStarted) {
|
|
4426
|
+
sseLines.push(
|
|
4427
|
+
"event: content_block_stop",
|
|
4428
|
+
'data: {"type":"content_block_stop","index":0}',
|
|
4429
|
+
"",
|
|
4430
|
+
"event: content_block_start",
|
|
4431
|
+
'data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}',
|
|
4432
|
+
""
|
|
4433
|
+
);
|
|
4434
|
+
state.contentBlockStarted = true;
|
|
4435
|
+
} else if (!state.contentBlockStarted && !state.thinkingBlockStarted) {
|
|
4436
|
+
sseLines.push(
|
|
4437
|
+
"event: content_block_start",
|
|
4438
|
+
'data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}',
|
|
4439
|
+
""
|
|
4440
|
+
);
|
|
4441
|
+
state.contentBlockStarted = true;
|
|
4442
|
+
}
|
|
4443
|
+
state.textContent += content;
|
|
4444
|
+
const blockIndex = state.thinkingBlockStarted ? 1 : 0;
|
|
4445
|
+
sseLines.push(
|
|
4446
|
+
"event: content_block_delta",
|
|
4447
|
+
`data: {"type":"content_block_delta","index":${blockIndex},"delta":{"type":"text_delta","text":"${this.escapeJsonString(content)}"}}`,
|
|
4448
|
+
""
|
|
4449
|
+
);
|
|
3925
4450
|
}
|
|
3926
|
-
|
|
3927
|
-
|
|
3928
|
-
|
|
3929
|
-
|
|
3930
|
-
|
|
4451
|
+
updateUsageFromChunk(chunk, state) {
|
|
4452
|
+
const usage = chunk?.usage || chunk?.response?.usage;
|
|
4453
|
+
if (!usage) return;
|
|
4454
|
+
if (typeof usage.prompt_tokens === "number") {
|
|
4455
|
+
state.usage.input_tokens = usage.prompt_tokens;
|
|
4456
|
+
}
|
|
4457
|
+
if (typeof usage.completion_tokens === "number") {
|
|
4458
|
+
state.usage.output_tokens = usage.completion_tokens;
|
|
4459
|
+
}
|
|
4460
|
+
if (typeof usage.input_tokens === "number") {
|
|
4461
|
+
state.usage.input_tokens = usage.input_tokens;
|
|
4462
|
+
}
|
|
4463
|
+
if (typeof usage.output_tokens === "number") {
|
|
4464
|
+
state.usage.output_tokens = usage.output_tokens;
|
|
4465
|
+
}
|
|
4466
|
+
}
|
|
4467
|
+
isResponsesEvent(chunk) {
|
|
4468
|
+
return typeof chunk?.type === "string" && chunk.type.startsWith("response.");
|
|
4469
|
+
}
|
|
4470
|
+
processResponsesEvent(event, state, sseLines) {
|
|
4471
|
+
this.updateUsageFromChunk(event, state);
|
|
4472
|
+
switch (event.type) {
|
|
4473
|
+
case "response.output_item.added":
|
|
4474
|
+
this.handleResponsesOutputItemAdded(event, state, sseLines);
|
|
4475
|
+
break;
|
|
4476
|
+
case "response.function_call_arguments.delta":
|
|
4477
|
+
this.handleResponsesFunctionArgumentsDelta(event, state, sseLines);
|
|
4478
|
+
break;
|
|
4479
|
+
case "response.function_call_arguments.done":
|
|
4480
|
+
case "response.output_item.done":
|
|
4481
|
+
this.handleResponsesFunctionArgumentsDone(event, state, sseLines);
|
|
4482
|
+
break;
|
|
4483
|
+
case "response.output_text.delta":
|
|
4484
|
+
case "response.text.delta":
|
|
4485
|
+
this.appendTextContent(this.extractResponsesTextDelta(event), state, sseLines);
|
|
4486
|
+
break;
|
|
4487
|
+
case "response.output_text.done":
|
|
4488
|
+
case "response.text.done":
|
|
4489
|
+
break;
|
|
4490
|
+
case "response.thinking.delta":
|
|
4491
|
+
this.appendThinkingContent(this.extractResponsesThinkingDelta(event), state, sseLines);
|
|
4492
|
+
break;
|
|
4493
|
+
default:
|
|
4494
|
+
break;
|
|
4495
|
+
}
|
|
3931
4496
|
}
|
|
3932
|
-
|
|
3933
|
-
|
|
3934
|
-
|
|
3935
|
-
|
|
3936
|
-
|
|
3937
|
-
|
|
3938
|
-
|
|
3939
|
-
|
|
3940
|
-
|
|
3941
|
-
|
|
3942
|
-
|
|
3943
|
-
} else {
|
|
3944
|
-
const errorSummary = this.getValidationErrorSummary(error);
|
|
3945
|
-
console.warn(`[A2ORequestAdapter] Input validation warning: ${errorSummary}. Details saved to logs.`);
|
|
3946
|
-
}
|
|
4497
|
+
resolveResponsesToolData(identifiers, state) {
|
|
4498
|
+
const aliases = [];
|
|
4499
|
+
if (identifiers.call_id) aliases.push(`responses_call_${identifiers.call_id}`);
|
|
4500
|
+
if (identifiers.item_id) aliases.push(`responses_item_${identifiers.item_id}`);
|
|
4501
|
+
if (typeof identifiers.output_index === "number") aliases.push(`responses_index_${identifiers.output_index}`);
|
|
4502
|
+
let toolData;
|
|
4503
|
+
for (const alias of aliases) {
|
|
4504
|
+
const existing = state.toolCallsMap.get(alias);
|
|
4505
|
+
if (existing) {
|
|
4506
|
+
toolData = existing;
|
|
4507
|
+
break;
|
|
3947
4508
|
}
|
|
3948
4509
|
}
|
|
3949
|
-
|
|
3950
|
-
|
|
3951
|
-
|
|
3952
|
-
|
|
3953
|
-
|
|
3954
|
-
console.warn("[A2ORequestAdapter] Healing failed:", healingError);
|
|
4510
|
+
if (!toolData) {
|
|
4511
|
+
const baseAlias = aliases[0] ?? `responses_auto_${++state.toolCallCounter}`;
|
|
4512
|
+
toolData = this.getOrCreateToolCallState(state, baseAlias);
|
|
4513
|
+
if (!aliases.length) {
|
|
4514
|
+
aliases.push(baseAlias);
|
|
3955
4515
|
}
|
|
3956
4516
|
}
|
|
3957
|
-
const
|
|
3958
|
-
|
|
3959
|
-
try {
|
|
3960
|
-
validateOpenAIRequest(result);
|
|
3961
|
-
} catch (error) {
|
|
3962
|
-
if (this.config.validation.strict) {
|
|
3963
|
-
throw error;
|
|
3964
|
-
} else {
|
|
3965
|
-
console.warn("[A2ORequestAdapter] Output validation warning:", error);
|
|
3966
|
-
}
|
|
3967
|
-
}
|
|
4517
|
+
for (const alias of aliases) {
|
|
4518
|
+
this.registerToolCallAlias(state, alias, toolData);
|
|
3968
4519
|
}
|
|
3969
|
-
return
|
|
4520
|
+
return toolData;
|
|
3970
4521
|
}
|
|
3971
|
-
|
|
3972
|
-
|
|
3973
|
-
|
|
3974
|
-
|
|
3975
|
-
if (
|
|
3976
|
-
|
|
4522
|
+
handleResponsesOutputItemAdded(event, state, sseLines) {
|
|
4523
|
+
const item = event?.item;
|
|
4524
|
+
if (!item) return;
|
|
4525
|
+
const itemType = item.type;
|
|
4526
|
+
if (itemType !== "function_call" && itemType !== "tool_call") {
|
|
4527
|
+
return;
|
|
3977
4528
|
}
|
|
3978
|
-
const
|
|
3979
|
-
|
|
3980
|
-
|
|
3981
|
-
|
|
3982
|
-
|
|
3983
|
-
|
|
3984
|
-
n: 1
|
|
3985
|
-
};
|
|
3986
|
-
if (anthropicRequest.tools && anthropicRequest.tools.length > 0) {
|
|
3987
|
-
openaiRequest.tools = this.convertToolDefinitions(anthropicRequest.tools);
|
|
4529
|
+
const toolData = this.resolveResponsesToolData(
|
|
4530
|
+
{ call_id: item.call_id ?? item.id, item_id: item.id, output_index: event.output_index },
|
|
4531
|
+
state
|
|
4532
|
+
);
|
|
4533
|
+
if (!toolData.id) {
|
|
4534
|
+
toolData.id = item.call_id || item.id || `call_${++state.toolCallCounter}`;
|
|
3988
4535
|
}
|
|
3989
|
-
const
|
|
3990
|
-
|
|
3991
|
-
|
|
3992
|
-
openaiRequest[field] = anthropicRequest[field];
|
|
3993
|
-
}
|
|
4536
|
+
const name = item.name ?? item.function?.name ?? item.function_call?.name;
|
|
4537
|
+
if (name) {
|
|
4538
|
+
toolData.name = name;
|
|
3994
4539
|
}
|
|
3995
|
-
if (
|
|
3996
|
-
|
|
4540
|
+
if (typeof item.arguments === "string" && item.arguments.length > 0) {
|
|
4541
|
+
toolData.pendingChunks.push(item.arguments);
|
|
4542
|
+
}
|
|
4543
|
+
const started = this.maybeStartToolBlock(toolData, state, sseLines);
|
|
4544
|
+
if (started || toolData.blockStartSent) {
|
|
4545
|
+
this.flushPendingToolChunks(toolData, sseLines);
|
|
3997
4546
|
}
|
|
3998
|
-
return openaiRequest;
|
|
3999
4547
|
}
|
|
4000
|
-
|
|
4001
|
-
|
|
4002
|
-
|
|
4003
|
-
|
|
4004
|
-
|
|
4005
|
-
|
|
4006
|
-
|
|
4007
|
-
healedRequest.max_tokens = 4096;
|
|
4548
|
+
handleResponsesFunctionArgumentsDelta(event, state, sseLines) {
|
|
4549
|
+
const toolData = this.resolveResponsesToolData(
|
|
4550
|
+
{ call_id: event.call_id, item_id: event.item_id, output_index: event.output_index },
|
|
4551
|
+
state
|
|
4552
|
+
);
|
|
4553
|
+
if (!toolData.id && event.call_id) {
|
|
4554
|
+
toolData.id = event.call_id;
|
|
4008
4555
|
}
|
|
4009
|
-
|
|
4010
|
-
|
|
4556
|
+
const name = event.name ?? event.function_name ?? event.function?.name;
|
|
4557
|
+
if (name) {
|
|
4558
|
+
toolData.name = name;
|
|
4011
4559
|
}
|
|
4012
|
-
|
|
4013
|
-
|
|
4560
|
+
const argsChunk = this.extractArgumentsDelta(event);
|
|
4561
|
+
if (argsChunk) {
|
|
4562
|
+
toolData.pendingChunks.push(argsChunk);
|
|
4014
4563
|
}
|
|
4015
|
-
|
|
4016
|
-
|
|
4017
|
-
|
|
4564
|
+
const started = this.maybeStartToolBlock(toolData, state, sseLines);
|
|
4565
|
+
if (started || toolData.blockStartSent) {
|
|
4566
|
+
this.flushPendingToolChunks(toolData, sseLines);
|
|
4567
|
+
}
|
|
4568
|
+
}
|
|
4569
|
+
handleResponsesFunctionArgumentsDone(event, state, sseLines) {
|
|
4570
|
+
const toolData = this.resolveResponsesToolData(
|
|
4571
|
+
{ call_id: event.call_id, item_id: event.item_id, output_index: event.output_index },
|
|
4572
|
+
state
|
|
4573
|
+
);
|
|
4574
|
+
if (typeof event.arguments === "string" && event.arguments.length > 0) {
|
|
4575
|
+
toolData.pendingChunks.push(event.arguments);
|
|
4576
|
+
}
|
|
4577
|
+
const started = this.maybeStartToolBlock(toolData, state, sseLines);
|
|
4578
|
+
if (started || toolData.blockStartSent) {
|
|
4579
|
+
this.flushPendingToolChunks(toolData, sseLines);
|
|
4580
|
+
}
|
|
4581
|
+
if (toolData.blockStartSent && !toolData.blockStopSent && toolData.blockIndex !== void 0) {
|
|
4582
|
+
sseLines.push(
|
|
4583
|
+
"event: content_block_stop",
|
|
4584
|
+
`data: {"type":"content_block_stop","index":${toolData.blockIndex}}`,
|
|
4585
|
+
""
|
|
4586
|
+
);
|
|
4587
|
+
toolData.blockStopSent = true;
|
|
4588
|
+
if (toolData.id && !state.completedToolCalls.includes(toolData.id)) {
|
|
4589
|
+
state.completedToolCalls.push(toolData.id);
|
|
4590
|
+
}
|
|
4591
|
+
this.logDebug("Sent content_block_stop", { toolName: toolData.name, blockIndex: toolData.blockIndex });
|
|
4592
|
+
}
|
|
4593
|
+
}
|
|
4594
|
+
extractResponsesTextDelta(event) {
|
|
4595
|
+
if (!event) return void 0;
|
|
4596
|
+
if (typeof event.delta === "string") return event.delta;
|
|
4597
|
+
if (event.delta && typeof event.delta.text === "string") return event.delta.text;
|
|
4598
|
+
if (typeof event.text === "string") return event.text;
|
|
4599
|
+
if (Array.isArray(event.output_text)) {
|
|
4600
|
+
return event.output_text.map((item) => item?.text ?? "").join("");
|
|
4601
|
+
}
|
|
4602
|
+
return void 0;
|
|
4603
|
+
}
|
|
4604
|
+
extractResponsesThinkingDelta(event) {
|
|
4605
|
+
if (!event) return void 0;
|
|
4606
|
+
if (typeof event.delta === "string") return event.delta;
|
|
4607
|
+
if (event.delta && typeof event.delta.thinking === "string") return event.delta.thinking;
|
|
4608
|
+
if (typeof event.text === "string") return event.text;
|
|
4609
|
+
return void 0;
|
|
4610
|
+
}
|
|
4611
|
+
extractArgumentsDelta(event) {
|
|
4612
|
+
if (!event) return void 0;
|
|
4613
|
+
if (typeof event.delta === "string") return event.delta;
|
|
4614
|
+
if (event.delta && typeof event.delta.arguments === "string") return event.delta.arguments;
|
|
4615
|
+
if (typeof event.arguments_delta === "string") return event.arguments_delta;
|
|
4616
|
+
if (typeof event.arguments === "string") return event.arguments;
|
|
4617
|
+
if (typeof event.partial_json === "string") return event.partial_json;
|
|
4618
|
+
return void 0;
|
|
4619
|
+
}
|
|
4620
|
+
/**
|
|
4621
|
+
* 在流结束时关闭所有未关闭的工具调用块
|
|
4622
|
+
*/
|
|
4623
|
+
closeAllToolCallBlocks(state, sseLines) {
|
|
4624
|
+
const processed = /* @__PURE__ */ new Set();
|
|
4625
|
+
for (const toolData of state.toolCallsMap.values()) {
|
|
4626
|
+
if (processed.has(toolData)) continue;
|
|
4627
|
+
processed.add(toolData);
|
|
4628
|
+
if (!toolData.blockStartSent && toolData.pendingChunks.length > 0) {
|
|
4629
|
+
if (!toolData.name) {
|
|
4630
|
+
toolData.name = "unknown_tool";
|
|
4631
|
+
}
|
|
4632
|
+
const started = this.maybeStartToolBlock(toolData, state, sseLines);
|
|
4633
|
+
if (started) {
|
|
4634
|
+
this.flushPendingToolChunks(toolData, sseLines);
|
|
4635
|
+
}
|
|
4018
4636
|
}
|
|
4019
|
-
if (!
|
|
4020
|
-
|
|
4637
|
+
if (toolData.blockStartSent && !toolData.blockStopSent && toolData.blockIndex !== void 0) {
|
|
4638
|
+
this.flushPendingToolChunks(toolData, sseLines);
|
|
4639
|
+
sseLines.push(
|
|
4640
|
+
"event: content_block_stop",
|
|
4641
|
+
`data: {"type":"content_block_stop","index":${toolData.blockIndex}}`,
|
|
4642
|
+
""
|
|
4643
|
+
);
|
|
4644
|
+
toolData.blockStopSent = true;
|
|
4645
|
+
if (toolData.id && !state.completedToolCalls.includes(toolData.id)) {
|
|
4646
|
+
state.completedToolCalls.push(toolData.id);
|
|
4647
|
+
}
|
|
4648
|
+
this.logDebug("Sent content_block_stop", { toolName: toolData.name, blockIndex: toolData.blockIndex });
|
|
4021
4649
|
}
|
|
4022
4650
|
}
|
|
4023
|
-
return healedRequest;
|
|
4024
4651
|
}
|
|
4025
4652
|
/**
|
|
4026
|
-
*
|
|
4653
|
+
* 添加最终事件 - 支持thinking+content双模式
|
|
4027
4654
|
*/
|
|
4028
|
-
|
|
4029
|
-
|
|
4655
|
+
addFinalEvents(state, sseLines) {
|
|
4656
|
+
this.closeAllToolCallBlocks(state, sseLines);
|
|
4657
|
+
if (state.contentBlockStarted) {
|
|
4658
|
+
const blockIndex = state.thinkingBlockStarted ? 1 : 0;
|
|
4659
|
+
sseLines.push(
|
|
4660
|
+
"event: content_block_stop",
|
|
4661
|
+
`data: {"type":"content_block_stop","index":${blockIndex}}`,
|
|
4662
|
+
""
|
|
4663
|
+
);
|
|
4664
|
+
} else if (state.thinkingBlockStarted) {
|
|
4665
|
+
sseLines.push(
|
|
4666
|
+
"event: content_block_stop",
|
|
4667
|
+
'data: {"type":"content_block_stop","index":0}',
|
|
4668
|
+
""
|
|
4669
|
+
);
|
|
4670
|
+
}
|
|
4671
|
+
const stopReason = state.completedToolCalls.length > 0 ? "tool_use" : "end_turn";
|
|
4672
|
+
const usagePayload = state.usage.input_tokens > 0 ? `{"input_tokens":${state.usage.input_tokens},"output_tokens":${state.usage.output_tokens}}` : `{"output_tokens":${state.usage.output_tokens}}`;
|
|
4673
|
+
sseLines.push(
|
|
4674
|
+
"event: message_delta",
|
|
4675
|
+
`data: {"type":"message_delta","delta":{"stop_reason":"${stopReason}","stop_sequence":null},"usage":${usagePayload}}`,
|
|
4676
|
+
"",
|
|
4677
|
+
"event: message_stop",
|
|
4678
|
+
'data: {"type":"message_stop"}',
|
|
4679
|
+
""
|
|
4680
|
+
);
|
|
4030
4681
|
}
|
|
4031
4682
|
/**
|
|
4032
|
-
*
|
|
4683
|
+
* 构建标准响应格式
|
|
4033
4684
|
*/
|
|
4034
|
-
|
|
4035
|
-
|
|
4036
|
-
|
|
4037
|
-
|
|
4038
|
-
|
|
4039
|
-
if (
|
|
4040
|
-
const
|
|
4041
|
-
|
|
4042
|
-
|
|
4043
|
-
|
|
4044
|
-
|
|
4685
|
+
buildStandardResponse(openaiStream) {
|
|
4686
|
+
const state = this.createConversionState();
|
|
4687
|
+
const lines = openaiStream.split("\n");
|
|
4688
|
+
const noopSseLines = [];
|
|
4689
|
+
for (const line of lines) {
|
|
4690
|
+
if (line.startsWith("data:")) {
|
|
4691
|
+
const dataLine = line.startsWith("data: ") ? line.substring(6) : line.substring(5);
|
|
4692
|
+
if (dataLine.trim() === "[DONE]") break;
|
|
4693
|
+
try {
|
|
4694
|
+
const chunk = JSON.parse(dataLine);
|
|
4695
|
+
noopSseLines.length = 0;
|
|
4696
|
+
this.processStreamChunk(chunk, state, noopSseLines);
|
|
4697
|
+
} catch (error) {
|
|
4698
|
+
}
|
|
4045
4699
|
}
|
|
4046
|
-
return summary.slice(0, 2).join(", ") + (error.issues.length > 5 ? ` (+${error.issues.length - 5} more)` : "");
|
|
4047
4700
|
}
|
|
4048
|
-
|
|
4701
|
+
const stopReason = state.completedToolCalls.length > 0 ? "tool_use" : "end_turn";
|
|
4702
|
+
return {
|
|
4703
|
+
id: `msg_${Date.now()}`,
|
|
4704
|
+
type: "message",
|
|
4705
|
+
role: "assistant",
|
|
4706
|
+
content: state.textContent ? [
|
|
4707
|
+
{
|
|
4708
|
+
type: "text",
|
|
4709
|
+
text: state.textContent
|
|
4710
|
+
}
|
|
4711
|
+
] : [],
|
|
4712
|
+
model: "claude-3-sonnet-20240229",
|
|
4713
|
+
stop_reason: stopReason,
|
|
4714
|
+
stop_sequence: null,
|
|
4715
|
+
usage: state.usage
|
|
4716
|
+
};
|
|
4049
4717
|
}
|
|
4050
|
-
};
|
|
4051
|
-
var A2ORequestAdapterStatic = {
|
|
4052
4718
|
/**
|
|
4053
|
-
*
|
|
4054
|
-
* 内部使用增强转换器,所有调用点自动获得增强功能
|
|
4719
|
+
* 创建转换状态对象
|
|
4055
4720
|
*/
|
|
4056
|
-
|
|
4057
|
-
|
|
4058
|
-
|
|
4059
|
-
|
|
4060
|
-
|
|
4061
|
-
|
|
4062
|
-
|
|
4063
|
-
|
|
4064
|
-
|
|
4065
|
-
|
|
4066
|
-
|
|
4067
|
-
|
|
4068
|
-
|
|
4069
|
-
|
|
4070
|
-
|
|
4071
|
-
|
|
4072
|
-
|
|
4721
|
+
createConversionState() {
|
|
4722
|
+
return {
|
|
4723
|
+
processedLines: 0,
|
|
4724
|
+
textContent: "",
|
|
4725
|
+
reasoningContent: "",
|
|
4726
|
+
toolCallsMap: /* @__PURE__ */ new Map(),
|
|
4727
|
+
completedToolCalls: [],
|
|
4728
|
+
allSSELines: [],
|
|
4729
|
+
errors: [],
|
|
4730
|
+
usage: {
|
|
4731
|
+
input_tokens: 0,
|
|
4732
|
+
output_tokens: 0
|
|
4733
|
+
},
|
|
4734
|
+
thinkingBlockStarted: false,
|
|
4735
|
+
contentBlockStarted: false,
|
|
4736
|
+
toolCallCounter: 0,
|
|
4737
|
+
nextToolBlockIndex: 1
|
|
4738
|
+
};
|
|
4739
|
+
}
|
|
4740
|
+
parseAnthropicSSEEvent(rawEvent) {
|
|
4741
|
+
const lines = rawEvent.split("\n");
|
|
4742
|
+
let eventType = null;
|
|
4743
|
+
const dataLines = [];
|
|
4744
|
+
for (const line of lines) {
|
|
4745
|
+
if (line.startsWith("event:")) {
|
|
4746
|
+
eventType = line.slice(6).trim();
|
|
4747
|
+
} else if (line.startsWith("data:")) {
|
|
4748
|
+
dataLines.push(line.slice(5).trim());
|
|
4749
|
+
}
|
|
4073
4750
|
}
|
|
4074
|
-
|
|
4751
|
+
const dataString = dataLines.join("\n");
|
|
4752
|
+
let data = null;
|
|
4753
|
+
if (dataString) {
|
|
4754
|
+
try {
|
|
4755
|
+
data = JSON.parse(dataString);
|
|
4756
|
+
} catch (error) {
|
|
4757
|
+
this.logDebug("Failed to parse Anthropic SSE JSON", { error });
|
|
4758
|
+
}
|
|
4759
|
+
}
|
|
4760
|
+
return { eventType, data };
|
|
4761
|
+
}
|
|
4762
|
+
extractTextFromAnthropicDelta(data) {
|
|
4763
|
+
const delta = data?.delta;
|
|
4764
|
+
if (!delta) return null;
|
|
4765
|
+
if (typeof delta.text === "string") {
|
|
4766
|
+
return delta.text;
|
|
4767
|
+
}
|
|
4768
|
+
if (delta.type === "text_delta" && typeof delta.text === "string") {
|
|
4769
|
+
return delta.text;
|
|
4770
|
+
}
|
|
4771
|
+
return null;
|
|
4772
|
+
}
|
|
4773
|
+
mapAnthropicStopReasonToOpenAI(reason) {
|
|
4774
|
+
switch (reason) {
|
|
4775
|
+
case "max_tokens":
|
|
4776
|
+
return "length";
|
|
4777
|
+
case "tool_use":
|
|
4778
|
+
return "tool_calls";
|
|
4779
|
+
case "stop_sequence":
|
|
4780
|
+
case "end_turn":
|
|
4781
|
+
default:
|
|
4782
|
+
return "stop";
|
|
4783
|
+
}
|
|
4784
|
+
}
|
|
4785
|
+
buildOpenAIStreamChunk(model, content, finishReason = null) {
|
|
4786
|
+
return {
|
|
4787
|
+
id: `chatcmpl-${Date.now()}`,
|
|
4788
|
+
object: "chat.completion.chunk",
|
|
4789
|
+
created: Math.floor(Date.now() / 1e3),
|
|
4790
|
+
model,
|
|
4791
|
+
choices: [{
|
|
4792
|
+
index: 0,
|
|
4793
|
+
delta: content ? { content } : {},
|
|
4794
|
+
finish_reason: finishReason
|
|
4795
|
+
}]
|
|
4796
|
+
};
|
|
4797
|
+
}
|
|
4075
4798
|
/**
|
|
4076
|
-
*
|
|
4077
|
-
* 内部使用增强转换器
|
|
4799
|
+
* 转换消息格式
|
|
4078
4800
|
*/
|
|
4079
|
-
|
|
4080
|
-
|
|
4081
|
-
|
|
4082
|
-
|
|
4083
|
-
|
|
4084
|
-
|
|
4085
|
-
|
|
4086
|
-
|
|
4087
|
-
|
|
4088
|
-
|
|
4089
|
-
|
|
4090
|
-
|
|
4091
|
-
|
|
4801
|
+
convertMessages(messages) {
|
|
4802
|
+
return messages.map((msg) => ({
|
|
4803
|
+
role: msg.role,
|
|
4804
|
+
content: msg.content
|
|
4805
|
+
}));
|
|
4806
|
+
}
|
|
4807
|
+
/**
|
|
4808
|
+
* 映射Anthropic模型到OpenAI模型
|
|
4809
|
+
*/
|
|
4810
|
+
mapAnthropicModelToOpenAI(model) {
|
|
4811
|
+
const supportedModels = [
|
|
4812
|
+
"glm-4.5",
|
|
4813
|
+
"kimi-k2",
|
|
4814
|
+
"deepseek-v3.1",
|
|
4815
|
+
"deepseek-r1",
|
|
4816
|
+
"deepseek-v3",
|
|
4817
|
+
"qwen3-32b",
|
|
4818
|
+
"qwen3-coder",
|
|
4819
|
+
"qwen3-235b",
|
|
4820
|
+
"tstars2.0"
|
|
4821
|
+
];
|
|
4822
|
+
if (supportedModels.includes(model)) {
|
|
4823
|
+
return model;
|
|
4824
|
+
}
|
|
4825
|
+
const mapping = {
|
|
4826
|
+
"claude-3-sonnet-20240229": "glm-4.5",
|
|
4827
|
+
"claude-3-haiku-20240307": "kimi-k2",
|
|
4828
|
+
"claude-3-opus-20240229": "deepseek-v3.1"
|
|
4829
|
+
};
|
|
4830
|
+
return mapping[model] || "glm-4.5";
|
|
4831
|
+
}
|
|
4092
4832
|
/**
|
|
4093
|
-
*
|
|
4833
|
+
* 检查请求是否包含图片内容
|
|
4094
4834
|
*/
|
|
4095
|
-
|
|
4096
|
-
return
|
|
4097
|
-
|
|
4835
|
+
hasImageContent(request) {
|
|
4836
|
+
return request.messages.some(
|
|
4837
|
+
(msg) => Array.isArray(msg.content) && msg.content.some((content) => content?.type === "image")
|
|
4838
|
+
);
|
|
4839
|
+
}
|
|
4098
4840
|
/**
|
|
4099
|
-
*
|
|
4841
|
+
* 转义JSON字符串
|
|
4100
4842
|
*/
|
|
4101
|
-
|
|
4102
|
-
return
|
|
4103
|
-
}
|
|
4843
|
+
escapeJsonString(str) {
|
|
4844
|
+
return str.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t");
|
|
4845
|
+
}
|
|
4104
4846
|
/**
|
|
4105
|
-
*
|
|
4847
|
+
* 获取初始SSE事件(message_start + ping)
|
|
4106
4848
|
*/
|
|
4107
|
-
|
|
4108
|
-
return [
|
|
4109
|
-
|
|
4849
|
+
getInitialSSEEvents(modelName = "claude-sonnet-4", messageId = `msg_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`) {
|
|
4850
|
+
return [
|
|
4851
|
+
"event: message_start",
|
|
4852
|
+
`data: {"type":"message_start","message":{"id":"${messageId}","type":"message","role":"assistant","model":"${modelName}","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`,
|
|
4853
|
+
"",
|
|
4854
|
+
"event: ping",
|
|
4855
|
+
'data: {"type":"ping"}',
|
|
4856
|
+
""
|
|
4857
|
+
];
|
|
4858
|
+
}
|
|
4110
4859
|
/**
|
|
4111
|
-
*
|
|
4860
|
+
* 增量转换单个OpenAI数据块为Anthropic SSE事件
|
|
4861
|
+
* 用于逐个处理流式数据片段
|
|
4112
4862
|
*/
|
|
4113
|
-
|
|
4114
|
-
|
|
4115
|
-
|
|
4863
|
+
convertIncrementalChunk(openaiDataLine, state) {
|
|
4864
|
+
const logger = this.config.logger;
|
|
4865
|
+
const sseEvents = [];
|
|
4866
|
+
state.processedLines += 1;
|
|
4867
|
+
if (openaiDataLine.trim() === "[DONE]") {
|
|
4868
|
+
this.addFinalEvents(state, sseEvents);
|
|
4869
|
+
state.allSSELines.push(...sseEvents);
|
|
4870
|
+
return sseEvents;
|
|
4871
|
+
}
|
|
4872
|
+
try {
|
|
4873
|
+
const chunk = JSON.parse(openaiDataLine);
|
|
4874
|
+
this.processStreamChunk(chunk, state, sseEvents);
|
|
4875
|
+
if (sseEvents.length > 0) {
|
|
4876
|
+
state.allSSELines.push(...sseEvents);
|
|
4877
|
+
}
|
|
4878
|
+
return sseEvents;
|
|
4879
|
+
} catch (error) {
|
|
4880
|
+
if (this.config.debugMode) {
|
|
4881
|
+
logger.warn("Failed to parse OpenAI stream chunk in convertIncrementalChunk", {
|
|
4882
|
+
line: openaiDataLine.substring(0, 200),
|
|
4883
|
+
error: error instanceof Error ? error.message : String(error)
|
|
4884
|
+
});
|
|
4885
|
+
}
|
|
4886
|
+
state.errors.push({
|
|
4887
|
+
error: error instanceof Error ? error.message : String(error),
|
|
4888
|
+
raw: openaiDataLine
|
|
4889
|
+
});
|
|
4890
|
+
return [];
|
|
4891
|
+
}
|
|
4892
|
+
}
|
|
4116
4893
|
/**
|
|
4117
|
-
*
|
|
4894
|
+
* 暴露内部状态创建方法,供外部增量处理流程使用。
|
|
4118
4895
|
*/
|
|
4119
|
-
|
|
4120
|
-
return
|
|
4896
|
+
createIncrementalState() {
|
|
4897
|
+
return this.createConversionState();
|
|
4121
4898
|
}
|
|
4122
4899
|
};
|
|
4123
4900
|
|
|
@@ -4258,15 +5035,36 @@ var ToolCallProcessor = class _ToolCallProcessor {
|
|
|
4258
5035
|
* 处理增量工具调用
|
|
4259
5036
|
*/
|
|
4260
5037
|
static processIncrementalToolCalls(toolCalls, state, sseLines) {
|
|
5038
|
+
const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
|
|
5039
|
+
if (debugEnabled) {
|
|
5040
|
+
console.debug("[ToolProcessor] processIncrementalToolCalls called with:", JSON.stringify(toolCalls, null, 2));
|
|
5041
|
+
}
|
|
4261
5042
|
for (const toolCall of toolCalls) {
|
|
4262
5043
|
const toolId = toolCall.id;
|
|
4263
5044
|
const toolName = toolCall.function?.name;
|
|
4264
5045
|
const toolArgs = toolCall.function?.arguments;
|
|
5046
|
+
if (debugEnabled) {
|
|
5047
|
+
console.debug("[ToolProcessor] Processing tool call:", {
|
|
5048
|
+
toolId,
|
|
5049
|
+
toolName,
|
|
5050
|
+
hasArgs: !!toolArgs
|
|
5051
|
+
});
|
|
5052
|
+
}
|
|
4265
5053
|
if (toolName && toolId && !state.toolCallsMap.has(toolId)) {
|
|
5054
|
+
if (debugEnabled) {
|
|
5055
|
+
console.debug("[ToolProcessor] Starting new tool call:", toolName);
|
|
5056
|
+
}
|
|
4266
5057
|
_ToolCallProcessor.processToolCallStart(toolId, toolName, state, sseLines);
|
|
4267
5058
|
}
|
|
4268
5059
|
if (toolArgs) {
|
|
5060
|
+
if (debugEnabled) {
|
|
5061
|
+
console.debug("[ToolProcessor] Processing tool args, calling processToolArgs");
|
|
5062
|
+
}
|
|
4269
5063
|
_ToolCallProcessor.processToolArgs(toolId, toolArgs, state, sseLines);
|
|
5064
|
+
} else if (toolName && toolId) {
|
|
5065
|
+
_ToolCallProcessor.processToolArgs(toolId, "", state, sseLines);
|
|
5066
|
+
} else {
|
|
5067
|
+
console.warn("\u26A0\uFE0F\u26A0\uFE0F\u26A0\uFE0F [ToolProcessor] No tool args to process! This will result in empty input!");
|
|
4270
5068
|
}
|
|
4271
5069
|
}
|
|
4272
5070
|
}
|
|
@@ -4274,15 +5072,36 @@ var ToolCallProcessor = class _ToolCallProcessor {
|
|
|
4274
5072
|
* 处理工具调用
|
|
4275
5073
|
*/
|
|
4276
5074
|
static processBatchToolCalls(toolCalls, state, sseLines) {
|
|
5075
|
+
const debugEnabled = process.env.AI_PROTOCOL_DEBUG === "true";
|
|
5076
|
+
if (debugEnabled) {
|
|
5077
|
+
console.debug("[ToolProcessor] processBatchToolCalls called with:", JSON.stringify(toolCalls, null, 2));
|
|
5078
|
+
}
|
|
4277
5079
|
for (const toolCall of toolCalls) {
|
|
4278
5080
|
const toolId = toolCall.id;
|
|
4279
5081
|
const toolName = toolCall.function?.name;
|
|
4280
5082
|
const toolArgs = toolCall.function?.arguments;
|
|
5083
|
+
if (debugEnabled) {
|
|
5084
|
+
console.debug("[ToolProcessor] Processing batch tool call:", {
|
|
5085
|
+
toolId,
|
|
5086
|
+
toolName,
|
|
5087
|
+
hasArgs: !!toolArgs
|
|
5088
|
+
});
|
|
5089
|
+
}
|
|
4281
5090
|
if (toolName && toolId && !state.toolCallsMap.has(toolId)) {
|
|
5091
|
+
if (debugEnabled) {
|
|
5092
|
+
console.debug("[ToolProcessor] Starting new batch tool call:", toolName);
|
|
5093
|
+
}
|
|
4282
5094
|
_ToolCallProcessor.processToolCallStart(toolId, toolName, state, sseLines);
|
|
4283
5095
|
}
|
|
4284
5096
|
if (toolArgs) {
|
|
5097
|
+
if (debugEnabled) {
|
|
5098
|
+
console.debug("[ToolProcessor] Processing batch tool args, calling processToolArgs");
|
|
5099
|
+
}
|
|
4285
5100
|
_ToolCallProcessor.processToolArgs(toolId, toolArgs, state, sseLines);
|
|
5101
|
+
} else if (toolName && toolId) {
|
|
5102
|
+
_ToolCallProcessor.processToolArgs(toolId, "", state, sseLines);
|
|
5103
|
+
} else {
|
|
5104
|
+
console.warn("\u26A0\uFE0F\u26A0\uFE0F\u26A0\uFE0F [ToolProcessor] No batch tool args to process! This will result in empty input!");
|
|
4286
5105
|
}
|
|
4287
5106
|
}
|
|
4288
5107
|
}
|
|
@@ -4369,9 +5188,10 @@ var StreamingStateManager = class {
|
|
|
4369
5188
|
return {
|
|
4370
5189
|
hasContent: false,
|
|
4371
5190
|
hasThinking: false,
|
|
4372
|
-
contentBlockIndex:
|
|
4373
|
-
|
|
4374
|
-
|
|
5191
|
+
contentBlockIndex: 1,
|
|
5192
|
+
// 🔧 修复:text block 在 thinking 之后 (index 1)
|
|
5193
|
+
thinkingBlockIndex: 0,
|
|
5194
|
+
// 🔧 修复:thinking block 先发送 (index 0)
|
|
4375
5195
|
toolCallsMap: /* @__PURE__ */ new Map(),
|
|
4376
5196
|
completedToolCalls: /* @__PURE__ */ new Set(),
|
|
4377
5197
|
accumulatedUsage: {
|
|
@@ -4387,6 +5207,9 @@ var StreamingStateManager = class {
|
|
|
4387
5207
|
* 处理文本内容
|
|
4388
5208
|
*/
|
|
4389
5209
|
static processTextContent(content, state, sseLines) {
|
|
5210
|
+
if (!state.hasThinking && !state.hasContent && content.trim() === "") {
|
|
5211
|
+
return;
|
|
5212
|
+
}
|
|
4390
5213
|
if (!state.hasContent) {
|
|
4391
5214
|
sseLines.push(...SSEEventGenerator.generateTextBlockStart(state.contentBlockIndex));
|
|
4392
5215
|
state.hasContent = true;
|
|
@@ -4480,6 +5303,247 @@ function generateMessageId() {
|
|
|
4480
5303
|
return `msg_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`;
|
|
4481
5304
|
}
|
|
4482
5305
|
|
|
5306
|
+
// src/core/o2a-sse-adapter/stream-converter.ts
|
|
5307
|
+
var StreamConverter = class {
|
|
5308
|
+
constructor(adapter, options = {}) {
|
|
5309
|
+
this.buffer = "";
|
|
5310
|
+
this.adapter = adapter;
|
|
5311
|
+
this.options = {
|
|
5312
|
+
bufferTimeout: 5e3,
|
|
5313
|
+
errorRecovery: true,
|
|
5314
|
+
maxRetries: 3,
|
|
5315
|
+
debug: false,
|
|
5316
|
+
...options
|
|
5317
|
+
};
|
|
5318
|
+
this.state = this.adapter.createIncrementalState();
|
|
5319
|
+
this.stats = {
|
|
5320
|
+
chunksProcessed: 0,
|
|
5321
|
+
eventsGenerated: 0,
|
|
5322
|
+
errors: 0,
|
|
5323
|
+
retries: 0,
|
|
5324
|
+
startTime: Date.now(),
|
|
5325
|
+
lastUpdateTime: Date.now(),
|
|
5326
|
+
bufferSize: 0
|
|
5327
|
+
};
|
|
5328
|
+
if (this.options.debug) {
|
|
5329
|
+
console.log("[StreamConverter] \u5DF2\u521D\u59CB\u5316\uFF0C\u914D\u7F6E:", this.options);
|
|
5330
|
+
}
|
|
5331
|
+
}
|
|
5332
|
+
/**
|
|
5333
|
+
* 获取初始事件
|
|
5334
|
+
*/
|
|
5335
|
+
getInitialEvents() {
|
|
5336
|
+
const events = this.adapter.getInitialSSEEvents(
|
|
5337
|
+
this.options.modelName,
|
|
5338
|
+
this.options.messageId
|
|
5339
|
+
);
|
|
5340
|
+
this.stats.eventsGenerated += events.length;
|
|
5341
|
+
this.stats.lastUpdateTime = Date.now();
|
|
5342
|
+
if (this.options.debug) {
|
|
5343
|
+
console.log("[StreamConverter] \u751F\u6210\u521D\u59CB\u4E8B\u4EF6:", events.length, "\u4E2A");
|
|
5344
|
+
}
|
|
5345
|
+
return events;
|
|
5346
|
+
}
|
|
5347
|
+
/**
|
|
5348
|
+
* 处理单个数据块
|
|
5349
|
+
*/
|
|
5350
|
+
processChunk(chunk) {
|
|
5351
|
+
this.stats.chunksProcessed++;
|
|
5352
|
+
this.stats.lastUpdateTime = Date.now();
|
|
5353
|
+
if (this.options.debug) {
|
|
5354
|
+
console.log("[StreamConverter] \u5904\u7406\u6570\u636E\u5757:", chunk.substring(0, 100) + "...");
|
|
5355
|
+
}
|
|
5356
|
+
try {
|
|
5357
|
+
const events = this.processBufferedData(chunk);
|
|
5358
|
+
this.stats.eventsGenerated += events.length;
|
|
5359
|
+
if (this.options.onChunkProcessed) {
|
|
5360
|
+
this.options.onChunkProcessed(chunk, events);
|
|
5361
|
+
}
|
|
5362
|
+
return events;
|
|
5363
|
+
} catch (error) {
|
|
5364
|
+
return this.handleChunkError(error, chunk);
|
|
5365
|
+
}
|
|
5366
|
+
}
|
|
5367
|
+
/**
|
|
5368
|
+
* 结束流处理
|
|
5369
|
+
*/
|
|
5370
|
+
finalize() {
|
|
5371
|
+
if (this.options.debug) {
|
|
5372
|
+
console.log("[StreamConverter] \u7ED3\u675F\u6D41\u5904\u7406\uFF0C\u7F13\u51B2\u533A\u5927\u5C0F:", this.buffer.length);
|
|
5373
|
+
}
|
|
5374
|
+
let events = [];
|
|
5375
|
+
if (this.buffer.trim()) {
|
|
5376
|
+
console.warn("[StreamConverter] \u7F13\u51B2\u533A\u4E2D\u6709\u672A\u5904\u7406\u6570\u636E\uFF0C\u5F3A\u5236\u5904\u7406:", this.buffer);
|
|
5377
|
+
events = this.processIncompleteBuffer();
|
|
5378
|
+
}
|
|
5379
|
+
try {
|
|
5380
|
+
const finalEvents = this.adapter.convertIncrementalChunk("[DONE]", this.state);
|
|
5381
|
+
events.push(...finalEvents);
|
|
5382
|
+
this.stats.eventsGenerated += finalEvents.length;
|
|
5383
|
+
} catch (error) {
|
|
5384
|
+
console.error("[StreamConverter] \u5904\u7406\u7ED3\u675F\u4E8B\u4EF6\u5931\u8D25:", error);
|
|
5385
|
+
}
|
|
5386
|
+
this.clearBufferTimeout();
|
|
5387
|
+
this.stats.lastUpdateTime = Date.now();
|
|
5388
|
+
if (this.options.debug) {
|
|
5389
|
+
console.log("[StreamConverter] \u6D41\u5904\u7406\u5B8C\u6210\uFF0C\u7EDF\u8BA1\u4FE1\u606F:", this.stats);
|
|
5390
|
+
}
|
|
5391
|
+
return events;
|
|
5392
|
+
}
|
|
5393
|
+
/**
|
|
5394
|
+
* 获取当前状态
|
|
5395
|
+
*/
|
|
5396
|
+
getState() {
|
|
5397
|
+
return { ...this.state };
|
|
5398
|
+
}
|
|
5399
|
+
/**
|
|
5400
|
+
* 重置状态
|
|
5401
|
+
*/
|
|
5402
|
+
reset() {
|
|
5403
|
+
this.state = this.adapter.createIncrementalState();
|
|
5404
|
+
this.buffer = "";
|
|
5405
|
+
this.clearBufferTimeout();
|
|
5406
|
+
this.stats = {
|
|
5407
|
+
chunksProcessed: 0,
|
|
5408
|
+
eventsGenerated: 0,
|
|
5409
|
+
errors: 0,
|
|
5410
|
+
retries: 0,
|
|
5411
|
+
startTime: Date.now(),
|
|
5412
|
+
lastUpdateTime: Date.now(),
|
|
5413
|
+
bufferSize: 0
|
|
5414
|
+
};
|
|
5415
|
+
if (this.options.debug) {
|
|
5416
|
+
console.log("[StreamConverter] \u72B6\u6001\u5DF2\u91CD\u7F6E");
|
|
5417
|
+
}
|
|
5418
|
+
}
|
|
5419
|
+
/**
|
|
5420
|
+
* 获取统计信息
|
|
5421
|
+
*/
|
|
5422
|
+
getStats() {
|
|
5423
|
+
return {
|
|
5424
|
+
...this.stats,
|
|
5425
|
+
bufferSize: this.buffer.length
|
|
5426
|
+
};
|
|
5427
|
+
}
|
|
5428
|
+
/**
|
|
5429
|
+
* 处理缓冲的数据
|
|
5430
|
+
*/
|
|
5431
|
+
processBufferedData(newChunk) {
|
|
5432
|
+
this.buffer += newChunk;
|
|
5433
|
+
this.stats.bufferSize = this.buffer.length;
|
|
5434
|
+
const lines = this.buffer.split("\n");
|
|
5435
|
+
this.buffer = lines.pop() || "";
|
|
5436
|
+
const events = [];
|
|
5437
|
+
for (const line of lines) {
|
|
5438
|
+
if (line.startsWith("data:")) {
|
|
5439
|
+
const jsonStr = line.slice(5).trim();
|
|
5440
|
+
if (jsonStr && jsonStr !== "[DONE]") {
|
|
5441
|
+
const lineEvents = this.processDataLine(jsonStr);
|
|
5442
|
+
events.push(...lineEvents);
|
|
5443
|
+
} else if (jsonStr === "[DONE]") {
|
|
5444
|
+
const finalEvents = this.adapter.convertIncrementalChunk("[DONE]", this.state);
|
|
5445
|
+
events.push(...finalEvents);
|
|
5446
|
+
}
|
|
5447
|
+
}
|
|
5448
|
+
}
|
|
5449
|
+
this.resetBufferTimeout();
|
|
5450
|
+
return events;
|
|
5451
|
+
}
|
|
5452
|
+
/**
|
|
5453
|
+
* 处理单行数据
|
|
5454
|
+
*/
|
|
5455
|
+
processDataLine(jsonStr, attempt = 0) {
|
|
5456
|
+
try {
|
|
5457
|
+
const chunkEvents = this.adapter.convertIncrementalChunk(jsonStr, this.state);
|
|
5458
|
+
if (this.options.debug && chunkEvents.length > 0) {
|
|
5459
|
+
console.log("[StreamConverter] \u751F\u6210\u4E8B\u4EF6:", chunkEvents.length, "\u4E2A");
|
|
5460
|
+
}
|
|
5461
|
+
return chunkEvents;
|
|
5462
|
+
} catch (error) {
|
|
5463
|
+
if (this.options.errorRecovery && attempt < (this.options.maxRetries || 3)) {
|
|
5464
|
+
console.warn(`[StreamConverter] \u5904\u7406\u6570\u636E\u884C\u5931\u8D25\uFF0C\u91CD\u8BD5 ${attempt + 1}/${this.options.maxRetries}:`, error);
|
|
5465
|
+
this.stats.retries++;
|
|
5466
|
+
return this.processDataLine(jsonStr, attempt + 1);
|
|
5467
|
+
}
|
|
5468
|
+
this.stats.errors++;
|
|
5469
|
+
console.error("[StreamConverter] \u5904\u7406\u6570\u636E\u884C\u6700\u7EC8\u5931\u8D25:", error, "Data:", jsonStr);
|
|
5470
|
+
if (this.options.onError) {
|
|
5471
|
+
this.options.onError(error, {
|
|
5472
|
+
chunk: jsonStr,
|
|
5473
|
+
state: this.state,
|
|
5474
|
+
attempt,
|
|
5475
|
+
totalRetries: this.stats.retries
|
|
5476
|
+
});
|
|
5477
|
+
}
|
|
5478
|
+
return [];
|
|
5479
|
+
}
|
|
5480
|
+
}
|
|
5481
|
+
/**
|
|
5482
|
+
* 处理块错误
|
|
5483
|
+
*/
|
|
5484
|
+
handleChunkError(error, chunk) {
|
|
5485
|
+
this.stats.errors++;
|
|
5486
|
+
if (this.options.debug) {
|
|
5487
|
+
console.error("[StreamConverter] \u5757\u5904\u7406\u9519\u8BEF:", error.message);
|
|
5488
|
+
}
|
|
5489
|
+
if (!this.options.errorRecovery) {
|
|
5490
|
+
throw error;
|
|
5491
|
+
}
|
|
5492
|
+
this.state.errors.push(`Chunk processing error: ${error.message}`);
|
|
5493
|
+
if (this.options.onError) {
|
|
5494
|
+
this.options.onError(error, {
|
|
5495
|
+
chunk,
|
|
5496
|
+
state: this.state,
|
|
5497
|
+
totalRetries: this.stats.retries
|
|
5498
|
+
});
|
|
5499
|
+
}
|
|
5500
|
+
return [];
|
|
5501
|
+
}
|
|
5502
|
+
/**
|
|
5503
|
+
* 处理不完整的缓冲区数据
|
|
5504
|
+
*/
|
|
5505
|
+
processIncompleteBuffer() {
|
|
5506
|
+
if (!this.buffer.trim()) {
|
|
5507
|
+
return [];
|
|
5508
|
+
}
|
|
5509
|
+
console.warn("[StreamConverter] \u5904\u7406\u4E0D\u5B8C\u6574\u7F13\u51B2\u533A\u6570\u636E:", this.buffer);
|
|
5510
|
+
if (this.buffer.startsWith("data:")) {
|
|
5511
|
+
const jsonStr = this.buffer.slice(5).trim();
|
|
5512
|
+
if (jsonStr) {
|
|
5513
|
+
return this.processDataLine(jsonStr);
|
|
5514
|
+
}
|
|
5515
|
+
}
|
|
5516
|
+
return [];
|
|
5517
|
+
}
|
|
5518
|
+
/**
|
|
5519
|
+
* 重置缓冲区超时
|
|
5520
|
+
*/
|
|
5521
|
+
resetBufferTimeout() {
|
|
5522
|
+
this.clearBufferTimeout();
|
|
5523
|
+
if (this.options.bufferTimeout && this.options.bufferTimeout > 0) {
|
|
5524
|
+
this.bufferTimeout = setTimeout(() => {
|
|
5525
|
+
if (this.buffer.trim()) {
|
|
5526
|
+
console.warn("[StreamConverter] \u7F13\u51B2\u533A\u8D85\u65F6\uFF0C\u5F3A\u5236\u5904\u7406\u6570\u636E:", this.buffer);
|
|
5527
|
+
const events = this.processIncompleteBuffer();
|
|
5528
|
+
this.buffer = "";
|
|
5529
|
+
if (events.length > 0 && this.options.onChunkProcessed) {
|
|
5530
|
+
this.options.onChunkProcessed("TIMEOUT_FLUSH", events);
|
|
5531
|
+
}
|
|
5532
|
+
}
|
|
5533
|
+
}, this.options.bufferTimeout);
|
|
5534
|
+
}
|
|
5535
|
+
}
|
|
5536
|
+
/**
|
|
5537
|
+
* 清理缓冲区超时
|
|
5538
|
+
*/
|
|
5539
|
+
clearBufferTimeout() {
|
|
5540
|
+
if (this.bufferTimeout) {
|
|
5541
|
+
clearTimeout(this.bufferTimeout);
|
|
5542
|
+
this.bufferTimeout = void 0;
|
|
5543
|
+
}
|
|
5544
|
+
}
|
|
5545
|
+
};
|
|
5546
|
+
|
|
4483
5547
|
// src/core/o2a-sse-adapter/adapter.ts
|
|
4484
5548
|
var O2ASSEAdapter = class {
|
|
4485
5549
|
constructor(debugMode = false, config = {}) {
|
|
@@ -4651,6 +5715,17 @@ var O2ASSEAdapter = class {
|
|
|
4651
5715
|
}
|
|
4652
5716
|
try {
|
|
4653
5717
|
const data = JSON.parse(dataContent);
|
|
5718
|
+
if ((data.choices?.length === 0 || !data.choices) && data.prompt_filter_results) {
|
|
5719
|
+
if (this.debugMode) {
|
|
5720
|
+
console.warn("\u26A0\uFE0F [O2ASSEAdapter] \u68C0\u6D4B\u5230Azure\u5185\u5BB9\u8FC7\u6EE4\u5668\u54CD\u5E94:", data.prompt_filter_results);
|
|
5721
|
+
}
|
|
5722
|
+
StreamingStateManager.processTextContent(
|
|
5723
|
+
`\u9519\u8BEF\uFF1A\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42\u3002\u8BF7\u68C0\u67E5\u8F93\u5165\u5185\u5BB9\u662F\u5426\u7B26\u5408\u4F7F\u7528\u653F\u7B56\u3002`,
|
|
5724
|
+
state,
|
|
5725
|
+
sseLines
|
|
5726
|
+
);
|
|
5727
|
+
break;
|
|
5728
|
+
}
|
|
4654
5729
|
const choice = data.choices?.[0];
|
|
4655
5730
|
const delta = choice?.delta;
|
|
4656
5731
|
if (!delta) {
|
|
@@ -4665,6 +5740,11 @@ var O2ASSEAdapter = class {
|
|
|
4665
5740
|
if (delta.content) {
|
|
4666
5741
|
StreamingStateManager.processTextContent(delta.content, state, sseLines);
|
|
4667
5742
|
}
|
|
5743
|
+
if (delta.reasoning_content) {
|
|
5744
|
+
if (typeof delta.reasoning_content === "string") {
|
|
5745
|
+
StreamingStateManager.processReasoningContent(delta.reasoning_content, state, sseLines);
|
|
5746
|
+
}
|
|
5747
|
+
}
|
|
4668
5748
|
if (delta.tool_calls) {
|
|
4669
5749
|
ToolCallProcessor.processBatchToolCalls(delta.tool_calls, state, sseLines);
|
|
4670
5750
|
}
|
|
@@ -4694,6 +5774,19 @@ var O2ASSEAdapter = class {
|
|
|
4694
5774
|
processNonStreamingResponse(data, state, sseLines) {
|
|
4695
5775
|
const choice = data.choices?.[0];
|
|
4696
5776
|
if (!choice) {
|
|
5777
|
+
if (data.prompt_filter_results || data.choices?.length === 0) {
|
|
5778
|
+
const errorMsg = "Azure\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42";
|
|
5779
|
+
const filterDetails = data.prompt_filter_results ? JSON.stringify(data.prompt_filter_results).substring(0, 500) : "choices\u4E3A\u7A7A";
|
|
5780
|
+
if (this.debugMode) {
|
|
5781
|
+
console.warn(`\u26A0\uFE0F [O2ASSEAdapter] ${errorMsg}:`, filterDetails);
|
|
5782
|
+
}
|
|
5783
|
+
StreamingStateManager.processTextContent(
|
|
5784
|
+
`\u9519\u8BEF\uFF1A\u5185\u5BB9\u8FC7\u6EE4\u5668\u62E6\u622A\u4E86\u8BF7\u6C42\u3002\u8BF7\u68C0\u67E5\u8F93\u5165\u5185\u5BB9\u662F\u5426\u7B26\u5408\u4F7F\u7528\u653F\u7B56\u3002`,
|
|
5785
|
+
state,
|
|
5786
|
+
sseLines
|
|
5787
|
+
);
|
|
5788
|
+
return;
|
|
5789
|
+
}
|
|
4697
5790
|
if (this.debugMode) {
|
|
4698
5791
|
console.warn("\u26A0\uFE0F [O2ASSEAdapter] \u975E\u6D41\u5F0F\u54CD\u5E94\u6CA1\u6709choices\u6570\u636E");
|
|
4699
5792
|
}
|
|
@@ -4734,6 +5827,101 @@ var O2ASSEAdapter = class {
|
|
|
4734
5827
|
validateClaudeSSE(sseContent) {
|
|
4735
5828
|
return FormatValidator2.validateClaudeSSE(sseContent);
|
|
4736
5829
|
}
|
|
5830
|
+
/**
|
|
5831
|
+
* 将 OpenAI Response 流直接转换为 Anthropic SSE 流
|
|
5832
|
+
* 这是新增的核心流式处理方法,支持实时转换
|
|
5833
|
+
*/
|
|
5834
|
+
convertResponseStream(openaiResponse, options = {}) {
|
|
5835
|
+
if (!openaiResponse.body) {
|
|
5836
|
+
throw new Error("Response body is null or undefined");
|
|
5837
|
+
}
|
|
5838
|
+
return this.convertReadableStream(openaiResponse.body, options);
|
|
5839
|
+
}
|
|
5840
|
+
/**
|
|
5841
|
+
* 将 ReadableStream 转换为 Anthropic SSE 流
|
|
5842
|
+
*/
|
|
5843
|
+
convertReadableStream(openaiStream, options = {}) {
|
|
5844
|
+
const converter = this.createStreamConverter(options);
|
|
5845
|
+
const decoder = new TextDecoder();
|
|
5846
|
+
return new ReadableStream({
|
|
5847
|
+
async start(controller) {
|
|
5848
|
+
if (options.debug) {
|
|
5849
|
+
console.log("[O2ASSEAdapter] \u5F00\u59CB\u6D41\u5F0F\u8F6C\u6362\uFF0C\u914D\u7F6E:", options);
|
|
5850
|
+
}
|
|
5851
|
+
try {
|
|
5852
|
+
const initialEvents = converter.getInitialEvents();
|
|
5853
|
+
for (const event of initialEvents) {
|
|
5854
|
+
controller.enqueue(event);
|
|
5855
|
+
}
|
|
5856
|
+
} catch (error) {
|
|
5857
|
+
console.error("[O2ASSEAdapter] \u521D\u59CB\u5316\u5931\u8D25:", error);
|
|
5858
|
+
controller.error(error);
|
|
5859
|
+
return;
|
|
5860
|
+
}
|
|
5861
|
+
const reader = openaiStream.getReader();
|
|
5862
|
+
try {
|
|
5863
|
+
while (true) {
|
|
5864
|
+
const { done, value } = await reader.read();
|
|
5865
|
+
if (done) {
|
|
5866
|
+
try {
|
|
5867
|
+
const finalEvents = converter.finalize();
|
|
5868
|
+
for (const event of finalEvents) {
|
|
5869
|
+
controller.enqueue(event);
|
|
5870
|
+
}
|
|
5871
|
+
if (options.debug) {
|
|
5872
|
+
console.log("[O2ASSEAdapter] \u6D41\u5F0F\u8F6C\u6362\u5B8C\u6210\uFF0C\u7EDF\u8BA1:", converter.getStats());
|
|
5873
|
+
}
|
|
5874
|
+
} catch (error) {
|
|
5875
|
+
console.error("[O2ASSEAdapter] \u7ED3\u675F\u5904\u7406\u5931\u8D25:", error);
|
|
5876
|
+
}
|
|
5877
|
+
break;
|
|
5878
|
+
}
|
|
5879
|
+
const chunk = decoder.decode(value, { stream: true });
|
|
5880
|
+
try {
|
|
5881
|
+
const events = converter.processChunk(chunk);
|
|
5882
|
+
for (const event of events) {
|
|
5883
|
+
controller.enqueue(event);
|
|
5884
|
+
}
|
|
5885
|
+
} catch (error) {
|
|
5886
|
+
console.error("[O2ASSEAdapter] \u5757\u5904\u7406\u5931\u8D25:", error);
|
|
5887
|
+
if (options.errorRecovery === false) {
|
|
5888
|
+
controller.error(error);
|
|
5889
|
+
return;
|
|
5890
|
+
}
|
|
5891
|
+
if (options.onError) {
|
|
5892
|
+
options.onError(error, {
|
|
5893
|
+
chunk,
|
|
5894
|
+
state: converter.getState()
|
|
5895
|
+
});
|
|
5896
|
+
}
|
|
5897
|
+
}
|
|
5898
|
+
}
|
|
5899
|
+
} catch (error) {
|
|
5900
|
+
console.error("[O2ASSEAdapter] \u6D41\u5904\u7406\u5931\u8D25:", error);
|
|
5901
|
+
if (options.onError) {
|
|
5902
|
+
options.onError(error, {
|
|
5903
|
+
chunk: "",
|
|
5904
|
+
state: converter.getState()
|
|
5905
|
+
});
|
|
5906
|
+
}
|
|
5907
|
+
controller.error(error);
|
|
5908
|
+
} finally {
|
|
5909
|
+
controller.close();
|
|
5910
|
+
}
|
|
5911
|
+
}
|
|
5912
|
+
});
|
|
5913
|
+
}
|
|
5914
|
+
/**
|
|
5915
|
+
* 创建流式转换器实例
|
|
5916
|
+
* 提供更精细的流处理控制
|
|
5917
|
+
*/
|
|
5918
|
+
createStreamConverter(options = {}) {
|
|
5919
|
+
return new StreamConverter(this, {
|
|
5920
|
+
modelName: options.modelName || this.config.defaultModel,
|
|
5921
|
+
debug: options.debug || this.debugMode,
|
|
5922
|
+
...options
|
|
5923
|
+
});
|
|
5924
|
+
}
|
|
4737
5925
|
/**
|
|
4738
5926
|
* 应用增强功能到SSE转换
|
|
4739
5927
|
* 包括输入验证、输出修复等
|
|
@@ -4805,15 +5993,56 @@ var O2ASSEAdapterStatic = {
|
|
|
4805
5993
|
validateClaudeSSE: (sseContent) => {
|
|
4806
5994
|
const adapter = new O2ASSEAdapter(false);
|
|
4807
5995
|
return adapter.validateClaudeSSE(sseContent);
|
|
5996
|
+
},
|
|
5997
|
+
/**
|
|
5998
|
+
* 转换 Response 流为 Anthropic SSE(静态方法)
|
|
5999
|
+
* 新增:直接处理 Response 对象的流式转换
|
|
6000
|
+
*/
|
|
6001
|
+
convertResponseStream: (openaiResponse, options = {}) => {
|
|
6002
|
+
const adapter = new O2ASSEAdapter(options.debug || false, {
|
|
6003
|
+
defaultModel: options.modelName || "claude-sonnet-4",
|
|
6004
|
+
generateUniqueMessageId: !options.messageId,
|
|
6005
|
+
errorDataMaxLength: 500
|
|
6006
|
+
});
|
|
6007
|
+
return adapter.convertResponseStream(openaiResponse, options);
|
|
6008
|
+
},
|
|
6009
|
+
/**
|
|
6010
|
+
* 转换 ReadableStream 为 Anthropic SSE(静态方法)
|
|
6011
|
+
* 新增:处理任意 ReadableStream<Uint8Array> 的流式转换
|
|
6012
|
+
*/
|
|
6013
|
+
convertReadableStream: (openaiStream, options = {}) => {
|
|
6014
|
+
const adapter = new O2ASSEAdapter(options.debug || false, {
|
|
6015
|
+
defaultModel: options.modelName || "claude-sonnet-4",
|
|
6016
|
+
generateUniqueMessageId: !options.messageId,
|
|
6017
|
+
errorDataMaxLength: 500
|
|
6018
|
+
});
|
|
6019
|
+
return adapter.convertReadableStream(openaiStream, options);
|
|
6020
|
+
},
|
|
6021
|
+
/**
|
|
6022
|
+
* 创建流式转换器(静态方法)
|
|
6023
|
+
* 新增:提供更精细的流处理控制
|
|
6024
|
+
*/
|
|
6025
|
+
createStreamConverter: (options = {}) => {
|
|
6026
|
+
const adapter = new O2ASSEAdapter(options.debug || false, {
|
|
6027
|
+
defaultModel: options.modelName || "claude-sonnet-4",
|
|
6028
|
+
generateUniqueMessageId: !options.messageId,
|
|
6029
|
+
errorDataMaxLength: 500
|
|
6030
|
+
});
|
|
6031
|
+
return adapter.createStreamConverter(options);
|
|
4808
6032
|
}
|
|
4809
6033
|
};
|
|
4810
6034
|
|
|
4811
6035
|
// src/core/standard/standard-protocol-adapter.ts
|
|
4812
6036
|
var StandardProtocolAdapter = class {
|
|
4813
6037
|
constructor(options = {}) {
|
|
4814
|
-
this.debugMode = options.debugMode
|
|
6038
|
+
this.debugMode = options.debugMode ?? process.env.AI_PROTOCOL_DEBUG === "true";
|
|
4815
6039
|
this.sseAdapter = new O2ASSEAdapter(this.debugMode);
|
|
4816
6040
|
}
|
|
6041
|
+
logDebug(message, meta) {
|
|
6042
|
+
if (this.debugMode) {
|
|
6043
|
+
console.debug(message, meta ?? "");
|
|
6044
|
+
}
|
|
6045
|
+
}
|
|
4817
6046
|
/**
|
|
4818
6047
|
* 转换Anthropic请求为OpenAI请求格式
|
|
4819
6048
|
* @param anthropicRequest - Anthropic格式的请求
|
|
@@ -4837,7 +6066,7 @@ var StandardProtocolAdapter = class {
|
|
|
4837
6066
|
*/
|
|
4838
6067
|
convertFromStreamToStandard(openaiRawStream, modelName, messageId) {
|
|
4839
6068
|
if (this.debugMode) {
|
|
4840
|
-
|
|
6069
|
+
this.logDebug("\u{1F504} [StandardProtocolAdapter] convertFromStreamToStandard \u5F00\u59CB\u5904\u7406:", {
|
|
4841
6070
|
rawStreamLength: openaiRawStream.length,
|
|
4842
6071
|
modelName,
|
|
4843
6072
|
messageId,
|
|
@@ -4846,14 +6075,14 @@ var StandardProtocolAdapter = class {
|
|
|
4846
6075
|
}
|
|
4847
6076
|
const sseResult = this.sseAdapter.convertToClaudeSSE(openaiRawStream, modelName, messageId);
|
|
4848
6077
|
if (this.debugMode) {
|
|
4849
|
-
|
|
6078
|
+
this.logDebug("\u{1F504} [StandardProtocolAdapter] SSE\u8F6C\u6362\u5B8C\u6210:", {
|
|
4850
6079
|
sseResultLength: sseResult.length,
|
|
4851
6080
|
ssePreview: sseResult.substring(0, 500)
|
|
4852
6081
|
});
|
|
4853
6082
|
}
|
|
4854
6083
|
const standardResponse = this.extractStandardResponseFromSSE(sseResult, modelName, messageId);
|
|
4855
6084
|
if (this.debugMode) {
|
|
4856
|
-
|
|
6085
|
+
this.logDebug("\u{1F504} [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u63D0\u53D6\u5B8C\u6210:", {
|
|
4857
6086
|
contentLength: standardResponse.content.length,
|
|
4858
6087
|
usage: standardResponse.usage,
|
|
4859
6088
|
stopReason: standardResponse.stop_reason
|
|
@@ -4868,7 +6097,7 @@ var StandardProtocolAdapter = class {
|
|
|
4868
6097
|
const lines = sseContent.split("\n");
|
|
4869
6098
|
const finalMessageId = messageId || generateMessageId();
|
|
4870
6099
|
if (this.debugMode) {
|
|
4871
|
-
|
|
6100
|
+
this.logDebug("\u{1F50D} [StandardProtocolAdapter] extractStandardResponseFromSSE \u5F00\u59CB\u89E3\u6790:", {
|
|
4872
6101
|
totalLines: lines.length,
|
|
4873
6102
|
messageId: finalMessageId
|
|
4874
6103
|
});
|
|
@@ -4887,7 +6116,10 @@ var StandardProtocolAdapter = class {
|
|
|
4887
6116
|
}
|
|
4888
6117
|
};
|
|
4889
6118
|
let currentTextContent = "";
|
|
6119
|
+
let currentThinkingContent = "";
|
|
4890
6120
|
const toolCalls = /* @__PURE__ */ new Map();
|
|
6121
|
+
const toolInputBuffers = /* @__PURE__ */ new Map();
|
|
6122
|
+
const indexToToolId = /* @__PURE__ */ new Map();
|
|
4891
6123
|
let processedDataLines = 0;
|
|
4892
6124
|
for (const line of lines) {
|
|
4893
6125
|
if (line.startsWith("data: ")) {
|
|
@@ -4900,24 +6132,80 @@ var StandardProtocolAdapter = class {
|
|
|
4900
6132
|
if (data.type === "content_block_start") {
|
|
4901
6133
|
const contentBlock = data.content_block;
|
|
4902
6134
|
if (contentBlock.type === "tool_use") {
|
|
6135
|
+
const toolIndex = data.index;
|
|
4903
6136
|
toolCalls.set(contentBlock.id, {
|
|
4904
6137
|
type: "tool_use",
|
|
4905
6138
|
id: contentBlock.id,
|
|
4906
6139
|
name: contentBlock.name,
|
|
4907
6140
|
input: contentBlock.input || {}
|
|
6141
|
+
// 初始为空对象,稍后会被更新
|
|
6142
|
+
});
|
|
6143
|
+
toolInputBuffers.set(toolIndex, "");
|
|
6144
|
+
indexToToolId.set(toolIndex, contentBlock.id);
|
|
6145
|
+
console.log("\u{1F527}\u{1F527}\u{1F527} [StandardProtocolAdapter] \u6DFB\u52A0\u5DE5\u5177\u8C03\u7528:", {
|
|
6146
|
+
index: toolIndex,
|
|
6147
|
+
toolId: contentBlock.id,
|
|
6148
|
+
name: contentBlock.name,
|
|
6149
|
+
indexToToolIdSize: indexToToolId.size
|
|
4908
6150
|
});
|
|
4909
|
-
if (this.debugMode) {
|
|
4910
|
-
console.log("\u{1F527} [StandardProtocolAdapter] \u6DFB\u52A0\u5DE5\u5177\u8C03\u7528:", contentBlock);
|
|
4911
|
-
}
|
|
4912
6151
|
}
|
|
4913
6152
|
}
|
|
4914
6153
|
if (data.type === "content_block_delta" && data.delta?.type === "text_delta") {
|
|
4915
6154
|
currentTextContent += data.delta.text;
|
|
4916
6155
|
if (this.debugMode && currentTextContent.length % 50 === 0) {
|
|
4917
|
-
|
|
6156
|
+
this.logDebug(`\u{1F4DD} [StandardProtocolAdapter] \u7D2F\u79EF\u6587\u672C\u5185\u5BB9 (${currentTextContent.length}\u5B57\u7B26)`, currentTextContent.substring(currentTextContent.length - 20));
|
|
6157
|
+
}
|
|
6158
|
+
}
|
|
6159
|
+
if (data.type === "content_block_delta" && data.delta?.type === "thinking_delta") {
|
|
6160
|
+
currentThinkingContent += data.delta.thinking;
|
|
6161
|
+
if (this.debugMode && currentThinkingContent.length % 100 === 0) {
|
|
6162
|
+
this.logDebug(`\u{1F9E0} [StandardProtocolAdapter] \u7D2F\u79EFthinking\u5185\u5BB9 (${currentThinkingContent.length}\u5B57\u7B26)`);
|
|
4918
6163
|
}
|
|
4919
6164
|
}
|
|
4920
6165
|
if (data.type === "content_block_delta" && data.delta?.type === "input_json_delta") {
|
|
6166
|
+
const toolIndex = data.index;
|
|
6167
|
+
const toolId = indexToToolId.get(toolIndex);
|
|
6168
|
+
if (this.debugMode) {
|
|
6169
|
+
this.logDebug(`\u{1F527}\u{1F527}\u{1F527} [StandardProtocolAdapter] \u68C0\u6D4B\u5230input_json_delta\u4E8B\u4EF6\uFF01`, {
|
|
6170
|
+
toolIndex,
|
|
6171
|
+
toolId: toolId || "NOT_FOUND",
|
|
6172
|
+
delta: data.delta.partial_json
|
|
6173
|
+
});
|
|
6174
|
+
}
|
|
6175
|
+
if (toolId) {
|
|
6176
|
+
const currentBuffer = toolInputBuffers.get(toolIndex) || "";
|
|
6177
|
+
const newBuffer = currentBuffer + data.delta.partial_json;
|
|
6178
|
+
toolInputBuffers.set(toolIndex, newBuffer);
|
|
6179
|
+
if (this.debugMode) {
|
|
6180
|
+
this.logDebug(`\u{1F527} [StandardProtocolAdapter] \u7D2F\u79EF\u5DE5\u5177\u53C2\u6570 (index=${toolIndex}, id=${toolId})`, {
|
|
6181
|
+
bufferLength: newBuffer.length
|
|
6182
|
+
});
|
|
6183
|
+
}
|
|
6184
|
+
} else {
|
|
6185
|
+
console.warn(`\u26A0\uFE0F [StandardProtocolAdapter] \u627E\u4E0D\u5230toolId for index=${toolIndex}`);
|
|
6186
|
+
}
|
|
6187
|
+
}
|
|
6188
|
+
if (data.type === "content_block_stop") {
|
|
6189
|
+
const toolIndex = data.index;
|
|
6190
|
+
const toolId = indexToToolId.get(toolIndex);
|
|
6191
|
+
if (toolId) {
|
|
6192
|
+
const jsonBuffer = toolInputBuffers.get(toolIndex);
|
|
6193
|
+
const tool = toolCalls.get(toolId);
|
|
6194
|
+
if (jsonBuffer && tool) {
|
|
6195
|
+
try {
|
|
6196
|
+
const parsedInput = JSON.parse(jsonBuffer);
|
|
6197
|
+
tool.input = parsedInput;
|
|
6198
|
+
if (this.debugMode) {
|
|
6199
|
+
this.logDebug(`\u2705 [StandardProtocolAdapter] \u5DE5\u5177\u53C2\u6570\u89E3\u6790\u5B8C\u6210 (index=${toolIndex}, id=${toolId})`, parsedInput);
|
|
6200
|
+
}
|
|
6201
|
+
} catch (parseError) {
|
|
6202
|
+
console.warn(`\u26A0\uFE0F [StandardProtocolAdapter] \u5DE5\u5177\u53C2\u6570JSON\u89E3\u6790\u5931\u8D25 (index=${toolIndex}, id=${toolId}):`, {
|
|
6203
|
+
buffer: jsonBuffer,
|
|
6204
|
+
error: parseError
|
|
6205
|
+
});
|
|
6206
|
+
}
|
|
6207
|
+
}
|
|
6208
|
+
}
|
|
4921
6209
|
}
|
|
4922
6210
|
if (data.type === "message_delta") {
|
|
4923
6211
|
if (data.delta?.stop_reason) {
|
|
@@ -4926,7 +6214,7 @@ var StandardProtocolAdapter = class {
|
|
|
4926
6214
|
if (data.usage) {
|
|
4927
6215
|
response.usage = data.usage;
|
|
4928
6216
|
if (this.debugMode) {
|
|
4929
|
-
|
|
6217
|
+
this.logDebug("\u{1F4CA} [StandardProtocolAdapter] \u66F4\u65B0usage\u4FE1\u606F:", data.usage);
|
|
4930
6218
|
}
|
|
4931
6219
|
}
|
|
4932
6220
|
}
|
|
@@ -4938,6 +6226,12 @@ var StandardProtocolAdapter = class {
|
|
|
4938
6226
|
}
|
|
4939
6227
|
}
|
|
4940
6228
|
}
|
|
6229
|
+
if (currentThinkingContent.trim()) {
|
|
6230
|
+
response.content.push({
|
|
6231
|
+
type: "thinking",
|
|
6232
|
+
thinking: currentThinkingContent.trim()
|
|
6233
|
+
});
|
|
6234
|
+
}
|
|
4941
6235
|
if (currentTextContent.trim()) {
|
|
4942
6236
|
response.content.push({
|
|
4943
6237
|
type: "text",
|
|
@@ -4946,8 +6240,9 @@ var StandardProtocolAdapter = class {
|
|
|
4946
6240
|
}
|
|
4947
6241
|
response.content.push(...Array.from(toolCalls.values()));
|
|
4948
6242
|
if (this.debugMode) {
|
|
4949
|
-
|
|
6243
|
+
this.logDebug("\u2705 [StandardProtocolAdapter] \u6807\u51C6\u54CD\u5E94\u6784\u5EFA\u5B8C\u6210:", {
|
|
4950
6244
|
contentCount: response.content.length,
|
|
6245
|
+
thinkingLength: currentThinkingContent.length,
|
|
4951
6246
|
textLength: currentTextContent.length,
|
|
4952
6247
|
toolCallsCount: toolCalls.size,
|
|
4953
6248
|
finalUsage: response.usage,
|
|
@@ -5807,6 +7102,7 @@ var FEATURES = {
|
|
|
5807
7102
|
createAnthropicSDK,
|
|
5808
7103
|
createOpenAISDK,
|
|
5809
7104
|
createValidator,
|
|
7105
|
+
downloadImageAsBase64,
|
|
5810
7106
|
errorRecovery,
|
|
5811
7107
|
getAllHealingStrategies,
|
|
5812
7108
|
getGlobalLogger,
|
|
@@ -5816,6 +7112,8 @@ var FEATURES = {
|
|
|
5816
7112
|
healO2ARequest,
|
|
5817
7113
|
healO2AResponse,
|
|
5818
7114
|
healingValidate,
|
|
7115
|
+
isBase64DataUri,
|
|
7116
|
+
isExternalUrl,
|
|
5819
7117
|
isRecoverable,
|
|
5820
7118
|
protocolHealer,
|
|
5821
7119
|
safeValidate,
|