@rdmind/rdmind 0.2.3-alpha.2 → 0.2.3-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.js +842 -6
- package/package.json +2 -2
package/cli.js
CHANGED
|
@@ -133331,6 +133331,8 @@ var init_tokenLimits = __esm({
|
|
|
133331
133331
|
// some Sonnet 3.7/Opus variants advertise 1M beta in docs
|
|
133332
133332
|
[/^claude-sonnet-4.*$/, LIMITS["1m"]],
|
|
133333
133333
|
[/^claude-opus-4.*$/, LIMITS["1m"]],
|
|
133334
|
+
// Claude Opus 4, 4.1, 4.5 all have 200K context (using 1M as upper bound)
|
|
133335
|
+
[/^claude-haiku-4.*$/, LIMITS["200k"]],
|
|
133334
133336
|
// -------------------
|
|
133335
133337
|
// Alibaba / Qwen
|
|
133336
133338
|
// -------------------
|
|
@@ -146025,6 +146027,827 @@ var init_geminiContentGenerator = __esm({
|
|
|
146025
146027
|
}
|
|
146026
146028
|
});
|
|
146027
146029
|
|
|
146030
|
+
// packages/core/src/core/vertexAnthropicContentGenerator.ts
|
|
146031
|
+
var vertexAnthropicContentGenerator_exports = {};
|
|
146032
|
+
__export(vertexAnthropicContentGenerator_exports, {
|
|
146033
|
+
VertexAnthropicContentGenerator: () => VertexAnthropicContentGenerator
|
|
146034
|
+
});
|
|
146035
|
+
var DefaultTelemetryService2, VertexAnthropicContentGenerator;
|
|
146036
|
+
var init_vertexAnthropicContentGenerator = __esm({
|
|
146037
|
+
"packages/core/src/core/vertexAnthropicContentGenerator.ts"() {
|
|
146038
|
+
"use strict";
|
|
146039
|
+
init_esbuild_shims();
|
|
146040
|
+
init_node();
|
|
146041
|
+
init_errorHandler();
|
|
146042
|
+
init_loggers();
|
|
146043
|
+
init_types();
|
|
146044
|
+
init_openaiLogger();
|
|
146045
|
+
DefaultTelemetryService2 = class {
|
|
146046
|
+
constructor(config2, enableLogging = false, loggingDir) {
|
|
146047
|
+
this.config = config2;
|
|
146048
|
+
this.enableLogging = enableLogging;
|
|
146049
|
+
this.logger = new OpenAILogger(loggingDir);
|
|
146050
|
+
}
|
|
146051
|
+
static {
|
|
146052
|
+
__name(this, "DefaultTelemetryService");
|
|
146053
|
+
}
|
|
146054
|
+
logger;
|
|
146055
|
+
async logSuccess(context2, response, request4, rawResponse) {
|
|
146056
|
+
const responseEvent = new ApiResponseEvent(
|
|
146057
|
+
response.responseId || "unknown",
|
|
146058
|
+
context2.model,
|
|
146059
|
+
context2.duration,
|
|
146060
|
+
context2.userPromptId,
|
|
146061
|
+
context2.authType,
|
|
146062
|
+
response.usageMetadata
|
|
146063
|
+
);
|
|
146064
|
+
logApiResponse(this.config, responseEvent);
|
|
146065
|
+
if (this.enableLogging && request4 && rawResponse) {
|
|
146066
|
+
await this.logger.logInteraction(request4, rawResponse);
|
|
146067
|
+
}
|
|
146068
|
+
}
|
|
146069
|
+
async logError(context2, error2, request4) {
|
|
146070
|
+
const errorMessage = error2 instanceof Error ? error2.message : String(error2);
|
|
146071
|
+
const apiError = error2;
|
|
146072
|
+
const errorEvent = new ApiErrorEvent(
|
|
146073
|
+
apiError?.requestID || "unknown",
|
|
146074
|
+
context2.model,
|
|
146075
|
+
errorMessage,
|
|
146076
|
+
context2.duration,
|
|
146077
|
+
context2.userPromptId,
|
|
146078
|
+
context2.authType,
|
|
146079
|
+
apiError?.type,
|
|
146080
|
+
apiError?.code
|
|
146081
|
+
);
|
|
146082
|
+
logApiError(this.config, errorEvent);
|
|
146083
|
+
if (this.enableLogging && request4) {
|
|
146084
|
+
await this.logger.logInteraction(request4, void 0, error2);
|
|
146085
|
+
}
|
|
146086
|
+
}
|
|
146087
|
+
async logStreamingSuccess(context2, responses, request4, _chunks, combinedResponse) {
|
|
146088
|
+
const finalUsageMetadata = responses.slice().reverse().find((r5) => r5.usageMetadata)?.usageMetadata;
|
|
146089
|
+
const lastResponse = responses[responses.length - 1];
|
|
146090
|
+
const responseEvent = new ApiResponseEvent(
|
|
146091
|
+
lastResponse?.responseId || "unknown",
|
|
146092
|
+
context2.model,
|
|
146093
|
+
context2.duration,
|
|
146094
|
+
context2.userPromptId,
|
|
146095
|
+
context2.authType,
|
|
146096
|
+
finalUsageMetadata
|
|
146097
|
+
);
|
|
146098
|
+
logApiResponse(this.config, responseEvent);
|
|
146099
|
+
if (this.enableLogging && request4 && combinedResponse) {
|
|
146100
|
+
await this.logger.logInteraction(request4, combinedResponse);
|
|
146101
|
+
}
|
|
146102
|
+
}
|
|
146103
|
+
};
|
|
146104
|
+
VertexAnthropicContentGenerator = class {
|
|
146105
|
+
static {
|
|
146106
|
+
__name(this, "VertexAnthropicContentGenerator");
|
|
146107
|
+
}
|
|
146108
|
+
baseUrl;
|
|
146109
|
+
apiKey;
|
|
146110
|
+
samplingParams;
|
|
146111
|
+
reasoning;
|
|
146112
|
+
cliConfig;
|
|
146113
|
+
telemetryService;
|
|
146114
|
+
errorHandler;
|
|
146115
|
+
constructor(config2, cliConfig) {
|
|
146116
|
+
this.baseUrl = config2.baseUrl || "";
|
|
146117
|
+
this.apiKey = config2.apiKey || "";
|
|
146118
|
+
this.samplingParams = config2.samplingParams;
|
|
146119
|
+
this.reasoning = config2.reasoning;
|
|
146120
|
+
this.cliConfig = cliConfig;
|
|
146121
|
+
if (!this.apiKey) {
|
|
146122
|
+
throw new Error("API key is required for Vertex Anthropic");
|
|
146123
|
+
}
|
|
146124
|
+
if (!this.baseUrl) {
|
|
146125
|
+
throw new Error("Base URL is required for Vertex Anthropic");
|
|
146126
|
+
}
|
|
146127
|
+
if (cliConfig) {
|
|
146128
|
+
this.telemetryService = new DefaultTelemetryService2(
|
|
146129
|
+
cliConfig,
|
|
146130
|
+
config2.enableOpenAILogging,
|
|
146131
|
+
config2.openAILoggingDir
|
|
146132
|
+
);
|
|
146133
|
+
} else {
|
|
146134
|
+
this.telemetryService = {
|
|
146135
|
+
logSuccess: /* @__PURE__ */ __name(async () => {
|
|
146136
|
+
}, "logSuccess"),
|
|
146137
|
+
logError: /* @__PURE__ */ __name(async () => {
|
|
146138
|
+
}, "logError"),
|
|
146139
|
+
logStreamingSuccess: /* @__PURE__ */ __name(async () => {
|
|
146140
|
+
}, "logStreamingSuccess")
|
|
146141
|
+
};
|
|
146142
|
+
}
|
|
146143
|
+
this.errorHandler = new EnhancedErrorHandler(
|
|
146144
|
+
(error2, _request) => this.shouldSuppressErrorLogging(error2, _request)
|
|
146145
|
+
);
|
|
146146
|
+
}
|
|
146147
|
+
shouldSuppressErrorLogging(_error, _request) {
|
|
146148
|
+
return false;
|
|
146149
|
+
}
|
|
146150
|
+
getRequestUrl(action) {
|
|
146151
|
+
return `${this.baseUrl}:${action}`;
|
|
146152
|
+
}
|
|
146153
|
+
async fetchApi(url2, body, signal) {
|
|
146154
|
+
const headers = {
|
|
146155
|
+
"Content-Type": "application/json",
|
|
146156
|
+
"api-key": this.apiKey
|
|
146157
|
+
};
|
|
146158
|
+
if (this.cliConfig?.getDebugMode()) {
|
|
146159
|
+
console.debug(
|
|
146160
|
+
`[VertexAnthropicContentGenerator] Request URL: ${url2}`
|
|
146161
|
+
);
|
|
146162
|
+
console.debug(
|
|
146163
|
+
`[VertexAnthropicContentGenerator] Request body:`,
|
|
146164
|
+
JSON.stringify(body, null, 2)
|
|
146165
|
+
);
|
|
146166
|
+
}
|
|
146167
|
+
const response = await fetch(url2, {
|
|
146168
|
+
method: "POST",
|
|
146169
|
+
headers,
|
|
146170
|
+
body: JSON.stringify(body),
|
|
146171
|
+
signal
|
|
146172
|
+
});
|
|
146173
|
+
if (!response.ok) {
|
|
146174
|
+
const errorText = await response.text();
|
|
146175
|
+
if (this.cliConfig?.getDebugMode()) {
|
|
146176
|
+
console.error(
|
|
146177
|
+
`[VertexAnthropicContentGenerator] API Error (${response.status}):`,
|
|
146178
|
+
errorText
|
|
146179
|
+
);
|
|
146180
|
+
}
|
|
146181
|
+
throw new Error(
|
|
146182
|
+
`Vertex Anthropic API request failed: ${response.status} ${response.statusText} - ${errorText}`
|
|
146183
|
+
);
|
|
146184
|
+
}
|
|
146185
|
+
return response;
|
|
146186
|
+
}
|
|
146187
|
+
async convertGeminiRequestToVertexAnthropic(request4) {
|
|
146188
|
+
const messages = [];
|
|
146189
|
+
let systemInstruction;
|
|
146190
|
+
if (request4.config?.systemInstruction) {
|
|
146191
|
+
if (typeof request4.config.systemInstruction === "string") {
|
|
146192
|
+
systemInstruction = request4.config.systemInstruction;
|
|
146193
|
+
} else if ("parts" in request4.config.systemInstruction && Array.isArray(request4.config.systemInstruction.parts)) {
|
|
146194
|
+
systemInstruction = request4.config.systemInstruction.parts.filter((p2) => typeof p2 === "object" && "text" in p2).map((p2) => p2.text).join("\n");
|
|
146195
|
+
}
|
|
146196
|
+
}
|
|
146197
|
+
const contents = Array.isArray(request4.contents) ? request4.contents : [request4.contents];
|
|
146198
|
+
for (const content of contents) {
|
|
146199
|
+
if (typeof content === "string") {
|
|
146200
|
+
messages.push({ role: "user", content });
|
|
146201
|
+
} else if ("role" in content && "parts" in content && content.parts) {
|
|
146202
|
+
const role = content.role === "model" ? "assistant" : "user";
|
|
146203
|
+
const contentBlocks = this.convertPartsToAnthropicBlocks(content.parts);
|
|
146204
|
+
if (contentBlocks.length > 0) {
|
|
146205
|
+
if (contentBlocks.length === 1 && contentBlocks[0].type === "text") {
|
|
146206
|
+
messages.push({ role, content: contentBlocks[0].text });
|
|
146207
|
+
} else {
|
|
146208
|
+
messages.push({ role, content: contentBlocks });
|
|
146209
|
+
}
|
|
146210
|
+
}
|
|
146211
|
+
}
|
|
146212
|
+
}
|
|
146213
|
+
const temperature = this.samplingParams?.temperature ?? 1;
|
|
146214
|
+
const thinking = this.buildThinkingConfig(request4);
|
|
146215
|
+
const defaultMaxTokens = thinking ? thinking.budget_tokens + 16e3 : 1e4;
|
|
146216
|
+
const maxTokens = this.samplingParams?.max_tokens ?? defaultMaxTokens;
|
|
146217
|
+
const vertexRequest = {
|
|
146218
|
+
anthropic_version: "vertex-2023-10-16",
|
|
146219
|
+
messages,
|
|
146220
|
+
max_tokens: maxTokens,
|
|
146221
|
+
temperature
|
|
146222
|
+
};
|
|
146223
|
+
if (systemInstruction) {
|
|
146224
|
+
vertexRequest.system = systemInstruction;
|
|
146225
|
+
}
|
|
146226
|
+
if (this.samplingParams?.top_p !== void 0) {
|
|
146227
|
+
vertexRequest.top_p = this.samplingParams.top_p;
|
|
146228
|
+
}
|
|
146229
|
+
if (this.samplingParams?.top_k !== void 0) {
|
|
146230
|
+
vertexRequest.top_k = this.samplingParams.top_k;
|
|
146231
|
+
}
|
|
146232
|
+
if (thinking) {
|
|
146233
|
+
vertexRequest.thinking = thinking;
|
|
146234
|
+
}
|
|
146235
|
+
if (request4.config?.tools && request4.config.tools.length > 0) {
|
|
146236
|
+
const tools = await this.convertGeminiToolsToAnthropic(
|
|
146237
|
+
request4.config.tools
|
|
146238
|
+
);
|
|
146239
|
+
if (tools.length > 0) {
|
|
146240
|
+
vertexRequest.tools = tools;
|
|
146241
|
+
}
|
|
146242
|
+
}
|
|
146243
|
+
return vertexRequest;
|
|
146244
|
+
}
|
|
146245
|
+
buildThinkingConfig(request4) {
|
|
146246
|
+
if (request4.config?.thinkingConfig?.includeThoughts === false) {
|
|
146247
|
+
return void 0;
|
|
146248
|
+
}
|
|
146249
|
+
const reasoning = this.reasoning;
|
|
146250
|
+
if (reasoning === false) {
|
|
146251
|
+
return void 0;
|
|
146252
|
+
}
|
|
146253
|
+
if (reasoning?.budget_tokens !== void 0) {
|
|
146254
|
+
return {
|
|
146255
|
+
type: "enabled",
|
|
146256
|
+
budget_tokens: reasoning.budget_tokens
|
|
146257
|
+
};
|
|
146258
|
+
}
|
|
146259
|
+
const effort = reasoning?.effort ?? "medium";
|
|
146260
|
+
const budgetTokens = effort === "low" ? 16e3 : effort === "high" ? 64e3 : 32e3;
|
|
146261
|
+
return {
|
|
146262
|
+
type: "enabled",
|
|
146263
|
+
budget_tokens: budgetTokens
|
|
146264
|
+
};
|
|
146265
|
+
}
|
|
146266
|
+
/**
|
|
146267
|
+
* 将 Gemini Part 数组转换为 Anthropic 内容块数组
|
|
146268
|
+
*/
|
|
146269
|
+
convertPartsToAnthropicBlocks(parts) {
|
|
146270
|
+
const blocks = [];
|
|
146271
|
+
for (const part of parts) {
|
|
146272
|
+
const block2 = this.convertPartToAnthropicBlock(part);
|
|
146273
|
+
if (block2) {
|
|
146274
|
+
blocks.push(block2);
|
|
146275
|
+
}
|
|
146276
|
+
}
|
|
146277
|
+
return blocks;
|
|
146278
|
+
}
|
|
146279
|
+
/**
|
|
146280
|
+
* 将单个 Gemini Part 转换为 Anthropic 内容块
|
|
146281
|
+
*/
|
|
146282
|
+
convertPartToAnthropicBlock(part) {
|
|
146283
|
+
if ("text" in part && "thought" in part && part.thought) {
|
|
146284
|
+
const thinkingBlock = {
|
|
146285
|
+
type: "thinking",
|
|
146286
|
+
thinking: part.text || ""
|
|
146287
|
+
};
|
|
146288
|
+
if ("thoughtSignature" in part && typeof part.thoughtSignature === "string") {
|
|
146289
|
+
thinkingBlock.signature = part.thoughtSignature;
|
|
146290
|
+
}
|
|
146291
|
+
return thinkingBlock;
|
|
146292
|
+
}
|
|
146293
|
+
if ("text" in part && part.text && !("thought" in part && part.thought)) {
|
|
146294
|
+
return { type: "text", text: part.text };
|
|
146295
|
+
}
|
|
146296
|
+
if (part.inlineData?.mimeType && part.inlineData?.data) {
|
|
146297
|
+
const mimeType = part.inlineData.mimeType;
|
|
146298
|
+
if (this.isSupportedImageMimeType(mimeType)) {
|
|
146299
|
+
return {
|
|
146300
|
+
type: "image",
|
|
146301
|
+
source: {
|
|
146302
|
+
type: "base64",
|
|
146303
|
+
media_type: mimeType,
|
|
146304
|
+
data: part.inlineData.data
|
|
146305
|
+
}
|
|
146306
|
+
};
|
|
146307
|
+
}
|
|
146308
|
+
if (mimeType === "application/pdf") {
|
|
146309
|
+
return {
|
|
146310
|
+
type: "document",
|
|
146311
|
+
source: {
|
|
146312
|
+
type: "base64",
|
|
146313
|
+
media_type: "application/pdf",
|
|
146314
|
+
data: part.inlineData.data
|
|
146315
|
+
}
|
|
146316
|
+
};
|
|
146317
|
+
}
|
|
146318
|
+
const displayName = part.inlineData.displayName ? ` (${part.inlineData.displayName})` : "";
|
|
146319
|
+
return {
|
|
146320
|
+
type: "text",
|
|
146321
|
+
text: `[Unsupported media type: ${mimeType}${displayName}]`
|
|
146322
|
+
};
|
|
146323
|
+
}
|
|
146324
|
+
if (part.fileData?.mimeType && part.fileData?.fileUri) {
|
|
146325
|
+
return {
|
|
146326
|
+
type: "text",
|
|
146327
|
+
text: `[External file reference: ${part.fileData.fileUri}]`
|
|
146328
|
+
};
|
|
146329
|
+
}
|
|
146330
|
+
if ("functionCall" in part && part.functionCall) {
|
|
146331
|
+
return {
|
|
146332
|
+
type: "tool_use",
|
|
146333
|
+
id: part.functionCall.id || `tool_${Date.now()}`,
|
|
146334
|
+
name: part.functionCall.name || "",
|
|
146335
|
+
input: part.functionCall.args || {}
|
|
146336
|
+
};
|
|
146337
|
+
}
|
|
146338
|
+
if ("functionResponse" in part && part.functionResponse) {
|
|
146339
|
+
const response = part.functionResponse;
|
|
146340
|
+
let content;
|
|
146341
|
+
if (response.response) {
|
|
146342
|
+
content = JSON.stringify(response.response);
|
|
146343
|
+
} else {
|
|
146344
|
+
content = "";
|
|
146345
|
+
}
|
|
146346
|
+
return {
|
|
146347
|
+
type: "tool_result",
|
|
146348
|
+
tool_use_id: response.id || "",
|
|
146349
|
+
content
|
|
146350
|
+
};
|
|
146351
|
+
}
|
|
146352
|
+
return null;
|
|
146353
|
+
}
|
|
146354
|
+
/**
|
|
146355
|
+
* 检查是否是 Anthropic 支持的图片类型
|
|
146356
|
+
*/
|
|
146357
|
+
isSupportedImageMimeType(mimeType) {
|
|
146358
|
+
return mimeType === "image/jpeg" || mimeType === "image/png" || mimeType === "image/gif" || mimeType === "image/webp";
|
|
146359
|
+
}
|
|
146360
|
+
/**
|
|
146361
|
+
* 将 Gemini 工具定义转换为 Anthropic 格式
|
|
146362
|
+
* 参考 AnthropicContentConverter.convertGeminiToolsToAnthropic
|
|
146363
|
+
*/
|
|
146364
|
+
async convertGeminiToolsToAnthropic(geminiTools) {
|
|
146365
|
+
const tools = [];
|
|
146366
|
+
if (!geminiTools) {
|
|
146367
|
+
return tools;
|
|
146368
|
+
}
|
|
146369
|
+
for (const tool of geminiTools) {
|
|
146370
|
+
let actualTool;
|
|
146371
|
+
const toolObj = tool;
|
|
146372
|
+
if ("tool" in toolObj && typeof toolObj["tool"] === "function") {
|
|
146373
|
+
actualTool = await toolObj["tool"]();
|
|
146374
|
+
} else {
|
|
146375
|
+
actualTool = tool;
|
|
146376
|
+
}
|
|
146377
|
+
if (!actualTool.functionDeclarations) {
|
|
146378
|
+
continue;
|
|
146379
|
+
}
|
|
146380
|
+
for (const func of actualTool.functionDeclarations) {
|
|
146381
|
+
if (!func.name) continue;
|
|
146382
|
+
let inputSchema;
|
|
146383
|
+
if (func.parametersJsonSchema) {
|
|
146384
|
+
inputSchema = {
|
|
146385
|
+
...func.parametersJsonSchema
|
|
146386
|
+
};
|
|
146387
|
+
} else if (func.parameters) {
|
|
146388
|
+
inputSchema = func.parameters;
|
|
146389
|
+
}
|
|
146390
|
+
if (!inputSchema) {
|
|
146391
|
+
inputSchema = { type: "object", properties: {} };
|
|
146392
|
+
}
|
|
146393
|
+
if (typeof inputSchema["type"] !== "string") {
|
|
146394
|
+
inputSchema["type"] = "object";
|
|
146395
|
+
}
|
|
146396
|
+
tools.push({
|
|
146397
|
+
name: func.name,
|
|
146398
|
+
description: func.description,
|
|
146399
|
+
input_schema: inputSchema
|
|
146400
|
+
});
|
|
146401
|
+
}
|
|
146402
|
+
}
|
|
146403
|
+
return tools;
|
|
146404
|
+
}
|
|
146405
|
+
convertVertexAnthropicResponseToGemini(response) {
|
|
146406
|
+
const parts = [];
|
|
146407
|
+
for (const content of response.content) {
|
|
146408
|
+
if (content.type === "text" && content.text) {
|
|
146409
|
+
parts.push({ text: content.text });
|
|
146410
|
+
} else if (content.type === "thinking" && content.thinking) {
|
|
146411
|
+
const thinkingPart = { text: content.thinking, thought: true };
|
|
146412
|
+
if (content.signature) {
|
|
146413
|
+
thinkingPart.thoughtSignature = content.signature;
|
|
146414
|
+
}
|
|
146415
|
+
parts.push(thinkingPart);
|
|
146416
|
+
} else if (content.type === "tool_use" && content.name && content.id) {
|
|
146417
|
+
parts.push({
|
|
146418
|
+
functionCall: {
|
|
146419
|
+
name: content.name,
|
|
146420
|
+
args: content.input || {},
|
|
146421
|
+
id: content.id
|
|
146422
|
+
}
|
|
146423
|
+
});
|
|
146424
|
+
}
|
|
146425
|
+
}
|
|
146426
|
+
const result = {
|
|
146427
|
+
responseId: response.id,
|
|
146428
|
+
modelVersion: response.model,
|
|
146429
|
+
candidates: [
|
|
146430
|
+
{
|
|
146431
|
+
content: {
|
|
146432
|
+
parts,
|
|
146433
|
+
role: "model"
|
|
146434
|
+
},
|
|
146435
|
+
index: 0,
|
|
146436
|
+
finishReason: this.mapFinishReason(response.stop_reason),
|
|
146437
|
+
safetyRatings: []
|
|
146438
|
+
}
|
|
146439
|
+
],
|
|
146440
|
+
promptFeedback: { safetyRatings: [] },
|
|
146441
|
+
usageMetadata: {
|
|
146442
|
+
promptTokenCount: response.usage.input_tokens,
|
|
146443
|
+
candidatesTokenCount: response.usage.output_tokens,
|
|
146444
|
+
totalTokenCount: response.usage.input_tokens + response.usage.output_tokens
|
|
146445
|
+
}
|
|
146446
|
+
};
|
|
146447
|
+
return result;
|
|
146448
|
+
}
|
|
146449
|
+
mapFinishReason(stopReason) {
|
|
146450
|
+
if (!stopReason) {
|
|
146451
|
+
return void 0;
|
|
146452
|
+
}
|
|
146453
|
+
switch (stopReason) {
|
|
146454
|
+
case "end_turn":
|
|
146455
|
+
return FinishReason.STOP;
|
|
146456
|
+
case "max_tokens":
|
|
146457
|
+
return FinishReason.MAX_TOKENS;
|
|
146458
|
+
case "stop_sequence":
|
|
146459
|
+
return FinishReason.STOP;
|
|
146460
|
+
case "tool_use":
|
|
146461
|
+
return FinishReason.STOP;
|
|
146462
|
+
default:
|
|
146463
|
+
return FinishReason.OTHER;
|
|
146464
|
+
}
|
|
146465
|
+
}
|
|
146466
|
+
/**
|
|
146467
|
+
* 安全解析 JSON,失败时返回默认值
|
|
146468
|
+
*/
|
|
146469
|
+
safeJsonParse(jsonStr, defaultValue) {
|
|
146470
|
+
try {
|
|
146471
|
+
return JSON.parse(jsonStr);
|
|
146472
|
+
} catch {
|
|
146473
|
+
return defaultValue;
|
|
146474
|
+
}
|
|
146475
|
+
}
|
|
146476
|
+
async generateContent(request4, userPromptId) {
|
|
146477
|
+
const startTime = Date.now();
|
|
146478
|
+
const context2 = {
|
|
146479
|
+
userPromptId,
|
|
146480
|
+
model: request4.model,
|
|
146481
|
+
authType: "xhs-sso",
|
|
146482
|
+
startTime,
|
|
146483
|
+
duration: 0,
|
|
146484
|
+
isStreaming: false
|
|
146485
|
+
};
|
|
146486
|
+
try {
|
|
146487
|
+
const url2 = this.getRequestUrl("rawPredict");
|
|
146488
|
+
const body = await this.convertGeminiRequestToVertexAnthropic(request4);
|
|
146489
|
+
const response = await this.fetchApi(
|
|
146490
|
+
url2,
|
|
146491
|
+
body,
|
|
146492
|
+
request4.config?.abortSignal
|
|
146493
|
+
);
|
|
146494
|
+
const data = await response.json();
|
|
146495
|
+
context2.duration = Date.now() - startTime;
|
|
146496
|
+
const geminiResponse = this.convertVertexAnthropicResponseToGemini(data);
|
|
146497
|
+
await this.telemetryService.logSuccess(context2, geminiResponse, body, data);
|
|
146498
|
+
return geminiResponse;
|
|
146499
|
+
} catch (error2) {
|
|
146500
|
+
context2.duration = Date.now() - startTime;
|
|
146501
|
+
await this.telemetryService.logError(context2, error2, request4);
|
|
146502
|
+
return this.errorHandler.handle(error2, context2, request4);
|
|
146503
|
+
}
|
|
146504
|
+
}
|
|
146505
|
+
async generateContentStream(request4, userPromptId) {
|
|
146506
|
+
const startTime = Date.now();
|
|
146507
|
+
const context2 = {
|
|
146508
|
+
userPromptId,
|
|
146509
|
+
model: request4.model,
|
|
146510
|
+
authType: "xhs-sso",
|
|
146511
|
+
startTime,
|
|
146512
|
+
duration: 0,
|
|
146513
|
+
isStreaming: true
|
|
146514
|
+
};
|
|
146515
|
+
try {
|
|
146516
|
+
const url2 = this.getRequestUrl("streamRawPredict");
|
|
146517
|
+
const baseBody = await this.convertGeminiRequestToVertexAnthropic(request4);
|
|
146518
|
+
const body = {
|
|
146519
|
+
...baseBody,
|
|
146520
|
+
stream: true
|
|
146521
|
+
};
|
|
146522
|
+
const response = await this.fetchApi(
|
|
146523
|
+
url2,
|
|
146524
|
+
body,
|
|
146525
|
+
request4.config?.abortSignal
|
|
146526
|
+
);
|
|
146527
|
+
if (!response.body) {
|
|
146528
|
+
throw new Error("Response body is null");
|
|
146529
|
+
}
|
|
146530
|
+
const stream2 = this.handleStream(response.body);
|
|
146531
|
+
const collectedResponses = [];
|
|
146532
|
+
return async function* () {
|
|
146533
|
+
try {
|
|
146534
|
+
for await (const chunk of stream2) {
|
|
146535
|
+
collectedResponses.push(chunk);
|
|
146536
|
+
yield chunk;
|
|
146537
|
+
}
|
|
146538
|
+
context2.duration = Date.now() - startTime;
|
|
146539
|
+
const combinedResponse = this.combineResponses(collectedResponses);
|
|
146540
|
+
await this.telemetryService.logStreamingSuccess(
|
|
146541
|
+
context2,
|
|
146542
|
+
collectedResponses,
|
|
146543
|
+
body,
|
|
146544
|
+
void 0,
|
|
146545
|
+
combinedResponse
|
|
146546
|
+
);
|
|
146547
|
+
} catch (error2) {
|
|
146548
|
+
context2.duration = Date.now() - startTime;
|
|
146549
|
+
await this.telemetryService.logError(context2, error2, body);
|
|
146550
|
+
throw error2;
|
|
146551
|
+
}
|
|
146552
|
+
}.call(this);
|
|
146553
|
+
} catch (error2) {
|
|
146554
|
+
context2.duration = Date.now() - startTime;
|
|
146555
|
+
await this.telemetryService.logError(context2, error2, request4);
|
|
146556
|
+
return this.errorHandler.handle(error2, context2, request4);
|
|
146557
|
+
}
|
|
146558
|
+
}
|
|
146559
|
+
combineResponses(responses) {
|
|
146560
|
+
if (responses.length === 0) {
|
|
146561
|
+
return {};
|
|
146562
|
+
}
|
|
146563
|
+
const lastResponse = responses[responses.length - 1];
|
|
146564
|
+
let combinedText = "";
|
|
146565
|
+
for (const response of responses) {
|
|
146566
|
+
if (response.candidates && response.candidates[0]?.content?.parts) {
|
|
146567
|
+
for (const part of response.candidates[0].content.parts) {
|
|
146568
|
+
if ("text" in part && part.text) {
|
|
146569
|
+
combinedText += part.text;
|
|
146570
|
+
}
|
|
146571
|
+
}
|
|
146572
|
+
}
|
|
146573
|
+
}
|
|
146574
|
+
return {
|
|
146575
|
+
...lastResponse,
|
|
146576
|
+
candidates: lastResponse.candidates ? [
|
|
146577
|
+
{
|
|
146578
|
+
...lastResponse.candidates[0],
|
|
146579
|
+
content: {
|
|
146580
|
+
...lastResponse.candidates[0].content,
|
|
146581
|
+
parts: [{ text: combinedText }]
|
|
146582
|
+
}
|
|
146583
|
+
}
|
|
146584
|
+
] : void 0
|
|
146585
|
+
};
|
|
146586
|
+
}
|
|
146587
|
+
async *handleStream(body) {
|
|
146588
|
+
const reader = body.getReader();
|
|
146589
|
+
const decoder = new TextDecoder();
|
|
146590
|
+
let buffer = "";
|
|
146591
|
+
let currentEvent = "";
|
|
146592
|
+
let messageId;
|
|
146593
|
+
let model = "";
|
|
146594
|
+
let cachedTokens = 0;
|
|
146595
|
+
let promptTokens = 0;
|
|
146596
|
+
let completionTokens = 0;
|
|
146597
|
+
let finishReason;
|
|
146598
|
+
const blocks = /* @__PURE__ */ new Map();
|
|
146599
|
+
try {
|
|
146600
|
+
while (true) {
|
|
146601
|
+
const { done, value } = await reader.read();
|
|
146602
|
+
if (done) break;
|
|
146603
|
+
buffer += decoder.decode(value, { stream: true });
|
|
146604
|
+
const lines = buffer.split("\n");
|
|
146605
|
+
buffer = lines.pop() || "";
|
|
146606
|
+
for (const line of lines) {
|
|
146607
|
+
const trimmedLine = line.trim();
|
|
146608
|
+
if (!trimmedLine) {
|
|
146609
|
+
currentEvent = "";
|
|
146610
|
+
continue;
|
|
146611
|
+
}
|
|
146612
|
+
if (trimmedLine.startsWith("event: ")) {
|
|
146613
|
+
currentEvent = trimmedLine.slice(7).trim();
|
|
146614
|
+
continue;
|
|
146615
|
+
}
|
|
146616
|
+
if (trimmedLine.startsWith("data: ")) {
|
|
146617
|
+
const dataStr = trimmedLine.slice(6).trim();
|
|
146618
|
+
if (!dataStr || dataStr === "[DONE]") continue;
|
|
146619
|
+
try {
|
|
146620
|
+
const data = JSON.parse(dataStr);
|
|
146621
|
+
const eventType = data.type || currentEvent;
|
|
146622
|
+
switch (eventType) {
|
|
146623
|
+
case "message_start": {
|
|
146624
|
+
if (data.message) {
|
|
146625
|
+
messageId = data.message.id ?? messageId;
|
|
146626
|
+
model = data.message.model ?? model;
|
|
146627
|
+
if (data.message.usage) {
|
|
146628
|
+
cachedTokens = data.message.usage.cache_read_input_tokens ?? 0;
|
|
146629
|
+
promptTokens = data.message.usage.input_tokens ?? 0;
|
|
146630
|
+
}
|
|
146631
|
+
}
|
|
146632
|
+
break;
|
|
146633
|
+
}
|
|
146634
|
+
case "content_block_start": {
|
|
146635
|
+
const index = data.index ?? 0;
|
|
146636
|
+
const type = String(data.content_block?.type || "text");
|
|
146637
|
+
const initialInput = type === "tool_use" && data.content_block?.input ? JSON.stringify(data.content_block.input) : "";
|
|
146638
|
+
const initialSignature = type === "thinking" && data.content_block?.signature ? String(data.content_block.signature) : "";
|
|
146639
|
+
if (this.cliConfig?.getDebugMode() && type === "tool_use") {
|
|
146640
|
+
console.debug(
|
|
146641
|
+
`[VertexAnthropicContentGenerator] Tool use block start:`,
|
|
146642
|
+
JSON.stringify({
|
|
146643
|
+
index,
|
|
146644
|
+
id: data.content_block?.id,
|
|
146645
|
+
name: data.content_block?.name,
|
|
146646
|
+
initialInput
|
|
146647
|
+
})
|
|
146648
|
+
);
|
|
146649
|
+
}
|
|
146650
|
+
blocks.set(index, {
|
|
146651
|
+
type,
|
|
146652
|
+
id: type === "tool_use" ? String(data.content_block?.id || "") : void 0,
|
|
146653
|
+
name: type === "tool_use" ? String(data.content_block?.name || "") : void 0,
|
|
146654
|
+
// SDK 兼容:如果初始 input 是空对象 {},则设为空字符串
|
|
146655
|
+
// 实际参数通过后续的 input_json_delta 事件发送
|
|
146656
|
+
inputJson: initialInput !== "{}" ? initialInput : "",
|
|
146657
|
+
signature: initialSignature
|
|
146658
|
+
});
|
|
146659
|
+
break;
|
|
146660
|
+
}
|
|
146661
|
+
case "content_block_delta": {
|
|
146662
|
+
const deltaType = data.delta?.type;
|
|
146663
|
+
const index = data.index ?? 0;
|
|
146664
|
+
if (deltaType === "text_delta" && data.delta?.text) {
|
|
146665
|
+
const chunk = this.buildGeminiChunk(
|
|
146666
|
+
{ text: data.delta.text },
|
|
146667
|
+
messageId,
|
|
146668
|
+
model
|
|
146669
|
+
);
|
|
146670
|
+
yield chunk;
|
|
146671
|
+
} else if (deltaType === "thinking_delta" && data.delta?.thinking) {
|
|
146672
|
+
const chunk = this.buildGeminiChunk(
|
|
146673
|
+
{ text: data.delta.thinking, thought: true },
|
|
146674
|
+
messageId,
|
|
146675
|
+
model
|
|
146676
|
+
);
|
|
146677
|
+
yield chunk;
|
|
146678
|
+
} else if (deltaType === "input_json_delta" && data.delta?.partial_json) {
|
|
146679
|
+
const blockState = blocks.get(index);
|
|
146680
|
+
if (blockState) {
|
|
146681
|
+
blockState.inputJson += data.delta.partial_json;
|
|
146682
|
+
if (this.cliConfig?.getDebugMode()) {
|
|
146683
|
+
console.debug(
|
|
146684
|
+
`[VertexAnthropicContentGenerator] input_json_delta:`,
|
|
146685
|
+
data.delta.partial_json
|
|
146686
|
+
);
|
|
146687
|
+
}
|
|
146688
|
+
}
|
|
146689
|
+
} else if (deltaType === "signature_delta" && data.delta?.signature) {
|
|
146690
|
+
const blockState = blocks.get(index);
|
|
146691
|
+
if (blockState) {
|
|
146692
|
+
blockState.signature += data.delta.signature;
|
|
146693
|
+
const chunk = this.buildGeminiChunk(
|
|
146694
|
+
{ thought: true, thoughtSignature: data.delta.signature },
|
|
146695
|
+
messageId,
|
|
146696
|
+
model
|
|
146697
|
+
);
|
|
146698
|
+
yield chunk;
|
|
146699
|
+
}
|
|
146700
|
+
}
|
|
146701
|
+
break;
|
|
146702
|
+
}
|
|
146703
|
+
case "content_block_stop": {
|
|
146704
|
+
const index = data.index ?? 0;
|
|
146705
|
+
const blockState = blocks.get(index);
|
|
146706
|
+
if (blockState?.type === "tool_use") {
|
|
146707
|
+
const args = this.safeJsonParse(blockState.inputJson || "{}", {});
|
|
146708
|
+
if (this.cliConfig?.getDebugMode()) {
|
|
146709
|
+
console.debug(
|
|
146710
|
+
`[VertexAnthropicContentGenerator] Tool use block stop:`,
|
|
146711
|
+
JSON.stringify({
|
|
146712
|
+
index,
|
|
146713
|
+
id: blockState.id,
|
|
146714
|
+
name: blockState.name,
|
|
146715
|
+
inputJson: blockState.inputJson,
|
|
146716
|
+
parsedArgs: args
|
|
146717
|
+
})
|
|
146718
|
+
);
|
|
146719
|
+
}
|
|
146720
|
+
const chunk = this.buildGeminiChunk(
|
|
146721
|
+
{
|
|
146722
|
+
functionCall: {
|
|
146723
|
+
id: blockState.id,
|
|
146724
|
+
name: blockState.name,
|
|
146725
|
+
args
|
|
146726
|
+
}
|
|
146727
|
+
},
|
|
146728
|
+
messageId,
|
|
146729
|
+
model
|
|
146730
|
+
);
|
|
146731
|
+
yield chunk;
|
|
146732
|
+
}
|
|
146733
|
+
blocks.delete(index);
|
|
146734
|
+
break;
|
|
146735
|
+
}
|
|
146736
|
+
case "message_delta": {
|
|
146737
|
+
if (data.delta?.stop_reason) {
|
|
146738
|
+
finishReason = data.delta.stop_reason;
|
|
146739
|
+
}
|
|
146740
|
+
if (data.usage?.output_tokens !== void 0) {
|
|
146741
|
+
completionTokens = data.usage.output_tokens;
|
|
146742
|
+
}
|
|
146743
|
+
if (finishReason || data.usage) {
|
|
146744
|
+
const chunk = this.buildGeminiChunk(
|
|
146745
|
+
void 0,
|
|
146746
|
+
messageId,
|
|
146747
|
+
model,
|
|
146748
|
+
finishReason,
|
|
146749
|
+
{
|
|
146750
|
+
cachedContentTokenCount: cachedTokens,
|
|
146751
|
+
promptTokenCount: cachedTokens + promptTokens,
|
|
146752
|
+
candidatesTokenCount: completionTokens,
|
|
146753
|
+
totalTokenCount: cachedTokens + promptTokens + completionTokens
|
|
146754
|
+
}
|
|
146755
|
+
);
|
|
146756
|
+
yield chunk;
|
|
146757
|
+
}
|
|
146758
|
+
break;
|
|
146759
|
+
}
|
|
146760
|
+
case "message_stop": {
|
|
146761
|
+
if (promptTokens || completionTokens) {
|
|
146762
|
+
const chunk = this.buildGeminiChunk(
|
|
146763
|
+
void 0,
|
|
146764
|
+
messageId,
|
|
146765
|
+
model,
|
|
146766
|
+
finishReason,
|
|
146767
|
+
{
|
|
146768
|
+
cachedContentTokenCount: cachedTokens,
|
|
146769
|
+
promptTokenCount: cachedTokens + promptTokens,
|
|
146770
|
+
candidatesTokenCount: completionTokens,
|
|
146771
|
+
totalTokenCount: cachedTokens + promptTokens + completionTokens
|
|
146772
|
+
}
|
|
146773
|
+
);
|
|
146774
|
+
yield chunk;
|
|
146775
|
+
}
|
|
146776
|
+
break;
|
|
146777
|
+
}
|
|
146778
|
+
default:
|
|
146779
|
+
break;
|
|
146780
|
+
}
|
|
146781
|
+
} catch (error2) {
|
|
146782
|
+
if (this.cliConfig?.getDebugMode()) {
|
|
146783
|
+
console.error(
|
|
146784
|
+
`[VertexAnthropicContentGenerator] Failed to parse SSE data:`,
|
|
146785
|
+
dataStr,
|
|
146786
|
+
error2
|
|
146787
|
+
);
|
|
146788
|
+
}
|
|
146789
|
+
}
|
|
146790
|
+
}
|
|
146791
|
+
}
|
|
146792
|
+
}
|
|
146793
|
+
} finally {
|
|
146794
|
+
reader.releaseLock();
|
|
146795
|
+
}
|
|
146796
|
+
}
|
|
146797
|
+
buildGeminiChunk(part, responseId, model, finishReason, usageMetadata) {
|
|
146798
|
+
const response = new GenerateContentResponse();
|
|
146799
|
+
response.responseId = responseId;
|
|
146800
|
+
response.createTime = Date.now().toString();
|
|
146801
|
+
response.modelVersion = model || "";
|
|
146802
|
+
response.promptFeedback = { safetyRatings: [] };
|
|
146803
|
+
let candidateParts = [];
|
|
146804
|
+
if (part) {
|
|
146805
|
+
if (part.functionCall) {
|
|
146806
|
+
candidateParts = [
|
|
146807
|
+
{
|
|
146808
|
+
functionCall: {
|
|
146809
|
+
name: part.functionCall.name || "",
|
|
146810
|
+
args: part.functionCall.args || {},
|
|
146811
|
+
id: part.functionCall.id
|
|
146812
|
+
}
|
|
146813
|
+
}
|
|
146814
|
+
];
|
|
146815
|
+
} else {
|
|
146816
|
+
candidateParts = [part];
|
|
146817
|
+
}
|
|
146818
|
+
}
|
|
146819
|
+
const mappedFinishReason = finishReason ? this.mapFinishReason(finishReason) : void 0;
|
|
146820
|
+
response.candidates = [
|
|
146821
|
+
{
|
|
146822
|
+
content: {
|
|
146823
|
+
parts: candidateParts,
|
|
146824
|
+
role: "model"
|
|
146825
|
+
},
|
|
146826
|
+
index: 0,
|
|
146827
|
+
safetyRatings: [],
|
|
146828
|
+
...mappedFinishReason ? { finishReason: mappedFinishReason } : {}
|
|
146829
|
+
}
|
|
146830
|
+
];
|
|
146831
|
+
if (usageMetadata) {
|
|
146832
|
+
response.usageMetadata = usageMetadata;
|
|
146833
|
+
}
|
|
146834
|
+
return response;
|
|
146835
|
+
}
|
|
146836
|
+
async countTokens(request4) {
|
|
146837
|
+
const content = JSON.stringify(request4.contents);
|
|
146838
|
+
const totalTokens = Math.ceil(content.length / 4);
|
|
146839
|
+
return { totalTokens };
|
|
146840
|
+
}
|
|
146841
|
+
async embedContent(_request) {
|
|
146842
|
+
throw new Error("Vertex Anthropic does not support embeddings.");
|
|
146843
|
+
}
|
|
146844
|
+
useSummarizedThinking() {
|
|
146845
|
+
return false;
|
|
146846
|
+
}
|
|
146847
|
+
};
|
|
146848
|
+
}
|
|
146849
|
+
});
|
|
146850
|
+
|
|
146028
146851
|
// node_modules/@anthropic-ai/sdk/version.mjs
|
|
146029
146852
|
var VERSION3;
|
|
146030
146853
|
var init_version3 = __esm({
|
|
@@ -157557,7 +158380,7 @@ __export(geminiContentGenerator_exports2, {
|
|
|
157557
158380
|
createGeminiContentGenerator: () => createGeminiContentGenerator
|
|
157558
158381
|
});
|
|
157559
158382
|
function createGeminiContentGenerator(config2, gcConfig) {
|
|
157560
|
-
const version2 = "0.2.3-alpha.
|
|
158383
|
+
const version2 = "0.2.3-alpha.3";
|
|
157561
158384
|
const userAgent2 = config2.userAgent || `QwenCode/${version2} (${process.platform}; ${process.arch})`;
|
|
157562
158385
|
const baseHeaders = {
|
|
157563
158386
|
"User-Agent": userAgent2
|
|
@@ -157732,6 +158555,12 @@ async function createContentGenerator(generatorConfig, config2, isInitialAuth) {
|
|
|
157732
158555
|
if (model.startsWith("gemini")) {
|
|
157733
158556
|
const { GeminiContentGenerator: GeminiContentGenerator3 } = await Promise.resolve().then(() => (init_geminiContentGenerator(), geminiContentGenerator_exports));
|
|
157734
158557
|
baseGenerator = new GeminiContentGenerator3(generatorConfig, config2);
|
|
158558
|
+
} else if (model.startsWith("claude")) {
|
|
158559
|
+
const { VertexAnthropicContentGenerator: VertexAnthropicContentGenerator2 } = await Promise.resolve().then(() => (init_vertexAnthropicContentGenerator(), vertexAnthropicContentGenerator_exports));
|
|
158560
|
+
baseGenerator = new VertexAnthropicContentGenerator2(
|
|
158561
|
+
generatorConfig,
|
|
158562
|
+
config2
|
|
158563
|
+
);
|
|
157735
158564
|
} else {
|
|
157736
158565
|
const { createOpenAIContentGenerator: createOpenAIContentGenerator2 } = await Promise.resolve().then(() => (init_openaiContentGenerator2(), openaiContentGenerator_exports));
|
|
157737
158566
|
baseGenerator = createOpenAIContentGenerator2(generatorConfig, config2);
|
|
@@ -259110,8 +259939,8 @@ var init_git_commit = __esm({
|
|
|
259110
259939
|
"packages/core/src/generated/git-commit.ts"() {
|
|
259111
259940
|
"use strict";
|
|
259112
259941
|
init_esbuild_shims();
|
|
259113
|
-
GIT_COMMIT_INFO = "
|
|
259114
|
-
CLI_VERSION = "0.2.3-alpha.
|
|
259942
|
+
GIT_COMMIT_INFO = "982b367d4";
|
|
259943
|
+
CLI_VERSION = "0.2.3-alpha.3";
|
|
259115
259944
|
}
|
|
259116
259945
|
});
|
|
259117
259946
|
|
|
@@ -359905,7 +360734,7 @@ __name(getPackageJson, "getPackageJson");
|
|
|
359905
360734
|
// packages/cli/src/utils/version.ts
|
|
359906
360735
|
async function getCliVersion() {
|
|
359907
360736
|
const pkgJson = await getPackageJson();
|
|
359908
|
-
return "0.2.3-alpha.
|
|
360737
|
+
return "0.2.3-alpha.3";
|
|
359909
360738
|
}
|
|
359910
360739
|
__name(getCliVersion, "getCliVersion");
|
|
359911
360740
|
|
|
@@ -367635,7 +368464,7 @@ var formatDuration = /* @__PURE__ */ __name((milliseconds) => {
|
|
|
367635
368464
|
|
|
367636
368465
|
// packages/cli/src/generated/git-commit.ts
|
|
367637
368466
|
init_esbuild_shims();
|
|
367638
|
-
var GIT_COMMIT_INFO2 = "
|
|
368467
|
+
var GIT_COMMIT_INFO2 = "982b367d4";
|
|
367639
368468
|
|
|
367640
368469
|
// packages/cli/src/utils/systemInfo.ts
|
|
367641
368470
|
async function getNpmVersion() {
|
|
@@ -407437,6 +408266,13 @@ var XHS_SSO_MODELS = [
|
|
|
407437
408266
|
baseUrl: "https://runway.devops.xiaohongshu.com/openai/moonshot/v1",
|
|
407438
408267
|
contextWindow: "256K",
|
|
407439
408268
|
description: "\u5728 Agent\u3001\u4EE3\u7801\u3001\u89C6\u89C9\u7406\u89E3\u53CA\u4E00\u7CFB\u5217\u901A\u7528\u667A\u80FD\u4EFB\u52A1\u4E0A\u53D6\u5F97\u5F00\u6E90 SoTA \u8868\u73B0"
|
|
408269
|
+
},
|
|
408270
|
+
{
|
|
408271
|
+
id: "claude-opus-4-5@20251101",
|
|
408272
|
+
displayName: "Claude Opus 4.5",
|
|
408273
|
+
baseUrl: "https://runway.devops.rednote.life/openai/google/anthropic/v1",
|
|
408274
|
+
contextWindow: "200K",
|
|
408275
|
+
description: "Anthropic \u6700\u5F3A\u5927\u7684\u6A21\u578B\uFF0C\u64C5\u957F\u590D\u6742\u63A8\u7406\u548C\u4EE3\u7801\u751F\u6210"
|
|
407440
408276
|
}
|
|
407441
408277
|
];
|
|
407442
408278
|
|
|
@@ -426065,7 +426901,7 @@ var GeminiAgent = class {
|
|
|
426065
426901
|
name: APPROVAL_MODE_INFO[mode].name,
|
|
426066
426902
|
description: APPROVAL_MODE_INFO[mode].description
|
|
426067
426903
|
}));
|
|
426068
|
-
const version2 = "0.2.3-alpha.
|
|
426904
|
+
const version2 = "0.2.3-alpha.3";
|
|
426069
426905
|
return {
|
|
426070
426906
|
protocolVersion: PROTOCOL_VERSION,
|
|
426071
426907
|
agentInfo: {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rdmind/rdmind",
|
|
3
|
-
"version": "0.2.3-alpha.
|
|
3
|
+
"version": "0.2.3-alpha.3",
|
|
4
4
|
"description": "RDMind - AI-powered coding assistant",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "cli.js",
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
"locales"
|
|
21
21
|
],
|
|
22
22
|
"config": {
|
|
23
|
-
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.2.3-alpha.
|
|
23
|
+
"sandboxImageUri": "ghcr.io/qwenlm/qwen-code:0.2.3-alpha.3"
|
|
24
24
|
},
|
|
25
25
|
"publishConfig": {
|
|
26
26
|
"access": "public"
|