opencode-codebuddy-external-auth 1.0.10 → 1.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/plugin.js +177 -19
- package/package.json +1 -1
package/dist/plugin.js
CHANGED
|
@@ -8,9 +8,13 @@ const PROVIDER_ID = "codebuddy-external";
|
|
|
8
8
|
const CONFIG = {
|
|
9
9
|
// IOA 版本使用 copilot.tencent.com 进行认证
|
|
10
10
|
serverUrl: "https://copilot.tencent.com",
|
|
11
|
+
// 本地 codebuddy --serve 代理地址
|
|
12
|
+
localServeUrl: "http://127.0.0.1:3000",
|
|
11
13
|
// 平台标识
|
|
12
14
|
platform: "CLI",
|
|
13
15
|
appVersion: "2.37.20",
|
|
16
|
+
// 使用 HTTP API 模式还是 CLI 模式
|
|
17
|
+
useHttpApi: true,
|
|
14
18
|
};
|
|
15
19
|
// ============================================================================
|
|
16
20
|
// Utility Functions
|
|
@@ -200,30 +204,17 @@ function createProxyFetch() {
|
|
|
200
204
|
const openaiRequest = JSON.parse(typeof body === "string" ? body : await new Response(body).text());
|
|
201
205
|
// Convert to CodeBuddy format
|
|
202
206
|
const { prompt, systemPrompt } = convertMessagesToPrompt(openaiRequest.messages);
|
|
203
|
-
//
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
// Execute codebuddy CLI
|
|
207
|
-
const resultText = await executeCodeBuddyCLI(prompt, openaiRequest.model, systemPrompt);
|
|
208
|
-
// Convert response to OpenAI format
|
|
209
|
-
if (openaiRequest.stream) {
|
|
210
|
-
return new Response(createOpenAIStreamResponse(resultText, openaiRequest.model), {
|
|
211
|
-
headers: {
|
|
212
|
-
"Content-Type": "text/event-stream",
|
|
213
|
-
"Cache-Control": "no-cache",
|
|
214
|
-
"Connection": "keep-alive",
|
|
215
|
-
},
|
|
216
|
-
});
|
|
207
|
+
// 根据配置选择 HTTP API 模式或 CLI 模式
|
|
208
|
+
if (CONFIG.useHttpApi) {
|
|
209
|
+
return await executeViaHttpApi(openaiRequest, prompt, systemPrompt);
|
|
217
210
|
}
|
|
218
211
|
else {
|
|
219
|
-
return
|
|
220
|
-
headers: { "Content-Type": "application/json" },
|
|
221
|
-
});
|
|
212
|
+
return await executeViaCli(openaiRequest, prompt, systemPrompt);
|
|
222
213
|
}
|
|
223
214
|
}
|
|
224
215
|
catch (error) {
|
|
225
|
-
console.error(`[codebuddy-external]
|
|
226
|
-
return new Response(JSON.stringify({ error: `CodeBuddy
|
|
216
|
+
console.error(`[codebuddy-external] Error:`, error);
|
|
217
|
+
return new Response(JSON.stringify({ error: `CodeBuddy error: ${error}` }), {
|
|
227
218
|
status: 500,
|
|
228
219
|
headers: { "Content-Type": "application/json" },
|
|
229
220
|
});
|
|
@@ -233,6 +224,173 @@ function createProxyFetch() {
|
|
|
233
224
|
return fetch(url, init);
|
|
234
225
|
};
|
|
235
226
|
}
|
|
227
|
+
/**
|
|
228
|
+
* Execute via HTTP API (codebuddy --serve)
|
|
229
|
+
* 支持流式响应
|
|
230
|
+
*/
|
|
231
|
+
async function executeViaHttpApi(openaiRequest, prompt, systemPrompt) {
|
|
232
|
+
// 构建 CodeBuddy /agent 请求
|
|
233
|
+
const agentRequest = {
|
|
234
|
+
prompt,
|
|
235
|
+
model: openaiRequest.model,
|
|
236
|
+
print: true, // 关键:非交互模式,返回结果
|
|
237
|
+
outputFormat: openaiRequest.stream ? "stream-json" : "text",
|
|
238
|
+
};
|
|
239
|
+
if (systemPrompt) {
|
|
240
|
+
agentRequest.systemPrompt = systemPrompt;
|
|
241
|
+
}
|
|
242
|
+
// 调用本地 codebuddy --serve
|
|
243
|
+
const response = await fetch(`${CONFIG.localServeUrl}/agent`, {
|
|
244
|
+
method: "POST",
|
|
245
|
+
headers: {
|
|
246
|
+
"Content-Type": "application/json",
|
|
247
|
+
},
|
|
248
|
+
body: JSON.stringify(agentRequest),
|
|
249
|
+
});
|
|
250
|
+
if (!response.ok) {
|
|
251
|
+
const errorText = await response.text();
|
|
252
|
+
throw new Error(`HTTP API error: ${response.status} - ${errorText}`);
|
|
253
|
+
}
|
|
254
|
+
// 处理流式响应
|
|
255
|
+
if (openaiRequest.stream && response.body) {
|
|
256
|
+
return new Response(transformCodeBuddyStreamToOpenAI(response.body, openaiRequest.model), {
|
|
257
|
+
headers: {
|
|
258
|
+
"Content-Type": "text/event-stream",
|
|
259
|
+
"Cache-Control": "no-cache",
|
|
260
|
+
"Connection": "keep-alive",
|
|
261
|
+
},
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
// 非流式响应
|
|
265
|
+
const responseText = await response.text();
|
|
266
|
+
let resultText = responseText;
|
|
267
|
+
// 尝试解析 JSON 响应
|
|
268
|
+
try {
|
|
269
|
+
const jsonResponse = JSON.parse(responseText);
|
|
270
|
+
if (jsonResponse.output) {
|
|
271
|
+
resultText = jsonResponse.output;
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
catch {
|
|
275
|
+
// 不是 JSON,直接使用原文
|
|
276
|
+
}
|
|
277
|
+
return new Response(createOpenAIResponse(resultText, openaiRequest.model), {
|
|
278
|
+
headers: { "Content-Type": "application/json" },
|
|
279
|
+
});
|
|
280
|
+
}
|
|
281
|
+
/**
|
|
282
|
+
* Transform CodeBuddy SSE stream to OpenAI SSE stream
|
|
283
|
+
*
|
|
284
|
+
* CodeBuddy 格式:
|
|
285
|
+
* event: next
|
|
286
|
+
* data: {"type": "assistant", "message": {"content": [{"type": "text", "text": "..."}]}}
|
|
287
|
+
*
|
|
288
|
+
* OpenAI 格式:
|
|
289
|
+
* data: {"choices": [{"delta": {"content": "..."}}]}
|
|
290
|
+
*/
|
|
291
|
+
function transformCodeBuddyStreamToOpenAI(sourceStream, model) {
|
|
292
|
+
const encoder = new TextEncoder();
|
|
293
|
+
const decoder = new TextDecoder();
|
|
294
|
+
const id = `chatcmpl-${Date.now()}`;
|
|
295
|
+
const created = Math.floor(Date.now() / 1000);
|
|
296
|
+
let buffer = "";
|
|
297
|
+
let sentRole = false;
|
|
298
|
+
return new ReadableStream({
|
|
299
|
+
async start(controller) {
|
|
300
|
+
const reader = sourceStream.getReader();
|
|
301
|
+
// 先发送 role chunk
|
|
302
|
+
const roleChunk = {
|
|
303
|
+
id,
|
|
304
|
+
object: "chat.completion.chunk",
|
|
305
|
+
created,
|
|
306
|
+
model,
|
|
307
|
+
choices: [{ index: 0, delta: { role: "assistant" }, finish_reason: null }],
|
|
308
|
+
};
|
|
309
|
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(roleChunk)}\n\n`));
|
|
310
|
+
sentRole = true;
|
|
311
|
+
try {
|
|
312
|
+
while (true) {
|
|
313
|
+
const { done, value } = await reader.read();
|
|
314
|
+
if (done)
|
|
315
|
+
break;
|
|
316
|
+
buffer += decoder.decode(value, { stream: true });
|
|
317
|
+
// 处理 SSE 事件
|
|
318
|
+
const lines = buffer.split("\n");
|
|
319
|
+
buffer = lines.pop() || ""; // 保留不完整的行
|
|
320
|
+
for (const line of lines) {
|
|
321
|
+
if (line.startsWith("data: ")) {
|
|
322
|
+
const dataStr = line.slice(6).trim();
|
|
323
|
+
if (!dataStr || dataStr === "{}")
|
|
324
|
+
continue;
|
|
325
|
+
try {
|
|
326
|
+
const data = JSON.parse(dataStr);
|
|
327
|
+
// 提取文本内容
|
|
328
|
+
let text = "";
|
|
329
|
+
if (data.type === "assistant" && data.message?.content) {
|
|
330
|
+
for (const part of data.message.content) {
|
|
331
|
+
if (part.type === "text" && part.text) {
|
|
332
|
+
text += part.text;
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
if (text) {
|
|
337
|
+
const chunk = {
|
|
338
|
+
id,
|
|
339
|
+
object: "chat.completion.chunk",
|
|
340
|
+
created,
|
|
341
|
+
model,
|
|
342
|
+
choices: [{ index: 0, delta: { content: text }, finish_reason: null }],
|
|
343
|
+
};
|
|
344
|
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}\n\n`));
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
catch {
|
|
348
|
+
// 忽略解析错误
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
else if (line.startsWith("event: done")) {
|
|
352
|
+
// 发送结束 chunk
|
|
353
|
+
const doneChunk = {
|
|
354
|
+
id,
|
|
355
|
+
object: "chat.completion.chunk",
|
|
356
|
+
created,
|
|
357
|
+
model,
|
|
358
|
+
choices: [{ index: 0, delta: {}, finish_reason: "stop" }],
|
|
359
|
+
};
|
|
360
|
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(doneChunk)}\n\n`));
|
|
361
|
+
controller.enqueue(encoder.encode("data: [DONE]\n\n"));
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
finally {
|
|
367
|
+
reader.releaseLock();
|
|
368
|
+
controller.close();
|
|
369
|
+
}
|
|
370
|
+
},
|
|
371
|
+
});
|
|
372
|
+
}
|
|
373
|
+
/**
|
|
374
|
+
* Execute via CLI (codebuddy -p)
|
|
375
|
+
* 不支持流式,需等待完整响应
|
|
376
|
+
*/
|
|
377
|
+
async function executeViaCli(openaiRequest, prompt, systemPrompt) {
|
|
378
|
+
const resultText = await executeCodeBuddyCLI(prompt, openaiRequest.model, systemPrompt);
|
|
379
|
+
if (openaiRequest.stream) {
|
|
380
|
+
return new Response(createOpenAIStreamResponse(resultText, openaiRequest.model), {
|
|
381
|
+
headers: {
|
|
382
|
+
"Content-Type": "text/event-stream",
|
|
383
|
+
"Cache-Control": "no-cache",
|
|
384
|
+
"Connection": "keep-alive",
|
|
385
|
+
},
|
|
386
|
+
});
|
|
387
|
+
}
|
|
388
|
+
else {
|
|
389
|
+
return new Response(createOpenAIResponse(resultText, openaiRequest.model), {
|
|
390
|
+
headers: { "Content-Type": "application/json" },
|
|
391
|
+
});
|
|
392
|
+
}
|
|
393
|
+
}
|
|
236
394
|
// ============================================================================
|
|
237
395
|
// OAuth Flow Implementation (IOA Login)
|
|
238
396
|
// ============================================================================
|