opencode-codebuddy-external-auth 1.0.9 → 1.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/plugin.js +178 -19
- package/package.json +1 -1
package/dist/plugin.js
CHANGED
|
@@ -8,9 +8,13 @@ const PROVIDER_ID = "codebuddy-external";
|
|
|
8
8
|
const CONFIG = {
|
|
9
9
|
// IOA 版本使用 copilot.tencent.com 进行认证
|
|
10
10
|
serverUrl: "https://copilot.tencent.com",
|
|
11
|
+
// 本地 codebuddy --serve 代理地址
|
|
12
|
+
localServeUrl: "http://127.0.0.1:3000",
|
|
11
13
|
// 平台标识
|
|
12
14
|
platform: "CLI",
|
|
13
15
|
appVersion: "2.37.20",
|
|
16
|
+
// 使用 HTTP API 模式还是 CLI 模式
|
|
17
|
+
useHttpApi: true,
|
|
14
18
|
};
|
|
15
19
|
// ============================================================================
|
|
16
20
|
// Utility Functions
|
|
@@ -147,7 +151,8 @@ async function executeCodeBuddyCLI(prompt, model, systemPrompt) {
|
|
|
147
151
|
args.push("--system-prompt", systemPrompt);
|
|
148
152
|
}
|
|
149
153
|
args.push(prompt);
|
|
150
|
-
|
|
154
|
+
// Debug log (可注释掉)
|
|
155
|
+
// console.log(`[codebuddy-external] Executing: codebuddy ${args.join(" ").substring(0, 100)}...`);
|
|
151
156
|
const child = spawn("codebuddy", args, {
|
|
152
157
|
env: { ...process.env },
|
|
153
158
|
stdio: ["pipe", "pipe", "pipe"],
|
|
@@ -199,29 +204,17 @@ function createProxyFetch() {
|
|
|
199
204
|
const openaiRequest = JSON.parse(typeof body === "string" ? body : await new Response(body).text());
|
|
200
205
|
// Convert to CodeBuddy format
|
|
201
206
|
const { prompt, systemPrompt } = convertMessagesToPrompt(openaiRequest.messages);
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
const resultText = await executeCodeBuddyCLI(prompt, openaiRequest.model, systemPrompt);
|
|
206
|
-
// Convert response to OpenAI format
|
|
207
|
-
if (openaiRequest.stream) {
|
|
208
|
-
return new Response(createOpenAIStreamResponse(resultText, openaiRequest.model), {
|
|
209
|
-
headers: {
|
|
210
|
-
"Content-Type": "text/event-stream",
|
|
211
|
-
"Cache-Control": "no-cache",
|
|
212
|
-
"Connection": "keep-alive",
|
|
213
|
-
},
|
|
214
|
-
});
|
|
207
|
+
// 根据配置选择 HTTP API 模式或 CLI 模式
|
|
208
|
+
if (CONFIG.useHttpApi) {
|
|
209
|
+
return await executeViaHttpApi(openaiRequest, prompt, systemPrompt);
|
|
215
210
|
}
|
|
216
211
|
else {
|
|
217
|
-
return
|
|
218
|
-
headers: { "Content-Type": "application/json" },
|
|
219
|
-
});
|
|
212
|
+
return await executeViaCli(openaiRequest, prompt, systemPrompt);
|
|
220
213
|
}
|
|
221
214
|
}
|
|
222
215
|
catch (error) {
|
|
223
|
-
console.error(`[codebuddy-external]
|
|
224
|
-
return new Response(JSON.stringify({ error: `CodeBuddy
|
|
216
|
+
console.error(`[codebuddy-external] Error:`, error);
|
|
217
|
+
return new Response(JSON.stringify({ error: `CodeBuddy error: ${error}` }), {
|
|
225
218
|
status: 500,
|
|
226
219
|
headers: { "Content-Type": "application/json" },
|
|
227
220
|
});
|
|
@@ -231,6 +224,172 @@ function createProxyFetch() {
|
|
|
231
224
|
return fetch(url, init);
|
|
232
225
|
};
|
|
233
226
|
}
|
|
227
|
+
/**
|
|
228
|
+
* Execute via HTTP API (codebuddy --serve)
|
|
229
|
+
* 支持流式响应
|
|
230
|
+
*/
|
|
231
|
+
async function executeViaHttpApi(openaiRequest, prompt, systemPrompt) {
|
|
232
|
+
// 构建 CodeBuddy /agent 请求
|
|
233
|
+
const agentRequest = {
|
|
234
|
+
prompt,
|
|
235
|
+
model: openaiRequest.model,
|
|
236
|
+
outputFormat: openaiRequest.stream ? "stream-json" : "json",
|
|
237
|
+
};
|
|
238
|
+
if (systemPrompt) {
|
|
239
|
+
agentRequest.systemPrompt = systemPrompt;
|
|
240
|
+
}
|
|
241
|
+
// 调用本地 codebuddy --serve
|
|
242
|
+
const response = await fetch(`${CONFIG.localServeUrl}/agent`, {
|
|
243
|
+
method: "POST",
|
|
244
|
+
headers: {
|
|
245
|
+
"Content-Type": "application/json",
|
|
246
|
+
},
|
|
247
|
+
body: JSON.stringify(agentRequest),
|
|
248
|
+
});
|
|
249
|
+
if (!response.ok) {
|
|
250
|
+
const errorText = await response.text();
|
|
251
|
+
throw new Error(`HTTP API error: ${response.status} - ${errorText}`);
|
|
252
|
+
}
|
|
253
|
+
// 处理流式响应
|
|
254
|
+
if (openaiRequest.stream && response.body) {
|
|
255
|
+
return new Response(transformCodeBuddyStreamToOpenAI(response.body, openaiRequest.model), {
|
|
256
|
+
headers: {
|
|
257
|
+
"Content-Type": "text/event-stream",
|
|
258
|
+
"Cache-Control": "no-cache",
|
|
259
|
+
"Connection": "keep-alive",
|
|
260
|
+
},
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
// 非流式响应
|
|
264
|
+
const responseText = await response.text();
|
|
265
|
+
let resultText = responseText;
|
|
266
|
+
// 尝试解析 JSON 响应
|
|
267
|
+
try {
|
|
268
|
+
const jsonResponse = JSON.parse(responseText);
|
|
269
|
+
if (jsonResponse.output) {
|
|
270
|
+
resultText = jsonResponse.output;
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
catch {
|
|
274
|
+
// 不是 JSON,直接使用原文
|
|
275
|
+
}
|
|
276
|
+
return new Response(createOpenAIResponse(resultText, openaiRequest.model), {
|
|
277
|
+
headers: { "Content-Type": "application/json" },
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
/**
|
|
281
|
+
* Transform CodeBuddy SSE stream to OpenAI SSE stream
|
|
282
|
+
*
|
|
283
|
+
* CodeBuddy 格式:
|
|
284
|
+
* event: next
|
|
285
|
+
* data: {"type": "assistant", "message": {"content": [{"type": "text", "text": "..."}]}}
|
|
286
|
+
*
|
|
287
|
+
* OpenAI 格式:
|
|
288
|
+
* data: {"choices": [{"delta": {"content": "..."}}]}
|
|
289
|
+
*/
|
|
290
|
+
function transformCodeBuddyStreamToOpenAI(sourceStream, model) {
|
|
291
|
+
const encoder = new TextEncoder();
|
|
292
|
+
const decoder = new TextDecoder();
|
|
293
|
+
const id = `chatcmpl-${Date.now()}`;
|
|
294
|
+
const created = Math.floor(Date.now() / 1000);
|
|
295
|
+
let buffer = "";
|
|
296
|
+
let sentRole = false;
|
|
297
|
+
return new ReadableStream({
|
|
298
|
+
async start(controller) {
|
|
299
|
+
const reader = sourceStream.getReader();
|
|
300
|
+
// 先发送 role chunk
|
|
301
|
+
const roleChunk = {
|
|
302
|
+
id,
|
|
303
|
+
object: "chat.completion.chunk",
|
|
304
|
+
created,
|
|
305
|
+
model,
|
|
306
|
+
choices: [{ index: 0, delta: { role: "assistant" }, finish_reason: null }],
|
|
307
|
+
};
|
|
308
|
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(roleChunk)}\n\n`));
|
|
309
|
+
sentRole = true;
|
|
310
|
+
try {
|
|
311
|
+
while (true) {
|
|
312
|
+
const { done, value } = await reader.read();
|
|
313
|
+
if (done)
|
|
314
|
+
break;
|
|
315
|
+
buffer += decoder.decode(value, { stream: true });
|
|
316
|
+
// 处理 SSE 事件
|
|
317
|
+
const lines = buffer.split("\n");
|
|
318
|
+
buffer = lines.pop() || ""; // 保留不完整的行
|
|
319
|
+
for (const line of lines) {
|
|
320
|
+
if (line.startsWith("data: ")) {
|
|
321
|
+
const dataStr = line.slice(6).trim();
|
|
322
|
+
if (!dataStr || dataStr === "{}")
|
|
323
|
+
continue;
|
|
324
|
+
try {
|
|
325
|
+
const data = JSON.parse(dataStr);
|
|
326
|
+
// 提取文本内容
|
|
327
|
+
let text = "";
|
|
328
|
+
if (data.type === "assistant" && data.message?.content) {
|
|
329
|
+
for (const part of data.message.content) {
|
|
330
|
+
if (part.type === "text" && part.text) {
|
|
331
|
+
text += part.text;
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
if (text) {
|
|
336
|
+
const chunk = {
|
|
337
|
+
id,
|
|
338
|
+
object: "chat.completion.chunk",
|
|
339
|
+
created,
|
|
340
|
+
model,
|
|
341
|
+
choices: [{ index: 0, delta: { content: text }, finish_reason: null }],
|
|
342
|
+
};
|
|
343
|
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}\n\n`));
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
catch {
|
|
347
|
+
// 忽略解析错误
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
else if (line.startsWith("event: done")) {
|
|
351
|
+
// 发送结束 chunk
|
|
352
|
+
const doneChunk = {
|
|
353
|
+
id,
|
|
354
|
+
object: "chat.completion.chunk",
|
|
355
|
+
created,
|
|
356
|
+
model,
|
|
357
|
+
choices: [{ index: 0, delta: {}, finish_reason: "stop" }],
|
|
358
|
+
};
|
|
359
|
+
controller.enqueue(encoder.encode(`data: ${JSON.stringify(doneChunk)}\n\n`));
|
|
360
|
+
controller.enqueue(encoder.encode("data: [DONE]\n\n"));
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
finally {
|
|
366
|
+
reader.releaseLock();
|
|
367
|
+
controller.close();
|
|
368
|
+
}
|
|
369
|
+
},
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
/**
|
|
373
|
+
* Execute via CLI (codebuddy -p)
|
|
374
|
+
* 不支持流式,需等待完整响应
|
|
375
|
+
*/
|
|
376
|
+
async function executeViaCli(openaiRequest, prompt, systemPrompt) {
|
|
377
|
+
const resultText = await executeCodeBuddyCLI(prompt, openaiRequest.model, systemPrompt);
|
|
378
|
+
if (openaiRequest.stream) {
|
|
379
|
+
return new Response(createOpenAIStreamResponse(resultText, openaiRequest.model), {
|
|
380
|
+
headers: {
|
|
381
|
+
"Content-Type": "text/event-stream",
|
|
382
|
+
"Cache-Control": "no-cache",
|
|
383
|
+
"Connection": "keep-alive",
|
|
384
|
+
},
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
else {
|
|
388
|
+
return new Response(createOpenAIResponse(resultText, openaiRequest.model), {
|
|
389
|
+
headers: { "Content-Type": "application/json" },
|
|
390
|
+
});
|
|
391
|
+
}
|
|
392
|
+
}
|
|
234
393
|
// ============================================================================
|
|
235
394
|
// OAuth Flow Implementation (IOA Login)
|
|
236
395
|
// ============================================================================
|