@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk 0.1.26 → 0.1.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +172 -47
  2. package/package.json +3 -3
package/dist/index.js CHANGED
@@ -163,29 +163,7 @@ function nextSequenceNumber(state) {
163
163
  return nextValue;
164
164
  }
165
165
 
166
- // src/codex-openai-responses-bridge-request.ts
167
- function stripModelPrefix(model, prefixes) {
168
- const normalizedModel = model.trim();
169
- for (const prefix of prefixes) {
170
- const normalizedPrefix = prefix.trim().toLowerCase();
171
- if (!normalizedPrefix) {
172
- continue;
173
- }
174
- const candidatePrefix = `${normalizedPrefix}/`;
175
- if (normalizedModel.toLowerCase().startsWith(candidatePrefix)) {
176
- return normalizedModel.slice(candidatePrefix.length);
177
- }
178
- }
179
- return normalizedModel;
180
- }
181
- function resolveUpstreamModel(requestedModel, config) {
182
- const prefixes = (config.modelPrefixes ?? []).filter((value) => value.trim().length > 0);
183
- const model = stripModelPrefix(readString(requestedModel) ?? "", prefixes) || stripModelPrefix(config.defaultModel ?? "", prefixes);
184
- if (!model) {
185
- throw new Error("Codex bridge could not resolve an upstream model.");
186
- }
187
- return model;
188
- }
166
+ // src/codex-openai-responses-bridge-message-content.ts
189
167
  function normalizeTextPart(value) {
190
168
  const record = readRecord(value);
191
169
  if (!record) {
@@ -271,6 +249,30 @@ function buildChatContent(content) {
271
249
  }
272
250
  return chatContent;
273
251
  }
252
+ function mergeChatContent(left, right) {
253
+ if (left === null) {
254
+ return right;
255
+ }
256
+ if (right === null) {
257
+ return left;
258
+ }
259
+ if (typeof left === "string" && typeof right === "string") {
260
+ return [left, right].filter((value) => value.length > 0).join("\n\n");
261
+ }
262
+ const normalizedLeft = typeof left === "string" ? [
263
+ {
264
+ type: "text",
265
+ text: left
266
+ }
267
+ ] : left;
268
+ const normalizedRight = typeof right === "string" ? [
269
+ {
270
+ type: "text",
271
+ text: right
272
+ }
273
+ ] : right;
274
+ return [...normalizedLeft, ...normalizedRight];
275
+ }
274
276
  function readAssistantMessageText(content) {
275
277
  if (typeof content === "string") {
276
278
  return content;
@@ -280,6 +282,30 @@ function readAssistantMessageText(content) {
280
282
  }
281
283
  return content.filter((entry) => entry.type === "text").map((entry) => readString(entry.text) ?? "").join("\n");
282
284
  }
285
+
286
+ // src/codex-openai-responses-bridge-request.ts
287
+ function stripModelPrefix(model, prefixes) {
288
+ const normalizedModel = model.trim();
289
+ for (const prefix of prefixes) {
290
+ const normalizedPrefix = prefix.trim().toLowerCase();
291
+ if (!normalizedPrefix) {
292
+ continue;
293
+ }
294
+ const candidatePrefix = `${normalizedPrefix}/`;
295
+ if (normalizedModel.toLowerCase().startsWith(candidatePrefix)) {
296
+ return normalizedModel.slice(candidatePrefix.length);
297
+ }
298
+ }
299
+ return normalizedModel;
300
+ }
301
+ function resolveUpstreamModel(requestedModel, config) {
302
+ const prefixes = (config.modelPrefixes ?? []).filter((value) => value.trim().length > 0);
303
+ const model = stripModelPrefix(readString(requestedModel) ?? "", prefixes) || stripModelPrefix(config.defaultModel ?? "", prefixes);
304
+ if (!model) {
305
+ throw new Error("Codex bridge could not resolve an upstream model.");
306
+ }
307
+ return model;
308
+ }
283
309
  function appendMessageInputItem(params) {
284
310
  const role = readString(params.item.role);
285
311
  const content = buildChatContent(params.item.content);
@@ -288,16 +314,20 @@ function appendMessageInputItem(params) {
288
314
  if (text.trim()) {
289
315
  params.assistantTextParts.push(text);
290
316
  }
291
- return;
317
+ return params.systemContent;
292
318
  }
293
319
  params.flushAssistant();
294
320
  const normalizedRole = role === "developer" ? "system" : role;
295
- if ((normalizedRole === "system" || normalizedRole === "user") && content !== null) {
321
+ if (normalizedRole === "system") {
322
+ return mergeChatContent(params.systemContent, content);
323
+ }
324
+ if (normalizedRole === "user" && content !== null) {
296
325
  params.messages.push({
297
- role: normalizedRole,
326
+ role: "user",
298
327
  content
299
328
  });
300
329
  }
330
+ return params.systemContent;
301
331
  }
302
332
  function appendFunctionCallItem(params) {
303
333
  const name = readString(params.item.name);
@@ -329,20 +359,18 @@ function appendFunctionCallOutputItem(params) {
329
359
  }
330
360
  function buildOpenAiMessages(input, instructions) {
331
361
  const messages = [];
332
- const instructionText = readString(instructions);
333
- if (instructionText) {
362
+ let systemContent = readString(instructions) ?? null;
363
+ if (typeof input === "string") {
334
364
  messages.push({
335
- role: "system",
336
- content: instructionText
365
+ role: "user",
366
+ content: input
337
367
  });
338
- }
339
- if (typeof input === "string") {
340
- return [
341
- ...messages,
368
+ return systemContent === null ? messages : [
342
369
  {
343
- role: "user",
344
- content: input
345
- }
370
+ role: "system",
371
+ content: systemContent
372
+ },
373
+ ...messages
346
374
  ];
347
375
  }
348
376
  const assistantTextParts = [];
@@ -368,8 +396,9 @@ function buildOpenAiMessages(input, instructions) {
368
396
  }
369
397
  const type = readString(item.type);
370
398
  if (type === "message") {
371
- appendMessageInputItem({
399
+ systemContent = appendMessageInputItem({
372
400
  messages,
401
+ systemContent,
373
402
  assistantTextParts,
374
403
  assistantToolCalls,
375
404
  item,
@@ -393,7 +422,13 @@ function buildOpenAiMessages(input, instructions) {
393
422
  }
394
423
  }
395
424
  flushAssistant();
396
- return messages;
425
+ return systemContent === null ? messages : [
426
+ {
427
+ role: "system",
428
+ content: systemContent
429
+ },
430
+ ...messages
431
+ ];
397
432
  }
398
433
  function toOpenAiTools(value) {
399
434
  const tools = [];
@@ -488,7 +523,8 @@ async function callOpenAiCompatibleUpstream(params) {
488
523
  };
489
524
  }
490
525
 
491
- // src/codex-openai-responses-bridge-stream.ts
526
+ // src/codex-openai-responses-bridge-assistant-output.ts
527
+ import { normalizeAssistantText } from "@nextclaw/ncp";
492
528
  function extractAssistantText(content) {
493
529
  if (typeof content === "string") {
494
530
  return content;
@@ -508,17 +544,46 @@ function extractAssistantText(content) {
508
544
  return "";
509
545
  }).filter(Boolean).join("");
510
546
  }
511
- function buildOpenResponsesOutputItems(response, responseId) {
512
- const message = response.choices?.[0]?.message;
513
- if (!message) {
514
- return [];
515
- }
547
+ function extractAssistantOutput(message) {
548
+ const rawText = extractAssistantText(message?.content);
549
+ const normalized = normalizeAssistantText(rawText, "think-tags");
550
+ const explicitReasoning = readString(message?.reasoning_content);
551
+ const reasoning = explicitReasoning ?? readString(normalized.reasoning) ?? "";
552
+ const text = explicitReasoning ? readString(normalized.text) ?? readString(rawText) ?? "" : normalized.reasoning ? readString(normalized.text) ?? "" : readString(rawText) ?? "";
553
+ return {
554
+ text,
555
+ reasoning
556
+ };
557
+ }
558
+ function buildInProgressReasoningItem(item) {
559
+ return {
560
+ ...structuredClone(item),
561
+ status: "in_progress",
562
+ content: [],
563
+ summary: []
564
+ };
565
+ }
566
+ function buildAssistantOutputItems(params) {
567
+ const { text, reasoning } = extractAssistantOutput(params.message);
516
568
  const outputItems = [];
517
- const text = extractAssistantText(message.content).trim();
569
+ if (reasoning) {
570
+ outputItems.push({
571
+ type: "reasoning",
572
+ id: `${params.responseId}:reasoning:0`,
573
+ summary: [],
574
+ content: [
575
+ {
576
+ type: "reasoning_text",
577
+ text: reasoning
578
+ }
579
+ ],
580
+ status: "completed"
581
+ });
582
+ }
518
583
  if (text) {
519
584
  outputItems.push({
520
585
  type: "message",
521
- id: `${responseId}:message:0`,
586
+ id: `${params.responseId}:message:${outputItems.length}`,
522
587
  role: "assistant",
523
588
  status: "completed",
524
589
  content: [
@@ -530,6 +595,57 @@ function buildOpenResponsesOutputItems(response, responseId) {
530
595
  ]
531
596
  });
532
597
  }
598
+ return outputItems;
599
+ }
600
+ function writeReasoningOutputItemEvents(params) {
601
+ const itemId = readString(params.item.id);
602
+ const content = readArray(params.item.content);
603
+ const textPart = content.find((entry) => readString(readRecord(entry)?.type) === "reasoning_text");
604
+ const text = readString(readRecord(textPart)?.text) ?? "";
605
+ writeSseEvent(params.response, "response.output_item.added", {
606
+ type: "response.output_item.added",
607
+ sequence_number: nextSequenceNumber(params.sequenceState),
608
+ output_index: params.outputIndex,
609
+ item: buildInProgressReasoningItem(params.item)
610
+ });
611
+ if (itemId && text) {
612
+ writeSseEvent(params.response, "response.reasoning_text.delta", {
613
+ type: "response.reasoning_text.delta",
614
+ sequence_number: nextSequenceNumber(params.sequenceState),
615
+ output_index: params.outputIndex,
616
+ item_id: itemId,
617
+ content_index: 0,
618
+ delta: text
619
+ });
620
+ }
621
+ if (itemId) {
622
+ writeSseEvent(params.response, "response.reasoning_text.done", {
623
+ type: "response.reasoning_text.done",
624
+ sequence_number: nextSequenceNumber(params.sequenceState),
625
+ output_index: params.outputIndex,
626
+ item_id: itemId,
627
+ content_index: 0,
628
+ text
629
+ });
630
+ }
631
+ writeSseEvent(params.response, "response.output_item.done", {
632
+ type: "response.output_item.done",
633
+ sequence_number: nextSequenceNumber(params.sequenceState),
634
+ output_index: params.outputIndex,
635
+ item: params.item
636
+ });
637
+ }
638
+
639
+ // src/codex-openai-responses-bridge-stream.ts
640
+ function buildOpenResponsesOutputItems(response, responseId) {
641
+ const message = response.choices?.[0]?.message;
642
+ if (!message) {
643
+ return [];
644
+ }
645
+ const outputItems = buildAssistantOutputItems({
646
+ message,
647
+ responseId
648
+ });
533
649
  const toolCalls = readArray(message.tool_calls);
534
650
  toolCalls.forEach((entry, index) => {
535
651
  const toolCall = readRecord(entry);
@@ -701,6 +817,15 @@ function writeFunctionCallOutputItemEvents(params) {
701
817
  function writeResponseOutputItemEvents(params) {
702
818
  params.outputItems.forEach((item, outputIndex) => {
703
819
  const type = readString(item.type);
820
+ if (type === "reasoning") {
821
+ writeReasoningOutputItemEvents({
822
+ response: params.response,
823
+ item,
824
+ outputIndex,
825
+ sequenceState: params.sequenceState
826
+ });
827
+ return;
828
+ }
704
829
  if (type === "message") {
705
830
  writeMessageOutputItemEvents({
706
831
  response: params.response,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk",
3
- "version": "0.1.26",
3
+ "version": "0.1.28",
4
4
  "private": false,
5
5
  "description": "NextClaw plugin that registers Codex SDK as an optional NCP runtime.",
6
6
  "type": "module",
@@ -22,8 +22,8 @@
22
22
  },
23
23
  "dependencies": {
24
24
  "@nextclaw/ncp-toolkit": "0.4.4",
25
- "@nextclaw/nextclaw-ncp-runtime-codex-sdk": "0.1.5",
26
- "@nextclaw/ncp": "0.4.0"
25
+ "@nextclaw/ncp": "0.4.0",
26
+ "@nextclaw/nextclaw-ncp-runtime-codex-sdk": "0.1.5"
27
27
  },
28
28
  "devDependencies": {
29
29
  "@types/node": "^20.17.6",