@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk 0.1.27 → 0.1.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +98 -8
  2. package/package.json +3 -3
package/dist/index.js CHANGED
@@ -523,7 +523,8 @@ async function callOpenAiCompatibleUpstream(params) {
523
523
  };
524
524
  }
525
525
 
526
- // src/codex-openai-responses-bridge-stream.ts
526
+ // src/codex-openai-responses-bridge-assistant-output.ts
527
+ import { normalizeAssistantText } from "@nextclaw/ncp";
527
528
  function extractAssistantText(content) {
528
529
  if (typeof content === "string") {
529
530
  return content;
@@ -543,17 +544,46 @@ function extractAssistantText(content) {
543
544
  return "";
544
545
  }).filter(Boolean).join("");
545
546
  }
546
- function buildOpenResponsesOutputItems(response, responseId) {
547
- const message = response.choices?.[0]?.message;
548
- if (!message) {
549
- return [];
550
- }
547
+ function extractAssistantOutput(message) {
548
+ const rawText = extractAssistantText(message?.content);
549
+ const normalized = normalizeAssistantText(rawText, "think-tags");
550
+ const explicitReasoning = readString(message?.reasoning_content);
551
+ const reasoning = explicitReasoning ?? readString(normalized.reasoning) ?? "";
552
+ const text = explicitReasoning ? readString(normalized.text) ?? readString(rawText) ?? "" : normalized.reasoning ? readString(normalized.text) ?? "" : readString(rawText) ?? "";
553
+ return {
554
+ text,
555
+ reasoning
556
+ };
557
+ }
558
+ function buildInProgressReasoningItem(item) {
559
+ return {
560
+ ...structuredClone(item),
561
+ status: "in_progress",
562
+ content: [],
563
+ summary: []
564
+ };
565
+ }
566
+ function buildAssistantOutputItems(params) {
567
+ const { text, reasoning } = extractAssistantOutput(params.message);
551
568
  const outputItems = [];
552
- const text = extractAssistantText(message.content).trim();
569
+ if (reasoning) {
570
+ outputItems.push({
571
+ type: "reasoning",
572
+ id: `${params.responseId}:reasoning:0`,
573
+ summary: [],
574
+ content: [
575
+ {
576
+ type: "reasoning_text",
577
+ text: reasoning
578
+ }
579
+ ],
580
+ status: "completed"
581
+ });
582
+ }
553
583
  if (text) {
554
584
  outputItems.push({
555
585
  type: "message",
556
- id: `${responseId}:message:0`,
586
+ id: `${params.responseId}:message:${outputItems.length}`,
557
587
  role: "assistant",
558
588
  status: "completed",
559
589
  content: [
@@ -565,6 +595,57 @@ function buildOpenResponsesOutputItems(response, responseId) {
565
595
  ]
566
596
  });
567
597
  }
598
+ return outputItems;
599
+ }
600
+ function writeReasoningOutputItemEvents(params) {
601
+ const itemId = readString(params.item.id);
602
+ const content = readArray(params.item.content);
603
+ const textPart = content.find((entry) => readString(readRecord(entry)?.type) === "reasoning_text");
604
+ const text = readString(readRecord(textPart)?.text) ?? "";
605
+ writeSseEvent(params.response, "response.output_item.added", {
606
+ type: "response.output_item.added",
607
+ sequence_number: nextSequenceNumber(params.sequenceState),
608
+ output_index: params.outputIndex,
609
+ item: buildInProgressReasoningItem(params.item)
610
+ });
611
+ if (itemId && text) {
612
+ writeSseEvent(params.response, "response.reasoning_text.delta", {
613
+ type: "response.reasoning_text.delta",
614
+ sequence_number: nextSequenceNumber(params.sequenceState),
615
+ output_index: params.outputIndex,
616
+ item_id: itemId,
617
+ content_index: 0,
618
+ delta: text
619
+ });
620
+ }
621
+ if (itemId) {
622
+ writeSseEvent(params.response, "response.reasoning_text.done", {
623
+ type: "response.reasoning_text.done",
624
+ sequence_number: nextSequenceNumber(params.sequenceState),
625
+ output_index: params.outputIndex,
626
+ item_id: itemId,
627
+ content_index: 0,
628
+ text
629
+ });
630
+ }
631
+ writeSseEvent(params.response, "response.output_item.done", {
632
+ type: "response.output_item.done",
633
+ sequence_number: nextSequenceNumber(params.sequenceState),
634
+ output_index: params.outputIndex,
635
+ item: params.item
636
+ });
637
+ }
638
+
639
+ // src/codex-openai-responses-bridge-stream.ts
640
+ function buildOpenResponsesOutputItems(response, responseId) {
641
+ const message = response.choices?.[0]?.message;
642
+ if (!message) {
643
+ return [];
644
+ }
645
+ const outputItems = buildAssistantOutputItems({
646
+ message,
647
+ responseId
648
+ });
568
649
  const toolCalls = readArray(message.tool_calls);
569
650
  toolCalls.forEach((entry, index) => {
570
651
  const toolCall = readRecord(entry);
@@ -736,6 +817,15 @@ function writeFunctionCallOutputItemEvents(params) {
736
817
  function writeResponseOutputItemEvents(params) {
737
818
  params.outputItems.forEach((item, outputIndex) => {
738
819
  const type = readString(item.type);
820
+ if (type === "reasoning") {
821
+ writeReasoningOutputItemEvents({
822
+ response: params.response,
823
+ item,
824
+ outputIndex,
825
+ sequenceState: params.sequenceState
826
+ });
827
+ return;
828
+ }
739
829
  if (type === "message") {
740
830
  writeMessageOutputItemEvents({
741
831
  response: params.response,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk",
3
- "version": "0.1.27",
3
+ "version": "0.1.28",
4
4
  "private": false,
5
5
  "description": "NextClaw plugin that registers Codex SDK as an optional NCP runtime.",
6
6
  "type": "module",
@@ -22,8 +22,8 @@
22
22
  },
23
23
  "dependencies": {
24
24
  "@nextclaw/ncp-toolkit": "0.4.4",
25
- "@nextclaw/nextclaw-ncp-runtime-codex-sdk": "0.1.5",
26
- "@nextclaw/ncp": "0.4.0"
25
+ "@nextclaw/ncp": "0.4.0",
26
+ "@nextclaw/nextclaw-ncp-runtime-codex-sdk": "0.1.5"
27
27
  },
28
28
  "devDependencies": {
29
29
  "@types/node": "^20.17.6",