@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk 0.1.27 → 0.1.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +101 -10
  2. package/package.json +4 -4
package/dist/index.js CHANGED
@@ -20,7 +20,7 @@ function resolveExternalModelProvider(params) {
20
20
  return explicitModelProvider;
21
21
  }
22
22
  const providerName = readOptionalString(params.providerName);
23
- if (providerName && !providerName.startsWith("custom-")) {
23
+ if (providerName) {
24
24
  return providerName;
25
25
  }
26
26
  const providerDisplayName = readOptionalString(params.providerDisplayName);
@@ -108,12 +108,13 @@ function buildCodexInputBuilder(runtimeAgent, workspace) {
108
108
  return async (input) => {
109
109
  const userText = readUserText(input);
110
110
  const metadata = input.metadata && typeof input.metadata === "object" && !Array.isArray(input.metadata) ? input.metadata : {};
111
- return runtimeAgent.buildRuntimeUserPrompt({
111
+ const prompt = runtimeAgent.buildRuntimeUserPrompt({
112
112
  workspace,
113
113
  sessionKey: input.sessionId,
114
114
  metadata,
115
115
  userMessage: userText
116
116
  });
117
+ return prompt;
117
118
  };
118
119
  }
119
120
 
@@ -523,7 +524,8 @@ async function callOpenAiCompatibleUpstream(params) {
523
524
  };
524
525
  }
525
526
 
526
- // src/codex-openai-responses-bridge-stream.ts
527
+ // src/codex-openai-responses-bridge-assistant-output.ts
528
+ import { normalizeAssistantText } from "@nextclaw/ncp";
527
529
  function extractAssistantText(content) {
528
530
  if (typeof content === "string") {
529
531
  return content;
@@ -543,17 +545,46 @@ function extractAssistantText(content) {
543
545
  return "";
544
546
  }).filter(Boolean).join("");
545
547
  }
546
- function buildOpenResponsesOutputItems(response, responseId) {
547
- const message = response.choices?.[0]?.message;
548
- if (!message) {
549
- return [];
550
- }
548
+ function extractAssistantOutput(message) {
549
+ const rawText = extractAssistantText(message?.content);
550
+ const normalized = normalizeAssistantText(rawText, "think-tags");
551
+ const explicitReasoning = readString(message?.reasoning_content);
552
+ const reasoning = explicitReasoning ?? readString(normalized.reasoning) ?? "";
553
+ const text = explicitReasoning ? readString(normalized.text) ?? readString(rawText) ?? "" : normalized.reasoning ? readString(normalized.text) ?? "" : readString(rawText) ?? "";
554
+ return {
555
+ text,
556
+ reasoning
557
+ };
558
+ }
559
+ function buildInProgressReasoningItem(item) {
560
+ return {
561
+ ...structuredClone(item),
562
+ status: "in_progress",
563
+ content: [],
564
+ summary: []
565
+ };
566
+ }
567
+ function buildAssistantOutputItems(params) {
568
+ const { text, reasoning } = extractAssistantOutput(params.message);
551
569
  const outputItems = [];
552
- const text = extractAssistantText(message.content).trim();
570
+ if (reasoning) {
571
+ outputItems.push({
572
+ type: "reasoning",
573
+ id: `${params.responseId}:reasoning:0`,
574
+ summary: [],
575
+ content: [
576
+ {
577
+ type: "reasoning_text",
578
+ text: reasoning
579
+ }
580
+ ],
581
+ status: "completed"
582
+ });
583
+ }
553
584
  if (text) {
554
585
  outputItems.push({
555
586
  type: "message",
556
- id: `${responseId}:message:0`,
587
+ id: `${params.responseId}:message:${outputItems.length}`,
557
588
  role: "assistant",
558
589
  status: "completed",
559
590
  content: [
@@ -565,6 +596,57 @@ function buildOpenResponsesOutputItems(response, responseId) {
565
596
  ]
566
597
  });
567
598
  }
599
+ return outputItems;
600
+ }
601
+ function writeReasoningOutputItemEvents(params) {
602
+ const itemId = readString(params.item.id);
603
+ const content = readArray(params.item.content);
604
+ const textPart = content.find((entry) => readString(readRecord(entry)?.type) === "reasoning_text");
605
+ const text = readString(readRecord(textPart)?.text) ?? "";
606
+ writeSseEvent(params.response, "response.output_item.added", {
607
+ type: "response.output_item.added",
608
+ sequence_number: nextSequenceNumber(params.sequenceState),
609
+ output_index: params.outputIndex,
610
+ item: buildInProgressReasoningItem(params.item)
611
+ });
612
+ if (itemId && text) {
613
+ writeSseEvent(params.response, "response.reasoning_text.delta", {
614
+ type: "response.reasoning_text.delta",
615
+ sequence_number: nextSequenceNumber(params.sequenceState),
616
+ output_index: params.outputIndex,
617
+ item_id: itemId,
618
+ content_index: 0,
619
+ delta: text
620
+ });
621
+ }
622
+ if (itemId) {
623
+ writeSseEvent(params.response, "response.reasoning_text.done", {
624
+ type: "response.reasoning_text.done",
625
+ sequence_number: nextSequenceNumber(params.sequenceState),
626
+ output_index: params.outputIndex,
627
+ item_id: itemId,
628
+ content_index: 0,
629
+ text
630
+ });
631
+ }
632
+ writeSseEvent(params.response, "response.output_item.done", {
633
+ type: "response.output_item.done",
634
+ sequence_number: nextSequenceNumber(params.sequenceState),
635
+ output_index: params.outputIndex,
636
+ item: params.item
637
+ });
638
+ }
639
+
640
+ // src/codex-openai-responses-bridge-stream.ts
641
+ function buildOpenResponsesOutputItems(response, responseId) {
642
+ const message = response.choices?.[0]?.message;
643
+ if (!message) {
644
+ return [];
645
+ }
646
+ const outputItems = buildAssistantOutputItems({
647
+ message,
648
+ responseId
649
+ });
568
650
  const toolCalls = readArray(message.tool_calls);
569
651
  toolCalls.forEach((entry, index) => {
570
652
  const toolCall = readRecord(entry);
@@ -736,6 +818,15 @@ function writeFunctionCallOutputItemEvents(params) {
736
818
  function writeResponseOutputItemEvents(params) {
737
819
  params.outputItems.forEach((item, outputIndex) => {
738
820
  const type = readString(item.type);
821
+ if (type === "reasoning") {
822
+ writeReasoningOutputItemEvents({
823
+ response: params.response,
824
+ item,
825
+ outputIndex,
826
+ sequenceState: params.sequenceState
827
+ });
828
+ return;
829
+ }
739
830
  if (type === "message") {
740
831
  writeMessageOutputItemEvents({
741
832
  response: params.response,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk",
3
- "version": "0.1.27",
3
+ "version": "0.1.29",
4
4
  "private": false,
5
5
  "description": "NextClaw plugin that registers Codex SDK as an optional NCP runtime.",
6
6
  "type": "module",
@@ -21,9 +21,9 @@
21
21
  ]
22
22
  },
23
23
  "dependencies": {
24
- "@nextclaw/ncp-toolkit": "0.4.4",
25
- "@nextclaw/nextclaw-ncp-runtime-codex-sdk": "0.1.5",
26
- "@nextclaw/ncp": "0.4.0"
24
+ "@nextclaw/ncp": "0.4.0",
25
+ "@nextclaw/nextclaw-ncp-runtime-codex-sdk": "0.1.6",
26
+ "@nextclaw/ncp-toolkit": "0.4.4"
27
27
  },
28
28
  "devDependencies": {
29
29
  "@types/node": "^20.17.6",