@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk 0.1.21 → 0.1.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,312 +0,0 @@
1
- import {
2
- nextSequenceNumber,
3
- readArray,
4
- readNumber,
5
- readRecord,
6
- readString,
7
- writeSseEvent
8
- } from "./codex-openai-responses-bridge-shared.js";
9
- function extractAssistantText(content) {
10
- if (typeof content === "string") {
11
- return content;
12
- }
13
- if (!Array.isArray(content)) {
14
- return "";
15
- }
16
- return content.map((entry) => {
17
- const record = readRecord(entry);
18
- if (!record) {
19
- return "";
20
- }
21
- const type = readString(record.type);
22
- if (type === "text" || type === "output_text") {
23
- return readString(record.text) ?? "";
24
- }
25
- return "";
26
- }).filter(Boolean).join("");
27
- }
28
- function buildOpenResponsesOutputItems(response, responseId) {
29
- const message = response.choices?.[0]?.message;
30
- if (!message) {
31
- return [];
32
- }
33
- const outputItems = [];
34
- const text = extractAssistantText(message.content).trim();
35
- if (text) {
36
- outputItems.push({
37
- type: "message",
38
- id: `${responseId}:message:0`,
39
- role: "assistant",
40
- status: "completed",
41
- content: [
42
- {
43
- type: "output_text",
44
- text,
45
- annotations: []
46
- }
47
- ]
48
- });
49
- }
50
- const toolCalls = readArray(message.tool_calls);
51
- toolCalls.forEach((entry, index) => {
52
- const toolCall = readRecord(entry);
53
- const fn = readRecord(toolCall?.function);
54
- const name = readString(fn?.name);
55
- const argumentsText = readString(fn?.arguments) ?? "{}";
56
- if (!name) {
57
- return;
58
- }
59
- const callId = readString(toolCall?.id) ?? `${responseId}:call:${index}`;
60
- outputItems.push({
61
- type: "function_call",
62
- id: `${responseId}:function:${index}`,
63
- call_id: callId,
64
- name,
65
- arguments: argumentsText,
66
- status: "completed"
67
- });
68
- });
69
- return outputItems;
70
- }
71
- function buildUsage(response) {
72
- const promptTokens = Math.max(0, Math.trunc(readNumber(response.usage?.prompt_tokens) ?? 0));
73
- const completionTokens = Math.max(
74
- 0,
75
- Math.trunc(readNumber(response.usage?.completion_tokens) ?? 0)
76
- );
77
- const totalTokens = Math.max(
78
- 0,
79
- Math.trunc(readNumber(response.usage?.total_tokens) ?? promptTokens + completionTokens)
80
- );
81
- return {
82
- input_tokens: promptTokens,
83
- input_tokens_details: null,
84
- output_tokens: completionTokens,
85
- output_tokens_details: null,
86
- total_tokens: totalTokens
87
- };
88
- }
89
- function buildResponseResource(params) {
90
- return {
91
- id: params.responseId,
92
- object: "response",
93
- created_at: Math.floor(Date.now() / 1e3),
94
- status: params.status ?? "completed",
95
- model: params.model,
96
- output: params.outputItems,
97
- usage: params.usage,
98
- error: null
99
- };
100
- }
101
- function cloneRecord(value) {
102
- return structuredClone(value);
103
- }
104
- function buildInProgressOutputItem(item) {
105
- const type = readString(item.type);
106
- if (type === "message") {
107
- return {
108
- ...cloneRecord(item),
109
- status: "in_progress",
110
- content: []
111
- };
112
- }
113
- if (type === "function_call") {
114
- return {
115
- ...cloneRecord(item),
116
- status: "in_progress",
117
- arguments: ""
118
- };
119
- }
120
- return cloneRecord(item);
121
- }
122
- function writeMessageOutputItemEvents(params) {
123
- const itemId = readString(params.item.id);
124
- const content = readArray(params.item.content);
125
- const textPart = content.find((entry) => readString(readRecord(entry)?.type) === "output_text");
126
- const text = readString(readRecord(textPart)?.text) ?? "";
127
- writeSseEvent(params.response, "response.output_item.added", {
128
- type: "response.output_item.added",
129
- sequence_number: nextSequenceNumber(params.sequenceState),
130
- output_index: params.outputIndex,
131
- item: buildInProgressOutputItem(params.item)
132
- });
133
- if (itemId) {
134
- writeSseEvent(params.response, "response.content_part.added", {
135
- type: "response.content_part.added",
136
- sequence_number: nextSequenceNumber(params.sequenceState),
137
- output_index: params.outputIndex,
138
- item_id: itemId,
139
- content_index: 0,
140
- part: {
141
- type: "output_text",
142
- text: "",
143
- annotations: []
144
- }
145
- });
146
- if (text) {
147
- writeSseEvent(params.response, "response.output_text.delta", {
148
- type: "response.output_text.delta",
149
- sequence_number: nextSequenceNumber(params.sequenceState),
150
- output_index: params.outputIndex,
151
- item_id: itemId,
152
- content_index: 0,
153
- delta: text
154
- });
155
- }
156
- writeSseEvent(params.response, "response.output_text.done", {
157
- type: "response.output_text.done",
158
- sequence_number: nextSequenceNumber(params.sequenceState),
159
- output_index: params.outputIndex,
160
- item_id: itemId,
161
- content_index: 0,
162
- text
163
- });
164
- writeSseEvent(params.response, "response.content_part.done", {
165
- type: "response.content_part.done",
166
- sequence_number: nextSequenceNumber(params.sequenceState),
167
- output_index: params.outputIndex,
168
- item_id: itemId,
169
- content_index: 0,
170
- part: {
171
- type: "output_text",
172
- text,
173
- annotations: []
174
- }
175
- });
176
- }
177
- writeSseEvent(params.response, "response.output_item.done", {
178
- type: "response.output_item.done",
179
- sequence_number: nextSequenceNumber(params.sequenceState),
180
- output_index: params.outputIndex,
181
- item: params.item
182
- });
183
- }
184
- function writeFunctionCallOutputItemEvents(params) {
185
- const itemId = readString(params.item.id);
186
- const argumentsText = readString(params.item.arguments) ?? "";
187
- writeSseEvent(params.response, "response.output_item.added", {
188
- type: "response.output_item.added",
189
- sequence_number: nextSequenceNumber(params.sequenceState),
190
- output_index: params.outputIndex,
191
- item: buildInProgressOutputItem(params.item)
192
- });
193
- if (itemId && argumentsText) {
194
- writeSseEvent(params.response, "response.function_call_arguments.delta", {
195
- type: "response.function_call_arguments.delta",
196
- sequence_number: nextSequenceNumber(params.sequenceState),
197
- output_index: params.outputIndex,
198
- item_id: itemId,
199
- delta: argumentsText
200
- });
201
- }
202
- if (itemId) {
203
- writeSseEvent(params.response, "response.function_call_arguments.done", {
204
- type: "response.function_call_arguments.done",
205
- sequence_number: nextSequenceNumber(params.sequenceState),
206
- output_index: params.outputIndex,
207
- item_id: itemId,
208
- arguments: argumentsText
209
- });
210
- }
211
- writeSseEvent(params.response, "response.output_item.done", {
212
- type: "response.output_item.done",
213
- sequence_number: nextSequenceNumber(params.sequenceState),
214
- output_index: params.outputIndex,
215
- item: params.item
216
- });
217
- }
218
- function writeResponseOutputItemEvents(params) {
219
- params.outputItems.forEach((item, outputIndex) => {
220
- const type = readString(item.type);
221
- if (type === "message") {
222
- writeMessageOutputItemEvents({
223
- response: params.response,
224
- item,
225
- outputIndex,
226
- sequenceState: params.sequenceState
227
- });
228
- return;
229
- }
230
- if (type === "function_call") {
231
- writeFunctionCallOutputItemEvents({
232
- response: params.response,
233
- item,
234
- outputIndex,
235
- sequenceState: params.sequenceState
236
- });
237
- return;
238
- }
239
- writeSseEvent(params.response, "response.output_item.done", {
240
- type: "response.output_item.done",
241
- sequence_number: nextSequenceNumber(params.sequenceState),
242
- output_index: outputIndex,
243
- item
244
- });
245
- });
246
- }
247
- function writeStreamError(response, message) {
248
- response.statusCode = 200;
249
- response.setHeader("content-type", "text/event-stream; charset=utf-8");
250
- response.setHeader("cache-control", "no-cache, no-transform");
251
- response.setHeader("connection", "keep-alive");
252
- writeSseEvent(response, "error", {
253
- type: "error",
254
- error: {
255
- code: "invalid_request_error",
256
- message
257
- }
258
- });
259
- response.end();
260
- }
261
- function buildBridgeResponsePayload(params) {
262
- const outputItems = buildOpenResponsesOutputItems(params.response, params.responseId);
263
- const usage = buildUsage(params.response);
264
- return {
265
- outputItems,
266
- usage,
267
- responseResource: buildResponseResource({
268
- responseId: params.responseId,
269
- model: params.model,
270
- outputItems,
271
- usage
272
- })
273
- };
274
- }
275
- function writeResponsesStream(params) {
276
- const sequenceState = {
277
- value: 0
278
- };
279
- params.response.statusCode = 200;
280
- params.response.setHeader("content-type", "text/event-stream; charset=utf-8");
281
- params.response.setHeader("cache-control", "no-cache, no-transform");
282
- params.response.setHeader("connection", "keep-alive");
283
- writeSseEvent(params.response, "response.created", {
284
- type: "response.created",
285
- sequence_number: nextSequenceNumber(sequenceState),
286
- response: buildResponseResource({
287
- responseId: params.responseId,
288
- model: params.model,
289
- outputItems: [],
290
- usage: buildUsage({
291
- usage: {}
292
- }),
293
- status: "in_progress"
294
- })
295
- });
296
- writeResponseOutputItemEvents({
297
- response: params.response,
298
- outputItems: params.outputItems,
299
- sequenceState
300
- });
301
- writeSseEvent(params.response, "response.completed", {
302
- type: "response.completed",
303
- sequence_number: nextSequenceNumber(sequenceState),
304
- response: params.responseResource
305
- });
306
- params.response.end();
307
- }
308
- export {
309
- buildBridgeResponsePayload,
310
- writeResponsesStream,
311
- writeStreamError
312
- };
@@ -1,5 +0,0 @@
1
- import { CodexOpenAiResponsesBridgeConfig, CodexOpenAiResponsesBridgeResult } from './codex-openai-responses-bridge-shared.js';
2
-
3
- declare function ensureCodexOpenAiResponsesBridge(config: CodexOpenAiResponsesBridgeConfig): Promise<CodexOpenAiResponsesBridgeResult>;
4
-
5
- export { ensureCodexOpenAiResponsesBridge };
@@ -1,140 +0,0 @@
1
- import { randomUUID } from "node:crypto";
2
- import { createServer } from "node:http";
3
- import { callOpenAiCompatibleUpstream } from "./codex-openai-responses-bridge-request.js";
4
- import {
5
- buildBridgeResponsePayload,
6
- writeResponsesStream,
7
- writeStreamError
8
- } from "./codex-openai-responses-bridge-stream.js";
9
- import {
10
- readBoolean,
11
- readRecord
12
- } from "./codex-openai-responses-bridge-shared.js";
13
- const bridgeCache = /* @__PURE__ */ new Map();
14
- function toBridgeCacheKey(config) {
15
- return JSON.stringify({
16
- upstreamApiBase: config.upstreamApiBase,
17
- upstreamApiKey: config.upstreamApiKey ?? "",
18
- upstreamExtraHeaders: config.upstreamExtraHeaders ?? {},
19
- defaultModel: config.defaultModel ?? "",
20
- modelPrefixes: (config.modelPrefixes ?? []).map((prefix) => prefix.trim().toLowerCase())
21
- });
22
- }
23
- async function readJsonBody(request) {
24
- const chunks = [];
25
- for await (const chunk of request) {
26
- chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
27
- }
28
- const rawText = Buffer.concat(chunks).toString("utf8").trim();
29
- if (!rawText) {
30
- return {};
31
- }
32
- try {
33
- return readRecord(JSON.parse(rawText)) ?? null;
34
- } catch {
35
- return null;
36
- }
37
- }
38
- async function handleResponsesRequest(request, response, config) {
39
- const body = await readJsonBody(request);
40
- if (!body) {
41
- response.statusCode = 400;
42
- response.setHeader("content-type", "application/json");
43
- response.end(
44
- JSON.stringify({
45
- error: {
46
- message: "Invalid JSON payload."
47
- }
48
- })
49
- );
50
- return;
51
- }
52
- try {
53
- const upstream = await callOpenAiCompatibleUpstream({
54
- config,
55
- body
56
- });
57
- const responseId = randomUUID();
58
- const { outputItems, responseResource } = buildBridgeResponsePayload({
59
- responseId,
60
- model: upstream.model,
61
- response: upstream.response
62
- });
63
- const wantsStream = readBoolean(body.stream) !== false;
64
- if (wantsStream) {
65
- writeResponsesStream({
66
- response,
67
- responseId,
68
- model: upstream.model,
69
- outputItems,
70
- responseResource
71
- });
72
- return;
73
- }
74
- response.statusCode = 200;
75
- response.setHeader("content-type", "application/json");
76
- response.end(JSON.stringify(responseResource));
77
- } catch (error) {
78
- const message = error instanceof Error ? error.message : "Codex OpenAI bridge request failed.";
79
- if (readBoolean(body.stream) !== false) {
80
- writeStreamError(response, message);
81
- return;
82
- }
83
- response.statusCode = 400;
84
- response.setHeader("content-type", "application/json");
85
- response.end(
86
- JSON.stringify({
87
- error: {
88
- message
89
- }
90
- })
91
- );
92
- }
93
- }
94
- async function createCodexOpenAiResponsesBridge(config) {
95
- const server = createServer((request, response) => {
96
- const pathname = request.url ? new URL(request.url, "http://127.0.0.1").pathname : "/";
97
- if (request.method === "POST" && (pathname === "/responses" || pathname === "/v1/responses")) {
98
- void handleResponsesRequest(request, response, config);
99
- return;
100
- }
101
- response.statusCode = 404;
102
- response.setHeader("content-type", "application/json");
103
- response.end(
104
- JSON.stringify({
105
- error: {
106
- message: `Unsupported Codex bridge path: ${pathname}`
107
- }
108
- })
109
- );
110
- });
111
- await new Promise((resolve, reject) => {
112
- server.once("error", reject);
113
- server.listen(0, "127.0.0.1", () => resolve());
114
- });
115
- const address = server.address();
116
- if (!address || typeof address === "string") {
117
- throw new Error("Codex bridge failed to bind a loopback port.");
118
- }
119
- return {
120
- baseUrl: `http://127.0.0.1:${address.port}`
121
- };
122
- }
123
- async function ensureCodexOpenAiResponsesBridge(config) {
124
- const key = toBridgeCacheKey(config);
125
- const existing = bridgeCache.get(key);
126
- if (existing) {
127
- return await existing.promise;
128
- }
129
- const promise = createCodexOpenAiResponsesBridge(config);
130
- bridgeCache.set(key, { promise });
131
- try {
132
- return await promise;
133
- } catch (error) {
134
- bridgeCache.delete(key);
135
- throw error;
136
- }
137
- }
138
- export {
139
- ensureCodexOpenAiResponsesBridge
140
- };
@@ -1,12 +0,0 @@
1
- declare function resolveCodexResponsesApiSupport(params: {
2
- capabilitySpec?: {
3
- supportsResponsesApi?: boolean;
4
- } | null;
5
- wireApi?: string | null;
6
- apiBase: string;
7
- apiKey: string;
8
- extraHeaders?: Record<string, string> | null;
9
- model: string;
10
- }): Promise<boolean>;
11
-
12
- export { resolveCodexResponsesApiSupport };
@@ -1,109 +0,0 @@
1
- import { readString } from "./codex-openai-responses-bridge-shared.js";
2
- const codexResponsesProbeCache = /* @__PURE__ */ new Map();
3
- function normalizeApiBase(value) {
4
- return value.endsWith("/") ? value : `${value}/`;
5
- }
6
- function readErrorMessage(value) {
7
- if (typeof value === "string") {
8
- return value.trim();
9
- }
10
- if (!value || typeof value !== "object" || Array.isArray(value)) {
11
- return "";
12
- }
13
- const record = value;
14
- return readString(record.message) ?? readString(record.error) ?? "";
15
- }
16
- function shouldTreatResponsesProbeFailureAsUnsupported(params) {
17
- const normalizedMessage = params.message.trim().toLowerCase();
18
- if (params.status === 404 || params.status === 405 || params.status === 501) {
19
- return true;
20
- }
21
- if (params.status >= 500) {
22
- return false;
23
- }
24
- return [
25
- "unsupported model",
26
- "not support",
27
- "not supported",
28
- "unsupported endpoint",
29
- "unknown url",
30
- "no route matched",
31
- "responses api"
32
- ].some((token) => normalizedMessage.includes(token));
33
- }
34
- async function probeCodexResponsesApiSupport(params) {
35
- const cacheKey = JSON.stringify({
36
- apiBase: params.apiBase,
37
- apiKey: params.apiKey,
38
- extraHeaders: params.extraHeaders ?? {},
39
- model: params.model
40
- });
41
- const existing = codexResponsesProbeCache.get(cacheKey);
42
- if (existing) {
43
- return await existing;
44
- }
45
- const probePromise = (async () => {
46
- const response = await fetch(new URL("responses", normalizeApiBase(params.apiBase)), {
47
- method: "POST",
48
- headers: {
49
- "Content-Type": "application/json",
50
- Authorization: `Bearer ${params.apiKey}`,
51
- ...params.extraHeaders ?? {}
52
- },
53
- body: JSON.stringify({
54
- model: params.model,
55
- input: "ping",
56
- max_output_tokens: 1,
57
- stream: false
58
- })
59
- });
60
- const rawText = await response.text();
61
- let parsed = null;
62
- try {
63
- parsed = JSON.parse(rawText);
64
- } catch {
65
- parsed = null;
66
- }
67
- const parsedRecord = parsed && typeof parsed === "object" && !Array.isArray(parsed) ? parsed : null;
68
- const message = readErrorMessage(parsedRecord?.error) || readErrorMessage(parsed) || rawText.slice(0, 240);
69
- const responseFailed = readString(parsedRecord?.status)?.toLowerCase() === "failed" || Boolean(parsedRecord?.error);
70
- if (response.ok && !responseFailed) {
71
- return true;
72
- }
73
- return !shouldTreatResponsesProbeFailureAsUnsupported({
74
- status: response.status,
75
- message
76
- });
77
- })();
78
- codexResponsesProbeCache.set(cacheKey, probePromise);
79
- try {
80
- return await probePromise;
81
- } catch (error) {
82
- codexResponsesProbeCache.delete(cacheKey);
83
- throw error;
84
- }
85
- }
86
- async function resolveCodexResponsesApiSupport(params) {
87
- if (params.capabilitySpec?.supportsResponsesApi === true) {
88
- return true;
89
- }
90
- if (params.capabilitySpec?.supportsResponsesApi === false) {
91
- return false;
92
- }
93
- const explicitWireApi = readString(params.wireApi)?.toLowerCase();
94
- if (explicitWireApi === "chat") {
95
- return false;
96
- }
97
- if (explicitWireApi === "responses") {
98
- return true;
99
- }
100
- return await probeCodexResponsesApiSupport({
101
- apiBase: params.apiBase,
102
- apiKey: params.apiKey,
103
- extraHeaders: params.extraHeaders,
104
- model: params.model
105
- });
106
- }
107
- export {
108
- resolveCodexResponsesApiSupport
109
- };
@@ -1,20 +0,0 @@
1
- import { Config } from '@nextclaw/core';
2
-
3
- type SessionTypeDescriptor = {
4
- ready?: boolean;
5
- reason?: string | null;
6
- reasonMessage?: string | null;
7
- supportedModels?: string[];
8
- recommendedModel?: string | null;
9
- cta?: {
10
- kind: string;
11
- label?: string;
12
- href?: string;
13
- } | null;
14
- };
15
- declare function createDescribeCodexSessionType(params: {
16
- config: Config;
17
- pluginConfig: Record<string, unknown>;
18
- }): () => SessionTypeDescriptor;
19
-
20
- export { type SessionTypeDescriptor, createDescribeCodexSessionType };
@@ -1,57 +0,0 @@
1
- function readString(value) {
2
- if (typeof value !== "string") {
3
- return void 0;
4
- }
5
- const trimmed = value.trim();
6
- return trimmed || void 0;
7
- }
8
- function readStringArray(value) {
9
- if (!Array.isArray(value)) {
10
- return void 0;
11
- }
12
- const values = value.map((entry) => readString(entry)).filter((entry) => Boolean(entry));
13
- return values.length > 0 ? values : void 0;
14
- }
15
- function dedupeStrings(values) {
16
- return Array.from(new Set(values));
17
- }
18
- function resolveConfiguredCodexModels(params) {
19
- const explicitSupportedModels = readStringArray(params.pluginConfig.supportedModels);
20
- if (explicitSupportedModels) {
21
- return dedupeStrings(explicitSupportedModels);
22
- }
23
- const configuredProviders = params.config.providers && typeof params.config.providers === "object" && !Array.isArray(params.config.providers) ? params.config.providers : {};
24
- const configuredModels = Object.entries(configuredProviders).flatMap(
25
- ([providerName, provider]) => (provider.models ?? []).map((modelName) => readString(modelName)).filter((modelName) => Boolean(modelName)).map((modelName) => `${providerName}/${modelName}`)
26
- );
27
- const fallbackModel = readString(params.pluginConfig.model) ?? params.config.agents.defaults.model;
28
- const fallbackModels = fallbackModel ? [fallbackModel] : [];
29
- return dedupeStrings(configuredModels.length > 0 ? configuredModels : fallbackModels);
30
- }
31
- function resolveRecommendedCodexModel(params) {
32
- const configuredModel = readString(params.pluginConfig.model) ?? params.config.agents.defaults.model;
33
- if (params.supportedModels.includes(configuredModel)) {
34
- return configuredModel;
35
- }
36
- return params.supportedModels[0] ?? configuredModel ?? null;
37
- }
38
- function createDescribeCodexSessionType(params) {
39
- return () => {
40
- const supportedModels = resolveConfiguredCodexModels(params);
41
- return {
42
- ready: true,
43
- reason: null,
44
- reasonMessage: null,
45
- supportedModels,
46
- recommendedModel: resolveRecommendedCodexModel({
47
- config: params.config,
48
- pluginConfig: params.pluginConfig,
49
- supportedModels
50
- }),
51
- cta: null
52
- };
53
- };
54
- }
55
- export {
56
- createDescribeCodexSessionType
57
- };