@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk 0.1.26 → 0.1.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +74 -39
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -163,29 +163,7 @@ function nextSequenceNumber(state) {
163
163
  return nextValue;
164
164
  }
165
165
 
166
- // src/codex-openai-responses-bridge-request.ts
167
- function stripModelPrefix(model, prefixes) {
168
- const normalizedModel = model.trim();
169
- for (const prefix of prefixes) {
170
- const normalizedPrefix = prefix.trim().toLowerCase();
171
- if (!normalizedPrefix) {
172
- continue;
173
- }
174
- const candidatePrefix = `${normalizedPrefix}/`;
175
- if (normalizedModel.toLowerCase().startsWith(candidatePrefix)) {
176
- return normalizedModel.slice(candidatePrefix.length);
177
- }
178
- }
179
- return normalizedModel;
180
- }
181
- function resolveUpstreamModel(requestedModel, config) {
182
- const prefixes = (config.modelPrefixes ?? []).filter((value) => value.trim().length > 0);
183
- const model = stripModelPrefix(readString(requestedModel) ?? "", prefixes) || stripModelPrefix(config.defaultModel ?? "", prefixes);
184
- if (!model) {
185
- throw new Error("Codex bridge could not resolve an upstream model.");
186
- }
187
- return model;
188
- }
166
+ // src/codex-openai-responses-bridge-message-content.ts
189
167
  function normalizeTextPart(value) {
190
168
  const record = readRecord(value);
191
169
  if (!record) {
@@ -271,6 +249,30 @@ function buildChatContent(content) {
271
249
  }
272
250
  return chatContent;
273
251
  }
252
+ function mergeChatContent(left, right) {
253
+ if (left === null) {
254
+ return right;
255
+ }
256
+ if (right === null) {
257
+ return left;
258
+ }
259
+ if (typeof left === "string" && typeof right === "string") {
260
+ return [left, right].filter((value) => value.length > 0).join("\n\n");
261
+ }
262
+ const normalizedLeft = typeof left === "string" ? [
263
+ {
264
+ type: "text",
265
+ text: left
266
+ }
267
+ ] : left;
268
+ const normalizedRight = typeof right === "string" ? [
269
+ {
270
+ type: "text",
271
+ text: right
272
+ }
273
+ ] : right;
274
+ return [...normalizedLeft, ...normalizedRight];
275
+ }
274
276
  function readAssistantMessageText(content) {
275
277
  if (typeof content === "string") {
276
278
  return content;
@@ -280,6 +282,30 @@ function readAssistantMessageText(content) {
280
282
  }
281
283
  return content.filter((entry) => entry.type === "text").map((entry) => readString(entry.text) ?? "").join("\n");
282
284
  }
285
+
286
+ // src/codex-openai-responses-bridge-request.ts
287
+ function stripModelPrefix(model, prefixes) {
288
+ const normalizedModel = model.trim();
289
+ for (const prefix of prefixes) {
290
+ const normalizedPrefix = prefix.trim().toLowerCase();
291
+ if (!normalizedPrefix) {
292
+ continue;
293
+ }
294
+ const candidatePrefix = `${normalizedPrefix}/`;
295
+ if (normalizedModel.toLowerCase().startsWith(candidatePrefix)) {
296
+ return normalizedModel.slice(candidatePrefix.length);
297
+ }
298
+ }
299
+ return normalizedModel;
300
+ }
301
+ function resolveUpstreamModel(requestedModel, config) {
302
+ const prefixes = (config.modelPrefixes ?? []).filter((value) => value.trim().length > 0);
303
+ const model = stripModelPrefix(readString(requestedModel) ?? "", prefixes) || stripModelPrefix(config.defaultModel ?? "", prefixes);
304
+ if (!model) {
305
+ throw new Error("Codex bridge could not resolve an upstream model.");
306
+ }
307
+ return model;
308
+ }
283
309
  function appendMessageInputItem(params) {
284
310
  const role = readString(params.item.role);
285
311
  const content = buildChatContent(params.item.content);
@@ -288,16 +314,20 @@ function appendMessageInputItem(params) {
288
314
  if (text.trim()) {
289
315
  params.assistantTextParts.push(text);
290
316
  }
291
- return;
317
+ return params.systemContent;
292
318
  }
293
319
  params.flushAssistant();
294
320
  const normalizedRole = role === "developer" ? "system" : role;
295
- if ((normalizedRole === "system" || normalizedRole === "user") && content !== null) {
321
+ if (normalizedRole === "system") {
322
+ return mergeChatContent(params.systemContent, content);
323
+ }
324
+ if (normalizedRole === "user" && content !== null) {
296
325
  params.messages.push({
297
- role: normalizedRole,
326
+ role: "user",
298
327
  content
299
328
  });
300
329
  }
330
+ return params.systemContent;
301
331
  }
302
332
  function appendFunctionCallItem(params) {
303
333
  const name = readString(params.item.name);
@@ -329,20 +359,18 @@ function appendFunctionCallOutputItem(params) {
329
359
  }
330
360
  function buildOpenAiMessages(input, instructions) {
331
361
  const messages = [];
332
- const instructionText = readString(instructions);
333
- if (instructionText) {
362
+ let systemContent = readString(instructions) ?? null;
363
+ if (typeof input === "string") {
334
364
  messages.push({
335
- role: "system",
336
- content: instructionText
365
+ role: "user",
366
+ content: input
337
367
  });
338
- }
339
- if (typeof input === "string") {
340
- return [
341
- ...messages,
368
+ return systemContent === null ? messages : [
342
369
  {
343
- role: "user",
344
- content: input
345
- }
370
+ role: "system",
371
+ content: systemContent
372
+ },
373
+ ...messages
346
374
  ];
347
375
  }
348
376
  const assistantTextParts = [];
@@ -368,8 +396,9 @@ function buildOpenAiMessages(input, instructions) {
368
396
  }
369
397
  const type = readString(item.type);
370
398
  if (type === "message") {
371
- appendMessageInputItem({
399
+ systemContent = appendMessageInputItem({
372
400
  messages,
401
+ systemContent,
373
402
  assistantTextParts,
374
403
  assistantToolCalls,
375
404
  item,
@@ -393,7 +422,13 @@ function buildOpenAiMessages(input, instructions) {
393
422
  }
394
423
  }
395
424
  flushAssistant();
396
- return messages;
425
+ return systemContent === null ? messages : [
426
+ {
427
+ role: "system",
428
+ content: systemContent
429
+ },
430
+ ...messages
431
+ ];
397
432
  }
398
433
  function toOpenAiTools(value) {
399
434
  const tools = [];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nextclaw/nextclaw-ncp-runtime-plugin-codex-sdk",
3
- "version": "0.1.26",
3
+ "version": "0.1.27",
4
4
  "private": false,
5
5
  "description": "NextClaw plugin that registers Codex SDK as an optional NCP runtime.",
6
6
  "type": "module",