claudish 3.3.2 → 3.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +229 -3
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -56975,6 +56975,9 @@ class OpenAIHandler {
56975
56975
  return getModelPricing("openai", this.modelName);
56976
56976
  }
56977
56977
  getApiEndpoint() {
56978
+ if (this.isCodexModel()) {
56979
+ return `${this.provider.baseUrl}/v1/responses`;
56980
+ }
56978
56981
  return `${this.provider.baseUrl}${this.provider.apiPath}`;
56979
56982
  }
56980
56983
  writeTokenFile(input, output) {
@@ -57015,6 +57018,10 @@ class OpenAIHandler {
57015
57018
  const model = this.modelName.toLowerCase();
57016
57019
  return model.includes("o1") || model.includes("o3");
57017
57020
  }
57021
+ isCodexModel() {
57022
+ const model = this.modelName.toLowerCase();
57023
+ return model.includes("codex");
57024
+ }
57018
57025
  usesMaxCompletionTokens() {
57019
57026
  const model = this.modelName.toLowerCase();
57020
57027
  return model.includes("gpt-5") || model.includes("o1") || model.includes("o3") || model.includes("o4");
@@ -57057,6 +57064,220 @@ class OpenAIHandler {
57057
57064
  }
57058
57065
  return payload;
57059
57066
  }
57067
+ convertMessagesToResponsesAPI(messages) {
57068
+ return messages.filter((msg) => msg.role !== "system").map((msg) => {
57069
+ if (msg.role === "tool") {
57070
+ return {
57071
+ role: "user",
57072
+ content: [
57073
+ {
57074
+ type: "input_text",
57075
+ text: `[Tool Result for ${msg.tool_call_id}]: ${typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content)}`
57076
+ }
57077
+ ]
57078
+ };
57079
+ }
57080
+ if (msg.role === "assistant" && msg.tool_calls) {
57081
+ const content = [];
57082
+ if (msg.content) {
57083
+ const textContent = typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content);
57084
+ if (textContent) {
57085
+ content.push({ type: "output_text", text: textContent });
57086
+ }
57087
+ }
57088
+ for (const toolCall of msg.tool_calls) {
57089
+ if (toolCall.type === "function") {
57090
+ content.push({
57091
+ type: "function_call",
57092
+ id: toolCall.id,
57093
+ name: toolCall.function.name,
57094
+ arguments: toolCall.function.arguments
57095
+ });
57096
+ }
57097
+ }
57098
+ return { role: "assistant", content };
57099
+ }
57100
+ if (typeof msg.content === "string") {
57101
+ return {
57102
+ role: msg.role,
57103
+ content: [
57104
+ {
57105
+ type: msg.role === "user" ? "input_text" : "output_text",
57106
+ text: msg.content
57107
+ }
57108
+ ]
57109
+ };
57110
+ }
57111
+ if (Array.isArray(msg.content)) {
57112
+ const convertedContent = msg.content.map((block) => {
57113
+ if (block.type === "text") {
57114
+ return {
57115
+ type: msg.role === "user" ? "input_text" : "output_text",
57116
+ text: block.text
57117
+ };
57118
+ }
57119
+ if (block.type === "image_url") {
57120
+ return {
57121
+ type: "input_image",
57122
+ image_url: block.image_url
57123
+ };
57124
+ }
57125
+ return block;
57126
+ });
57127
+ return {
57128
+ role: msg.role,
57129
+ content: convertedContent
57130
+ };
57131
+ }
57132
+ return msg;
57133
+ });
57134
+ }
57135
+ buildResponsesPayload(claudeRequest, messages, tools) {
57136
+ const convertedMessages = this.convertMessagesToResponsesAPI(messages);
57137
+ const payload = {
57138
+ model: this.modelName,
57139
+ input: convertedMessages,
57140
+ stream: true
57141
+ };
57142
+ if (claudeRequest.system) {
57143
+ payload.instructions = claudeRequest.system;
57144
+ }
57145
+ if (claudeRequest.max_tokens) {
57146
+ payload.max_output_tokens = claudeRequest.max_tokens;
57147
+ }
57148
+ if (tools.length > 0) {
57149
+ payload.tools = tools.map((tool) => {
57150
+ if (tool.type === "function" && tool.function) {
57151
+ return {
57152
+ type: "function",
57153
+ name: tool.function.name,
57154
+ description: tool.function.description,
57155
+ parameters: tool.function.parameters
57156
+ };
57157
+ }
57158
+ return tool;
57159
+ });
57160
+ }
57161
+ return payload;
57162
+ }
57163
+ async handleResponsesStreaming(c, response, _adapter, _claudeRequest) {
57164
+ const reader = response.body?.getReader();
57165
+ if (!reader) {
57166
+ return c.json({ error: "No response body" }, 500);
57167
+ }
57168
+ const encoder = new TextEncoder;
57169
+ const decoder = new TextDecoder;
57170
+ let buffer = "";
57171
+ let contentIndex = 0;
57172
+ let inputTokens = 0;
57173
+ let outputTokens = 0;
57174
+ const stream = new ReadableStream({
57175
+ start: async (controller) => {
57176
+ const messageStart = {
57177
+ type: "message_start",
57178
+ message: {
57179
+ id: `msg_${Date.now()}`,
57180
+ type: "message",
57181
+ role: "assistant",
57182
+ content: [],
57183
+ model: this.modelName,
57184
+ usage: { input_tokens: 0, output_tokens: 0 }
57185
+ }
57186
+ };
57187
+ controller.enqueue(encoder.encode(`event: message_start
57188
+ data: ${JSON.stringify(messageStart)}
57189
+
57190
+ `));
57191
+ try {
57192
+ while (true) {
57193
+ const { done, value } = await reader.read();
57194
+ if (done)
57195
+ break;
57196
+ buffer += decoder.decode(value, { stream: true });
57197
+ const lines = buffer.split(`
57198
+ `);
57199
+ buffer = lines.pop() || "";
57200
+ for (const line of lines) {
57201
+ if (line.startsWith("event: ")) {
57202
+ continue;
57203
+ }
57204
+ if (!line.startsWith("data: "))
57205
+ continue;
57206
+ const data = line.slice(6);
57207
+ if (data === "[DONE]")
57208
+ continue;
57209
+ try {
57210
+ const event = JSON.parse(data);
57211
+ if (event.type === "response.output_text.delta") {
57212
+ if (contentIndex === 0) {
57213
+ const blockStart = {
57214
+ type: "content_block_start",
57215
+ index: 0,
57216
+ content_block: { type: "text", text: "" }
57217
+ };
57218
+ controller.enqueue(encoder.encode(`event: content_block_start
57219
+ data: ${JSON.stringify(blockStart)}
57220
+
57221
+ `));
57222
+ contentIndex = 1;
57223
+ }
57224
+ const delta = {
57225
+ type: "content_block_delta",
57226
+ index: 0,
57227
+ delta: { type: "text_delta", text: event.delta || "" }
57228
+ };
57229
+ controller.enqueue(encoder.encode(`event: content_block_delta
57230
+ data: ${JSON.stringify(delta)}
57231
+
57232
+ `));
57233
+ } else if (event.type === "response.completed") {
57234
+ if (event.response?.usage) {
57235
+ inputTokens = event.response.usage.input_tokens || 0;
57236
+ outputTokens = event.response.usage.output_tokens || 0;
57237
+ }
57238
+ } else if (event.type === "response.function_call_arguments.delta") {}
57239
+ } catch (parseError) {
57240
+ log(`[OpenAIHandler] Error parsing Responses event: ${parseError}`);
57241
+ }
57242
+ }
57243
+ }
57244
+ if (contentIndex > 0) {
57245
+ const blockStop = { type: "content_block_stop", index: 0 };
57246
+ controller.enqueue(encoder.encode(`event: content_block_stop
57247
+ data: ${JSON.stringify(blockStop)}
57248
+
57249
+ `));
57250
+ }
57251
+ const messageDelta = {
57252
+ type: "message_delta",
57253
+ delta: { stop_reason: "end_turn", stop_sequence: null },
57254
+ usage: { output_tokens: outputTokens }
57255
+ };
57256
+ controller.enqueue(encoder.encode(`event: message_delta
57257
+ data: ${JSON.stringify(messageDelta)}
57258
+
57259
+ `));
57260
+ const messageStop = { type: "message_stop" };
57261
+ controller.enqueue(encoder.encode(`event: message_stop
57262
+ data: ${JSON.stringify(messageStop)}
57263
+
57264
+ `));
57265
+ this.updateTokenTracking(inputTokens, outputTokens);
57266
+ controller.close();
57267
+ } catch (error46) {
57268
+ log(`[OpenAIHandler] Responses streaming error: ${error46}`);
57269
+ controller.error(error46);
57270
+ }
57271
+ }
57272
+ });
57273
+ return new Response(stream, {
57274
+ headers: {
57275
+ "Content-Type": "text/event-stream",
57276
+ "Cache-Control": "no-cache",
57277
+ Connection: "keep-alive"
57278
+ }
57279
+ });
57280
+ }
57060
57281
  async handle(c, payload) {
57061
57282
  const { claudeRequest, droppedParams } = transformOpenAIToClaude(payload);
57062
57283
  const messages = this.convertMessages(claudeRequest);
@@ -57081,11 +57302,12 @@ class OpenAIHandler {
57081
57302
  log(`[OpenAI] Tools: ${toolNames}`);
57082
57303
  }
57083
57304
  }
57084
- const openAIPayload = this.buildOpenAIPayload(claudeRequest, messages, tools);
57305
+ const isCodex = this.isCodexModel();
57306
+ const apiPayload = isCodex ? this.buildResponsesPayload(claudeRequest, messages, tools) : this.buildOpenAIPayload(claudeRequest, messages, tools);
57085
57307
  const adapter = this.adapterManager.getAdapter();
57086
57308
  if (typeof adapter.reset === "function")
57087
57309
  adapter.reset();
57088
- adapter.prepareRequest(openAIPayload, claudeRequest);
57310
+ adapter.prepareRequest(apiPayload, claudeRequest);
57089
57311
  await this.middlewareManager.beforeRequest({
57090
57312
  modelId: `openai/${this.modelName}`,
57091
57313
  messages,
@@ -57104,7 +57326,7 @@ class OpenAIHandler {
57104
57326
  "Content-Type": "application/json",
57105
57327
  Authorization: `Bearer ${this.apiKey}`
57106
57328
  },
57107
- body: JSON.stringify(openAIPayload),
57329
+ body: JSON.stringify(apiPayload),
57108
57330
  signal: controller.signal
57109
57331
  });
57110
57332
  } catch (fetchError) {
@@ -57146,6 +57368,10 @@ class OpenAIHandler {
57146
57368
  if (droppedParams.length > 0) {
57147
57369
  c.header("X-Dropped-Params", droppedParams.join(", "));
57148
57370
  }
57371
+ if (isCodex) {
57372
+ log(`[OpenAIHandler] Using Responses API streaming handler for Codex model`);
57373
+ return this.handleResponsesStreaming(c, response, adapter, claudeRequest);
57374
+ }
57149
57375
  return createStreamingResponseHandler(c, response, adapter, `openai/${this.modelName}`, this.middlewareManager, (input, output) => this.updateTokenTracking(input, output), claudeRequest.tools);
57150
57376
  }
57151
57377
  async shutdown() {}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claudish",
3
- "version": "3.3.2",
3
+ "version": "3.3.4",
4
4
  "description": "Run Claude Code with any model - OpenRouter, Ollama, LM Studio & local models",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",