claudish 3.3.2 → 3.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +160 -3
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -56975,6 +56975,9 @@ class OpenAIHandler {
56975
56975
  return getModelPricing("openai", this.modelName);
56976
56976
  }
56977
56977
  getApiEndpoint() {
56978
+ if (this.isCodexModel()) {
56979
+ return `${this.provider.baseUrl}/v1/responses`;
56980
+ }
56978
56981
  return `${this.provider.baseUrl}${this.provider.apiPath}`;
56979
56982
  }
56980
56983
  writeTokenFile(input, output) {
@@ -57015,6 +57018,10 @@ class OpenAIHandler {
57015
57018
  const model = this.modelName.toLowerCase();
57016
57019
  return model.includes("o1") || model.includes("o3");
57017
57020
  }
57021
+ isCodexModel() {
57022
+ const model = this.modelName.toLowerCase();
57023
+ return model.includes("codex");
57024
+ }
57018
57025
  usesMaxCompletionTokens() {
57019
57026
  const model = this.modelName.toLowerCase();
57020
57027
  return model.includes("gpt-5") || model.includes("o1") || model.includes("o3") || model.includes("o4");
@@ -57057,6 +57064,151 @@ class OpenAIHandler {
57057
57064
  }
57058
57065
  return payload;
57059
57066
  }
57067
+ buildResponsesPayload(claudeRequest, messages, tools) {
57068
+ const payload = {
57069
+ model: this.modelName,
57070
+ input: messages,
57071
+ stream: true
57072
+ };
57073
+ if (claudeRequest.system) {
57074
+ payload.instructions = claudeRequest.system;
57075
+ }
57076
+ if (claudeRequest.max_tokens) {
57077
+ payload.max_output_tokens = claudeRequest.max_tokens;
57078
+ }
57079
+ if (tools.length > 0) {
57080
+ payload.tools = tools.map((tool) => {
57081
+ if (tool.type === "function" && tool.function) {
57082
+ return {
57083
+ type: "function",
57084
+ name: tool.function.name,
57085
+ description: tool.function.description,
57086
+ parameters: tool.function.parameters
57087
+ };
57088
+ }
57089
+ return tool;
57090
+ });
57091
+ }
57092
+ return payload;
57093
+ }
57094
+ async handleResponsesStreaming(c, response, _adapter, _claudeRequest) {
57095
+ const reader = response.body?.getReader();
57096
+ if (!reader) {
57097
+ return c.json({ error: "No response body" }, 500);
57098
+ }
57099
+ const encoder = new TextEncoder;
57100
+ const decoder = new TextDecoder;
57101
+ let buffer = "";
57102
+ let contentIndex = 0;
57103
+ let inputTokens = 0;
57104
+ let outputTokens = 0;
57105
+ const stream = new ReadableStream({
57106
+ start: async (controller) => {
57107
+ const messageStart = {
57108
+ type: "message_start",
57109
+ message: {
57110
+ id: `msg_${Date.now()}`,
57111
+ type: "message",
57112
+ role: "assistant",
57113
+ content: [],
57114
+ model: this.modelName,
57115
+ usage: { input_tokens: 0, output_tokens: 0 }
57116
+ }
57117
+ };
57118
+ controller.enqueue(encoder.encode(`event: message_start
57119
+ data: ${JSON.stringify(messageStart)}
57120
+
57121
+ `));
57122
+ try {
57123
+ while (true) {
57124
+ const { done, value } = await reader.read();
57125
+ if (done)
57126
+ break;
57127
+ buffer += decoder.decode(value, { stream: true });
57128
+ const lines = buffer.split(`
57129
+ `);
57130
+ buffer = lines.pop() || "";
57131
+ for (const line of lines) {
57132
+ if (line.startsWith("event: ")) {
57133
+ continue;
57134
+ }
57135
+ if (!line.startsWith("data: "))
57136
+ continue;
57137
+ const data = line.slice(6);
57138
+ if (data === "[DONE]")
57139
+ continue;
57140
+ try {
57141
+ const event = JSON.parse(data);
57142
+ if (event.type === "response.output_text.delta") {
57143
+ if (contentIndex === 0) {
57144
+ const blockStart = {
57145
+ type: "content_block_start",
57146
+ index: 0,
57147
+ content_block: { type: "text", text: "" }
57148
+ };
57149
+ controller.enqueue(encoder.encode(`event: content_block_start
57150
+ data: ${JSON.stringify(blockStart)}
57151
+
57152
+ `));
57153
+ contentIndex = 1;
57154
+ }
57155
+ const delta = {
57156
+ type: "content_block_delta",
57157
+ index: 0,
57158
+ delta: { type: "text_delta", text: event.delta || "" }
57159
+ };
57160
+ controller.enqueue(encoder.encode(`event: content_block_delta
57161
+ data: ${JSON.stringify(delta)}
57162
+
57163
+ `));
57164
+ } else if (event.type === "response.completed") {
57165
+ if (event.response?.usage) {
57166
+ inputTokens = event.response.usage.input_tokens || 0;
57167
+ outputTokens = event.response.usage.output_tokens || 0;
57168
+ }
57169
+ } else if (event.type === "response.function_call_arguments.delta") {}
57170
+ } catch (parseError) {
57171
+ log(`[OpenAIHandler] Error parsing Responses event: ${parseError}`);
57172
+ }
57173
+ }
57174
+ }
57175
+ if (contentIndex > 0) {
57176
+ const blockStop = { type: "content_block_stop", index: 0 };
57177
+ controller.enqueue(encoder.encode(`event: content_block_stop
57178
+ data: ${JSON.stringify(blockStop)}
57179
+
57180
+ `));
57181
+ }
57182
+ const messageDelta = {
57183
+ type: "message_delta",
57184
+ delta: { stop_reason: "end_turn", stop_sequence: null },
57185
+ usage: { output_tokens: outputTokens }
57186
+ };
57187
+ controller.enqueue(encoder.encode(`event: message_delta
57188
+ data: ${JSON.stringify(messageDelta)}
57189
+
57190
+ `));
57191
+ const messageStop = { type: "message_stop" };
57192
+ controller.enqueue(encoder.encode(`event: message_stop
57193
+ data: ${JSON.stringify(messageStop)}
57194
+
57195
+ `));
57196
+ this.updateTokenTracking(inputTokens, outputTokens);
57197
+ controller.close();
57198
+ } catch (error46) {
57199
+ log(`[OpenAIHandler] Responses streaming error: ${error46}`);
57200
+ controller.error(error46);
57201
+ }
57202
+ }
57203
+ });
57204
+ return new Response(stream, {
57205
+ headers: {
57206
+ "Content-Type": "text/event-stream",
57207
+ "Cache-Control": "no-cache",
57208
+ Connection: "keep-alive"
57209
+ }
57210
+ });
57211
+ }
57060
57212
  async handle(c, payload) {
57061
57213
  const { claudeRequest, droppedParams } = transformOpenAIToClaude(payload);
57062
57214
  const messages = this.convertMessages(claudeRequest);
@@ -57081,11 +57233,12 @@ class OpenAIHandler {
57081
57233
  log(`[OpenAI] Tools: ${toolNames}`);
57082
57234
  }
57083
57235
  }
57084
- const openAIPayload = this.buildOpenAIPayload(claudeRequest, messages, tools);
57236
+ const isCodex = this.isCodexModel();
57237
+ const apiPayload = isCodex ? this.buildResponsesPayload(claudeRequest, messages, tools) : this.buildOpenAIPayload(claudeRequest, messages, tools);
57085
57238
  const adapter = this.adapterManager.getAdapter();
57086
57239
  if (typeof adapter.reset === "function")
57087
57240
  adapter.reset();
57088
- adapter.prepareRequest(openAIPayload, claudeRequest);
57241
+ adapter.prepareRequest(apiPayload, claudeRequest);
57089
57242
  await this.middlewareManager.beforeRequest({
57090
57243
  modelId: `openai/${this.modelName}`,
57091
57244
  messages,
@@ -57104,7 +57257,7 @@ class OpenAIHandler {
57104
57257
  "Content-Type": "application/json",
57105
57258
  Authorization: `Bearer ${this.apiKey}`
57106
57259
  },
57107
- body: JSON.stringify(openAIPayload),
57260
+ body: JSON.stringify(apiPayload),
57108
57261
  signal: controller.signal
57109
57262
  });
57110
57263
  } catch (fetchError) {
@@ -57146,6 +57299,10 @@ class OpenAIHandler {
57146
57299
  if (droppedParams.length > 0) {
57147
57300
  c.header("X-Dropped-Params", droppedParams.join(", "));
57148
57301
  }
57302
+ if (isCodex) {
57303
+ log(`[OpenAIHandler] Using Responses API streaming handler for Codex model`);
57304
+ return this.handleResponsesStreaming(c, response, adapter, claudeRequest);
57305
+ }
57149
57306
  return createStreamingResponseHandler(c, response, adapter, `openai/${this.modelName}`, this.middlewareManager, (input, output) => this.updateTokenTracking(input, output), claudeRequest.tools);
57150
57307
  }
57151
57308
  async shutdown() {}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claudish",
3
- "version": "3.3.2",
3
+ "version": "3.3.3",
4
4
  "description": "Run Claude Code with any model - OpenRouter, Ollama, LM Studio & local models",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",