@mariozechner/pi-ai 0.34.2 → 0.36.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +17 -1
- package/dist/cli.js.map +1 -1
- package/dist/models.generated.d.ts +640 -43
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +673 -76
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/openai-codex/constants.d.ts +21 -0
- package/dist/providers/openai-codex/constants.d.ts.map +1 -0
- package/dist/providers/openai-codex/constants.js +21 -0
- package/dist/providers/openai-codex/constants.js.map +1 -0
- package/dist/providers/openai-codex/prompts/codex-instructions.md +105 -0
- package/dist/providers/openai-codex/prompts/codex.d.ts +11 -0
- package/dist/providers/openai-codex/prompts/codex.d.ts.map +1 -0
- package/dist/providers/openai-codex/prompts/codex.js +184 -0
- package/dist/providers/openai-codex/prompts/codex.js.map +1 -0
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts +6 -0
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts.map +1 -0
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.js +48 -0
- package/dist/providers/openai-codex/prompts/pi-codex-bridge.js.map +1 -0
- package/dist/providers/openai-codex/request-transformer.d.ts +41 -0
- package/dist/providers/openai-codex/request-transformer.d.ts.map +1 -0
- package/dist/providers/openai-codex/request-transformer.js +242 -0
- package/dist/providers/openai-codex/request-transformer.js.map +1 -0
- package/dist/providers/openai-codex/response-handler.d.ts +19 -0
- package/dist/providers/openai-codex/response-handler.d.ts.map +1 -0
- package/dist/providers/openai-codex/response-handler.js +107 -0
- package/dist/providers/openai-codex/response-handler.js.map +1 -0
- package/dist/providers/openai-codex-responses.d.ts +10 -0
- package/dist/providers/openai-codex-responses.d.ts.map +1 -0
- package/dist/providers/openai-codex-responses.js +528 -0
- package/dist/providers/openai-codex-responses.js.map +1 -0
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +27 -1
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts +4 -2
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/oauth/index.d.ts +1 -0
- package/dist/utils/oauth/index.d.ts.map +1 -1
- package/dist/utils/oauth/index.js +11 -0
- package/dist/utils/oauth/index.js.map +1 -1
- package/dist/utils/oauth/openai-codex.d.ts +20 -0
- package/dist/utils/oauth/openai-codex.d.ts.map +1 -0
- package/dist/utils/oauth/openai-codex.js +278 -0
- package/dist/utils/oauth/openai-codex.js.map +1 -0
- package/dist/utils/oauth/types.d.ts +2 -1
- package/dist/utils/oauth/types.d.ts.map +1 -1
- package/dist/utils/oauth/types.js.map +1 -1
- package/package.json +2 -2
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
import { TOOL_REMAP_MESSAGE } from "./prompts/codex.js";
|
|
2
|
+
import { CODEX_PI_BRIDGE } from "./prompts/pi-codex-bridge.js";
|
|
3
|
+
const MODEL_MAP = {
|
|
4
|
+
"gpt-5.1-codex": "gpt-5.1-codex",
|
|
5
|
+
"gpt-5.1-codex-low": "gpt-5.1-codex",
|
|
6
|
+
"gpt-5.1-codex-medium": "gpt-5.1-codex",
|
|
7
|
+
"gpt-5.1-codex-high": "gpt-5.1-codex",
|
|
8
|
+
"gpt-5.1-codex-max": "gpt-5.1-codex-max",
|
|
9
|
+
"gpt-5.1-codex-max-low": "gpt-5.1-codex-max",
|
|
10
|
+
"gpt-5.1-codex-max-medium": "gpt-5.1-codex-max",
|
|
11
|
+
"gpt-5.1-codex-max-high": "gpt-5.1-codex-max",
|
|
12
|
+
"gpt-5.1-codex-max-xhigh": "gpt-5.1-codex-max",
|
|
13
|
+
"gpt-5.2": "gpt-5.2",
|
|
14
|
+
"gpt-5.2-none": "gpt-5.2",
|
|
15
|
+
"gpt-5.2-low": "gpt-5.2",
|
|
16
|
+
"gpt-5.2-medium": "gpt-5.2",
|
|
17
|
+
"gpt-5.2-high": "gpt-5.2",
|
|
18
|
+
"gpt-5.2-xhigh": "gpt-5.2",
|
|
19
|
+
"gpt-5.2-codex": "gpt-5.2-codex",
|
|
20
|
+
"gpt-5.2-codex-low": "gpt-5.2-codex",
|
|
21
|
+
"gpt-5.2-codex-medium": "gpt-5.2-codex",
|
|
22
|
+
"gpt-5.2-codex-high": "gpt-5.2-codex",
|
|
23
|
+
"gpt-5.2-codex-xhigh": "gpt-5.2-codex",
|
|
24
|
+
"gpt-5.1-codex-mini": "gpt-5.1-codex-mini",
|
|
25
|
+
"gpt-5.1-codex-mini-medium": "gpt-5.1-codex-mini",
|
|
26
|
+
"gpt-5.1-codex-mini-high": "gpt-5.1-codex-mini",
|
|
27
|
+
"gpt-5.1": "gpt-5.1",
|
|
28
|
+
"gpt-5.1-none": "gpt-5.1",
|
|
29
|
+
"gpt-5.1-low": "gpt-5.1",
|
|
30
|
+
"gpt-5.1-medium": "gpt-5.1",
|
|
31
|
+
"gpt-5.1-high": "gpt-5.1",
|
|
32
|
+
"gpt-5.1-chat-latest": "gpt-5.1",
|
|
33
|
+
"gpt-5-codex": "gpt-5.1-codex",
|
|
34
|
+
"codex-mini-latest": "gpt-5.1-codex-mini",
|
|
35
|
+
"gpt-5-codex-mini": "gpt-5.1-codex-mini",
|
|
36
|
+
"gpt-5-codex-mini-medium": "gpt-5.1-codex-mini",
|
|
37
|
+
"gpt-5-codex-mini-high": "gpt-5.1-codex-mini",
|
|
38
|
+
"gpt-5": "gpt-5.1",
|
|
39
|
+
"gpt-5-mini": "gpt-5.1",
|
|
40
|
+
"gpt-5-nano": "gpt-5.1",
|
|
41
|
+
};
|
|
42
|
+
function getNormalizedModel(modelId) {
|
|
43
|
+
if (MODEL_MAP[modelId])
|
|
44
|
+
return MODEL_MAP[modelId];
|
|
45
|
+
const lowerModelId = modelId.toLowerCase();
|
|
46
|
+
const match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);
|
|
47
|
+
return match ? MODEL_MAP[match] : undefined;
|
|
48
|
+
}
|
|
49
|
+
export function normalizeModel(model) {
|
|
50
|
+
if (!model)
|
|
51
|
+
return "gpt-5.1";
|
|
52
|
+
const modelId = model.includes("/") ? model.split("/").pop() : model;
|
|
53
|
+
const mappedModel = getNormalizedModel(modelId);
|
|
54
|
+
if (mappedModel)
|
|
55
|
+
return mappedModel;
|
|
56
|
+
const normalized = modelId.toLowerCase();
|
|
57
|
+
if (normalized.includes("gpt-5.2-codex") || normalized.includes("gpt 5.2 codex")) {
|
|
58
|
+
return "gpt-5.2-codex";
|
|
59
|
+
}
|
|
60
|
+
if (normalized.includes("gpt-5.2") || normalized.includes("gpt 5.2")) {
|
|
61
|
+
return "gpt-5.2";
|
|
62
|
+
}
|
|
63
|
+
if (normalized.includes("gpt-5.1-codex-max") || normalized.includes("gpt 5.1 codex max")) {
|
|
64
|
+
return "gpt-5.1-codex-max";
|
|
65
|
+
}
|
|
66
|
+
if (normalized.includes("gpt-5.1-codex-mini") || normalized.includes("gpt 5.1 codex mini")) {
|
|
67
|
+
return "gpt-5.1-codex-mini";
|
|
68
|
+
}
|
|
69
|
+
if (normalized.includes("codex-mini-latest") ||
|
|
70
|
+
normalized.includes("gpt-5-codex-mini") ||
|
|
71
|
+
normalized.includes("gpt 5 codex mini")) {
|
|
72
|
+
return "codex-mini-latest";
|
|
73
|
+
}
|
|
74
|
+
if (normalized.includes("gpt-5.1-codex") || normalized.includes("gpt 5.1 codex")) {
|
|
75
|
+
return "gpt-5.1-codex";
|
|
76
|
+
}
|
|
77
|
+
if (normalized.includes("gpt-5.1") || normalized.includes("gpt 5.1")) {
|
|
78
|
+
return "gpt-5.1";
|
|
79
|
+
}
|
|
80
|
+
if (normalized.includes("codex")) {
|
|
81
|
+
return "gpt-5.1-codex";
|
|
82
|
+
}
|
|
83
|
+
if (normalized.includes("gpt-5") || normalized.includes("gpt 5")) {
|
|
84
|
+
return "gpt-5.1";
|
|
85
|
+
}
|
|
86
|
+
return "gpt-5.1";
|
|
87
|
+
}
|
|
88
|
+
function getReasoningConfig(modelName, options = {}) {
|
|
89
|
+
const normalizedName = modelName?.toLowerCase() ?? "";
|
|
90
|
+
const isGpt52Codex = normalizedName.includes("gpt-5.2-codex") || normalizedName.includes("gpt 5.2 codex");
|
|
91
|
+
const isGpt52General = (normalizedName.includes("gpt-5.2") || normalizedName.includes("gpt 5.2")) && !isGpt52Codex;
|
|
92
|
+
const isCodexMax = normalizedName.includes("codex-max") || normalizedName.includes("codex max");
|
|
93
|
+
const isCodexMini = normalizedName.includes("codex-mini") ||
|
|
94
|
+
normalizedName.includes("codex mini") ||
|
|
95
|
+
normalizedName.includes("codex_mini") ||
|
|
96
|
+
normalizedName.includes("codex-mini-latest");
|
|
97
|
+
const isCodex = normalizedName.includes("codex") && !isCodexMini;
|
|
98
|
+
const isLightweight = !isCodexMini && (normalizedName.includes("nano") || normalizedName.includes("mini"));
|
|
99
|
+
const isGpt51General = (normalizedName.includes("gpt-5.1") || normalizedName.includes("gpt 5.1")) &&
|
|
100
|
+
!isCodex &&
|
|
101
|
+
!isCodexMax &&
|
|
102
|
+
!isCodexMini;
|
|
103
|
+
const supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;
|
|
104
|
+
const supportsNone = isGpt52General || isGpt51General;
|
|
105
|
+
const defaultEffort = isCodexMini
|
|
106
|
+
? "medium"
|
|
107
|
+
: supportsXhigh
|
|
108
|
+
? "high"
|
|
109
|
+
: isLightweight
|
|
110
|
+
? "minimal"
|
|
111
|
+
: "medium";
|
|
112
|
+
let effort = options.reasoningEffort || defaultEffort;
|
|
113
|
+
if (isCodexMini) {
|
|
114
|
+
if (effort === "minimal" || effort === "low" || effort === "none") {
|
|
115
|
+
effort = "medium";
|
|
116
|
+
}
|
|
117
|
+
if (effort === "xhigh") {
|
|
118
|
+
effort = "high";
|
|
119
|
+
}
|
|
120
|
+
if (effort !== "high" && effort !== "medium") {
|
|
121
|
+
effort = "medium";
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
if (!supportsXhigh && effort === "xhigh") {
|
|
125
|
+
effort = "high";
|
|
126
|
+
}
|
|
127
|
+
if (!supportsNone && effort === "none") {
|
|
128
|
+
effort = "low";
|
|
129
|
+
}
|
|
130
|
+
if (isCodex && effort === "minimal") {
|
|
131
|
+
effort = "low";
|
|
132
|
+
}
|
|
133
|
+
return {
|
|
134
|
+
effort,
|
|
135
|
+
summary: options.reasoningSummary ?? "auto",
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
function filterInput(input) {
|
|
139
|
+
if (!Array.isArray(input))
|
|
140
|
+
return input;
|
|
141
|
+
return input
|
|
142
|
+
.filter((item) => item.type !== "item_reference")
|
|
143
|
+
.map((item) => {
|
|
144
|
+
if (item.id != null) {
|
|
145
|
+
const { id: _id, ...rest } = item;
|
|
146
|
+
return rest;
|
|
147
|
+
}
|
|
148
|
+
return item;
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
function addCodexBridgeMessage(input, hasTools, systemPrompt) {
|
|
152
|
+
if (!hasTools || !Array.isArray(input))
|
|
153
|
+
return input;
|
|
154
|
+
const bridgeText = systemPrompt ? `${CODEX_PI_BRIDGE}\n\n${systemPrompt}` : CODEX_PI_BRIDGE;
|
|
155
|
+
const bridgeMessage = {
|
|
156
|
+
type: "message",
|
|
157
|
+
role: "developer",
|
|
158
|
+
content: [
|
|
159
|
+
{
|
|
160
|
+
type: "input_text",
|
|
161
|
+
text: bridgeText,
|
|
162
|
+
},
|
|
163
|
+
],
|
|
164
|
+
};
|
|
165
|
+
return [bridgeMessage, ...input];
|
|
166
|
+
}
|
|
167
|
+
function addToolRemapMessage(input, hasTools) {
|
|
168
|
+
if (!hasTools || !Array.isArray(input))
|
|
169
|
+
return input;
|
|
170
|
+
const toolRemapMessage = {
|
|
171
|
+
type: "message",
|
|
172
|
+
role: "developer",
|
|
173
|
+
content: [
|
|
174
|
+
{
|
|
175
|
+
type: "input_text",
|
|
176
|
+
text: TOOL_REMAP_MESSAGE,
|
|
177
|
+
},
|
|
178
|
+
],
|
|
179
|
+
};
|
|
180
|
+
return [toolRemapMessage, ...input];
|
|
181
|
+
}
|
|
182
|
+
export async function transformRequestBody(body, codexInstructions, options = {}, codexMode = true, systemPrompt) {
|
|
183
|
+
const normalizedModel = normalizeModel(body.model);
|
|
184
|
+
body.model = normalizedModel;
|
|
185
|
+
body.store = false;
|
|
186
|
+
body.stream = true;
|
|
187
|
+
body.instructions = codexInstructions;
|
|
188
|
+
if (body.input && Array.isArray(body.input)) {
|
|
189
|
+
body.input = filterInput(body.input);
|
|
190
|
+
if (codexMode) {
|
|
191
|
+
body.input = addCodexBridgeMessage(body.input, !!body.tools, systemPrompt);
|
|
192
|
+
}
|
|
193
|
+
else {
|
|
194
|
+
body.input = addToolRemapMessage(body.input, !!body.tools);
|
|
195
|
+
}
|
|
196
|
+
if (body.input) {
|
|
197
|
+
const functionCallIds = new Set(body.input
|
|
198
|
+
.filter((item) => item.type === "function_call" && typeof item.call_id === "string")
|
|
199
|
+
.map((item) => item.call_id));
|
|
200
|
+
body.input = body.input.map((item) => {
|
|
201
|
+
if (item.type === "function_call_output" && typeof item.call_id === "string") {
|
|
202
|
+
const callId = item.call_id;
|
|
203
|
+
if (!functionCallIds.has(callId)) {
|
|
204
|
+
const itemRecord = item;
|
|
205
|
+
const toolName = typeof itemRecord.name === "string" ? itemRecord.name : "tool";
|
|
206
|
+
let text = "";
|
|
207
|
+
try {
|
|
208
|
+
const output = itemRecord.output;
|
|
209
|
+
text = typeof output === "string" ? output : JSON.stringify(output);
|
|
210
|
+
}
|
|
211
|
+
catch {
|
|
212
|
+
text = String(itemRecord.output ?? "");
|
|
213
|
+
}
|
|
214
|
+
if (text.length > 16000) {
|
|
215
|
+
text = `${text.slice(0, 16000)}\n...[truncated]`;
|
|
216
|
+
}
|
|
217
|
+
return {
|
|
218
|
+
type: "message",
|
|
219
|
+
role: "assistant",
|
|
220
|
+
content: `[Previous ${toolName} result; call_id=${callId}]: ${text}`,
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
return item;
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
const reasoningConfig = getReasoningConfig(normalizedModel, options);
|
|
229
|
+
body.reasoning = {
|
|
230
|
+
...body.reasoning,
|
|
231
|
+
...reasoningConfig,
|
|
232
|
+
};
|
|
233
|
+
body.text = {
|
|
234
|
+
...body.text,
|
|
235
|
+
verbosity: options.textVerbosity || "medium",
|
|
236
|
+
};
|
|
237
|
+
body.include = options.include || ["reasoning.encrypted_content"];
|
|
238
|
+
delete body.max_output_tokens;
|
|
239
|
+
delete body.max_completion_tokens;
|
|
240
|
+
return body;
|
|
241
|
+
}
|
|
242
|
+
//# sourceMappingURL=request-transformer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"request-transformer.js","sourceRoot":"","sources":["../../../src/providers/openai-codex/request-transformer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,oBAAoB,CAAC;AACxD,OAAO,EAAE,eAAe,EAAE,MAAM,8BAA8B,CAAC;AA4C/D,MAAM,SAAS,GAA2B;IACzC,eAAe,EAAE,eAAe;IAChC,mBAAmB,EAAE,eAAe;IACpC,sBAAsB,EAAE,eAAe;IACvC,oBAAoB,EAAE,eAAe;IACrC,mBAAmB,EAAE,mBAAmB;IACxC,uBAAuB,EAAE,mBAAmB;IAC5C,0BAA0B,EAAE,mBAAmB;IAC/C,wBAAwB,EAAE,mBAAmB;IAC7C,yBAAyB,EAAE,mBAAmB;IAC9C,SAAS,EAAE,SAAS;IACpB,cAAc,EAAE,SAAS;IACzB,aAAa,EAAE,SAAS;IACxB,gBAAgB,EAAE,SAAS;IAC3B,cAAc,EAAE,SAAS;IACzB,eAAe,EAAE,SAAS;IAC1B,eAAe,EAAE,eAAe;IAChC,mBAAmB,EAAE,eAAe;IACpC,sBAAsB,EAAE,eAAe;IACvC,oBAAoB,EAAE,eAAe;IACrC,qBAAqB,EAAE,eAAe;IACtC,oBAAoB,EAAE,oBAAoB;IAC1C,2BAA2B,EAAE,oBAAoB;IACjD,yBAAyB,EAAE,oBAAoB;IAC/C,SAAS,EAAE,SAAS;IACpB,cAAc,EAAE,SAAS;IACzB,aAAa,EAAE,SAAS;IACxB,gBAAgB,EAAE,SAAS;IAC3B,cAAc,EAAE,SAAS;IACzB,qBAAqB,EAAE,SAAS;IAChC,aAAa,EAAE,eAAe;IAC9B,mBAAmB,EAAE,oBAAoB;IACzC,kBAAkB,EAAE,oBAAoB;IACxC,yBAAyB,EAAE,oBAAoB;IAC/C,uBAAuB,EAAE,oBAAoB;IAC7C,OAAO,EAAE,SAAS;IAClB,YAAY,EAAE,SAAS;IACvB,YAAY,EAAE,SAAS;CACvB,CAAC;AAEF,SAAS,kBAAkB,CAAC,OAAe,EAAsB;IAChE,IAAI,SAAS,CAAC,OAAO,CAAC;QAAE,OAAO,SAAS,CAAC,OAAO,CAAC,CAAC;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;IAC3C,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,YAAY,CAAC,CAAC;IACvF,OAAO,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AAAA,CAC5C;AAED,MAAM,UAAU,cAAc,CAAC,KAAyB,EAAU;IACjE,IAAI,CAAC,KAAK;QAAE,OAAO,SAAS,CAAC;IAE7B,MAAM,OAAO,GAAG,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAG,CAAC,CAAC,CAAC,KAAK,CAAC;IACtE,MAAM,WAAW,GAAG,kBAAkB,CAAC,OAAO,CAAC,CAAC;IAChD,IAAI,WAAW;QAAE,OAAO,WAAW,CAAC;IAEpC,MAAM,UAAU,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;IAEzC,IAAI,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;QAClF,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACtE,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE,CAAC;QAC1F,OAAO,mBAAmB,CAAC;IAC5B,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,oBAAoB,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,oBAAoB,CAAC,EAAE,CAAC;QAC5F,OAAO,oBAAoB,CAAC;IAC7B,CAAC;IACD,IACC,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC;QACxC,UAAU,CAAC,QAAQ,CAAC,kBAAkB,CAAC;QACvC,UAAU,CAAC,QAAQ,CAAC,kBAAkB,CAAC,EACtC,CAAC;QACF,OAAO,mBAAmB,CAAC;IAC5B,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;QAClF,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACtE,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;QAClC,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;QAClE,OAAO,SAAS,CAAC;IAClB,CAAC;IAED,OAAO,SAAS,CAAC;AAAA,CACjB;AAED,SAAS,kBAAkB,CAAC,SAA6B,EAAE,OAAO,GAAwB,EAAE,EAAmB;IAC9G,MAAM,cAAc,GAAG,SAAS,EAAE,WAAW,EAAE,IAAI,EAAE,CAAC;IAEtD,MAAM,YAAY,GAAG,cAAc,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;IAC1G,MAAM,cAAc,GAAG,CAAC,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC;IACnH,MAAM,UAAU,GAAG,cAAc,CAAC,QAAQ,CAAC,WAAW,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;IAChG,MAAM,WAAW,GAChB,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;IAC9C,MAAM,OAAO,GAAG,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC;IACjE,MAAM,aAAa,GAAG,CAAC,WAAW,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC3G,MAAM,cAAc,GACnB,CAAC,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;QAC1E,CAAC,OAAO;QACR,CAAC,UAAU;QACX,CAAC,WAAW,CAAC;IAEd,MAAM,aAAa,GAAG,cAAc,IAAI,YAAY,IAAI,UAAU,CAAC;IACnE,MAAM,YAAY,GAAG,cAAc,IAAI,cAAc,CAAC;IAEtD,MAAM,aAAa,GAA8B,WAAW;QAC3D,CAAC,CAAC,QAAQ;QACV,CAAC,CAAC,aAAa;YACd,CAAC,CAAC,MAAM;YACR,CAAC,CAAC,aAAa;gBACd,CAAC,CAAC,SAAS;gBACX,CAAC,CAAC,QAAQ,CAAC;IAEd,IAAI,MAAM,GAAG,OAAO,CAAC,eAAe,IAAI,aAAa,CAAC;IAEtD,IAAI,WAAW,EAAE,CAAC;QACjB,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,KAAK,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;YACnE,MAAM,GAAG,QAAQ,CAAC;QACnB,CAAC;QACD,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;YACxB,MAAM,GAAG,MAAM,CAAC;QACjB,CAAC;QACD,IAAI,MAAM,KAAK,MAAM,IAAI,MAAM,KAAK,QAAQ,EAAE,CAAC;YAC9C,MAAM,GAAG,QAAQ,CAAC;QACnB,CAAC;IACF,CAAC;IAED,IAAI,CAAC,aAAa,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;QAC1C,MAAM,GAAG,MAAM,CAAC;IACjB,CAAC;IAED,IAAI,CAAC,YAAY,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;QACxC,MAAM,GAAG,KAAK,CAAC;IAChB,CAAC;IAED,IAAI,OAAO,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACrC,MAAM,GAAG,KAAK,CAAC;IAChB,CAAC;IAED,OAAO;QACN,MAAM;QACN,OAAO,EAAE,OAAO,CAAC,gBAAgB,IAAI,MAAM;KAC3C,CAAC;AAAA,CACF;AAED,SAAS,WAAW,CAAC,KAA8B,EAA2B;IAC7E,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAExC,OAAO,KAAK;SACV,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,KAAK,gBAAgB,CAAC;SAChD,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;QACd,IAAI,IAAI,CAAC,EAAE,IAAI,IAAI,EAAE,CAAC;YACrB,MAAM,EAAE,EAAE,EAAE,GAAG,EAAE,GAAG,IAAI,EAAE,GAAG,IAAI,CAAC;YAClC,OAAO,IAAiB,CAAC;QAC1B,CAAC;QACD,OAAO,IAAI,CAAC;IAAA,CACZ,CAAC,CAAC;AAAA,CACJ;AAED,SAAS,qBAAqB,CAC7B,KAA8B,EAC9B,QAAiB,EACjB,YAAqB,EACK;IAC1B,IAAI,CAAC,QAAQ,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAErD,MAAM,UAAU,GAAG,YAAY,CAAC,CAAC,CAAC,GAAG,eAAe,OAAO,YAAY,EAAE,CAAC,CAAC,CAAC,eAAe,CAAC;IAE5F,MAAM,aAAa,GAAc;QAChC,IAAI,EAAE,SAAS;QACf,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE;YACR;gBACC,IAAI,EAAE,YAAY;gBAClB,IAAI,EAAE,UAAU;aAChB;SACD;KACD,CAAC;IAEF,OAAO,CAAC,aAAa,EAAE,GAAG,KAAK,CAAC,CAAC;AAAA,CACjC;AAED,SAAS,mBAAmB,CAAC,KAA8B,EAAE,QAAiB,EAA2B;IACxG,IAAI,CAAC,QAAQ,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAErD,MAAM,gBAAgB,GAAc;QACnC,IAAI,EAAE,SAAS;QACf,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE;YACR;gBACC,IAAI,EAAE,YAAY;gBAClB,IAAI,EAAE,kBAAkB;aACxB;SACD;KACD,CAAC;IAEF,OAAO,CAAC,gBAAgB,EAAE,GAAG,KAAK,CAAC,CAAC;AAAA,CACpC;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACzC,IAAiB,EACjB,iBAAyB,EACzB,OAAO,GAAwB,EAAE,EACjC,SAAS,GAAG,IAAI,EAChB,YAAqB,EACE;IACvB,MAAM,eAAe,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAEnD,IAAI,CAAC,KAAK,GAAG,eAAe,CAAC;IAC7B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACnB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;IACnB,IAAI,CAAC,YAAY,GAAG,iBAAiB,CAAC;IAEtC,IAAI,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;QAC7C,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAErC,IAAI,SAAS,EAAE,CAAC;YACf,IAAI,CAAC,KAAK,GAAG,qBAAqB,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;QAC5E,CAAC;aAAM,CAAC;YACP,IAAI,CAAC,KAAK,GAAG,mBAAmB,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC5D,CAAC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;YAChB,MAAM,eAAe,GAAG,IAAI,GAAG,CAC9B,IAAI,CAAC,KAAK;iBACR,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,KAAK,eAAe,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,CAAC;iBACnF,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,OAAiB,CAAC,CACvC,CAAC;YAEF,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;gBACrC,IAAI,IAAI,CAAC,IAAI,KAAK,sBAAsB,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;oBAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,OAAiB,CAAC;oBACtC,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;wBAClC,MAAM,UAAU,GAAG,IAA0C,CAAC;wBAC9D,MAAM,QAAQ,GAAG,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC;wBAChF,IAAI,IAAI,GAAG,EAAE,CAAC;wBACd,IAAI,CAAC;4BACJ,MAAM,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC;4BACjC,IAAI,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;wBACrE,CAAC;wBAAC,MAAM,CAAC;4BACR,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;wBACxC,CAAC;wBACD,IAAI,IAAI,CAAC,MAAM,GAAG,KAAK,EAAE,CAAC;4BACzB,IAAI,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,kBAAkB,CAAC;wBAClD,CAAC;wBACD,OAAO;4BACN,IAAI,EAAE,SAAS;4BACf,IAAI,EAAE,WAAW;4BACjB,OAAO,EAAE,aAAa,QAAQ,oBAAoB,MAAM,MAAM,IAAI,EAAE;yBACvD,CAAC;oBAChB,CAAC;gBACF,CAAC;gBACD,OAAO,IAAI,CAAC;YAAA,CACZ,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;IAED,MAAM,eAAe,GAAG,kBAAkB,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;IACrE,IAAI,CAAC,SAAS,GAAG;QAChB,GAAG,IAAI,CAAC,SAAS;QACjB,GAAG,eAAe;KAClB,CAAC;IAEF,IAAI,CAAC,IAAI,GAAG;QACX,GAAG,IAAI,CAAC,IAAI;QACZ,SAAS,EAAE,OAAO,CAAC,aAAa,IAAI,QAAQ;KAC5C,CAAC;IAEF,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,CAAC,6BAA6B,CAAC,CAAC;IAElE,OAAO,IAAI,CAAC,iBAAiB,CAAC;IAC9B,OAAO,IAAI,CAAC,qBAAqB,CAAC;IAElC,OAAO,IAAI,CAAC;AAAA,CACZ","sourcesContent":["import { TOOL_REMAP_MESSAGE } from \"./prompts/codex.js\";\nimport { CODEX_PI_BRIDGE } from \"./prompts/pi-codex-bridge.js\";\n\nexport interface ReasoningConfig {\n\teffort: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\tsummary: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\";\n}\n\nexport interface CodexRequestOptions {\n\treasoningEffort?: ReasoningConfig[\"effort\"];\n\treasoningSummary?: ReasoningConfig[\"summary\"] | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n}\n\nexport interface InputItem {\n\tid?: string | null;\n\ttype?: string | null;\n\trole?: string;\n\tcontent?: unknown;\n\tcall_id?: string | null;\n\tname?: string;\n\toutput?: unknown;\n\targuments?: string;\n}\n\nexport interface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: InputItem[];\n\ttools?: unknown;\n\ttemperature?: number;\n\treasoning?: Partial<ReasoningConfig>;\n\ttext?: {\n\t\tverbosity?: \"low\" | \"medium\" | \"high\";\n\t};\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\tmax_output_tokens?: number;\n\tmax_completion_tokens?: number;\n\t[key: string]: unknown;\n}\n\nconst MODEL_MAP: Record<string, string> = {\n\t\"gpt-5.1-codex\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-low\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-medium\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-high\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-max\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-low\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-medium\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-high\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-xhigh\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.2\": \"gpt-5.2\",\n\t\"gpt-5.2-none\": \"gpt-5.2\",\n\t\"gpt-5.2-low\": \"gpt-5.2\",\n\t\"gpt-5.2-medium\": \"gpt-5.2\",\n\t\"gpt-5.2-high\": \"gpt-5.2\",\n\t\"gpt-5.2-xhigh\": \"gpt-5.2\",\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-low\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-medium\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-high\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-xhigh\": \"gpt-5.2-codex\",\n\t\"gpt-5.1-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1\": \"gpt-5.1\",\n\t\"gpt-5.1-none\": \"gpt-5.1\",\n\t\"gpt-5.1-low\": \"gpt-5.1\",\n\t\"gpt-5.1-medium\": \"gpt-5.1\",\n\t\"gpt-5.1-high\": \"gpt-5.1\",\n\t\"gpt-5.1-chat-latest\": \"gpt-5.1\",\n\t\"gpt-5-codex\": \"gpt-5.1-codex\",\n\t\"codex-mini-latest\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5\": \"gpt-5.1\",\n\t\"gpt-5-mini\": \"gpt-5.1\",\n\t\"gpt-5-nano\": \"gpt-5.1\",\n};\n\nfunction getNormalizedModel(modelId: string): string | undefined {\n\tif (MODEL_MAP[modelId]) return MODEL_MAP[modelId];\n\tconst lowerModelId = modelId.toLowerCase();\n\tconst match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);\n\treturn match ? MODEL_MAP[match] : undefined;\n}\n\nexport function normalizeModel(model: string | undefined): string {\n\tif (!model) return \"gpt-5.1\";\n\n\tconst modelId = model.includes(\"/\") ? model.split(\"/\").pop()! : model;\n\tconst mappedModel = getNormalizedModel(modelId);\n\tif (mappedModel) return mappedModel;\n\n\tconst normalized = modelId.toLowerCase();\n\n\tif (normalized.includes(\"gpt-5.2-codex\") || normalized.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.2\") || normalized.includes(\"gpt 5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-max\") || normalized.includes(\"gpt 5.1 codex max\")) {\n\t\treturn \"gpt-5.1-codex-max\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-mini\") || normalized.includes(\"gpt 5.1 codex mini\")) {\n\t\treturn \"gpt-5.1-codex-mini\";\n\t}\n\tif (\n\t\tnormalized.includes(\"codex-mini-latest\") ||\n\t\tnormalized.includes(\"gpt-5-codex-mini\") ||\n\t\tnormalized.includes(\"gpt 5 codex mini\")\n\t) {\n\t\treturn \"codex-mini-latest\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex\") || normalized.includes(\"gpt 5.1 codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.1\") || normalized.includes(\"gpt 5.1\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\tif (normalized.includes(\"codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5\") || normalized.includes(\"gpt 5\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\n\treturn \"gpt-5.1\";\n}\n\nfunction getReasoningConfig(modelName: string | undefined, options: CodexRequestOptions = {}): ReasoningConfig {\n\tconst normalizedName = modelName?.toLowerCase() ?? \"\";\n\n\tconst isGpt52Codex = normalizedName.includes(\"gpt-5.2-codex\") || normalizedName.includes(\"gpt 5.2 codex\");\n\tconst isGpt52General = (normalizedName.includes(\"gpt-5.2\") || normalizedName.includes(\"gpt 5.2\")) && !isGpt52Codex;\n\tconst isCodexMax = normalizedName.includes(\"codex-max\") || normalizedName.includes(\"codex max\");\n\tconst isCodexMini =\n\t\tnormalizedName.includes(\"codex-mini\") ||\n\t\tnormalizedName.includes(\"codex mini\") ||\n\t\tnormalizedName.includes(\"codex_mini\") ||\n\t\tnormalizedName.includes(\"codex-mini-latest\");\n\tconst isCodex = normalizedName.includes(\"codex\") && !isCodexMini;\n\tconst isLightweight = !isCodexMini && (normalizedName.includes(\"nano\") || normalizedName.includes(\"mini\"));\n\tconst isGpt51General =\n\t\t(normalizedName.includes(\"gpt-5.1\") || normalizedName.includes(\"gpt 5.1\")) &&\n\t\t!isCodex &&\n\t\t!isCodexMax &&\n\t\t!isCodexMini;\n\n\tconst supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;\n\tconst supportsNone = isGpt52General || isGpt51General;\n\n\tconst defaultEffort: ReasoningConfig[\"effort\"] = isCodexMini\n\t\t? \"medium\"\n\t\t: supportsXhigh\n\t\t\t? \"high\"\n\t\t\t: isLightweight\n\t\t\t\t? \"minimal\"\n\t\t\t\t: \"medium\";\n\n\tlet effort = options.reasoningEffort || defaultEffort;\n\n\tif (isCodexMini) {\n\t\tif (effort === \"minimal\" || effort === \"low\" || effort === \"none\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t\tif (effort === \"xhigh\") {\n\t\t\teffort = \"high\";\n\t\t}\n\t\tif (effort !== \"high\" && effort !== \"medium\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t}\n\n\tif (!supportsXhigh && effort === \"xhigh\") {\n\t\teffort = \"high\";\n\t}\n\n\tif (!supportsNone && effort === \"none\") {\n\t\teffort = \"low\";\n\t}\n\n\tif (isCodex && effort === \"minimal\") {\n\t\teffort = \"low\";\n\t}\n\n\treturn {\n\t\teffort,\n\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t};\n}\n\nfunction filterInput(input: InputItem[] | undefined): InputItem[] | undefined {\n\tif (!Array.isArray(input)) return input;\n\n\treturn input\n\t\t.filter((item) => item.type !== \"item_reference\")\n\t\t.map((item) => {\n\t\t\tif (item.id != null) {\n\t\t\t\tconst { id: _id, ...rest } = item;\n\t\t\t\treturn rest as InputItem;\n\t\t\t}\n\t\t\treturn item;\n\t\t});\n}\n\nfunction addCodexBridgeMessage(\n\tinput: InputItem[] | undefined,\n\thasTools: boolean,\n\tsystemPrompt?: string,\n): InputItem[] | undefined {\n\tif (!hasTools || !Array.isArray(input)) return input;\n\n\tconst bridgeText = systemPrompt ? `${CODEX_PI_BRIDGE}\\n\\n${systemPrompt}` : CODEX_PI_BRIDGE;\n\n\tconst bridgeMessage: InputItem = {\n\t\ttype: \"message\",\n\t\trole: \"developer\",\n\t\tcontent: [\n\t\t\t{\n\t\t\t\ttype: \"input_text\",\n\t\t\t\ttext: bridgeText,\n\t\t\t},\n\t\t],\n\t};\n\n\treturn [bridgeMessage, ...input];\n}\n\nfunction addToolRemapMessage(input: InputItem[] | undefined, hasTools: boolean): InputItem[] | undefined {\n\tif (!hasTools || !Array.isArray(input)) return input;\n\n\tconst toolRemapMessage: InputItem = {\n\t\ttype: \"message\",\n\t\trole: \"developer\",\n\t\tcontent: [\n\t\t\t{\n\t\t\t\ttype: \"input_text\",\n\t\t\t\ttext: TOOL_REMAP_MESSAGE,\n\t\t\t},\n\t\t],\n\t};\n\n\treturn [toolRemapMessage, ...input];\n}\n\nexport async function transformRequestBody(\n\tbody: RequestBody,\n\tcodexInstructions: string,\n\toptions: CodexRequestOptions = {},\n\tcodexMode = true,\n\tsystemPrompt?: string,\n): Promise<RequestBody> {\n\tconst normalizedModel = normalizeModel(body.model);\n\n\tbody.model = normalizedModel;\n\tbody.store = false;\n\tbody.stream = true;\n\tbody.instructions = codexInstructions;\n\n\tif (body.input && Array.isArray(body.input)) {\n\t\tbody.input = filterInput(body.input);\n\n\t\tif (codexMode) {\n\t\t\tbody.input = addCodexBridgeMessage(body.input, !!body.tools, systemPrompt);\n\t\t} else {\n\t\t\tbody.input = addToolRemapMessage(body.input, !!body.tools);\n\t\t}\n\n\t\tif (body.input) {\n\t\t\tconst functionCallIds = new Set(\n\t\t\t\tbody.input\n\t\t\t\t\t.filter((item) => item.type === \"function_call\" && typeof item.call_id === \"string\")\n\t\t\t\t\t.map((item) => item.call_id as string),\n\t\t\t);\n\n\t\t\tbody.input = body.input.map((item) => {\n\t\t\t\tif (item.type === \"function_call_output\" && typeof item.call_id === \"string\") {\n\t\t\t\t\tconst callId = item.call_id as string;\n\t\t\t\t\tif (!functionCallIds.has(callId)) {\n\t\t\t\t\t\tconst itemRecord = item as unknown as Record<string, unknown>;\n\t\t\t\t\t\tconst toolName = typeof itemRecord.name === \"string\" ? itemRecord.name : \"tool\";\n\t\t\t\t\t\tlet text = \"\";\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst output = itemRecord.output;\n\t\t\t\t\t\t\ttext = typeof output === \"string\" ? output : JSON.stringify(output);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\ttext = String(itemRecord.output ?? \"\");\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (text.length > 16000) {\n\t\t\t\t\t\t\ttext = `${text.slice(0, 16000)}\\n...[truncated]`;\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\t\tcontent: `[Previous ${toolName} result; call_id=${callId}]: ${text}`,\n\t\t\t\t\t\t} as InputItem;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn item;\n\t\t\t});\n\t\t}\n\t}\n\n\tconst reasoningConfig = getReasoningConfig(normalizedModel, options);\n\tbody.reasoning = {\n\t\t...body.reasoning,\n\t\t...reasoningConfig,\n\t};\n\n\tbody.text = {\n\t\t...body.text,\n\t\tverbosity: options.textVerbosity || \"medium\",\n\t};\n\n\tbody.include = options.include || [\"reasoning.encrypted_content\"];\n\n\tdelete body.max_output_tokens;\n\tdelete body.max_completion_tokens;\n\n\treturn body;\n}\n"]}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export type CodexRateLimit = {
|
|
2
|
+
used_percent?: number;
|
|
3
|
+
window_minutes?: number;
|
|
4
|
+
resets_at?: number;
|
|
5
|
+
};
|
|
6
|
+
export type CodexRateLimits = {
|
|
7
|
+
primary?: CodexRateLimit;
|
|
8
|
+
secondary?: CodexRateLimit;
|
|
9
|
+
};
|
|
10
|
+
export type CodexErrorInfo = {
|
|
11
|
+
message: string;
|
|
12
|
+
status: number;
|
|
13
|
+
friendlyMessage?: string;
|
|
14
|
+
rateLimits?: CodexRateLimits;
|
|
15
|
+
raw?: string;
|
|
16
|
+
};
|
|
17
|
+
export declare function parseCodexError(response: Response): Promise<CodexErrorInfo>;
|
|
18
|
+
export declare function parseCodexSseStream(response: Response): AsyncGenerator<Record<string, unknown>>;
|
|
19
|
+
//# sourceMappingURL=response-handler.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"response-handler.d.ts","sourceRoot":"","sources":["../../../src/providers/openai-codex/response-handler.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,cAAc,GAAG;IAC5B,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,MAAM,MAAM,eAAe,GAAG;IAC7B,OAAO,CAAC,EAAE,cAAc,CAAC;IACzB,SAAS,CAAC,EAAE,cAAc,CAAC;CAC3B,CAAC;AAEF,MAAM,MAAM,cAAc,GAAG;IAC5B,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,UAAU,CAAC,EAAE,eAAe,CAAC;IAC7B,GAAG,CAAC,EAAE,MAAM,CAAC;CACb,CAAC;AAEF,wBAAsB,eAAe,CAAC,QAAQ,EAAE,QAAQ,GAAG,OAAO,CAAC,cAAc,CAAC,CAkDjF;AAED,wBAAuB,mBAAmB,CAAC,QAAQ,EAAE,QAAQ,GAAG,cAAc,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CA4BtG","sourcesContent":["export type CodexRateLimit = {\n\tused_percent?: number;\n\twindow_minutes?: number;\n\tresets_at?: number;\n};\n\nexport type CodexRateLimits = {\n\tprimary?: CodexRateLimit;\n\tsecondary?: CodexRateLimit;\n};\n\nexport type CodexErrorInfo = {\n\tmessage: string;\n\tstatus: number;\n\tfriendlyMessage?: string;\n\trateLimits?: CodexRateLimits;\n\traw?: string;\n};\n\nexport async function parseCodexError(response: Response): Promise<CodexErrorInfo> {\n\tconst raw = await response.text();\n\tlet message = raw || response.statusText || \"Request failed\";\n\tlet friendlyMessage: string | undefined;\n\tlet rateLimits: CodexRateLimits | undefined;\n\n\ttry {\n\t\tconst parsed = JSON.parse(raw) as { error?: Record<string, unknown> };\n\t\tconst err = parsed?.error ?? {};\n\n\t\tconst headers = response.headers;\n\t\tconst primary = {\n\t\t\tused_percent: toNumber(headers.get(\"x-codex-primary-used-percent\")),\n\t\t\twindow_minutes: toInt(headers.get(\"x-codex-primary-window-minutes\")),\n\t\t\tresets_at: toInt(headers.get(\"x-codex-primary-reset-at\")),\n\t\t};\n\t\tconst secondary = {\n\t\t\tused_percent: toNumber(headers.get(\"x-codex-secondary-used-percent\")),\n\t\t\twindow_minutes: toInt(headers.get(\"x-codex-secondary-window-minutes\")),\n\t\t\tresets_at: toInt(headers.get(\"x-codex-secondary-reset-at\")),\n\t\t};\n\t\trateLimits =\n\t\t\tprimary.used_percent !== undefined || secondary.used_percent !== undefined\n\t\t\t\t? { primary, secondary }\n\t\t\t\t: undefined;\n\n\t\tconst code = String((err as { code?: string; type?: string }).code ?? (err as { type?: string }).type ?? \"\");\n\t\tconst resetsAt = (err as { resets_at?: number }).resets_at ?? primary.resets_at ?? secondary.resets_at;\n\t\tconst mins = resetsAt ? Math.max(0, Math.round((resetsAt * 1000 - Date.now()) / 60000)) : undefined;\n\n\t\tif (/usage_limit_reached|usage_not_included|rate_limit_exceeded/i.test(code) || response.status === 429) {\n\t\t\tconst planType = (err as { plan_type?: string }).plan_type;\n\t\t\tconst plan = planType ? ` (${String(planType).toLowerCase()} plan)` : \"\";\n\t\t\tconst when = mins !== undefined ? ` Try again in ~${mins} min.` : \"\";\n\t\t\tfriendlyMessage = `You have hit your ChatGPT usage limit${plan}.${when}`.trim();\n\t\t}\n\n\t\tconst errMessage = (err as { message?: string }).message;\n\t\tmessage = errMessage || friendlyMessage || message;\n\t} catch {\n\t\t// raw body not JSON\n\t}\n\n\treturn {\n\t\tmessage,\n\t\tstatus: response.status,\n\t\tfriendlyMessage,\n\t\trateLimits,\n\t\traw: raw,\n\t};\n}\n\nexport async function* parseCodexSseStream(response: Response): AsyncGenerator<Record<string, unknown>> {\n\tif (!response.body) {\n\t\treturn;\n\t}\n\n\tconst reader = response.body.getReader();\n\tconst decoder = new TextDecoder();\n\tlet buffer = \"\";\n\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tbuffer += decoder.decode(value, { stream: true });\n\n\t\tlet index = buffer.indexOf(\"\\n\\n\");\n\t\twhile (index !== -1) {\n\t\t\tconst chunk = buffer.slice(0, index);\n\t\t\tbuffer = buffer.slice(index + 2);\n\t\t\tconst event = parseSseChunk(chunk);\n\t\t\tif (event) yield event;\n\t\t\tindex = buffer.indexOf(\"\\n\\n\");\n\t\t}\n\t}\n\n\tif (buffer.trim()) {\n\t\tconst event = parseSseChunk(buffer);\n\t\tif (event) yield event;\n\t}\n}\n\nfunction parseSseChunk(chunk: string): Record<string, unknown> | null {\n\tconst lines = chunk.split(\"\\n\");\n\tconst dataLines: string[] = [];\n\n\tfor (const line of lines) {\n\t\tif (line.startsWith(\"data:\")) {\n\t\t\tdataLines.push(line.slice(5).trim());\n\t\t}\n\t}\n\n\tif (dataLines.length === 0) return null;\n\tconst data = dataLines.join(\"\\n\").trim();\n\tif (!data || data === \"[DONE]\") return null;\n\n\ttry {\n\t\treturn JSON.parse(data) as Record<string, unknown>;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\nfunction toNumber(v: string | null): number | undefined {\n\tif (v == null) return undefined;\n\tconst n = Number(v);\n\treturn Number.isFinite(n) ? n : undefined;\n}\n\nfunction toInt(v: string | null): number | undefined {\n\tif (v == null) return undefined;\n\tconst n = parseInt(v, 10);\n\treturn Number.isFinite(n) ? n : undefined;\n}\n"]}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
export async function parseCodexError(response) {
|
|
2
|
+
const raw = await response.text();
|
|
3
|
+
let message = raw || response.statusText || "Request failed";
|
|
4
|
+
let friendlyMessage;
|
|
5
|
+
let rateLimits;
|
|
6
|
+
try {
|
|
7
|
+
const parsed = JSON.parse(raw);
|
|
8
|
+
const err = parsed?.error ?? {};
|
|
9
|
+
const headers = response.headers;
|
|
10
|
+
const primary = {
|
|
11
|
+
used_percent: toNumber(headers.get("x-codex-primary-used-percent")),
|
|
12
|
+
window_minutes: toInt(headers.get("x-codex-primary-window-minutes")),
|
|
13
|
+
resets_at: toInt(headers.get("x-codex-primary-reset-at")),
|
|
14
|
+
};
|
|
15
|
+
const secondary = {
|
|
16
|
+
used_percent: toNumber(headers.get("x-codex-secondary-used-percent")),
|
|
17
|
+
window_minutes: toInt(headers.get("x-codex-secondary-window-minutes")),
|
|
18
|
+
resets_at: toInt(headers.get("x-codex-secondary-reset-at")),
|
|
19
|
+
};
|
|
20
|
+
rateLimits =
|
|
21
|
+
primary.used_percent !== undefined || secondary.used_percent !== undefined
|
|
22
|
+
? { primary, secondary }
|
|
23
|
+
: undefined;
|
|
24
|
+
const code = String(err.code ?? err.type ?? "");
|
|
25
|
+
const resetsAt = err.resets_at ?? primary.resets_at ?? secondary.resets_at;
|
|
26
|
+
const mins = resetsAt ? Math.max(0, Math.round((resetsAt * 1000 - Date.now()) / 60000)) : undefined;
|
|
27
|
+
if (/usage_limit_reached|usage_not_included|rate_limit_exceeded/i.test(code) || response.status === 429) {
|
|
28
|
+
const planType = err.plan_type;
|
|
29
|
+
const plan = planType ? ` (${String(planType).toLowerCase()} plan)` : "";
|
|
30
|
+
const when = mins !== undefined ? ` Try again in ~${mins} min.` : "";
|
|
31
|
+
friendlyMessage = `You have hit your ChatGPT usage limit${plan}.${when}`.trim();
|
|
32
|
+
}
|
|
33
|
+
const errMessage = err.message;
|
|
34
|
+
message = errMessage || friendlyMessage || message;
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
// raw body not JSON
|
|
38
|
+
}
|
|
39
|
+
return {
|
|
40
|
+
message,
|
|
41
|
+
status: response.status,
|
|
42
|
+
friendlyMessage,
|
|
43
|
+
rateLimits,
|
|
44
|
+
raw: raw,
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
export async function* parseCodexSseStream(response) {
|
|
48
|
+
if (!response.body) {
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
const reader = response.body.getReader();
|
|
52
|
+
const decoder = new TextDecoder();
|
|
53
|
+
let buffer = "";
|
|
54
|
+
while (true) {
|
|
55
|
+
const { done, value } = await reader.read();
|
|
56
|
+
if (done)
|
|
57
|
+
break;
|
|
58
|
+
buffer += decoder.decode(value, { stream: true });
|
|
59
|
+
let index = buffer.indexOf("\n\n");
|
|
60
|
+
while (index !== -1) {
|
|
61
|
+
const chunk = buffer.slice(0, index);
|
|
62
|
+
buffer = buffer.slice(index + 2);
|
|
63
|
+
const event = parseSseChunk(chunk);
|
|
64
|
+
if (event)
|
|
65
|
+
yield event;
|
|
66
|
+
index = buffer.indexOf("\n\n");
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
if (buffer.trim()) {
|
|
70
|
+
const event = parseSseChunk(buffer);
|
|
71
|
+
if (event)
|
|
72
|
+
yield event;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
function parseSseChunk(chunk) {
|
|
76
|
+
const lines = chunk.split("\n");
|
|
77
|
+
const dataLines = [];
|
|
78
|
+
for (const line of lines) {
|
|
79
|
+
if (line.startsWith("data:")) {
|
|
80
|
+
dataLines.push(line.slice(5).trim());
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
if (dataLines.length === 0)
|
|
84
|
+
return null;
|
|
85
|
+
const data = dataLines.join("\n").trim();
|
|
86
|
+
if (!data || data === "[DONE]")
|
|
87
|
+
return null;
|
|
88
|
+
try {
|
|
89
|
+
return JSON.parse(data);
|
|
90
|
+
}
|
|
91
|
+
catch {
|
|
92
|
+
return null;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
function toNumber(v) {
|
|
96
|
+
if (v == null)
|
|
97
|
+
return undefined;
|
|
98
|
+
const n = Number(v);
|
|
99
|
+
return Number.isFinite(n) ? n : undefined;
|
|
100
|
+
}
|
|
101
|
+
function toInt(v) {
|
|
102
|
+
if (v == null)
|
|
103
|
+
return undefined;
|
|
104
|
+
const n = parseInt(v, 10);
|
|
105
|
+
return Number.isFinite(n) ? n : undefined;
|
|
106
|
+
}
|
|
107
|
+
//# sourceMappingURL=response-handler.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"response-handler.js","sourceRoot":"","sources":["../../../src/providers/openai-codex/response-handler.ts"],"names":[],"mappings":"AAmBA,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,QAAkB,EAA2B;IAClF,MAAM,GAAG,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;IAClC,IAAI,OAAO,GAAG,GAAG,IAAI,QAAQ,CAAC,UAAU,IAAI,gBAAgB,CAAC;IAC7D,IAAI,eAAmC,CAAC;IACxC,IAAI,UAAuC,CAAC;IAE5C,IAAI,CAAC;QACJ,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAwC,CAAC;QACtE,MAAM,GAAG,GAAG,MAAM,EAAE,KAAK,IAAI,EAAE,CAAC;QAEhC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;QACjC,MAAM,OAAO,GAAG;YACf,YAAY,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAC;YACnE,cAAc,EAAE,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAC;YACpE,SAAS,EAAE,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,0BAA0B,CAAC,CAAC;SACzD,CAAC;QACF,MAAM,SAAS,GAAG;YACjB,YAAY,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAC;YACrE,cAAc,EAAE,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,kCAAkC,CAAC,CAAC;YACtE,SAAS,EAAE,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAC;SAC3D,CAAC;QACF,UAAU;YACT,OAAO,CAAC,YAAY,KAAK,SAAS,IAAI,SAAS,CAAC,YAAY,KAAK,SAAS;gBACzE,CAAC,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE;gBACxB,CAAC,CAAC,SAAS,CAAC;QAEd,MAAM,IAAI,GAAG,MAAM,CAAE,GAAwC,CAAC,IAAI,IAAK,GAAyB,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC;QAC7G,MAAM,QAAQ,GAAI,GAA8B,CAAC,SAAS,IAAI,OAAO,CAAC,SAAS,IAAI,SAAS,CAAC,SAAS,CAAC;QACvG,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;QAEpG,IAAI,6DAA6D,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;YACzG,MAAM,QAAQ,GAAI,GAA8B,CAAC,SAAS,CAAC;YAC3D,MAAM,IAAI,GAAG,QAAQ,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC,QAAQ,CAAC,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC;YACzE,MAAM,IAAI,GAAG,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,kBAAkB,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC;YACrE,eAAe,GAAG,wCAAwC,IAAI,IAAI,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC;QACjF,CAAC;QAED,MAAM,UAAU,GAAI,GAA4B,CAAC,OAAO,CAAC;QACzD,OAAO,GAAG,UAAU,IAAI,eAAe,IAAI,OAAO,CAAC;IACpD,CAAC;IAAC,MAAM,CAAC;QACR,oBAAoB;IACrB,CAAC;IAED,OAAO;QACN,OAAO;QACP,MAAM,EAAE,QAAQ,CAAC,MAAM;QACvB,eAAe;QACf,UAAU;QACV,GAAG,EAAE,GAAG;KACR,CAAC;AAAA,CACF;AAED,MAAM,CAAC,KAAK,SAAS,CAAC,CAAC,mBAAmB,CAAC,QAAkB,EAA2C;IACvG,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QACpB,OAAO;IACR,CAAC;IAED,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC;IACzC,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;IAClC,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,IAAI,EAAE,CAAC;QACb,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QAC5C,IAAI,IAAI;YAAE,MAAM;QAChB,MAAM,IAAI,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC;QAElD,IAAI,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACnC,OAAO,KAAK,KAAK,CAAC,CAAC,EAAE,CAAC;YACrB,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;YACrC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;YACjC,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC;YACnC,IAAI,KAAK;gBAAE,MAAM,KAAK,CAAC;YACvB,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAChC,CAAC;IACF,CAAC;IAED,IAAI,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC;QACnB,MAAM,KAAK,GAAG,aAAa,CAAC,MAAM,CAAC,CAAC;QACpC,IAAI,KAAK;YAAE,MAAM,KAAK,CAAC;IACxB,CAAC;AAAA,CACD;AAED,SAAS,aAAa,CAAC,KAAa,EAAkC;IACrE,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IAChC,MAAM,SAAS,GAAa,EAAE,CAAC;IAE/B,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QAC1B,IAAI,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE,CAAC;YAC9B,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC;QACtC,CAAC;IACF,CAAC;IAED,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,IAAI,CAAC;IACxC,MAAM,IAAI,GAAG,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;IACzC,IAAI,CAAC,IAAI,IAAI,IAAI,KAAK,QAAQ;QAAE,OAAO,IAAI,CAAC;IAE5C,IAAI,CAAC;QACJ,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAA4B,CAAC;IACpD,CAAC;IAAC,MAAM,CAAC;QACR,OAAO,IAAI,CAAC;IACb,CAAC;AAAA,CACD;AAED,SAAS,QAAQ,CAAC,CAAgB,EAAsB;IACvD,IAAI,CAAC,IAAI,IAAI;QAAE,OAAO,SAAS,CAAC;IAChC,MAAM,CAAC,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;IACpB,OAAO,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AAAA,CAC1C;AAED,SAAS,KAAK,CAAC,CAAgB,EAAsB;IACpD,IAAI,CAAC,IAAI,IAAI;QAAE,OAAO,SAAS,CAAC;IAChC,MAAM,CAAC,GAAG,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;IAC1B,OAAO,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AAAA,CAC1C","sourcesContent":["export type CodexRateLimit = {\n\tused_percent?: number;\n\twindow_minutes?: number;\n\tresets_at?: number;\n};\n\nexport type CodexRateLimits = {\n\tprimary?: CodexRateLimit;\n\tsecondary?: CodexRateLimit;\n};\n\nexport type CodexErrorInfo = {\n\tmessage: string;\n\tstatus: number;\n\tfriendlyMessage?: string;\n\trateLimits?: CodexRateLimits;\n\traw?: string;\n};\n\nexport async function parseCodexError(response: Response): Promise<CodexErrorInfo> {\n\tconst raw = await response.text();\n\tlet message = raw || response.statusText || \"Request failed\";\n\tlet friendlyMessage: string | undefined;\n\tlet rateLimits: CodexRateLimits | undefined;\n\n\ttry {\n\t\tconst parsed = JSON.parse(raw) as { error?: Record<string, unknown> };\n\t\tconst err = parsed?.error ?? {};\n\n\t\tconst headers = response.headers;\n\t\tconst primary = {\n\t\t\tused_percent: toNumber(headers.get(\"x-codex-primary-used-percent\")),\n\t\t\twindow_minutes: toInt(headers.get(\"x-codex-primary-window-minutes\")),\n\t\t\tresets_at: toInt(headers.get(\"x-codex-primary-reset-at\")),\n\t\t};\n\t\tconst secondary = {\n\t\t\tused_percent: toNumber(headers.get(\"x-codex-secondary-used-percent\")),\n\t\t\twindow_minutes: toInt(headers.get(\"x-codex-secondary-window-minutes\")),\n\t\t\tresets_at: toInt(headers.get(\"x-codex-secondary-reset-at\")),\n\t\t};\n\t\trateLimits =\n\t\t\tprimary.used_percent !== undefined || secondary.used_percent !== undefined\n\t\t\t\t? { primary, secondary }\n\t\t\t\t: undefined;\n\n\t\tconst code = String((err as { code?: string; type?: string }).code ?? (err as { type?: string }).type ?? \"\");\n\t\tconst resetsAt = (err as { resets_at?: number }).resets_at ?? primary.resets_at ?? secondary.resets_at;\n\t\tconst mins = resetsAt ? Math.max(0, Math.round((resetsAt * 1000 - Date.now()) / 60000)) : undefined;\n\n\t\tif (/usage_limit_reached|usage_not_included|rate_limit_exceeded/i.test(code) || response.status === 429) {\n\t\t\tconst planType = (err as { plan_type?: string }).plan_type;\n\t\t\tconst plan = planType ? ` (${String(planType).toLowerCase()} plan)` : \"\";\n\t\t\tconst when = mins !== undefined ? ` Try again in ~${mins} min.` : \"\";\n\t\t\tfriendlyMessage = `You have hit your ChatGPT usage limit${plan}.${when}`.trim();\n\t\t}\n\n\t\tconst errMessage = (err as { message?: string }).message;\n\t\tmessage = errMessage || friendlyMessage || message;\n\t} catch {\n\t\t// raw body not JSON\n\t}\n\n\treturn {\n\t\tmessage,\n\t\tstatus: response.status,\n\t\tfriendlyMessage,\n\t\trateLimits,\n\t\traw: raw,\n\t};\n}\n\nexport async function* parseCodexSseStream(response: Response): AsyncGenerator<Record<string, unknown>> {\n\tif (!response.body) {\n\t\treturn;\n\t}\n\n\tconst reader = response.body.getReader();\n\tconst decoder = new TextDecoder();\n\tlet buffer = \"\";\n\n\twhile (true) {\n\t\tconst { done, value } = await reader.read();\n\t\tif (done) break;\n\t\tbuffer += decoder.decode(value, { stream: true });\n\n\t\tlet index = buffer.indexOf(\"\\n\\n\");\n\t\twhile (index !== -1) {\n\t\t\tconst chunk = buffer.slice(0, index);\n\t\t\tbuffer = buffer.slice(index + 2);\n\t\t\tconst event = parseSseChunk(chunk);\n\t\t\tif (event) yield event;\n\t\t\tindex = buffer.indexOf(\"\\n\\n\");\n\t\t}\n\t}\n\n\tif (buffer.trim()) {\n\t\tconst event = parseSseChunk(buffer);\n\t\tif (event) yield event;\n\t}\n}\n\nfunction parseSseChunk(chunk: string): Record<string, unknown> | null {\n\tconst lines = chunk.split(\"\\n\");\n\tconst dataLines: string[] = [];\n\n\tfor (const line of lines) {\n\t\tif (line.startsWith(\"data:\")) {\n\t\t\tdataLines.push(line.slice(5).trim());\n\t\t}\n\t}\n\n\tif (dataLines.length === 0) return null;\n\tconst data = dataLines.join(\"\\n\").trim();\n\tif (!data || data === \"[DONE]\") return null;\n\n\ttry {\n\t\treturn JSON.parse(data) as Record<string, unknown>;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\nfunction toNumber(v: string | null): number | undefined {\n\tif (v == null) return undefined;\n\tconst n = Number(v);\n\treturn Number.isFinite(n) ? n : undefined;\n}\n\nfunction toInt(v: string | null): number | undefined {\n\tif (v == null) return undefined;\n\tconst n = parseInt(v, 10);\n\treturn Number.isFinite(n) ? n : undefined;\n}\n"]}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { StreamFunction, StreamOptions } from "../types.js";
|
|
2
|
+
export interface OpenAICodexResponsesOptions extends StreamOptions {
|
|
3
|
+
reasoningEffort?: "none" | "minimal" | "low" | "medium" | "high" | "xhigh";
|
|
4
|
+
reasoningSummary?: "auto" | "concise" | "detailed" | "off" | "on" | null;
|
|
5
|
+
textVerbosity?: "low" | "medium" | "high";
|
|
6
|
+
include?: string[];
|
|
7
|
+
codexMode?: boolean;
|
|
8
|
+
}
|
|
9
|
+
export declare const streamOpenAICodexResponses: StreamFunction<"openai-codex-responses">;
|
|
10
|
+
//# sourceMappingURL=openai-codex-responses.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai-codex-responses.d.ts","sourceRoot":"","sources":["../../src/providers/openai-codex-responses.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EAMX,cAAc,EACd,aAAa,EAKb,MAAM,aAAa,CAAC;AAqBrB,MAAM,WAAW,2BAA4B,SAAQ,aAAa;IACjE,eAAe,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IAC3E,gBAAgB,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,GAAG,IAAI,CAAC;IACzE,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IAC1C,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,SAAS,CAAC,EAAE,OAAO,CAAC;CACpB;AAID,eAAO,MAAM,0BAA0B,EAAE,cAAc,CAAC,wBAAwB,CA4T/E,CAAC","sourcesContent":["import type {\n\tResponseFunctionToolCall,\n\tResponseInput,\n\tResponseInputContent,\n\tResponseInputImage,\n\tResponseInputText,\n\tResponseOutputMessage,\n\tResponseReasoningItem,\n} from \"openai/resources/responses/responses.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport {\n\tCODEX_BASE_URL,\n\tJWT_CLAIM_PATH,\n\tOPENAI_HEADER_VALUES,\n\tOPENAI_HEADERS,\n\tURL_PATHS,\n} from \"./openai-codex/constants.js\";\nimport { getCodexInstructions } from \"./openai-codex/prompts/codex.js\";\nimport {\n\ttype CodexRequestOptions,\n\tnormalizeModel,\n\ttype RequestBody,\n\ttransformRequestBody,\n} from \"./openai-codex/request-transformer.js\";\nimport { parseCodexError, parseCodexSseStream } from \"./openai-codex/response-handler.js\";\nimport { transformMessages } from \"./transorm-messages.js\";\n\nexport interface OpenAICodexResponsesOptions extends StreamOptions {\n\treasoningEffort?: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\treasoningSummary?: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\" | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n\tcodexMode?: boolean;\n}\n\nconst CODEX_DEBUG = process.env.PI_CODEX_DEBUG === \"1\" || process.env.PI_CODEX_DEBUG === \"true\";\n\nexport const streamOpenAICodexResponses: StreamFunction<\"openai-codex-responses\"> = (\n\tmodel: Model<\"openai-codex-responses\">,\n\tcontext: Context,\n\toptions?: OpenAICodexResponsesOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"openai-codex-responses\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tif (!apiKey) {\n\t\t\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t\t\t}\n\n\t\t\tconst accountId = getAccountId(apiKey);\n\t\t\tconst baseUrl = model.baseUrl || CODEX_BASE_URL;\n\t\t\tconst baseWithSlash = baseUrl.endsWith(\"/\") ? baseUrl : `${baseUrl}/`;\n\t\t\tconst url = rewriteUrlForCodex(new URL(URL_PATHS.RESPONSES.slice(1), baseWithSlash).toString());\n\n\t\t\tconst messages = convertMessages(model, context);\n\t\t\tconst params: RequestBody = {\n\t\t\t\tmodel: model.id,\n\t\t\t\tinput: messages,\n\t\t\t\tstream: true,\n\t\t\t};\n\n\t\t\tif (options?.maxTokens) {\n\t\t\t\tparams.max_output_tokens = options.maxTokens;\n\t\t\t}\n\n\t\t\tif (options?.temperature !== undefined) {\n\t\t\t\tparams.temperature = options.temperature;\n\t\t\t}\n\n\t\t\tif (context.tools) {\n\t\t\t\tparams.tools = convertTools(context.tools);\n\t\t\t}\n\n\t\t\tconst normalizedModel = normalizeModel(params.model);\n\t\t\tconst codexInstructions = await getCodexInstructions(normalizedModel);\n\n\t\t\tconst codexOptions: CodexRequestOptions = {\n\t\t\t\treasoningEffort: options?.reasoningEffort,\n\t\t\t\treasoningSummary: options?.reasoningSummary ?? undefined,\n\t\t\t\ttextVerbosity: options?.textVerbosity,\n\t\t\t\tinclude: options?.include,\n\t\t\t};\n\n\t\t\tconst transformedBody = await transformRequestBody(\n\t\t\t\tparams,\n\t\t\t\tcodexInstructions,\n\t\t\t\tcodexOptions,\n\t\t\t\toptions?.codexMode ?? true,\n\t\t\t\tcontext.systemPrompt,\n\t\t\t);\n\n\t\t\tconst headers = createCodexHeaders(model.headers, accountId, apiKey, transformedBody.prompt_cache_key);\n\t\t\tlogCodexDebug(\"codex request\", {\n\t\t\t\turl,\n\t\t\t\tmodel: params.model,\n\t\t\t\theaders: redactHeaders(headers),\n\t\t\t});\n\n\t\t\tconst response = await fetch(url, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders,\n\t\t\t\tbody: JSON.stringify(transformedBody),\n\t\t\t\tsignal: options?.signal,\n\t\t\t});\n\n\t\t\tlogCodexDebug(\"codex response\", {\n\t\t\t\turl: response.url,\n\t\t\t\tstatus: response.status,\n\t\t\t\tstatusText: response.statusText,\n\t\t\t\tcontentType: response.headers.get(\"content-type\") || null,\n\t\t\t\tcfRay: response.headers.get(\"cf-ray\") || null,\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst info = await parseCodexError(response);\n\t\t\t\tthrow new Error(info.friendlyMessage || info.message);\n\t\t\t}\n\n\t\t\tif (!response.body) {\n\t\t\t\tthrow new Error(\"No response body\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\tlet currentItem: ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall | null = null;\n\t\t\tlet currentBlock: ThinkingContent | TextContent | (ToolCall & { partialJson: string }) | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\n\t\t\tfor await (const rawEvent of parseCodexSseStream(response)) {\n\t\t\t\tconst eventType = typeof rawEvent.type === \"string\" ? rawEvent.type : \"\";\n\t\t\t\tif (!eventType) continue;\n\n\t\t\t\tif (eventType === \"response.output_item.added\") {\n\t\t\t\t\tconst item = rawEvent.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\t\tif (item.type === \"reasoning\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t} else if (item.type === \"message\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\t\tpartialJson: item.arguments || \"\",\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_part.added\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tcurrentItem.summary.push((rawEvent as { part: ResponseReasoningItem[\"summary\"][number] }).part);\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_text.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_part.done\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\t\tcurrentBlock.thinking += \"\\n\\n\";\n\t\t\t\t\t\t\tlastPart.text += \"\\n\\n\";\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: \"\\n\\n\",\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.content_part.added\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\") {\n\t\t\t\t\t\tcurrentItem.content = currentItem.content || [];\n\t\t\t\t\t\tconst part = (rawEvent as { part?: ResponseOutputMessage[\"content\"][number] }).part;\n\t\t\t\t\t\tif (part && (part.type === \"output_text\" || part.type === \"refusal\")) {\n\t\t\t\t\t\t\tcurrentItem.content.push(part);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.output_text.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\t\tif (lastPart && lastPart.type === \"output_text\") {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.refusal.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\t\tif (lastPart && lastPart.type === \"refusal\") {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\t\tlastPart.refusal += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.function_call_arguments.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\tcurrentBlock.partialJson += delta;\n\t\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.output_item.done\") {\n\t\t\t\t\tconst item = rawEvent.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\t\tif (item.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentBlock.thinking = item.summary?.map((s) => s.text).join(\"\\n\\n\") || \"\";\n\t\t\t\t\t\tcurrentBlock.thinkingSignature = JSON.stringify(item);\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t} else if (item.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tcurrentBlock.text = item.content.map((c) => (c.type === \"output_text\" ? c.text : c.refusal)).join(\"\");\n\t\t\t\t\t\tcurrentBlock.textSignature = item.id;\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\t\targuments: JSON.parse(item.arguments),\n\t\t\t\t\t\t};\n\t\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.completed\" || eventType === \"response.done\") {\n\t\t\t\t\tconst response = (\n\t\t\t\t\t\trawEvent as {\n\t\t\t\t\t\t\tresponse?: {\n\t\t\t\t\t\t\t\tusage?: {\n\t\t\t\t\t\t\t\t\tinput_tokens?: number;\n\t\t\t\t\t\t\t\t\toutput_tokens?: number;\n\t\t\t\t\t\t\t\t\ttotal_tokens?: number;\n\t\t\t\t\t\t\t\t\tinput_tokens_details?: { cached_tokens?: number };\n\t\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t\tstatus?: string;\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t}\n\t\t\t\t\t).response;\n\t\t\t\t\tif (response?.usage) {\n\t\t\t\t\t\tconst cachedTokens = response.usage.input_tokens_details?.cached_tokens || 0;\n\t\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\t\tinput: (response.usage.input_tokens || 0) - cachedTokens,\n\t\t\t\t\t\t\toutput: response.usage.output_tokens || 0,\n\t\t\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotalTokens: response.usage.total_tokens || 0,\n\t\t\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t\toutput.stopReason = mapStopReason(response?.status);\n\t\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\") && output.stopReason === \"stop\") {\n\t\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"error\") {\n\t\t\t\t\tconst code = (rawEvent as { code?: string }).code || \"\";\n\t\t\t\t\tconst message = (rawEvent as { message?: string }).message || \"Unknown error\";\n\t\t\t\t\tthrow new Error(code ? `Error Code ${code}: ${message}` : message);\n\t\t\t\t} else if (eventType === \"response.failed\") {\n\t\t\t\t\tthrow new Error(\"Unknown error\");\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as { index?: number }).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction createCodexHeaders(\n\tinitHeaders: Record<string, string> | undefined,\n\taccountId: string,\n\taccessToken: string,\n\tpromptCacheKey?: string,\n): Headers {\n\tconst headers = new Headers(initHeaders ?? {});\n\theaders.delete(\"x-api-key\");\n\theaders.set(\"Authorization\", `Bearer ${accessToken}`);\n\theaders.set(OPENAI_HEADERS.ACCOUNT_ID, accountId);\n\theaders.set(OPENAI_HEADERS.BETA, OPENAI_HEADER_VALUES.BETA_RESPONSES);\n\theaders.set(OPENAI_HEADERS.ORIGINATOR, OPENAI_HEADER_VALUES.ORIGINATOR_CODEX);\n\n\tif (promptCacheKey) {\n\t\theaders.set(OPENAI_HEADERS.CONVERSATION_ID, promptCacheKey);\n\t\theaders.set(OPENAI_HEADERS.SESSION_ID, promptCacheKey);\n\t} else {\n\t\theaders.delete(OPENAI_HEADERS.CONVERSATION_ID);\n\t\theaders.delete(OPENAI_HEADERS.SESSION_ID);\n\t}\n\n\theaders.set(\"accept\", \"text/event-stream\");\n\theaders.set(\"content-type\", \"application/json\");\n\treturn headers;\n}\n\nfunction logCodexDebug(message: string, details?: Record<string, unknown>): void {\n\tif (!CODEX_DEBUG) return;\n\tif (details) {\n\t\tconsole.error(`[codex] ${message}`, details);\n\t\treturn;\n\t}\n\tconsole.error(`[codex] ${message}`);\n}\n\nfunction redactHeaders(headers: Headers): Record<string, string> {\n\tconst redacted: Record<string, string> = {};\n\tfor (const [key, value] of headers.entries()) {\n\t\tconst lower = key.toLowerCase();\n\t\tif (lower === \"authorization\") {\n\t\t\tredacted[key] = \"Bearer [redacted]\";\n\t\t\tcontinue;\n\t\t}\n\t\tif (\n\t\t\tlower.includes(\"account\") ||\n\t\t\tlower.includes(\"session\") ||\n\t\t\tlower.includes(\"conversation\") ||\n\t\t\tlower === \"cookie\"\n\t\t) {\n\t\t\tredacted[key] = \"[redacted]\";\n\t\t\tcontinue;\n\t\t}\n\t\tredacted[key] = value;\n\t}\n\treturn redacted;\n}\n\nfunction rewriteUrlForCodex(url: string): string {\n\treturn url.replace(URL_PATHS.RESPONSES, URL_PATHS.CODEX_RESPONSES);\n}\n\ntype JwtPayload = {\n\t[JWT_CLAIM_PATH]?: {\n\t\tchatgpt_account_id?: string;\n\t};\n\t[key: string]: unknown;\n};\n\nfunction decodeJwt(token: string): JwtPayload | null {\n\ttry {\n\t\tconst parts = token.split(\".\");\n\t\tif (parts.length !== 3) return null;\n\t\tconst payload = parts[1] ?? \"\";\n\t\tconst decoded = Buffer.from(payload, \"base64\").toString(\"utf-8\");\n\t\treturn JSON.parse(decoded) as JwtPayload;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\nfunction getAccountId(accessToken: string): string {\n\tconst payload = decodeJwt(accessToken);\n\tconst auth = payload?.[JWT_CLAIM_PATH];\n\tconst accountId = auth?.chatgpt_account_id;\n\tif (!accountId) {\n\t\tthrow new Error(\"Failed to extract accountId from token\");\n\t}\n\treturn accountId;\n}\n\nfunction shortHash(str: string): string {\n\tlet h1 = 0xdeadbeef;\n\tlet h2 = 0x41c6ce57;\n\tfor (let i = 0; i < str.length; i++) {\n\t\tconst ch = str.charCodeAt(i);\n\t\th1 = Math.imul(h1 ^ ch, 2654435761);\n\t\th2 = Math.imul(h2 ^ ch, 1597334677);\n\t}\n\th1 = Math.imul(h1 ^ (h1 >>> 16), 2246822507) ^ Math.imul(h2 ^ (h2 >>> 13), 3266489909);\n\th2 = Math.imul(h2 ^ (h2 >>> 16), 2246822507) ^ Math.imul(h1 ^ (h1 >>> 13), 3266489909);\n\treturn (h2 >>> 0).toString(36) + (h1 >>> 0).toString(36);\n}\n\nfunction convertMessages(model: Model<\"openai-codex-responses\">, context: Context): ResponseInput {\n\tconst messages: ResponseInput = [];\n\n\tconst transformedMessages = transformMessages(context.messages, model);\n\n\tlet msgIndex = 0;\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ResponseInputContent[] = msg.content.map((item): ResponseInputContent => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ResponseInputText;\n\t\t\t\t\t}\n\t\t\t\t\treturn {\n\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\timage_url: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t} satisfies ResponseInputImage;\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"input_image\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst output: ResponseInput = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"thinking\" && msg.stopReason !== \"error\") {\n\t\t\t\t\tif (block.thinkingSignature) {\n\t\t\t\t\t\tconst reasoningItem = JSON.parse(block.thinkingSignature) as ResponseReasoningItem;\n\t\t\t\t\t\toutput.push(reasoningItem);\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"text\") {\n\t\t\t\t\tconst textBlock = block as TextContent;\n\t\t\t\t\tlet msgId = textBlock.textSignature;\n\t\t\t\t\tif (!msgId) {\n\t\t\t\t\t\tmsgId = `msg_${msgIndex}`;\n\t\t\t\t\t} else if (msgId.length > 64) {\n\t\t\t\t\t\tmsgId = `msg_${shortHash(msgId)}`;\n\t\t\t\t\t}\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: [{ type: \"output_text\", text: sanitizeSurrogates(textBlock.text), annotations: [] }],\n\t\t\t\t\t\tstatus: \"completed\",\n\t\t\t\t\t\tid: msgId,\n\t\t\t\t\t} satisfies ResponseOutputMessage);\n\t\t\t\t} else if (block.type === \"toolCall\" && msg.stopReason !== \"error\") {\n\t\t\t\t\tconst toolCall = block as ToolCall;\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"function_call\",\n\t\t\t\t\t\tid: toolCall.id.split(\"|\")[1],\n\t\t\t\t\t\tcall_id: toolCall.id.split(\"|\")[0],\n\t\t\t\t\t\tname: toolCall.name,\n\t\t\t\t\t\targuments: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (output.length === 0) continue;\n\t\t\tmessages.push(...output);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tconst textResult = msg.content\n\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t.map((c) => (c as { text: string }).text)\n\t\t\t\t.join(\"\\n\");\n\t\t\tconst hasImages = msg.content.some((c) => c.type === \"image\");\n\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tmessages.push({\n\t\t\t\ttype: \"function_call_output\",\n\t\t\t\tcall_id: msg.toolCallId.split(\"|\")[0],\n\t\t\t\toutput: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t});\n\n\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\tconst contentParts: ResponseInputContent[] = [];\n\t\t\t\tcontentParts.push({\n\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t} satisfies ResponseInputText);\n\n\t\t\t\tfor (const block of msg.content) {\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tcontentParts.push({\n\t\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\t\timage_url: `data:${block.mimeType};base64,${block.data}`,\n\t\t\t\t\t\t} satisfies ResponseInputImage);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: contentParts,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\tmsgIndex++;\n\t}\n\n\treturn messages;\n}\n\nfunction convertTools(\n\ttools: Tool[],\n): Array<{ type: \"function\"; name: string; description: string; parameters: Record<string, unknown>; strict: null }> {\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tname: tool.name,\n\t\tdescription: tool.description,\n\t\tparameters: tool.parameters as unknown as Record<string, unknown>,\n\t\tstrict: null,\n\t}));\n}\n\nfunction mapStopReason(status: string | undefined): StopReason {\n\tif (!status) return \"stop\";\n\tswitch (status) {\n\t\tcase \"completed\":\n\t\t\treturn \"stop\";\n\t\tcase \"incomplete\":\n\t\t\treturn \"length\";\n\t\tcase \"failed\":\n\t\tcase \"cancelled\":\n\t\t\treturn \"error\";\n\t\tcase \"in_progress\":\n\t\tcase \"queued\":\n\t\t\treturn \"stop\";\n\t\tdefault:\n\t\t\treturn \"stop\";\n\t}\n}\n"]}
|