@mariozechner/pi-ai 0.37.8 → 0.38.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/models.generated.d.ts +17 -136
- package/dist/models.generated.d.ts.map +1 -1
- package/dist/models.generated.js +22 -141
- package/dist/models.generated.js.map +1 -1
- package/dist/providers/openai-codex/prompts/codex.d.ts +2 -2
- package/dist/providers/openai-codex/prompts/codex.d.ts.map +1 -1
- package/dist/providers/openai-codex/prompts/codex.js +7 -7
- package/dist/providers/openai-codex/prompts/codex.js.map +1 -1
- package/dist/providers/openai-codex/request-transformer.d.ts +0 -1
- package/dist/providers/openai-codex/request-transformer.d.ts.map +1 -1
- package/dist/providers/openai-codex/request-transformer.js +12 -130
- package/dist/providers/openai-codex/request-transformer.js.map +1 -1
- package/dist/providers/openai-codex-responses.d.ts.map +1 -1
- package/dist/providers/openai-codex-responses.js +65 -8
- package/dist/providers/openai-codex-responses.js.map +1 -1
- package/dist/stream.d.ts.map +1 -1
- package/dist/stream.js +13 -6
- package/dist/stream.js.map +1 -1
- package/dist/types.d.ts +9 -0
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/utils/overflow.d.ts.map +1 -1
- package/dist/utils/overflow.js +1 -1
- package/dist/utils/overflow.js.map +1 -1
- package/package.json +1 -1
|
@@ -5,6 +5,6 @@ export type CacheMetadata = {
|
|
|
5
5
|
lastChecked: number;
|
|
6
6
|
url: string;
|
|
7
7
|
};
|
|
8
|
-
export declare function getModelFamily(
|
|
9
|
-
export declare function getCodexInstructions(
|
|
8
|
+
export declare function getModelFamily(model: string): ModelFamily;
|
|
9
|
+
export declare function getCodexInstructions(model?: string): Promise<string>;
|
|
10
10
|
//# sourceMappingURL=codex.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"codex.d.ts","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/codex.ts"],"names":[],"mappings":"AAqBA,MAAM,MAAM,WAAW,GAAG,eAAe,GAAG,WAAW,GAAG,OAAO,GAAG,SAAS,GAAG,SAAS,CAAC;AAkB1F,MAAM,MAAM,aAAa,GAAG;IAC3B,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;IACZ,WAAW,EAAE,MAAM,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;CACZ,CAAC;AAEF,wBAAgB,cAAc,CAAC,
|
|
1
|
+
{"version":3,"file":"codex.d.ts","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/codex.ts"],"names":[],"mappings":"AAqBA,MAAM,MAAM,WAAW,GAAG,eAAe,GAAG,WAAW,GAAG,OAAO,GAAG,SAAS,GAAG,SAAS,CAAC;AAkB1F,MAAM,MAAM,aAAa,GAAG;IAC3B,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;IACZ,WAAW,EAAE,MAAM,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;CACZ,CAAC;AAEF,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,WAAW,CAczD;AAsCD,wBAAsB,oBAAoB,CAAC,KAAK,SAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,CAsFnF","sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\nconst GITHUB_API_RELEASES = \"https://api.github.com/repos/openai/codex/releases/latest\";\nconst GITHUB_HTML_RELEASES = \"https://github.com/openai/codex/releases/latest\";\n\nconst DEFAULT_AGENT_DIR = join(homedir(), \".pi\", \"agent\");\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst FALLBACK_PROMPT_PATH = join(__dirname, \"codex-instructions.md\");\n\nfunction getAgentDir(): string {\n\treturn process.env.PI_CODING_AGENT_DIR || DEFAULT_AGENT_DIR;\n}\n\nfunction getCacheDir(): string {\n\treturn join(getAgentDir(), \"cache\", \"openai-codex\");\n}\n\nexport type ModelFamily = \"gpt-5.2-codex\" | \"codex-max\" | \"codex\" | \"gpt-5.2\" | \"gpt-5.1\";\n\nconst PROMPT_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex_prompt.md\",\n\t\"codex-max\": \"gpt-5.1-codex-max_prompt.md\",\n\tcodex: \"gpt_5_codex_prompt.md\",\n\t\"gpt-5.2\": \"gpt_5_2_prompt.md\",\n\t\"gpt-5.1\": \"gpt_5_1_prompt.md\",\n};\n\nconst CACHE_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex-instructions.md\",\n\t\"codex-max\": \"codex-max-instructions.md\",\n\tcodex: \"codex-instructions.md\",\n\t\"gpt-5.2\": \"gpt-5.2-instructions.md\",\n\t\"gpt-5.1\": \"gpt-5.1-instructions.md\",\n};\n\nexport type CacheMetadata = {\n\tetag: string | null;\n\ttag: string;\n\tlastChecked: number;\n\turl: string;\n};\n\nexport function getModelFamily(model: string): ModelFamily {\n\tif (model.includes(\"gpt-5.2-codex\") || model.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (model.includes(\"codex-max\")) {\n\t\treturn \"codex-max\";\n\t}\n\tif (model.includes(\"codex\") || model.startsWith(\"codex-\")) {\n\t\treturn \"codex\";\n\t}\n\tif (model.includes(\"gpt-5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\treturn \"gpt-5.1\";\n}\n\nasync function getLatestReleaseTag(): Promise<string> {\n\ttry {\n\t\tconst response = await fetch(GITHUB_API_RELEASES);\n\t\tif (response.ok) {\n\t\t\tconst data = (await response.json()) as { tag_name?: string };\n\t\t\tif (data.tag_name) {\n\t\t\t\treturn data.tag_name;\n\t\t\t}\n\t\t}\n\t} catch {\n\t\t// fallback\n\t}\n\n\tconst htmlResponse = await fetch(GITHUB_HTML_RELEASES);\n\tif (!htmlResponse.ok) {\n\t\tthrow new Error(`Failed to fetch latest release: ${htmlResponse.status}`);\n\t}\n\n\tconst finalUrl = htmlResponse.url;\n\tif (finalUrl) {\n\t\tconst parts = finalUrl.split(\"/tag/\");\n\t\tconst last = parts[parts.length - 1];\n\t\tif (last && !last.includes(\"/\")) {\n\t\t\treturn last;\n\t\t}\n\t}\n\n\tconst html = await htmlResponse.text();\n\tconst match = html.match(/\\/openai\\/codex\\/releases\\/tag\\/([^\"]+)/);\n\tif (match?.[1]) {\n\t\treturn match[1];\n\t}\n\n\tthrow new Error(\"Failed to determine latest release tag from GitHub\");\n}\n\nexport async function getCodexInstructions(model = \"gpt-5.1-codex\"): Promise<string> {\n\tconst modelFamily = getModelFamily(model);\n\tconst promptFile = PROMPT_FILES[modelFamily];\n\tconst cacheDir = getCacheDir();\n\tconst cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);\n\tconst cacheMetaFile = join(cacheDir, `${CACHE_FILES[modelFamily].replace(\".md\", \"-meta.json\")}`);\n\n\ttry {\n\t\tlet cachedETag: string | null = null;\n\t\tlet cachedTag: string | null = null;\n\t\tlet cachedTimestamp: number | null = null;\n\n\t\tif (existsSync(cacheMetaFile)) {\n\t\t\tconst metadata = JSON.parse(readFileSync(cacheMetaFile, \"utf8\")) as CacheMetadata;\n\t\t\tcachedETag = metadata.etag;\n\t\t\tcachedTag = metadata.tag;\n\t\t\tcachedTimestamp = metadata.lastChecked;\n\t\t}\n\n\t\tconst CACHE_TTL_MS = 24 * 60 * 60 * 1000;\n\t\tif (cachedTimestamp && Date.now() - cachedTimestamp < CACHE_TTL_MS && existsSync(cacheFile)) {\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tconst latestTag = await getLatestReleaseTag();\n\t\tconst instructionsUrl = `https://raw.githubusercontent.com/openai/codex/${latestTag}/codex-rs/core/${promptFile}`;\n\n\t\tif (cachedTag !== latestTag) {\n\t\t\tcachedETag = null;\n\t\t}\n\n\t\tconst headers: Record<string, string> = {};\n\t\tif (cachedETag) {\n\t\t\theaders[\"If-None-Match\"] = cachedETag;\n\t\t}\n\n\t\tconst response = await fetch(instructionsUrl, { headers });\n\n\t\tif (response.status === 304) {\n\t\t\tif (existsSync(cacheFile)) {\n\t\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t\t}\n\t\t}\n\n\t\tif (response.ok) {\n\t\t\tconst instructions = await response.text();\n\t\t\tconst newETag = response.headers.get(\"etag\");\n\n\t\t\tif (!existsSync(cacheDir)) {\n\t\t\t\tmkdirSync(cacheDir, { recursive: true });\n\t\t\t}\n\n\t\t\twriteFileSync(cacheFile, instructions, \"utf8\");\n\t\t\twriteFileSync(\n\t\t\t\tcacheMetaFile,\n\t\t\t\tJSON.stringify({\n\t\t\t\t\tetag: newETag,\n\t\t\t\t\ttag: latestTag,\n\t\t\t\t\tlastChecked: Date.now(),\n\t\t\t\t\turl: instructionsUrl,\n\t\t\t\t} satisfies CacheMetadata),\n\t\t\t\t\"utf8\",\n\t\t\t);\n\n\t\t\treturn instructions;\n\t\t}\n\n\t\tthrow new Error(`HTTP ${response.status}`);\n\t} catch (error) {\n\t\tconsole.error(\n\t\t\t`[openai-codex] Failed to fetch ${modelFamily} instructions from GitHub:`,\n\t\t\terror instanceof Error ? error.message : String(error),\n\t\t);\n\n\t\tif (existsSync(cacheFile)) {\n\t\t\tconsole.error(`[openai-codex] Using cached ${modelFamily} instructions`);\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tif (existsSync(FALLBACK_PROMPT_PATH)) {\n\t\t\tconsole.error(`[openai-codex] Falling back to bundled instructions for ${modelFamily}`);\n\t\t\treturn readFileSync(FALLBACK_PROMPT_PATH, \"utf8\");\n\t\t}\n\n\t\tthrow new Error(`No cached Codex instructions available for ${modelFamily}`);\n\t}\n}\n"]}
|
|
@@ -28,17 +28,17 @@ const CACHE_FILES = {
|
|
|
28
28
|
"gpt-5.2": "gpt-5.2-instructions.md",
|
|
29
29
|
"gpt-5.1": "gpt-5.1-instructions.md",
|
|
30
30
|
};
|
|
31
|
-
export function getModelFamily(
|
|
32
|
-
if (
|
|
31
|
+
export function getModelFamily(model) {
|
|
32
|
+
if (model.includes("gpt-5.2-codex") || model.includes("gpt 5.2 codex")) {
|
|
33
33
|
return "gpt-5.2-codex";
|
|
34
34
|
}
|
|
35
|
-
if (
|
|
35
|
+
if (model.includes("codex-max")) {
|
|
36
36
|
return "codex-max";
|
|
37
37
|
}
|
|
38
|
-
if (
|
|
38
|
+
if (model.includes("codex") || model.startsWith("codex-")) {
|
|
39
39
|
return "codex";
|
|
40
40
|
}
|
|
41
|
-
if (
|
|
41
|
+
if (model.includes("gpt-5.2")) {
|
|
42
42
|
return "gpt-5.2";
|
|
43
43
|
}
|
|
44
44
|
return "gpt-5.1";
|
|
@@ -75,8 +75,8 @@ async function getLatestReleaseTag() {
|
|
|
75
75
|
}
|
|
76
76
|
throw new Error("Failed to determine latest release tag from GitHub");
|
|
77
77
|
}
|
|
78
|
-
export async function getCodexInstructions(
|
|
79
|
-
const modelFamily = getModelFamily(
|
|
78
|
+
export async function getCodexInstructions(model = "gpt-5.1-codex") {
|
|
79
|
+
const modelFamily = getModelFamily(model);
|
|
80
80
|
const promptFile = PROMPT_FILES[modelFamily];
|
|
81
81
|
const cacheDir = getCacheDir();
|
|
82
82
|
const cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"codex.js","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/codex.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,SAAS,CAAC;AAC7E,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEzC,MAAM,mBAAmB,GAAG,2DAA2D,CAAC;AACxF,MAAM,oBAAoB,GAAG,iDAAiD,CAAC;AAE/E,MAAM,iBAAiB,GAAG,IAAI,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AAC1D,MAAM,UAAU,GAAG,aAAa,CAAC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;AACtC,MAAM,oBAAoB,GAAG,IAAI,CAAC,SAAS,EAAE,uBAAuB,CAAC,CAAC;AAEtE,SAAS,WAAW,GAAW;IAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,mBAAmB,IAAI,iBAAiB,CAAC;AAAA,CAC5D;AAED,SAAS,WAAW,GAAW;IAC9B,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;AAAA,CACpD;AAID,MAAM,YAAY,GAAgC;IACjD,eAAe,EAAE,yBAAyB;IAC1C,WAAW,EAAE,6BAA6B;IAC1C,KAAK,EAAE,uBAAuB;IAC9B,SAAS,EAAE,mBAAmB;IAC9B,SAAS,EAAE,mBAAmB;CAC9B,CAAC;AAEF,MAAM,WAAW,GAAgC;IAChD,eAAe,EAAE,+BAA+B;IAChD,WAAW,EAAE,2BAA2B;IACxC,KAAK,EAAE,uBAAuB;IAC9B,SAAS,EAAE,yBAAyB;IACpC,SAAS,EAAE,yBAAyB;CACpC,CAAC;AASF,MAAM,UAAU,cAAc,CAAC,eAAuB,EAAe;IACpE,IAAI,eAAe,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,eAAe,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;QAC5F,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;QAC3C,OAAO,WAAW,CAAC;IACpB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,eAAe,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC/E,OAAO,OAAO,CAAC;IAChB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACzC,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED,KAAK,UAAU,mBAAmB,GAAoB;IACrD,IAAI,CAAC;QACJ,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAClD,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAA0B,CAAC;YAC9D,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBACnB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACtB,CAAC;QACF,CAAC;IACF,CAAC;IAAC,MAAM,CAAC;QACR,WAAW;IACZ,CAAC;IAED,MAAM,YAAY,GAAG,MAAM,KAAK,CAAC,oBAAoB,CAAC,CAAC;IACvD,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC;QACtB,MAAM,IAAI,KAAK,CAAC,mCAAmC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC;IAC3E,CAAC;IAED,MAAM,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC;IAClC,IAAI,QAAQ,EAAE,CAAC;QACd,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACtC,MAAM,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QACrC,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;YACjC,OAAO,IAAI,CAAC;QACb,CAAC;IACF,CAAC;IAED,MAAM,IAAI,GAAG,MAAM,YAAY,CAAC,IAAI,EAAE,CAAC;IACvC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;IACpE,IAAI,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QAChB,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;IACjB,CAAC;IAED,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;AAAA,CACtE;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CAAC,eAAe,GAAG,eAAe,EAAmB;IAC9F,MAAM,WAAW,GAAG,cAAc,CAAC,eAAe,CAAC,CAAC;IACpD,MAAM,UAAU,GAAG,YAAY,CAAC,WAAW,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,WAAW,EAAE,CAAC;IAC/B,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,WAAW,CAAC,CAAC,CAAC;IAC3D,MAAM,aAAa,GAAG,IAAI,CAAC,QAAQ,EAAE,GAAG,WAAW,CAAC,WAAW,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,YAAY,CAAC,EAAE,CAAC,CAAC;IAEjG,IAAI,CAAC;QACJ,IAAI,UAAU,GAAkB,IAAI,CAAC;QACrC,IAAI,SAAS,GAAkB,IAAI,CAAC;QACpC,IAAI,eAAe,GAAkB,IAAI,CAAC;QAE1C,IAAI,UAAU,CAAC,aAAa,CAAC,EAAE,CAAC;YAC/B,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,aAAa,EAAE,MAAM,CAAC,CAAkB,CAAC;YAClF,UAAU,GAAG,QAAQ,CAAC,IAAI,CAAC;YAC3B,SAAS,GAAG,QAAQ,CAAC,GAAG,CAAC;YACzB,eAAe,GAAG,QAAQ,CAAC,WAAW,CAAC;QACxC,CAAC;QAED,MAAM,YAAY,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;QACzC,IAAI,eAAe,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,eAAe,GAAG,YAAY,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC7F,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;QACxC,CAAC;QAED,MAAM,SAAS,GAAG,MAAM,mBAAmB,EAAE,CAAC;QAC9C,MAAM,eAAe,GAAG,kDAAkD,SAAS,kBAAkB,UAAU,EAAE,CAAC;QAElH,IAAI,SAAS,KAAK,SAAS,EAAE,CAAC;YAC7B,UAAU,GAAG,IAAI,CAAC;QACnB,CAAC;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,UAAU,EAAE,CAAC;YAChB,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,CAAC;QACvC,CAAC;QAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,eAAe,EAAE,EAAE,OAAO,EAAE,CAAC,CAAC;QAE3D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;YAC7B,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;gBAC3B,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;YACxC,CAAC;QACF,CAAC;QAED,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,YAAY,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YAC3C,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAE7C,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;gBAC3B,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAC1C,CAAC;YAED,aAAa,CAAC,SAAS,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC;YAC/C,aAAa,CACZ,aAAa,EACb,IAAI,CAAC,SAAS,CAAC;gBACd,IAAI,EAAE,OAAO;gBACb,GAAG,EAAE,SAAS;gBACd,WAAW,EAAE,IAAI,CAAC,GAAG,EAAE;gBACvB,GAAG,EAAE,eAAe;aACI,CAAC,EAC1B,MAAM,CACN,CAAC;YAEF,OAAO,YAAY,CAAC;QACrB,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;IAC5C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAChB,OAAO,CAAC,KAAK,CACZ,kCAAkC,WAAW,4BAA4B,EACzE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CACtD,CAAC;QAEF,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC3B,OAAO,CAAC,KAAK,CAAC,+BAA+B,WAAW,eAAe,CAAC,CAAC;YACzE,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;QACxC,CAAC;QAED,IAAI,UAAU,CAAC,oBAAoB,CAAC,EAAE,CAAC;YACtC,OAAO,CAAC,KAAK,CAAC,2DAA2D,WAAW,EAAE,CAAC,CAAC;YACxF,OAAO,YAAY,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;QACnD,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,8CAA8C,WAAW,EAAE,CAAC,CAAC;IAC9E,CAAC;AAAA,CACD","sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\nconst GITHUB_API_RELEASES = \"https://api.github.com/repos/openai/codex/releases/latest\";\nconst GITHUB_HTML_RELEASES = \"https://github.com/openai/codex/releases/latest\";\n\nconst DEFAULT_AGENT_DIR = join(homedir(), \".pi\", \"agent\");\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst FALLBACK_PROMPT_PATH = join(__dirname, \"codex-instructions.md\");\n\nfunction getAgentDir(): string {\n\treturn process.env.PI_CODING_AGENT_DIR || DEFAULT_AGENT_DIR;\n}\n\nfunction getCacheDir(): string {\n\treturn join(getAgentDir(), \"cache\", \"openai-codex\");\n}\n\nexport type ModelFamily = \"gpt-5.2-codex\" | \"codex-max\" | \"codex\" | \"gpt-5.2\" | \"gpt-5.1\";\n\nconst PROMPT_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex_prompt.md\",\n\t\"codex-max\": \"gpt-5.1-codex-max_prompt.md\",\n\tcodex: \"gpt_5_codex_prompt.md\",\n\t\"gpt-5.2\": \"gpt_5_2_prompt.md\",\n\t\"gpt-5.1\": \"gpt_5_1_prompt.md\",\n};\n\nconst CACHE_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex-instructions.md\",\n\t\"codex-max\": \"codex-max-instructions.md\",\n\tcodex: \"codex-instructions.md\",\n\t\"gpt-5.2\": \"gpt-5.2-instructions.md\",\n\t\"gpt-5.1\": \"gpt-5.1-instructions.md\",\n};\n\nexport type CacheMetadata = {\n\tetag: string | null;\n\ttag: string;\n\tlastChecked: number;\n\turl: string;\n};\n\nexport function getModelFamily(normalizedModel: string): ModelFamily {\n\tif (normalizedModel.includes(\"gpt-5.2-codex\") || normalizedModel.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalizedModel.includes(\"codex-max\")) {\n\t\treturn \"codex-max\";\n\t}\n\tif (normalizedModel.includes(\"codex\") || normalizedModel.startsWith(\"codex-\")) {\n\t\treturn \"codex\";\n\t}\n\tif (normalizedModel.includes(\"gpt-5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\treturn \"gpt-5.1\";\n}\n\nasync function getLatestReleaseTag(): Promise<string> {\n\ttry {\n\t\tconst response = await fetch(GITHUB_API_RELEASES);\n\t\tif (response.ok) {\n\t\t\tconst data = (await response.json()) as { tag_name?: string };\n\t\t\tif (data.tag_name) {\n\t\t\t\treturn data.tag_name;\n\t\t\t}\n\t\t}\n\t} catch {\n\t\t// fallback\n\t}\n\n\tconst htmlResponse = await fetch(GITHUB_HTML_RELEASES);\n\tif (!htmlResponse.ok) {\n\t\tthrow new Error(`Failed to fetch latest release: ${htmlResponse.status}`);\n\t}\n\n\tconst finalUrl = htmlResponse.url;\n\tif (finalUrl) {\n\t\tconst parts = finalUrl.split(\"/tag/\");\n\t\tconst last = parts[parts.length - 1];\n\t\tif (last && !last.includes(\"/\")) {\n\t\t\treturn last;\n\t\t}\n\t}\n\n\tconst html = await htmlResponse.text();\n\tconst match = html.match(/\\/openai\\/codex\\/releases\\/tag\\/([^\"]+)/);\n\tif (match?.[1]) {\n\t\treturn match[1];\n\t}\n\n\tthrow new Error(\"Failed to determine latest release tag from GitHub\");\n}\n\nexport async function getCodexInstructions(normalizedModel = \"gpt-5.1-codex\"): Promise<string> {\n\tconst modelFamily = getModelFamily(normalizedModel);\n\tconst promptFile = PROMPT_FILES[modelFamily];\n\tconst cacheDir = getCacheDir();\n\tconst cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);\n\tconst cacheMetaFile = join(cacheDir, `${CACHE_FILES[modelFamily].replace(\".md\", \"-meta.json\")}`);\n\n\ttry {\n\t\tlet cachedETag: string | null = null;\n\t\tlet cachedTag: string | null = null;\n\t\tlet cachedTimestamp: number | null = null;\n\n\t\tif (existsSync(cacheMetaFile)) {\n\t\t\tconst metadata = JSON.parse(readFileSync(cacheMetaFile, \"utf8\")) as CacheMetadata;\n\t\t\tcachedETag = metadata.etag;\n\t\t\tcachedTag = metadata.tag;\n\t\t\tcachedTimestamp = metadata.lastChecked;\n\t\t}\n\n\t\tconst CACHE_TTL_MS = 24 * 60 * 60 * 1000;\n\t\tif (cachedTimestamp && Date.now() - cachedTimestamp < CACHE_TTL_MS && existsSync(cacheFile)) {\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tconst latestTag = await getLatestReleaseTag();\n\t\tconst instructionsUrl = `https://raw.githubusercontent.com/openai/codex/${latestTag}/codex-rs/core/${promptFile}`;\n\n\t\tif (cachedTag !== latestTag) {\n\t\t\tcachedETag = null;\n\t\t}\n\n\t\tconst headers: Record<string, string> = {};\n\t\tif (cachedETag) {\n\t\t\theaders[\"If-None-Match\"] = cachedETag;\n\t\t}\n\n\t\tconst response = await fetch(instructionsUrl, { headers });\n\n\t\tif (response.status === 304) {\n\t\t\tif (existsSync(cacheFile)) {\n\t\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t\t}\n\t\t}\n\n\t\tif (response.ok) {\n\t\t\tconst instructions = await response.text();\n\t\t\tconst newETag = response.headers.get(\"etag\");\n\n\t\t\tif (!existsSync(cacheDir)) {\n\t\t\t\tmkdirSync(cacheDir, { recursive: true });\n\t\t\t}\n\n\t\t\twriteFileSync(cacheFile, instructions, \"utf8\");\n\t\t\twriteFileSync(\n\t\t\t\tcacheMetaFile,\n\t\t\t\tJSON.stringify({\n\t\t\t\t\tetag: newETag,\n\t\t\t\t\ttag: latestTag,\n\t\t\t\t\tlastChecked: Date.now(),\n\t\t\t\t\turl: instructionsUrl,\n\t\t\t\t} satisfies CacheMetadata),\n\t\t\t\t\"utf8\",\n\t\t\t);\n\n\t\t\treturn instructions;\n\t\t}\n\n\t\tthrow new Error(`HTTP ${response.status}`);\n\t} catch (error) {\n\t\tconsole.error(\n\t\t\t`[openai-codex] Failed to fetch ${modelFamily} instructions from GitHub:`,\n\t\t\terror instanceof Error ? error.message : String(error),\n\t\t);\n\n\t\tif (existsSync(cacheFile)) {\n\t\t\tconsole.error(`[openai-codex] Using cached ${modelFamily} instructions`);\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tif (existsSync(FALLBACK_PROMPT_PATH)) {\n\t\t\tconsole.error(`[openai-codex] Falling back to bundled instructions for ${modelFamily}`);\n\t\t\treturn readFileSync(FALLBACK_PROMPT_PATH, \"utf8\");\n\t\t}\n\n\t\tthrow new Error(`No cached Codex instructions available for ${modelFamily}`);\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"codex.js","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/codex.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,SAAS,CAAC;AAC7E,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEzC,MAAM,mBAAmB,GAAG,2DAA2D,CAAC;AACxF,MAAM,oBAAoB,GAAG,iDAAiD,CAAC;AAE/E,MAAM,iBAAiB,GAAG,IAAI,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AAC1D,MAAM,UAAU,GAAG,aAAa,CAAC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;AACtC,MAAM,oBAAoB,GAAG,IAAI,CAAC,SAAS,EAAE,uBAAuB,CAAC,CAAC;AAEtE,SAAS,WAAW,GAAW;IAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,mBAAmB,IAAI,iBAAiB,CAAC;AAAA,CAC5D;AAED,SAAS,WAAW,GAAW;IAC9B,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;AAAA,CACpD;AAID,MAAM,YAAY,GAAgC;IACjD,eAAe,EAAE,yBAAyB;IAC1C,WAAW,EAAE,6BAA6B;IAC1C,KAAK,EAAE,uBAAuB;IAC9B,SAAS,EAAE,mBAAmB;IAC9B,SAAS,EAAE,mBAAmB;CAC9B,CAAC;AAEF,MAAM,WAAW,GAAgC;IAChD,eAAe,EAAE,+BAA+B;IAChD,WAAW,EAAE,2BAA2B;IACxC,KAAK,EAAE,uBAAuB;IAC9B,SAAS,EAAE,yBAAyB;IACpC,SAAS,EAAE,yBAAyB;CACpC,CAAC;AASF,MAAM,UAAU,cAAc,CAAC,KAAa,EAAe;IAC1D,IAAI,KAAK,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,KAAK,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;QACxE,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,KAAK,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;QACjC,OAAO,WAAW,CAAC;IACpB,CAAC;IACD,IAAI,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC3D,OAAO,OAAO,CAAC;IAChB,CAAC;IACD,IAAI,KAAK,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC/B,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED,KAAK,UAAU,mBAAmB,GAAoB;IACrD,IAAI,CAAC;QACJ,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAClD,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAA0B,CAAC;YAC9D,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBACnB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACtB,CAAC;QACF,CAAC;IACF,CAAC;IAAC,MAAM,CAAC;QACR,WAAW;IACZ,CAAC;IAED,MAAM,YAAY,GAAG,MAAM,KAAK,CAAC,oBAAoB,CAAC,CAAC;IACvD,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC;QACtB,MAAM,IAAI,KAAK,CAAC,mCAAmC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC;IAC3E,CAAC;IAED,MAAM,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC;IAClC,IAAI,QAAQ,EAAE,CAAC;QACd,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACtC,MAAM,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QACrC,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;YACjC,OAAO,IAAI,CAAC;QACb,CAAC;IACF,CAAC;IAED,MAAM,IAAI,GAAG,MAAM,YAAY,CAAC,IAAI,EAAE,CAAC;IACvC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;IACpE,IAAI,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QAChB,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;IACjB,CAAC;IAED,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;AAAA,CACtE;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CAAC,KAAK,GAAG,eAAe,EAAmB;IACpF,MAAM,WAAW,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;IAC1C,MAAM,UAAU,GAAG,YAAY,CAAC,WAAW,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,WAAW,EAAE,CAAC;IAC/B,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,WAAW,CAAC,CAAC,CAAC;IAC3D,MAAM,aAAa,GAAG,IAAI,CAAC,QAAQ,EAAE,GAAG,WAAW,CAAC,WAAW,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,YAAY,CAAC,EAAE,CAAC,CAAC;IAEjG,IAAI,CAAC;QACJ,IAAI,UAAU,GAAkB,IAAI,CAAC;QACrC,IAAI,SAAS,GAAkB,IAAI,CAAC;QACpC,IAAI,eAAe,GAAkB,IAAI,CAAC;QAE1C,IAAI,UAAU,CAAC,aAAa,CAAC,EAAE,CAAC;YAC/B,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,aAAa,EAAE,MAAM,CAAC,CAAkB,CAAC;YAClF,UAAU,GAAG,QAAQ,CAAC,IAAI,CAAC;YAC3B,SAAS,GAAG,QAAQ,CAAC,GAAG,CAAC;YACzB,eAAe,GAAG,QAAQ,CAAC,WAAW,CAAC;QACxC,CAAC;QAED,MAAM,YAAY,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;QACzC,IAAI,eAAe,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,eAAe,GAAG,YAAY,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC7F,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;QACxC,CAAC;QAED,MAAM,SAAS,GAAG,MAAM,mBAAmB,EAAE,CAAC;QAC9C,MAAM,eAAe,GAAG,kDAAkD,SAAS,kBAAkB,UAAU,EAAE,CAAC;QAElH,IAAI,SAAS,KAAK,SAAS,EAAE,CAAC;YAC7B,UAAU,GAAG,IAAI,CAAC;QACnB,CAAC;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,UAAU,EAAE,CAAC;YAChB,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,CAAC;QACvC,CAAC;QAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,eAAe,EAAE,EAAE,OAAO,EAAE,CAAC,CAAC;QAE3D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;YAC7B,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;gBAC3B,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;YACxC,CAAC;QACF,CAAC;QAED,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,YAAY,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YAC3C,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAE7C,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;gBAC3B,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAC1C,CAAC;YAED,aAAa,CAAC,SAAS,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC;YAC/C,aAAa,CACZ,aAAa,EACb,IAAI,CAAC,SAAS,CAAC;gBACd,IAAI,EAAE,OAAO;gBACb,GAAG,EAAE,SAAS;gBACd,WAAW,EAAE,IAAI,CAAC,GAAG,EAAE;gBACvB,GAAG,EAAE,eAAe;aACI,CAAC,EAC1B,MAAM,CACN,CAAC;YAEF,OAAO,YAAY,CAAC;QACrB,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;IAC5C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAChB,OAAO,CAAC,KAAK,CACZ,kCAAkC,WAAW,4BAA4B,EACzE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CACtD,CAAC;QAEF,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC3B,OAAO,CAAC,KAAK,CAAC,+BAA+B,WAAW,eAAe,CAAC,CAAC;YACzE,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;QACxC,CAAC;QAED,IAAI,UAAU,CAAC,oBAAoB,CAAC,EAAE,CAAC;YACtC,OAAO,CAAC,KAAK,CAAC,2DAA2D,WAAW,EAAE,CAAC,CAAC;YACxF,OAAO,YAAY,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;QACnD,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,8CAA8C,WAAW,EAAE,CAAC,CAAC;IAC9E,CAAC;AAAA,CACD","sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\nconst GITHUB_API_RELEASES = \"https://api.github.com/repos/openai/codex/releases/latest\";\nconst GITHUB_HTML_RELEASES = \"https://github.com/openai/codex/releases/latest\";\n\nconst DEFAULT_AGENT_DIR = join(homedir(), \".pi\", \"agent\");\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst FALLBACK_PROMPT_PATH = join(__dirname, \"codex-instructions.md\");\n\nfunction getAgentDir(): string {\n\treturn process.env.PI_CODING_AGENT_DIR || DEFAULT_AGENT_DIR;\n}\n\nfunction getCacheDir(): string {\n\treturn join(getAgentDir(), \"cache\", \"openai-codex\");\n}\n\nexport type ModelFamily = \"gpt-5.2-codex\" | \"codex-max\" | \"codex\" | \"gpt-5.2\" | \"gpt-5.1\";\n\nconst PROMPT_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex_prompt.md\",\n\t\"codex-max\": \"gpt-5.1-codex-max_prompt.md\",\n\tcodex: \"gpt_5_codex_prompt.md\",\n\t\"gpt-5.2\": \"gpt_5_2_prompt.md\",\n\t\"gpt-5.1\": \"gpt_5_1_prompt.md\",\n};\n\nconst CACHE_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex-instructions.md\",\n\t\"codex-max\": \"codex-max-instructions.md\",\n\tcodex: \"codex-instructions.md\",\n\t\"gpt-5.2\": \"gpt-5.2-instructions.md\",\n\t\"gpt-5.1\": \"gpt-5.1-instructions.md\",\n};\n\nexport type CacheMetadata = {\n\tetag: string | null;\n\ttag: string;\n\tlastChecked: number;\n\turl: string;\n};\n\nexport function getModelFamily(model: string): ModelFamily {\n\tif (model.includes(\"gpt-5.2-codex\") || model.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (model.includes(\"codex-max\")) {\n\t\treturn \"codex-max\";\n\t}\n\tif (model.includes(\"codex\") || model.startsWith(\"codex-\")) {\n\t\treturn \"codex\";\n\t}\n\tif (model.includes(\"gpt-5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\treturn \"gpt-5.1\";\n}\n\nasync function getLatestReleaseTag(): Promise<string> {\n\ttry {\n\t\tconst response = await fetch(GITHUB_API_RELEASES);\n\t\tif (response.ok) {\n\t\t\tconst data = (await response.json()) as { tag_name?: string };\n\t\t\tif (data.tag_name) {\n\t\t\t\treturn data.tag_name;\n\t\t\t}\n\t\t}\n\t} catch {\n\t\t// fallback\n\t}\n\n\tconst htmlResponse = await fetch(GITHUB_HTML_RELEASES);\n\tif (!htmlResponse.ok) {\n\t\tthrow new Error(`Failed to fetch latest release: ${htmlResponse.status}`);\n\t}\n\n\tconst finalUrl = htmlResponse.url;\n\tif (finalUrl) {\n\t\tconst parts = finalUrl.split(\"/tag/\");\n\t\tconst last = parts[parts.length - 1];\n\t\tif (last && !last.includes(\"/\")) {\n\t\t\treturn last;\n\t\t}\n\t}\n\n\tconst html = await htmlResponse.text();\n\tconst match = html.match(/\\/openai\\/codex\\/releases\\/tag\\/([^\"]+)/);\n\tif (match?.[1]) {\n\t\treturn match[1];\n\t}\n\n\tthrow new Error(\"Failed to determine latest release tag from GitHub\");\n}\n\nexport async function getCodexInstructions(model = \"gpt-5.1-codex\"): Promise<string> {\n\tconst modelFamily = getModelFamily(model);\n\tconst promptFile = PROMPT_FILES[modelFamily];\n\tconst cacheDir = getCacheDir();\n\tconst cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);\n\tconst cacheMetaFile = join(cacheDir, `${CACHE_FILES[modelFamily].replace(\".md\", \"-meta.json\")}`);\n\n\ttry {\n\t\tlet cachedETag: string | null = null;\n\t\tlet cachedTag: string | null = null;\n\t\tlet cachedTimestamp: number | null = null;\n\n\t\tif (existsSync(cacheMetaFile)) {\n\t\t\tconst metadata = JSON.parse(readFileSync(cacheMetaFile, \"utf8\")) as CacheMetadata;\n\t\t\tcachedETag = metadata.etag;\n\t\t\tcachedTag = metadata.tag;\n\t\t\tcachedTimestamp = metadata.lastChecked;\n\t\t}\n\n\t\tconst CACHE_TTL_MS = 24 * 60 * 60 * 1000;\n\t\tif (cachedTimestamp && Date.now() - cachedTimestamp < CACHE_TTL_MS && existsSync(cacheFile)) {\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tconst latestTag = await getLatestReleaseTag();\n\t\tconst instructionsUrl = `https://raw.githubusercontent.com/openai/codex/${latestTag}/codex-rs/core/${promptFile}`;\n\n\t\tif (cachedTag !== latestTag) {\n\t\t\tcachedETag = null;\n\t\t}\n\n\t\tconst headers: Record<string, string> = {};\n\t\tif (cachedETag) {\n\t\t\theaders[\"If-None-Match\"] = cachedETag;\n\t\t}\n\n\t\tconst response = await fetch(instructionsUrl, { headers });\n\n\t\tif (response.status === 304) {\n\t\t\tif (existsSync(cacheFile)) {\n\t\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t\t}\n\t\t}\n\n\t\tif (response.ok) {\n\t\t\tconst instructions = await response.text();\n\t\t\tconst newETag = response.headers.get(\"etag\");\n\n\t\t\tif (!existsSync(cacheDir)) {\n\t\t\t\tmkdirSync(cacheDir, { recursive: true });\n\t\t\t}\n\n\t\t\twriteFileSync(cacheFile, instructions, \"utf8\");\n\t\t\twriteFileSync(\n\t\t\t\tcacheMetaFile,\n\t\t\t\tJSON.stringify({\n\t\t\t\t\tetag: newETag,\n\t\t\t\t\ttag: latestTag,\n\t\t\t\t\tlastChecked: Date.now(),\n\t\t\t\t\turl: instructionsUrl,\n\t\t\t\t} satisfies CacheMetadata),\n\t\t\t\t\"utf8\",\n\t\t\t);\n\n\t\t\treturn instructions;\n\t\t}\n\n\t\tthrow new Error(`HTTP ${response.status}`);\n\t} catch (error) {\n\t\tconsole.error(\n\t\t\t`[openai-codex] Failed to fetch ${modelFamily} instructions from GitHub:`,\n\t\t\terror instanceof Error ? error.message : String(error),\n\t\t);\n\n\t\tif (existsSync(cacheFile)) {\n\t\t\tconsole.error(`[openai-codex] Using cached ${modelFamily} instructions`);\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tif (existsSync(FALLBACK_PROMPT_PATH)) {\n\t\t\tconsole.error(`[openai-codex] Falling back to bundled instructions for ${modelFamily}`);\n\t\t\treturn readFileSync(FALLBACK_PROMPT_PATH, \"utf8\");\n\t\t}\n\n\t\tthrow new Error(`No cached Codex instructions available for ${modelFamily}`);\n\t}\n}\n"]}
|
|
@@ -37,7 +37,6 @@ export interface RequestBody {
|
|
|
37
37
|
max_completion_tokens?: number;
|
|
38
38
|
[key: string]: unknown;
|
|
39
39
|
}
|
|
40
|
-
export declare function normalizeModel(model: string | undefined): string;
|
|
41
40
|
export declare function transformRequestBody(body: RequestBody, options?: CodexRequestOptions, prompt?: {
|
|
42
41
|
instructions: string;
|
|
43
42
|
developerMessages: string[];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"request-transformer.d.ts","sourceRoot":"","sources":["../../../src/providers/openai-codex/request-transformer.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,eAAe;IAC/B,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IACjE,OAAO,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,CAAC;CACxD;AAED,MAAM,WAAW,mBAAmB;IACnC,eAAe,CAAC,EAAE,eAAe,CAAC,QAAQ,CAAC,CAAC;IAC5C,gBAAgB,CAAC,EAAE,eAAe,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC;IACrD,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IAC1C,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;CACnB;AAED,MAAM,WAAW,SAAS;IACzB,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,WAAW;IAC3B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,SAAS,EAAE,CAAC;IACpB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,CAAC;IACrC,IAAI,CAAC,EAAE;QACN,SAAS,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;KACtC,CAAC;IACF,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,sBAAsB,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC;IAC7C,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACvB;AAiDD,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,MAAM,CA0ChE;AA8ED,wBAAsB,oBAAoB,CACzC,IAAI,EAAE,WAAW,EACjB,OAAO,GAAE,mBAAwB,EACjC,MAAM,CAAC,EAAE;IAAE,YAAY,EAAE,MAAM,CAAC;IAAC,iBAAiB,EAAE,MAAM,EAAE,CAAA;CAAE,GAC5D,OAAO,CAAC,WAAW,CAAC,CAgFtB","sourcesContent":["export interface ReasoningConfig {\n\teffort: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\tsummary: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\";\n}\n\nexport interface CodexRequestOptions {\n\treasoningEffort?: ReasoningConfig[\"effort\"];\n\treasoningSummary?: ReasoningConfig[\"summary\"] | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n}\n\nexport interface InputItem {\n\tid?: string | null;\n\ttype?: string | null;\n\trole?: string;\n\tcontent?: unknown;\n\tcall_id?: string | null;\n\tname?: string;\n\toutput?: unknown;\n\targuments?: string;\n}\n\nexport interface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: InputItem[];\n\ttools?: unknown;\n\ttemperature?: number;\n\treasoning?: Partial<ReasoningConfig>;\n\ttext?: {\n\t\tverbosity?: \"low\" | \"medium\" | \"high\";\n\t};\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\tprompt_cache_retention?: \"in_memory\" | \"24h\";\n\tmax_output_tokens?: number;\n\tmax_completion_tokens?: number;\n\t[key: string]: unknown;\n}\n\nconst MODEL_MAP: Record<string, string> = {\n\t\"gpt-5.1-codex\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-low\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-medium\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-high\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-max\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-low\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-medium\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-high\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-xhigh\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.2\": \"gpt-5.2\",\n\t\"gpt-5.2-none\": \"gpt-5.2\",\n\t\"gpt-5.2-low\": \"gpt-5.2\",\n\t\"gpt-5.2-medium\": \"gpt-5.2\",\n\t\"gpt-5.2-high\": \"gpt-5.2\",\n\t\"gpt-5.2-xhigh\": \"gpt-5.2\",\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-low\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-medium\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-high\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-xhigh\": \"gpt-5.2-codex\",\n\t\"gpt-5.1-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1\": \"gpt-5.1\",\n\t\"gpt-5.1-none\": \"gpt-5.1\",\n\t\"gpt-5.1-low\": \"gpt-5.1\",\n\t\"gpt-5.1-medium\": \"gpt-5.1\",\n\t\"gpt-5.1-high\": \"gpt-5.1\",\n\t\"gpt-5.1-chat-latest\": \"gpt-5.1\",\n\t\"gpt-5-codex\": \"gpt-5.1-codex\",\n\t\"codex-mini-latest\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5\": \"gpt-5.1\",\n\t\"gpt-5-mini\": \"gpt-5.1\",\n\t\"gpt-5-nano\": \"gpt-5.1\",\n};\n\nfunction getNormalizedModel(modelId: string): string | undefined {\n\tif (MODEL_MAP[modelId]) return MODEL_MAP[modelId];\n\tconst lowerModelId = modelId.toLowerCase();\n\tconst match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);\n\treturn match ? MODEL_MAP[match] : undefined;\n}\n\nexport function normalizeModel(model: string | undefined): string {\n\tif (!model) return \"gpt-5.1\";\n\n\tconst modelId = model.includes(\"/\") ? model.split(\"/\").pop()! : model;\n\tconst mappedModel = getNormalizedModel(modelId);\n\tif (mappedModel) return mappedModel;\n\n\tconst normalized = modelId.toLowerCase();\n\n\tif (normalized.includes(\"gpt-5.2-codex\") || normalized.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.2\") || normalized.includes(\"gpt 5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-max\") || normalized.includes(\"gpt 5.1 codex max\")) {\n\t\treturn \"gpt-5.1-codex-max\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-mini\") || normalized.includes(\"gpt 5.1 codex mini\")) {\n\t\treturn \"gpt-5.1-codex-mini\";\n\t}\n\tif (\n\t\tnormalized.includes(\"codex-mini-latest\") ||\n\t\tnormalized.includes(\"gpt-5-codex-mini\") ||\n\t\tnormalized.includes(\"gpt 5 codex mini\")\n\t) {\n\t\treturn \"codex-mini-latest\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex\") || normalized.includes(\"gpt 5.1 codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.1\") || normalized.includes(\"gpt 5.1\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\tif (normalized.includes(\"codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5\") || normalized.includes(\"gpt 5\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\n\treturn \"gpt-5.1\";\n}\n\nfunction getReasoningConfig(modelName: string | undefined, options: CodexRequestOptions = {}): ReasoningConfig {\n\tconst normalizedName = modelName?.toLowerCase() ?? \"\";\n\n\tconst isGpt52Codex = normalizedName.includes(\"gpt-5.2-codex\") || normalizedName.includes(\"gpt 5.2 codex\");\n\tconst isGpt52General = (normalizedName.includes(\"gpt-5.2\") || normalizedName.includes(\"gpt 5.2\")) && !isGpt52Codex;\n\tconst isCodexMax = normalizedName.includes(\"codex-max\") || normalizedName.includes(\"codex max\");\n\tconst isCodexMini =\n\t\tnormalizedName.includes(\"codex-mini\") ||\n\t\tnormalizedName.includes(\"codex mini\") ||\n\t\tnormalizedName.includes(\"codex_mini\") ||\n\t\tnormalizedName.includes(\"codex-mini-latest\");\n\tconst isCodex = normalizedName.includes(\"codex\") && !isCodexMini;\n\tconst isLightweight = !isCodexMini && (normalizedName.includes(\"nano\") || normalizedName.includes(\"mini\"));\n\tconst isGpt51General =\n\t\t(normalizedName.includes(\"gpt-5.1\") || normalizedName.includes(\"gpt 5.1\")) &&\n\t\t!isCodex &&\n\t\t!isCodexMax &&\n\t\t!isCodexMini;\n\n\tconst supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;\n\tconst supportsNone = isGpt52General || isGpt51General;\n\n\tconst defaultEffort: ReasoningConfig[\"effort\"] = isCodexMini\n\t\t? \"medium\"\n\t\t: supportsXhigh\n\t\t\t? \"high\"\n\t\t\t: isLightweight\n\t\t\t\t? \"minimal\"\n\t\t\t\t: \"medium\";\n\n\tlet effort = options.reasoningEffort || defaultEffort;\n\n\tif (isCodexMini) {\n\t\tif (effort === \"minimal\" || effort === \"low\" || effort === \"none\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t\tif (effort === \"xhigh\") {\n\t\t\teffort = \"high\";\n\t\t}\n\t\tif (effort !== \"high\" && effort !== \"medium\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t}\n\n\tif (!supportsXhigh && effort === \"xhigh\") {\n\t\teffort = \"high\";\n\t}\n\n\tif (!supportsNone && effort === \"none\") {\n\t\teffort = \"low\";\n\t}\n\n\tif (isCodex && effort === \"minimal\") {\n\t\teffort = \"low\";\n\t}\n\n\treturn {\n\t\teffort,\n\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t};\n}\n\nfunction filterInput(input: InputItem[] | undefined): InputItem[] | undefined {\n\tif (!Array.isArray(input)) return input;\n\n\treturn input\n\t\t.filter((item) => item.type !== \"item_reference\")\n\t\t.map((item) => {\n\t\t\tif (item.id != null) {\n\t\t\t\tconst { id: _id, ...rest } = item;\n\t\t\t\treturn rest as InputItem;\n\t\t\t}\n\t\t\treturn item;\n\t\t});\n}\n\nexport async function transformRequestBody(\n\tbody: RequestBody,\n\toptions: CodexRequestOptions = {},\n\tprompt?: { instructions: string; developerMessages: string[] },\n): Promise<RequestBody> {\n\tconst normalizedModel = normalizeModel(body.model);\n\n\tbody.model = normalizedModel;\n\tbody.store = false;\n\tbody.stream = true;\n\n\tif (body.input && Array.isArray(body.input)) {\n\t\tbody.input = filterInput(body.input);\n\n\t\tif (body.input) {\n\t\t\tconst functionCallIds = new Set(\n\t\t\t\tbody.input\n\t\t\t\t\t.filter((item) => item.type === \"function_call\" && typeof item.call_id === \"string\")\n\t\t\t\t\t.map((item) => item.call_id as string),\n\t\t\t);\n\n\t\t\tbody.input = body.input.map((item) => {\n\t\t\t\tif (item.type === \"function_call_output\" && typeof item.call_id === \"string\") {\n\t\t\t\t\tconst callId = item.call_id as string;\n\t\t\t\t\tif (!functionCallIds.has(callId)) {\n\t\t\t\t\t\tconst itemRecord = item as unknown as Record<string, unknown>;\n\t\t\t\t\t\tconst toolName = typeof itemRecord.name === \"string\" ? itemRecord.name : \"tool\";\n\t\t\t\t\t\tlet text = \"\";\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst output = itemRecord.output;\n\t\t\t\t\t\t\ttext = typeof output === \"string\" ? output : JSON.stringify(output);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\ttext = String(itemRecord.output ?? \"\");\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (text.length > 16000) {\n\t\t\t\t\t\t\ttext = `${text.slice(0, 16000)}\\n...[truncated]`;\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\t\tcontent: `[Previous ${toolName} result; call_id=${callId}]: ${text}`,\n\t\t\t\t\t\t} as InputItem;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn item;\n\t\t\t});\n\t\t}\n\t}\n\n\tif (prompt?.developerMessages && prompt.developerMessages.length > 0 && Array.isArray(body.input)) {\n\t\tconst developerMessages = prompt.developerMessages.map(\n\t\t\t(text) =>\n\t\t\t\t({\n\t\t\t\t\ttype: \"message\",\n\t\t\t\t\trole: \"developer\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text }],\n\t\t\t\t}) as InputItem,\n\t\t);\n\t\tbody.input = [...developerMessages, ...body.input];\n\t}\n\n\tif (options.reasoningEffort !== undefined) {\n\t\tconst reasoningConfig = getReasoningConfig(normalizedModel, options);\n\t\tbody.reasoning = {\n\t\t\t...body.reasoning,\n\t\t\t...reasoningConfig,\n\t\t};\n\t} else {\n\t\tdelete body.reasoning;\n\t}\n\n\tbody.text = {\n\t\t...body.text,\n\t\tverbosity: options.textVerbosity || \"medium\",\n\t};\n\n\tconst include = Array.isArray(options.include) ? [...options.include] : [];\n\tinclude.push(\"reasoning.encrypted_content\");\n\tbody.include = Array.from(new Set(include));\n\n\tdelete body.max_output_tokens;\n\tdelete body.max_completion_tokens;\n\n\treturn body;\n}\n"]}
|
|
1
|
+
{"version":3,"file":"request-transformer.d.ts","sourceRoot":"","sources":["../../../src/providers/openai-codex/request-transformer.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,eAAe;IAC/B,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IACjE,OAAO,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,CAAC;CACxD;AAED,MAAM,WAAW,mBAAmB;IACnC,eAAe,CAAC,EAAE,eAAe,CAAC,QAAQ,CAAC,CAAC;IAC5C,gBAAgB,CAAC,EAAE,eAAe,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC;IACrD,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IAC1C,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;CACnB;AAED,MAAM,WAAW,SAAS;IACzB,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,WAAW;IAC3B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,SAAS,EAAE,CAAC;IACpB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,CAAC;IACrC,IAAI,CAAC,EAAE;QACN,SAAS,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;KACtC,CAAC;IACF,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,sBAAsB,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC;IAC7C,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACvB;AAwCD,wBAAsB,oBAAoB,CACzC,IAAI,EAAE,WAAW,EACjB,OAAO,GAAE,mBAAwB,EACjC,MAAM,CAAC,EAAE;IAAE,YAAY,EAAE,MAAM,CAAC;IAAC,iBAAiB,EAAE,MAAM,EAAE,CAAA;CAAE,GAC5D,OAAO,CAAC,WAAW,CAAC,CA6EtB","sourcesContent":["export interface ReasoningConfig {\n\teffort: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\tsummary: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\";\n}\n\nexport interface CodexRequestOptions {\n\treasoningEffort?: ReasoningConfig[\"effort\"];\n\treasoningSummary?: ReasoningConfig[\"summary\"] | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n}\n\nexport interface InputItem {\n\tid?: string | null;\n\ttype?: string | null;\n\trole?: string;\n\tcontent?: unknown;\n\tcall_id?: string | null;\n\tname?: string;\n\toutput?: unknown;\n\targuments?: string;\n}\n\nexport interface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: InputItem[];\n\ttools?: unknown;\n\ttemperature?: number;\n\treasoning?: Partial<ReasoningConfig>;\n\ttext?: {\n\t\tverbosity?: \"low\" | \"medium\" | \"high\";\n\t};\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\tprompt_cache_retention?: \"in_memory\" | \"24h\";\n\tmax_output_tokens?: number;\n\tmax_completion_tokens?: number;\n\t[key: string]: unknown;\n}\n\nfunction clampReasoningEffort(model: string, effort: ReasoningConfig[\"effort\"]): ReasoningConfig[\"effort\"] {\n\t// Codex backend expects exact model IDs. Do not normalize model names here.\n\tconst modelId = model.includes(\"/\") ? model.split(\"/\").pop()! : model;\n\n\t// gpt-5.1 does not support xhigh.\n\tif (modelId === \"gpt-5.1\" && effort === \"xhigh\") {\n\t\treturn \"high\";\n\t}\n\n\t// gpt-5.1-codex-mini only supports medium/high.\n\tif (modelId === \"gpt-5.1-codex-mini\") {\n\t\treturn effort === \"high\" || effort === \"xhigh\" ? \"high\" : \"medium\";\n\t}\n\n\treturn effort;\n}\n\nfunction getReasoningConfig(model: string, options: CodexRequestOptions): ReasoningConfig {\n\treturn {\n\t\teffort: clampReasoningEffort(model, options.reasoningEffort as ReasoningConfig[\"effort\"]),\n\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t};\n}\n\nfunction filterInput(input: InputItem[] | undefined): InputItem[] | undefined {\n\tif (!Array.isArray(input)) return input;\n\n\treturn input\n\t\t.filter((item) => item.type !== \"item_reference\")\n\t\t.map((item) => {\n\t\t\tif (item.id != null) {\n\t\t\t\tconst { id: _id, ...rest } = item;\n\t\t\t\treturn rest as InputItem;\n\t\t\t}\n\t\t\treturn item;\n\t\t});\n}\n\nexport async function transformRequestBody(\n\tbody: RequestBody,\n\toptions: CodexRequestOptions = {},\n\tprompt?: { instructions: string; developerMessages: string[] },\n): Promise<RequestBody> {\n\tbody.store = false;\n\tbody.stream = true;\n\n\tif (body.input && Array.isArray(body.input)) {\n\t\tbody.input = filterInput(body.input);\n\n\t\tif (body.input) {\n\t\t\tconst functionCallIds = new Set(\n\t\t\t\tbody.input\n\t\t\t\t\t.filter((item) => item.type === \"function_call\" && typeof item.call_id === \"string\")\n\t\t\t\t\t.map((item) => item.call_id as string),\n\t\t\t);\n\n\t\t\tbody.input = body.input.map((item) => {\n\t\t\t\tif (item.type === \"function_call_output\" && typeof item.call_id === \"string\") {\n\t\t\t\t\tconst callId = item.call_id as string;\n\t\t\t\t\tif (!functionCallIds.has(callId)) {\n\t\t\t\t\t\tconst itemRecord = item as unknown as Record<string, unknown>;\n\t\t\t\t\t\tconst toolName = typeof itemRecord.name === \"string\" ? itemRecord.name : \"tool\";\n\t\t\t\t\t\tlet text = \"\";\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst output = itemRecord.output;\n\t\t\t\t\t\t\ttext = typeof output === \"string\" ? output : JSON.stringify(output);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\ttext = String(itemRecord.output ?? \"\");\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (text.length > 16000) {\n\t\t\t\t\t\t\ttext = `${text.slice(0, 16000)}\\n...[truncated]`;\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\t\tcontent: `[Previous ${toolName} result; call_id=${callId}]: ${text}`,\n\t\t\t\t\t\t} as InputItem;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn item;\n\t\t\t});\n\t\t}\n\t}\n\n\tif (prompt?.developerMessages && prompt.developerMessages.length > 0 && Array.isArray(body.input)) {\n\t\tconst developerMessages = prompt.developerMessages.map(\n\t\t\t(text) =>\n\t\t\t\t({\n\t\t\t\t\ttype: \"message\",\n\t\t\t\t\trole: \"developer\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text }],\n\t\t\t\t}) as InputItem,\n\t\t);\n\t\tbody.input = [...developerMessages, ...body.input];\n\t}\n\n\tif (options.reasoningEffort !== undefined) {\n\t\tconst reasoningConfig = getReasoningConfig(body.model, options);\n\t\tbody.reasoning = {\n\t\t\t...body.reasoning,\n\t\t\t...reasoningConfig,\n\t\t};\n\t} else {\n\t\tdelete body.reasoning;\n\t}\n\n\tbody.text = {\n\t\t...body.text,\n\t\tverbosity: options.textVerbosity || \"medium\",\n\t};\n\n\tconst include = Array.isArray(options.include) ? [...options.include] : [];\n\tinclude.push(\"reasoning.encrypted_content\");\n\tbody.include = Array.from(new Set(include));\n\n\tdelete body.max_output_tokens;\n\tdelete body.max_completion_tokens;\n\n\treturn body;\n}\n"]}
|
|
@@ -1,135 +1,19 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
"gpt-5.1-codex-low": "gpt-5.1-codex",
|
|
4
|
-
"gpt-5.1-codex-medium": "gpt-5.1-codex",
|
|
5
|
-
"gpt-5.1-codex-high": "gpt-5.1-codex",
|
|
6
|
-
"gpt-5.1-codex-max": "gpt-5.1-codex-max",
|
|
7
|
-
"gpt-5.1-codex-max-low": "gpt-5.1-codex-max",
|
|
8
|
-
"gpt-5.1-codex-max-medium": "gpt-5.1-codex-max",
|
|
9
|
-
"gpt-5.1-codex-max-high": "gpt-5.1-codex-max",
|
|
10
|
-
"gpt-5.1-codex-max-xhigh": "gpt-5.1-codex-max",
|
|
11
|
-
"gpt-5.2": "gpt-5.2",
|
|
12
|
-
"gpt-5.2-none": "gpt-5.2",
|
|
13
|
-
"gpt-5.2-low": "gpt-5.2",
|
|
14
|
-
"gpt-5.2-medium": "gpt-5.2",
|
|
15
|
-
"gpt-5.2-high": "gpt-5.2",
|
|
16
|
-
"gpt-5.2-xhigh": "gpt-5.2",
|
|
17
|
-
"gpt-5.2-codex": "gpt-5.2-codex",
|
|
18
|
-
"gpt-5.2-codex-low": "gpt-5.2-codex",
|
|
19
|
-
"gpt-5.2-codex-medium": "gpt-5.2-codex",
|
|
20
|
-
"gpt-5.2-codex-high": "gpt-5.2-codex",
|
|
21
|
-
"gpt-5.2-codex-xhigh": "gpt-5.2-codex",
|
|
22
|
-
"gpt-5.1-codex-mini": "gpt-5.1-codex-mini",
|
|
23
|
-
"gpt-5.1-codex-mini-medium": "gpt-5.1-codex-mini",
|
|
24
|
-
"gpt-5.1-codex-mini-high": "gpt-5.1-codex-mini",
|
|
25
|
-
"gpt-5.1": "gpt-5.1",
|
|
26
|
-
"gpt-5.1-none": "gpt-5.1",
|
|
27
|
-
"gpt-5.1-low": "gpt-5.1",
|
|
28
|
-
"gpt-5.1-medium": "gpt-5.1",
|
|
29
|
-
"gpt-5.1-high": "gpt-5.1",
|
|
30
|
-
"gpt-5.1-chat-latest": "gpt-5.1",
|
|
31
|
-
"gpt-5-codex": "gpt-5.1-codex",
|
|
32
|
-
"codex-mini-latest": "gpt-5.1-codex-mini",
|
|
33
|
-
"gpt-5-codex-mini": "gpt-5.1-codex-mini",
|
|
34
|
-
"gpt-5-codex-mini-medium": "gpt-5.1-codex-mini",
|
|
35
|
-
"gpt-5-codex-mini-high": "gpt-5.1-codex-mini",
|
|
36
|
-
"gpt-5": "gpt-5.1",
|
|
37
|
-
"gpt-5-mini": "gpt-5.1",
|
|
38
|
-
"gpt-5-nano": "gpt-5.1",
|
|
39
|
-
};
|
|
40
|
-
function getNormalizedModel(modelId) {
|
|
41
|
-
if (MODEL_MAP[modelId])
|
|
42
|
-
return MODEL_MAP[modelId];
|
|
43
|
-
const lowerModelId = modelId.toLowerCase();
|
|
44
|
-
const match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);
|
|
45
|
-
return match ? MODEL_MAP[match] : undefined;
|
|
46
|
-
}
|
|
47
|
-
export function normalizeModel(model) {
|
|
48
|
-
if (!model)
|
|
49
|
-
return "gpt-5.1";
|
|
1
|
+
function clampReasoningEffort(model, effort) {
|
|
2
|
+
// Codex backend expects exact model IDs. Do not normalize model names here.
|
|
50
3
|
const modelId = model.includes("/") ? model.split("/").pop() : model;
|
|
51
|
-
|
|
52
|
-
if (
|
|
53
|
-
return
|
|
54
|
-
const normalized = modelId.toLowerCase();
|
|
55
|
-
if (normalized.includes("gpt-5.2-codex") || normalized.includes("gpt 5.2 codex")) {
|
|
56
|
-
return "gpt-5.2-codex";
|
|
57
|
-
}
|
|
58
|
-
if (normalized.includes("gpt-5.2") || normalized.includes("gpt 5.2")) {
|
|
59
|
-
return "gpt-5.2";
|
|
60
|
-
}
|
|
61
|
-
if (normalized.includes("gpt-5.1-codex-max") || normalized.includes("gpt 5.1 codex max")) {
|
|
62
|
-
return "gpt-5.1-codex-max";
|
|
63
|
-
}
|
|
64
|
-
if (normalized.includes("gpt-5.1-codex-mini") || normalized.includes("gpt 5.1 codex mini")) {
|
|
65
|
-
return "gpt-5.1-codex-mini";
|
|
66
|
-
}
|
|
67
|
-
if (normalized.includes("codex-mini-latest") ||
|
|
68
|
-
normalized.includes("gpt-5-codex-mini") ||
|
|
69
|
-
normalized.includes("gpt 5 codex mini")) {
|
|
70
|
-
return "codex-mini-latest";
|
|
71
|
-
}
|
|
72
|
-
if (normalized.includes("gpt-5.1-codex") || normalized.includes("gpt 5.1 codex")) {
|
|
73
|
-
return "gpt-5.1-codex";
|
|
74
|
-
}
|
|
75
|
-
if (normalized.includes("gpt-5.1") || normalized.includes("gpt 5.1")) {
|
|
76
|
-
return "gpt-5.1";
|
|
77
|
-
}
|
|
78
|
-
if (normalized.includes("codex")) {
|
|
79
|
-
return "gpt-5.1-codex";
|
|
4
|
+
// gpt-5.1 does not support xhigh.
|
|
5
|
+
if (modelId === "gpt-5.1" && effort === "xhigh") {
|
|
6
|
+
return "high";
|
|
80
7
|
}
|
|
81
|
-
|
|
82
|
-
|
|
8
|
+
// gpt-5.1-codex-mini only supports medium/high.
|
|
9
|
+
if (modelId === "gpt-5.1-codex-mini") {
|
|
10
|
+
return effort === "high" || effort === "xhigh" ? "high" : "medium";
|
|
83
11
|
}
|
|
84
|
-
return
|
|
12
|
+
return effort;
|
|
85
13
|
}
|
|
86
|
-
function getReasoningConfig(
|
|
87
|
-
const normalizedName = modelName?.toLowerCase() ?? "";
|
|
88
|
-
const isGpt52Codex = normalizedName.includes("gpt-5.2-codex") || normalizedName.includes("gpt 5.2 codex");
|
|
89
|
-
const isGpt52General = (normalizedName.includes("gpt-5.2") || normalizedName.includes("gpt 5.2")) && !isGpt52Codex;
|
|
90
|
-
const isCodexMax = normalizedName.includes("codex-max") || normalizedName.includes("codex max");
|
|
91
|
-
const isCodexMini = normalizedName.includes("codex-mini") ||
|
|
92
|
-
normalizedName.includes("codex mini") ||
|
|
93
|
-
normalizedName.includes("codex_mini") ||
|
|
94
|
-
normalizedName.includes("codex-mini-latest");
|
|
95
|
-
const isCodex = normalizedName.includes("codex") && !isCodexMini;
|
|
96
|
-
const isLightweight = !isCodexMini && (normalizedName.includes("nano") || normalizedName.includes("mini"));
|
|
97
|
-
const isGpt51General = (normalizedName.includes("gpt-5.1") || normalizedName.includes("gpt 5.1")) &&
|
|
98
|
-
!isCodex &&
|
|
99
|
-
!isCodexMax &&
|
|
100
|
-
!isCodexMini;
|
|
101
|
-
const supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;
|
|
102
|
-
const supportsNone = isGpt52General || isGpt51General;
|
|
103
|
-
const defaultEffort = isCodexMini
|
|
104
|
-
? "medium"
|
|
105
|
-
: supportsXhigh
|
|
106
|
-
? "high"
|
|
107
|
-
: isLightweight
|
|
108
|
-
? "minimal"
|
|
109
|
-
: "medium";
|
|
110
|
-
let effort = options.reasoningEffort || defaultEffort;
|
|
111
|
-
if (isCodexMini) {
|
|
112
|
-
if (effort === "minimal" || effort === "low" || effort === "none") {
|
|
113
|
-
effort = "medium";
|
|
114
|
-
}
|
|
115
|
-
if (effort === "xhigh") {
|
|
116
|
-
effort = "high";
|
|
117
|
-
}
|
|
118
|
-
if (effort !== "high" && effort !== "medium") {
|
|
119
|
-
effort = "medium";
|
|
120
|
-
}
|
|
121
|
-
}
|
|
122
|
-
if (!supportsXhigh && effort === "xhigh") {
|
|
123
|
-
effort = "high";
|
|
124
|
-
}
|
|
125
|
-
if (!supportsNone && effort === "none") {
|
|
126
|
-
effort = "low";
|
|
127
|
-
}
|
|
128
|
-
if (isCodex && effort === "minimal") {
|
|
129
|
-
effort = "low";
|
|
130
|
-
}
|
|
14
|
+
function getReasoningConfig(model, options) {
|
|
131
15
|
return {
|
|
132
|
-
effort,
|
|
16
|
+
effort: clampReasoningEffort(model, options.reasoningEffort),
|
|
133
17
|
summary: options.reasoningSummary ?? "auto",
|
|
134
18
|
};
|
|
135
19
|
}
|
|
@@ -147,8 +31,6 @@ function filterInput(input) {
|
|
|
147
31
|
});
|
|
148
32
|
}
|
|
149
33
|
export async function transformRequestBody(body, options = {}, prompt) {
|
|
150
|
-
const normalizedModel = normalizeModel(body.model);
|
|
151
|
-
body.model = normalizedModel;
|
|
152
34
|
body.store = false;
|
|
153
35
|
body.stream = true;
|
|
154
36
|
if (body.input && Array.isArray(body.input)) {
|
|
@@ -194,7 +76,7 @@ export async function transformRequestBody(body, options = {}, prompt) {
|
|
|
194
76
|
body.input = [...developerMessages, ...body.input];
|
|
195
77
|
}
|
|
196
78
|
if (options.reasoningEffort !== undefined) {
|
|
197
|
-
const reasoningConfig = getReasoningConfig(
|
|
79
|
+
const reasoningConfig = getReasoningConfig(body.model, options);
|
|
198
80
|
body.reasoning = {
|
|
199
81
|
...body.reasoning,
|
|
200
82
|
...reasoningConfig,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"request-transformer.js","sourceRoot":"","sources":["../../../src/providers/openai-codex/request-transformer.ts"],"names":[],"mappings":"AA2CA,MAAM,SAAS,GAA2B;IACzC,eAAe,EAAE,eAAe;IAChC,mBAAmB,EAAE,eAAe;IACpC,sBAAsB,EAAE,eAAe;IACvC,oBAAoB,EAAE,eAAe;IACrC,mBAAmB,EAAE,mBAAmB;IACxC,uBAAuB,EAAE,mBAAmB;IAC5C,0BAA0B,EAAE,mBAAmB;IAC/C,wBAAwB,EAAE,mBAAmB;IAC7C,yBAAyB,EAAE,mBAAmB;IAC9C,SAAS,EAAE,SAAS;IACpB,cAAc,EAAE,SAAS;IACzB,aAAa,EAAE,SAAS;IACxB,gBAAgB,EAAE,SAAS;IAC3B,cAAc,EAAE,SAAS;IACzB,eAAe,EAAE,SAAS;IAC1B,eAAe,EAAE,eAAe;IAChC,mBAAmB,EAAE,eAAe;IACpC,sBAAsB,EAAE,eAAe;IACvC,oBAAoB,EAAE,eAAe;IACrC,qBAAqB,EAAE,eAAe;IACtC,oBAAoB,EAAE,oBAAoB;IAC1C,2BAA2B,EAAE,oBAAoB;IACjD,yBAAyB,EAAE,oBAAoB;IAC/C,SAAS,EAAE,SAAS;IACpB,cAAc,EAAE,SAAS;IACzB,aAAa,EAAE,SAAS;IACxB,gBAAgB,EAAE,SAAS;IAC3B,cAAc,EAAE,SAAS;IACzB,qBAAqB,EAAE,SAAS;IAChC,aAAa,EAAE,eAAe;IAC9B,mBAAmB,EAAE,oBAAoB;IACzC,kBAAkB,EAAE,oBAAoB;IACxC,yBAAyB,EAAE,oBAAoB;IAC/C,uBAAuB,EAAE,oBAAoB;IAC7C,OAAO,EAAE,SAAS;IAClB,YAAY,EAAE,SAAS;IACvB,YAAY,EAAE,SAAS;CACvB,CAAC;AAEF,SAAS,kBAAkB,CAAC,OAAe,EAAsB;IAChE,IAAI,SAAS,CAAC,OAAO,CAAC;QAAE,OAAO,SAAS,CAAC,OAAO,CAAC,CAAC;IAClD,MAAM,YAAY,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;IAC3C,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,GAAG,CAAC,WAAW,EAAE,KAAK,YAAY,CAAC,CAAC;IACvF,OAAO,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;AAAA,CAC5C;AAED,MAAM,UAAU,cAAc,CAAC,KAAyB,EAAU;IACjE,IAAI,CAAC,KAAK;QAAE,OAAO,SAAS,CAAC;IAE7B,MAAM,OAAO,GAAG,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAG,CAAC,CAAC,CAAC,KAAK,CAAC;IACtE,MAAM,WAAW,GAAG,kBAAkB,CAAC,OAAO,CAAC,CAAC;IAChD,IAAI,WAAW;QAAE,OAAO,WAAW,CAAC;IAEpC,MAAM,UAAU,GAAG,OAAO,CAAC,WAAW,EAAE,CAAC;IAEzC,IAAI,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;QAClF,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACtE,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC,EAAE,CAAC;QAC1F,OAAO,mBAAmB,CAAC;IAC5B,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,oBAAoB,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,oBAAoB,CAAC,EAAE,CAAC;QAC5F,OAAO,oBAAoB,CAAC;IAC7B,CAAC;IACD,IACC,UAAU,CAAC,QAAQ,CAAC,mBAAmB,CAAC;QACxC,UAAU,CAAC,QAAQ,CAAC,kBAAkB,CAAC;QACvC,UAAU,CAAC,QAAQ,CAAC,kBAAkB,CAAC,EACtC,CAAC;QACF,OAAO,mBAAmB,CAAC;IAC5B,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;QAClF,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACtE,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;QAClC,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,UAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC;QAClE,OAAO,SAAS,CAAC;IAClB,CAAC;IAED,OAAO,SAAS,CAAC;AAAA,CACjB;AAED,SAAS,kBAAkB,CAAC,SAA6B,EAAE,OAAO,GAAwB,EAAE,EAAmB;IAC9G,MAAM,cAAc,GAAG,SAAS,EAAE,WAAW,EAAE,IAAI,EAAE,CAAC;IAEtD,MAAM,YAAY,GAAG,cAAc,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC;IAC1G,MAAM,cAAc,GAAG,CAAC,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC;IACnH,MAAM,UAAU,GAAG,cAAc,CAAC,QAAQ,CAAC,WAAW,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;IAChG,MAAM,WAAW,GAChB,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,YAAY,CAAC;QACrC,cAAc,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;IAC9C,MAAM,OAAO,GAAG,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC;IACjE,MAAM,aAAa,GAAG,CAAC,WAAW,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC;IAC3G,MAAM,cAAc,GACnB,CAAC,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,IAAI,cAAc,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;QAC1E,CAAC,OAAO;QACR,CAAC,UAAU;QACX,CAAC,WAAW,CAAC;IAEd,MAAM,aAAa,GAAG,cAAc,IAAI,YAAY,IAAI,UAAU,CAAC;IACnE,MAAM,YAAY,GAAG,cAAc,IAAI,cAAc,CAAC;IAEtD,MAAM,aAAa,GAA8B,WAAW;QAC3D,CAAC,CAAC,QAAQ;QACV,CAAC,CAAC,aAAa;YACd,CAAC,CAAC,MAAM;YACR,CAAC,CAAC,aAAa;gBACd,CAAC,CAAC,SAAS;gBACX,CAAC,CAAC,QAAQ,CAAC;IAEd,IAAI,MAAM,GAAG,OAAO,CAAC,eAAe,IAAI,aAAa,CAAC;IAEtD,IAAI,WAAW,EAAE,CAAC;QACjB,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,KAAK,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;YACnE,MAAM,GAAG,QAAQ,CAAC;QACnB,CAAC;QACD,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;YACxB,MAAM,GAAG,MAAM,CAAC;QACjB,CAAC;QACD,IAAI,MAAM,KAAK,MAAM,IAAI,MAAM,KAAK,QAAQ,EAAE,CAAC;YAC9C,MAAM,GAAG,QAAQ,CAAC;QACnB,CAAC;IACF,CAAC;IAED,IAAI,CAAC,aAAa,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;QAC1C,MAAM,GAAG,MAAM,CAAC;IACjB,CAAC;IAED,IAAI,CAAC,YAAY,IAAI,MAAM,KAAK,MAAM,EAAE,CAAC;QACxC,MAAM,GAAG,KAAK,CAAC;IAChB,CAAC;IAED,IAAI,OAAO,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;QACrC,MAAM,GAAG,KAAK,CAAC;IAChB,CAAC;IAED,OAAO;QACN,MAAM;QACN,OAAO,EAAE,OAAO,CAAC,gBAAgB,IAAI,MAAM;KAC3C,CAAC;AAAA,CACF;AAED,SAAS,WAAW,CAAC,KAA8B,EAA2B;IAC7E,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAExC,OAAO,KAAK;SACV,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,KAAK,gBAAgB,CAAC;SAChD,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;QACd,IAAI,IAAI,CAAC,EAAE,IAAI,IAAI,EAAE,CAAC;YACrB,MAAM,EAAE,EAAE,EAAE,GAAG,EAAE,GAAG,IAAI,EAAE,GAAG,IAAI,CAAC;YAClC,OAAO,IAAiB,CAAC;QAC1B,CAAC;QACD,OAAO,IAAI,CAAC;IAAA,CACZ,CAAC,CAAC;AAAA,CACJ;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACzC,IAAiB,EACjB,OAAO,GAAwB,EAAE,EACjC,MAA8D,EACvC;IACvB,MAAM,eAAe,GAAG,cAAc,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAEnD,IAAI,CAAC,KAAK,GAAG,eAAe,CAAC;IAC7B,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACnB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;IAEnB,IAAI,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;QAC7C,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAErC,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;YAChB,MAAM,eAAe,GAAG,IAAI,GAAG,CAC9B,IAAI,CAAC,KAAK;iBACR,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,KAAK,eAAe,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,CAAC;iBACnF,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,OAAiB,CAAC,CACvC,CAAC;YAEF,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;gBACrC,IAAI,IAAI,CAAC,IAAI,KAAK,sBAAsB,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;oBAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,OAAiB,CAAC;oBACtC,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;wBAClC,MAAM,UAAU,GAAG,IAA0C,CAAC;wBAC9D,MAAM,QAAQ,GAAG,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC;wBAChF,IAAI,IAAI,GAAG,EAAE,CAAC;wBACd,IAAI,CAAC;4BACJ,MAAM,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC;4BACjC,IAAI,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;wBACrE,CAAC;wBAAC,MAAM,CAAC;4BACR,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;wBACxC,CAAC;wBACD,IAAI,IAAI,CAAC,MAAM,GAAG,KAAK,EAAE,CAAC;4BACzB,IAAI,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,kBAAkB,CAAC;wBAClD,CAAC;wBACD,OAAO;4BACN,IAAI,EAAE,SAAS;4BACf,IAAI,EAAE,WAAW;4BACjB,OAAO,EAAE,aAAa,QAAQ,oBAAoB,MAAM,MAAM,IAAI,EAAE;yBACvD,CAAC;oBAChB,CAAC;gBACF,CAAC;gBACD,OAAO,IAAI,CAAC;YAAA,CACZ,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;IAED,IAAI,MAAM,EAAE,iBAAiB,IAAI,MAAM,CAAC,iBAAiB,CAAC,MAAM,GAAG,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;QACnG,MAAM,iBAAiB,GAAG,MAAM,CAAC,iBAAiB,CAAC,GAAG,CACrD,CAAC,IAAI,EAAE,EAAE,CACR,CAAC;YACA,IAAI,EAAE,SAAS;YACf,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC;SACvC,CAAc,CAChB,CAAC;QACF,IAAI,CAAC,KAAK,GAAG,CAAC,GAAG,iBAAiB,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;IACpD,CAAC;IAED,IAAI,OAAO,CAAC,eAAe,KAAK,SAAS,EAAE,CAAC;QAC3C,MAAM,eAAe,GAAG,kBAAkB,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;QACrE,IAAI,CAAC,SAAS,GAAG;YAChB,GAAG,IAAI,CAAC,SAAS;YACjB,GAAG,eAAe;SAClB,CAAC;IACH,CAAC;SAAM,CAAC;QACP,OAAO,IAAI,CAAC,SAAS,CAAC;IACvB,CAAC;IAED,IAAI,CAAC,IAAI,GAAG;QACX,GAAG,IAAI,CAAC,IAAI;QACZ,SAAS,EAAE,OAAO,CAAC,aAAa,IAAI,QAAQ;KAC5C,CAAC;IAEF,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IAC3E,OAAO,CAAC,IAAI,CAAC,6BAA6B,CAAC,CAAC;IAC5C,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC;IAE5C,OAAO,IAAI,CAAC,iBAAiB,CAAC;IAC9B,OAAO,IAAI,CAAC,qBAAqB,CAAC;IAElC,OAAO,IAAI,CAAC;AAAA,CACZ","sourcesContent":["export interface ReasoningConfig {\n\teffort: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\tsummary: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\";\n}\n\nexport interface CodexRequestOptions {\n\treasoningEffort?: ReasoningConfig[\"effort\"];\n\treasoningSummary?: ReasoningConfig[\"summary\"] | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n}\n\nexport interface InputItem {\n\tid?: string | null;\n\ttype?: string | null;\n\trole?: string;\n\tcontent?: unknown;\n\tcall_id?: string | null;\n\tname?: string;\n\toutput?: unknown;\n\targuments?: string;\n}\n\nexport interface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: InputItem[];\n\ttools?: unknown;\n\ttemperature?: number;\n\treasoning?: Partial<ReasoningConfig>;\n\ttext?: {\n\t\tverbosity?: \"low\" | \"medium\" | \"high\";\n\t};\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\tprompt_cache_retention?: \"in_memory\" | \"24h\";\n\tmax_output_tokens?: number;\n\tmax_completion_tokens?: number;\n\t[key: string]: unknown;\n}\n\nconst MODEL_MAP: Record<string, string> = {\n\t\"gpt-5.1-codex\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-low\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-medium\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-high\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-max\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-low\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-medium\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-high\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-xhigh\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.2\": \"gpt-5.2\",\n\t\"gpt-5.2-none\": \"gpt-5.2\",\n\t\"gpt-5.2-low\": \"gpt-5.2\",\n\t\"gpt-5.2-medium\": \"gpt-5.2\",\n\t\"gpt-5.2-high\": \"gpt-5.2\",\n\t\"gpt-5.2-xhigh\": \"gpt-5.2\",\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-low\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-medium\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-high\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-xhigh\": \"gpt-5.2-codex\",\n\t\"gpt-5.1-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1\": \"gpt-5.1\",\n\t\"gpt-5.1-none\": \"gpt-5.1\",\n\t\"gpt-5.1-low\": \"gpt-5.1\",\n\t\"gpt-5.1-medium\": \"gpt-5.1\",\n\t\"gpt-5.1-high\": \"gpt-5.1\",\n\t\"gpt-5.1-chat-latest\": \"gpt-5.1\",\n\t\"gpt-5-codex\": \"gpt-5.1-codex\",\n\t\"codex-mini-latest\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5\": \"gpt-5.1\",\n\t\"gpt-5-mini\": \"gpt-5.1\",\n\t\"gpt-5-nano\": \"gpt-5.1\",\n};\n\nfunction getNormalizedModel(modelId: string): string | undefined {\n\tif (MODEL_MAP[modelId]) return MODEL_MAP[modelId];\n\tconst lowerModelId = modelId.toLowerCase();\n\tconst match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);\n\treturn match ? MODEL_MAP[match] : undefined;\n}\n\nexport function normalizeModel(model: string | undefined): string {\n\tif (!model) return \"gpt-5.1\";\n\n\tconst modelId = model.includes(\"/\") ? model.split(\"/\").pop()! : model;\n\tconst mappedModel = getNormalizedModel(modelId);\n\tif (mappedModel) return mappedModel;\n\n\tconst normalized = modelId.toLowerCase();\n\n\tif (normalized.includes(\"gpt-5.2-codex\") || normalized.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.2\") || normalized.includes(\"gpt 5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-max\") || normalized.includes(\"gpt 5.1 codex max\")) {\n\t\treturn \"gpt-5.1-codex-max\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-mini\") || normalized.includes(\"gpt 5.1 codex mini\")) {\n\t\treturn \"gpt-5.1-codex-mini\";\n\t}\n\tif (\n\t\tnormalized.includes(\"codex-mini-latest\") ||\n\t\tnormalized.includes(\"gpt-5-codex-mini\") ||\n\t\tnormalized.includes(\"gpt 5 codex mini\")\n\t) {\n\t\treturn \"codex-mini-latest\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex\") || normalized.includes(\"gpt 5.1 codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.1\") || normalized.includes(\"gpt 5.1\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\tif (normalized.includes(\"codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5\") || normalized.includes(\"gpt 5\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\n\treturn \"gpt-5.1\";\n}\n\nfunction getReasoningConfig(modelName: string | undefined, options: CodexRequestOptions = {}): ReasoningConfig {\n\tconst normalizedName = modelName?.toLowerCase() ?? \"\";\n\n\tconst isGpt52Codex = normalizedName.includes(\"gpt-5.2-codex\") || normalizedName.includes(\"gpt 5.2 codex\");\n\tconst isGpt52General = (normalizedName.includes(\"gpt-5.2\") || normalizedName.includes(\"gpt 5.2\")) && !isGpt52Codex;\n\tconst isCodexMax = normalizedName.includes(\"codex-max\") || normalizedName.includes(\"codex max\");\n\tconst isCodexMini =\n\t\tnormalizedName.includes(\"codex-mini\") ||\n\t\tnormalizedName.includes(\"codex mini\") ||\n\t\tnormalizedName.includes(\"codex_mini\") ||\n\t\tnormalizedName.includes(\"codex-mini-latest\");\n\tconst isCodex = normalizedName.includes(\"codex\") && !isCodexMini;\n\tconst isLightweight = !isCodexMini && (normalizedName.includes(\"nano\") || normalizedName.includes(\"mini\"));\n\tconst isGpt51General =\n\t\t(normalizedName.includes(\"gpt-5.1\") || normalizedName.includes(\"gpt 5.1\")) &&\n\t\t!isCodex &&\n\t\t!isCodexMax &&\n\t\t!isCodexMini;\n\n\tconst supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;\n\tconst supportsNone = isGpt52General || isGpt51General;\n\n\tconst defaultEffort: ReasoningConfig[\"effort\"] = isCodexMini\n\t\t? \"medium\"\n\t\t: supportsXhigh\n\t\t\t? \"high\"\n\t\t\t: isLightweight\n\t\t\t\t? \"minimal\"\n\t\t\t\t: \"medium\";\n\n\tlet effort = options.reasoningEffort || defaultEffort;\n\n\tif (isCodexMini) {\n\t\tif (effort === \"minimal\" || effort === \"low\" || effort === \"none\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t\tif (effort === \"xhigh\") {\n\t\t\teffort = \"high\";\n\t\t}\n\t\tif (effort !== \"high\" && effort !== \"medium\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t}\n\n\tif (!supportsXhigh && effort === \"xhigh\") {\n\t\teffort = \"high\";\n\t}\n\n\tif (!supportsNone && effort === \"none\") {\n\t\teffort = \"low\";\n\t}\n\n\tif (isCodex && effort === \"minimal\") {\n\t\teffort = \"low\";\n\t}\n\n\treturn {\n\t\teffort,\n\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t};\n}\n\nfunction filterInput(input: InputItem[] | undefined): InputItem[] | undefined {\n\tif (!Array.isArray(input)) return input;\n\n\treturn input\n\t\t.filter((item) => item.type !== \"item_reference\")\n\t\t.map((item) => {\n\t\t\tif (item.id != null) {\n\t\t\t\tconst { id: _id, ...rest } = item;\n\t\t\t\treturn rest as InputItem;\n\t\t\t}\n\t\t\treturn item;\n\t\t});\n}\n\nexport async function transformRequestBody(\n\tbody: RequestBody,\n\toptions: CodexRequestOptions = {},\n\tprompt?: { instructions: string; developerMessages: string[] },\n): Promise<RequestBody> {\n\tconst normalizedModel = normalizeModel(body.model);\n\n\tbody.model = normalizedModel;\n\tbody.store = false;\n\tbody.stream = true;\n\n\tif (body.input && Array.isArray(body.input)) {\n\t\tbody.input = filterInput(body.input);\n\n\t\tif (body.input) {\n\t\t\tconst functionCallIds = new Set(\n\t\t\t\tbody.input\n\t\t\t\t\t.filter((item) => item.type === \"function_call\" && typeof item.call_id === \"string\")\n\t\t\t\t\t.map((item) => item.call_id as string),\n\t\t\t);\n\n\t\t\tbody.input = body.input.map((item) => {\n\t\t\t\tif (item.type === \"function_call_output\" && typeof item.call_id === \"string\") {\n\t\t\t\t\tconst callId = item.call_id as string;\n\t\t\t\t\tif (!functionCallIds.has(callId)) {\n\t\t\t\t\t\tconst itemRecord = item as unknown as Record<string, unknown>;\n\t\t\t\t\t\tconst toolName = typeof itemRecord.name === \"string\" ? itemRecord.name : \"tool\";\n\t\t\t\t\t\tlet text = \"\";\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst output = itemRecord.output;\n\t\t\t\t\t\t\ttext = typeof output === \"string\" ? output : JSON.stringify(output);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\ttext = String(itemRecord.output ?? \"\");\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (text.length > 16000) {\n\t\t\t\t\t\t\ttext = `${text.slice(0, 16000)}\\n...[truncated]`;\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\t\tcontent: `[Previous ${toolName} result; call_id=${callId}]: ${text}`,\n\t\t\t\t\t\t} as InputItem;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn item;\n\t\t\t});\n\t\t}\n\t}\n\n\tif (prompt?.developerMessages && prompt.developerMessages.length > 0 && Array.isArray(body.input)) {\n\t\tconst developerMessages = prompt.developerMessages.map(\n\t\t\t(text) =>\n\t\t\t\t({\n\t\t\t\t\ttype: \"message\",\n\t\t\t\t\trole: \"developer\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text }],\n\t\t\t\t}) as InputItem,\n\t\t);\n\t\tbody.input = [...developerMessages, ...body.input];\n\t}\n\n\tif (options.reasoningEffort !== undefined) {\n\t\tconst reasoningConfig = getReasoningConfig(normalizedModel, options);\n\t\tbody.reasoning = {\n\t\t\t...body.reasoning,\n\t\t\t...reasoningConfig,\n\t\t};\n\t} else {\n\t\tdelete body.reasoning;\n\t}\n\n\tbody.text = {\n\t\t...body.text,\n\t\tverbosity: options.textVerbosity || \"medium\",\n\t};\n\n\tconst include = Array.isArray(options.include) ? [...options.include] : [];\n\tinclude.push(\"reasoning.encrypted_content\");\n\tbody.include = Array.from(new Set(include));\n\n\tdelete body.max_output_tokens;\n\tdelete body.max_completion_tokens;\n\n\treturn body;\n}\n"]}
|
|
1
|
+
{"version":3,"file":"request-transformer.js","sourceRoot":"","sources":["../../../src/providers/openai-codex/request-transformer.ts"],"names":[],"mappings":"AA2CA,SAAS,oBAAoB,CAAC,KAAa,EAAE,MAAiC,EAA6B;IAC1G,4EAA4E;IAC5E,MAAM,OAAO,GAAG,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,EAAG,CAAC,CAAC,CAAC,KAAK,CAAC;IAEtE,kCAAkC;IAClC,IAAI,OAAO,KAAK,SAAS,IAAI,MAAM,KAAK,OAAO,EAAE,CAAC;QACjD,OAAO,MAAM,CAAC;IACf,CAAC;IAED,gDAAgD;IAChD,IAAI,OAAO,KAAK,oBAAoB,EAAE,CAAC;QACtC,OAAO,MAAM,KAAK,MAAM,IAAI,MAAM,KAAK,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC;IACpE,CAAC;IAED,OAAO,MAAM,CAAC;AAAA,CACd;AAED,SAAS,kBAAkB,CAAC,KAAa,EAAE,OAA4B,EAAmB;IACzF,OAAO;QACN,MAAM,EAAE,oBAAoB,CAAC,KAAK,EAAE,OAAO,CAAC,eAA4C,CAAC;QACzF,OAAO,EAAE,OAAO,CAAC,gBAAgB,IAAI,MAAM;KAC3C,CAAC;AAAA,CACF;AAED,SAAS,WAAW,CAAC,KAA8B,EAA2B;IAC7E,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAExC,OAAO,KAAK;SACV,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,KAAK,gBAAgB,CAAC;SAChD,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;QACd,IAAI,IAAI,CAAC,EAAE,IAAI,IAAI,EAAE,CAAC;YACrB,MAAM,EAAE,EAAE,EAAE,GAAG,EAAE,GAAG,IAAI,EAAE,GAAG,IAAI,CAAC;YAClC,OAAO,IAAiB,CAAC;QAC1B,CAAC;QACD,OAAO,IAAI,CAAC;IAAA,CACZ,CAAC,CAAC;AAAA,CACJ;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACzC,IAAiB,EACjB,OAAO,GAAwB,EAAE,EACjC,MAA8D,EACvC;IACvB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACnB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;IAEnB,IAAI,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;QAC7C,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAErC,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;YAChB,MAAM,eAAe,GAAG,IAAI,GAAG,CAC9B,IAAI,CAAC,KAAK;iBACR,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,KAAK,eAAe,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,CAAC;iBACnF,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,OAAiB,CAAC,CACvC,CAAC;YAEF,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;gBACrC,IAAI,IAAI,CAAC,IAAI,KAAK,sBAAsB,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;oBAC9E,MAAM,MAAM,GAAG,IAAI,CAAC,OAAiB,CAAC;oBACtC,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;wBAClC,MAAM,UAAU,GAAG,IAA0C,CAAC;wBAC9D,MAAM,QAAQ,GAAG,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC;wBAChF,IAAI,IAAI,GAAG,EAAE,CAAC;wBACd,IAAI,CAAC;4BACJ,MAAM,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC;4BACjC,IAAI,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;wBACrE,CAAC;wBAAC,MAAM,CAAC;4BACR,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC;wBACxC,CAAC;wBACD,IAAI,IAAI,CAAC,MAAM,GAAG,KAAK,EAAE,CAAC;4BACzB,IAAI,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,kBAAkB,CAAC;wBAClD,CAAC;wBACD,OAAO;4BACN,IAAI,EAAE,SAAS;4BACf,IAAI,EAAE,WAAW;4BACjB,OAAO,EAAE,aAAa,QAAQ,oBAAoB,MAAM,MAAM,IAAI,EAAE;yBACvD,CAAC;oBAChB,CAAC;gBACF,CAAC;gBACD,OAAO,IAAI,CAAC;YAAA,CACZ,CAAC,CAAC;QACJ,CAAC;IACF,CAAC;IAED,IAAI,MAAM,EAAE,iBAAiB,IAAI,MAAM,CAAC,iBAAiB,CAAC,MAAM,GAAG,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;QACnG,MAAM,iBAAiB,GAAG,MAAM,CAAC,iBAAiB,CAAC,GAAG,CACrD,CAAC,IAAI,EAAE,EAAE,CACR,CAAC;YACA,IAAI,EAAE,SAAS;YACf,IAAI,EAAE,WAAW;YACjB,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC;SACvC,CAAc,CAChB,CAAC;QACF,IAAI,CAAC,KAAK,GAAG,CAAC,GAAG,iBAAiB,EAAE,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;IACpD,CAAC;IAED,IAAI,OAAO,CAAC,eAAe,KAAK,SAAS,EAAE,CAAC;QAC3C,MAAM,eAAe,GAAG,kBAAkB,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,GAAG;YAChB,GAAG,IAAI,CAAC,SAAS;YACjB,GAAG,eAAe;SAClB,CAAC;IACH,CAAC;SAAM,CAAC;QACP,OAAO,IAAI,CAAC,SAAS,CAAC;IACvB,CAAC;IAED,IAAI,CAAC,IAAI,GAAG;QACX,GAAG,IAAI,CAAC,IAAI;QACZ,SAAS,EAAE,OAAO,CAAC,aAAa,IAAI,QAAQ;KAC5C,CAAC;IAEF,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IAC3E,OAAO,CAAC,IAAI,CAAC,6BAA6B,CAAC,CAAC;IAC5C,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC;IAE5C,OAAO,IAAI,CAAC,iBAAiB,CAAC;IAC9B,OAAO,IAAI,CAAC,qBAAqB,CAAC;IAElC,OAAO,IAAI,CAAC;AAAA,CACZ","sourcesContent":["export interface ReasoningConfig {\n\teffort: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\tsummary: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\";\n}\n\nexport interface CodexRequestOptions {\n\treasoningEffort?: ReasoningConfig[\"effort\"];\n\treasoningSummary?: ReasoningConfig[\"summary\"] | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n}\n\nexport interface InputItem {\n\tid?: string | null;\n\ttype?: string | null;\n\trole?: string;\n\tcontent?: unknown;\n\tcall_id?: string | null;\n\tname?: string;\n\toutput?: unknown;\n\targuments?: string;\n}\n\nexport interface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: InputItem[];\n\ttools?: unknown;\n\ttemperature?: number;\n\treasoning?: Partial<ReasoningConfig>;\n\ttext?: {\n\t\tverbosity?: \"low\" | \"medium\" | \"high\";\n\t};\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\tprompt_cache_retention?: \"in_memory\" | \"24h\";\n\tmax_output_tokens?: number;\n\tmax_completion_tokens?: number;\n\t[key: string]: unknown;\n}\n\nfunction clampReasoningEffort(model: string, effort: ReasoningConfig[\"effort\"]): ReasoningConfig[\"effort\"] {\n\t// Codex backend expects exact model IDs. Do not normalize model names here.\n\tconst modelId = model.includes(\"/\") ? model.split(\"/\").pop()! : model;\n\n\t// gpt-5.1 does not support xhigh.\n\tif (modelId === \"gpt-5.1\" && effort === \"xhigh\") {\n\t\treturn \"high\";\n\t}\n\n\t// gpt-5.1-codex-mini only supports medium/high.\n\tif (modelId === \"gpt-5.1-codex-mini\") {\n\t\treturn effort === \"high\" || effort === \"xhigh\" ? \"high\" : \"medium\";\n\t}\n\n\treturn effort;\n}\n\nfunction getReasoningConfig(model: string, options: CodexRequestOptions): ReasoningConfig {\n\treturn {\n\t\teffort: clampReasoningEffort(model, options.reasoningEffort as ReasoningConfig[\"effort\"]),\n\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t};\n}\n\nfunction filterInput(input: InputItem[] | undefined): InputItem[] | undefined {\n\tif (!Array.isArray(input)) return input;\n\n\treturn input\n\t\t.filter((item) => item.type !== \"item_reference\")\n\t\t.map((item) => {\n\t\t\tif (item.id != null) {\n\t\t\t\tconst { id: _id, ...rest } = item;\n\t\t\t\treturn rest as InputItem;\n\t\t\t}\n\t\t\treturn item;\n\t\t});\n}\n\nexport async function transformRequestBody(\n\tbody: RequestBody,\n\toptions: CodexRequestOptions = {},\n\tprompt?: { instructions: string; developerMessages: string[] },\n): Promise<RequestBody> {\n\tbody.store = false;\n\tbody.stream = true;\n\n\tif (body.input && Array.isArray(body.input)) {\n\t\tbody.input = filterInput(body.input);\n\n\t\tif (body.input) {\n\t\t\tconst functionCallIds = new Set(\n\t\t\t\tbody.input\n\t\t\t\t\t.filter((item) => item.type === \"function_call\" && typeof item.call_id === \"string\")\n\t\t\t\t\t.map((item) => item.call_id as string),\n\t\t\t);\n\n\t\t\tbody.input = body.input.map((item) => {\n\t\t\t\tif (item.type === \"function_call_output\" && typeof item.call_id === \"string\") {\n\t\t\t\t\tconst callId = item.call_id as string;\n\t\t\t\t\tif (!functionCallIds.has(callId)) {\n\t\t\t\t\t\tconst itemRecord = item as unknown as Record<string, unknown>;\n\t\t\t\t\t\tconst toolName = typeof itemRecord.name === \"string\" ? itemRecord.name : \"tool\";\n\t\t\t\t\t\tlet text = \"\";\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst output = itemRecord.output;\n\t\t\t\t\t\t\ttext = typeof output === \"string\" ? output : JSON.stringify(output);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\ttext = String(itemRecord.output ?? \"\");\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (text.length > 16000) {\n\t\t\t\t\t\t\ttext = `${text.slice(0, 16000)}\\n...[truncated]`;\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\t\tcontent: `[Previous ${toolName} result; call_id=${callId}]: ${text}`,\n\t\t\t\t\t\t} as InputItem;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn item;\n\t\t\t});\n\t\t}\n\t}\n\n\tif (prompt?.developerMessages && prompt.developerMessages.length > 0 && Array.isArray(body.input)) {\n\t\tconst developerMessages = prompt.developerMessages.map(\n\t\t\t(text) =>\n\t\t\t\t({\n\t\t\t\t\ttype: \"message\",\n\t\t\t\t\trole: \"developer\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text }],\n\t\t\t\t}) as InputItem,\n\t\t);\n\t\tbody.input = [...developerMessages, ...body.input];\n\t}\n\n\tif (options.reasoningEffort !== undefined) {\n\t\tconst reasoningConfig = getReasoningConfig(body.model, options);\n\t\tbody.reasoning = {\n\t\t\t...body.reasoning,\n\t\t\t...reasoningConfig,\n\t\t};\n\t} else {\n\t\tdelete body.reasoning;\n\t}\n\n\tbody.text = {\n\t\t...body.text,\n\t\tverbosity: options.textVerbosity || \"medium\",\n\t};\n\n\tconst include = Array.isArray(options.include) ? [...options.include] : [];\n\tinclude.push(\"reasoning.encrypted_content\");\n\tbody.include = Array.from(new Set(include));\n\n\tdelete body.max_output_tokens;\n\tdelete body.max_completion_tokens;\n\n\treturn body;\n}\n"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai-codex-responses.d.ts","sourceRoot":"","sources":["../../src/providers/openai-codex-responses.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EAMX,cAAc,EACd,aAAa,EAKb,MAAM,aAAa,CAAC;AAuBrB,MAAM,WAAW,2BAA4B,SAAQ,aAAa;IACjE,eAAe,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IAC3E,gBAAgB,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,GAAG,IAAI,CAAC;IACzE,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IAC1C,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,SAAS,CAAC,EAAE,OAAO,CAAC;CACpB;AAID,eAAO,MAAM,0BAA0B,EAAE,cAAc,CAAC,wBAAwB,CAkU/E,CAAC","sourcesContent":["import type {\n\tResponseFunctionToolCall,\n\tResponseInput,\n\tResponseInputContent,\n\tResponseInputImage,\n\tResponseInputText,\n\tResponseOutputMessage,\n\tResponseReasoningItem,\n} from \"openai/resources/responses/responses.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport {\n\tCODEX_BASE_URL,\n\tJWT_CLAIM_PATH,\n\tOPENAI_HEADER_VALUES,\n\tOPENAI_HEADERS,\n\tURL_PATHS,\n} from \"./openai-codex/constants.js\";\nimport { getCodexInstructions } from \"./openai-codex/prompts/codex.js\";\nimport { buildCodexPiBridge } from \"./openai-codex/prompts/pi-codex-bridge.js\";\nimport { buildCodexSystemPrompt } from \"./openai-codex/prompts/system-prompt.js\";\nimport {\n\ttype CodexRequestOptions,\n\tnormalizeModel,\n\ttype RequestBody,\n\ttransformRequestBody,\n} from \"./openai-codex/request-transformer.js\";\nimport { parseCodexError, parseCodexSseStream } from \"./openai-codex/response-handler.js\";\nimport { transformMessages } from \"./transorm-messages.js\";\n\nexport interface OpenAICodexResponsesOptions extends StreamOptions {\n\treasoningEffort?: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\treasoningSummary?: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\" | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n\tcodexMode?: boolean;\n}\n\nconst CODEX_DEBUG = process.env.PI_CODEX_DEBUG === \"1\" || process.env.PI_CODEX_DEBUG === \"true\";\n\nexport const streamOpenAICodexResponses: StreamFunction<\"openai-codex-responses\"> = (\n\tmodel: Model<\"openai-codex-responses\">,\n\tcontext: Context,\n\toptions?: OpenAICodexResponsesOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"openai-codex-responses\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tif (!apiKey) {\n\t\t\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t\t\t}\n\n\t\t\tconst accountId = getAccountId(apiKey);\n\t\t\tconst baseUrl = model.baseUrl || CODEX_BASE_URL;\n\t\t\tconst baseWithSlash = baseUrl.endsWith(\"/\") ? baseUrl : `${baseUrl}/`;\n\t\t\tconst url = rewriteUrlForCodex(new URL(URL_PATHS.RESPONSES.slice(1), baseWithSlash).toString());\n\n\t\t\tconst messages = convertMessages(model, context);\n\t\t\tconst params: RequestBody = {\n\t\t\t\tmodel: model.id,\n\t\t\t\tinput: messages,\n\t\t\t\tstream: true,\n\t\t\t\tprompt_cache_key: options?.sessionId,\n\t\t\t};\n\n\t\t\tif (options?.maxTokens) {\n\t\t\t\tparams.max_output_tokens = options.maxTokens;\n\t\t\t}\n\n\t\t\tif (options?.temperature !== undefined) {\n\t\t\t\tparams.temperature = options.temperature;\n\t\t\t}\n\n\t\t\tif (context.tools) {\n\t\t\t\tparams.tools = convertTools(context.tools);\n\t\t\t}\n\n\t\t\tconst normalizedModel = normalizeModel(params.model);\n\t\t\tconst codexInstructions = await getCodexInstructions(normalizedModel);\n\t\t\tconst bridgeText = buildCodexPiBridge(context.tools);\n\t\t\tconst systemPrompt = buildCodexSystemPrompt({\n\t\t\t\tcodexInstructions,\n\t\t\t\tbridgeText,\n\t\t\t\tuserSystemPrompt: context.systemPrompt,\n\t\t\t});\n\n\t\t\tparams.model = normalizedModel;\n\t\t\tparams.instructions = systemPrompt.instructions;\n\n\t\t\tconst codexOptions: CodexRequestOptions = {\n\t\t\t\treasoningEffort: options?.reasoningEffort,\n\t\t\t\treasoningSummary: options?.reasoningSummary ?? undefined,\n\t\t\t\ttextVerbosity: options?.textVerbosity,\n\t\t\t\tinclude: options?.include,\n\t\t\t};\n\n\t\t\tconst transformedBody = await transformRequestBody(params, codexOptions, systemPrompt);\n\n\t\t\tconst reasoningEffort = transformedBody.reasoning?.effort ?? null;\n\t\t\tconst headers = createCodexHeaders(model.headers, accountId, apiKey, options?.sessionId);\n\t\t\tlogCodexDebug(\"codex request\", {\n\t\t\t\turl,\n\t\t\t\tmodel: params.model,\n\t\t\t\treasoningEffort,\n\t\t\t\theaders: redactHeaders(headers),\n\t\t\t});\n\n\t\t\tconst response = await fetch(url, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders,\n\t\t\t\tbody: JSON.stringify(transformedBody),\n\t\t\t\tsignal: options?.signal,\n\t\t\t});\n\n\t\t\tlogCodexDebug(\"codex response\", {\n\t\t\t\turl: response.url,\n\t\t\t\tstatus: response.status,\n\t\t\t\tstatusText: response.statusText,\n\t\t\t\tcontentType: response.headers.get(\"content-type\") || null,\n\t\t\t\tcfRay: response.headers.get(\"cf-ray\") || null,\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst info = await parseCodexError(response);\n\t\t\t\tthrow new Error(info.friendlyMessage || info.message);\n\t\t\t}\n\n\t\t\tif (!response.body) {\n\t\t\t\tthrow new Error(\"No response body\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\tlet currentItem: ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall | null = null;\n\t\t\tlet currentBlock: ThinkingContent | TextContent | (ToolCall & { partialJson: string }) | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\n\t\t\tfor await (const rawEvent of parseCodexSseStream(response)) {\n\t\t\t\tconst eventType = typeof rawEvent.type === \"string\" ? rawEvent.type : \"\";\n\t\t\t\tif (!eventType) continue;\n\n\t\t\t\tif (eventType === \"response.output_item.added\") {\n\t\t\t\t\tconst item = rawEvent.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\t\tif (item.type === \"reasoning\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t} else if (item.type === \"message\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\t\tpartialJson: item.arguments || \"\",\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_part.added\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tcurrentItem.summary.push((rawEvent as { part: ResponseReasoningItem[\"summary\"][number] }).part);\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_text.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_part.done\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\t\tcurrentBlock.thinking += \"\\n\\n\";\n\t\t\t\t\t\t\tlastPart.text += \"\\n\\n\";\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: \"\\n\\n\",\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.content_part.added\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\") {\n\t\t\t\t\t\tcurrentItem.content = currentItem.content || [];\n\t\t\t\t\t\tconst part = (rawEvent as { part?: ResponseOutputMessage[\"content\"][number] }).part;\n\t\t\t\t\t\tif (part && (part.type === \"output_text\" || part.type === \"refusal\")) {\n\t\t\t\t\t\t\tcurrentItem.content.push(part);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.output_text.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\t\tif (lastPart && lastPart.type === \"output_text\") {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.refusal.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\t\tif (lastPart && lastPart.type === \"refusal\") {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\t\tlastPart.refusal += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.function_call_arguments.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\tcurrentBlock.partialJson += delta;\n\t\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.output_item.done\") {\n\t\t\t\t\tconst item = rawEvent.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\t\tif (item.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentBlock.thinking = item.summary?.map((s) => s.text).join(\"\\n\\n\") || \"\";\n\t\t\t\t\t\tcurrentBlock.thinkingSignature = JSON.stringify(item);\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t} else if (item.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tcurrentBlock.text = item.content.map((c) => (c.type === \"output_text\" ? c.text : c.refusal)).join(\"\");\n\t\t\t\t\t\tcurrentBlock.textSignature = item.id;\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\t\targuments: JSON.parse(item.arguments),\n\t\t\t\t\t\t};\n\t\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.completed\" || eventType === \"response.done\") {\n\t\t\t\t\tconst response = (\n\t\t\t\t\t\trawEvent as {\n\t\t\t\t\t\t\tresponse?: {\n\t\t\t\t\t\t\t\tusage?: {\n\t\t\t\t\t\t\t\t\tinput_tokens?: number;\n\t\t\t\t\t\t\t\t\toutput_tokens?: number;\n\t\t\t\t\t\t\t\t\ttotal_tokens?: number;\n\t\t\t\t\t\t\t\t\tinput_tokens_details?: { cached_tokens?: number };\n\t\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t\tstatus?: string;\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t}\n\t\t\t\t\t).response;\n\t\t\t\t\tif (response?.usage) {\n\t\t\t\t\t\tconst cachedTokens = response.usage.input_tokens_details?.cached_tokens || 0;\n\t\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\t\tinput: (response.usage.input_tokens || 0) - cachedTokens,\n\t\t\t\t\t\t\toutput: response.usage.output_tokens || 0,\n\t\t\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotalTokens: response.usage.total_tokens || 0,\n\t\t\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t\toutput.stopReason = mapStopReason(response?.status);\n\t\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\") && output.stopReason === \"stop\") {\n\t\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"error\") {\n\t\t\t\t\tconst code = (rawEvent as { code?: string }).code || \"\";\n\t\t\t\t\tconst message = (rawEvent as { message?: string }).message || \"Unknown error\";\n\t\t\t\t\tthrow new Error(code ? `Error Code ${code}: ${message}` : message);\n\t\t\t\t} else if (eventType === \"response.failed\") {\n\t\t\t\t\tthrow new Error(\"Unknown error\");\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"An unknown error occurred\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as { index?: number }).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction createCodexHeaders(\n\tinitHeaders: Record<string, string> | undefined,\n\taccountId: string,\n\taccessToken: string,\n\tpromptCacheKey?: string,\n): Headers {\n\tconst headers = new Headers(initHeaders ?? {});\n\theaders.delete(\"x-api-key\");\n\theaders.set(\"Authorization\", `Bearer ${accessToken}`);\n\theaders.set(OPENAI_HEADERS.ACCOUNT_ID, accountId);\n\theaders.set(OPENAI_HEADERS.BETA, OPENAI_HEADER_VALUES.BETA_RESPONSES);\n\theaders.set(OPENAI_HEADERS.ORIGINATOR, OPENAI_HEADER_VALUES.ORIGINATOR_CODEX);\n\n\tif (promptCacheKey) {\n\t\theaders.set(OPENAI_HEADERS.CONVERSATION_ID, promptCacheKey);\n\t\theaders.set(OPENAI_HEADERS.SESSION_ID, promptCacheKey);\n\t} else {\n\t\theaders.delete(OPENAI_HEADERS.CONVERSATION_ID);\n\t\theaders.delete(OPENAI_HEADERS.SESSION_ID);\n\t}\n\n\theaders.set(\"accept\", \"text/event-stream\");\n\theaders.set(\"content-type\", \"application/json\");\n\treturn headers;\n}\n\nfunction logCodexDebug(message: string, details?: Record<string, unknown>): void {\n\tif (!CODEX_DEBUG) return;\n\tif (details) {\n\t\tconsole.error(`[codex] ${message}`, details);\n\t\treturn;\n\t}\n\tconsole.error(`[codex] ${message}`);\n}\n\nfunction redactHeaders(headers: Headers): Record<string, string> {\n\tconst redacted: Record<string, string> = {};\n\tfor (const [key, value] of headers.entries()) {\n\t\tconst lower = key.toLowerCase();\n\t\tif (lower === \"authorization\") {\n\t\t\tredacted[key] = \"Bearer [redacted]\";\n\t\t\tcontinue;\n\t\t}\n\t\tif (\n\t\t\tlower.includes(\"account\") ||\n\t\t\tlower.includes(\"session\") ||\n\t\t\tlower.includes(\"conversation\") ||\n\t\t\tlower === \"cookie\"\n\t\t) {\n\t\t\tredacted[key] = \"[redacted]\";\n\t\t\tcontinue;\n\t\t}\n\t\tredacted[key] = value;\n\t}\n\treturn redacted;\n}\n\nfunction rewriteUrlForCodex(url: string): string {\n\treturn url.replace(URL_PATHS.RESPONSES, URL_PATHS.CODEX_RESPONSES);\n}\n\ntype JwtPayload = {\n\t[JWT_CLAIM_PATH]?: {\n\t\tchatgpt_account_id?: string;\n\t};\n\t[key: string]: unknown;\n};\n\nfunction decodeJwt(token: string): JwtPayload | null {\n\ttry {\n\t\tconst parts = token.split(\".\");\n\t\tif (parts.length !== 3) return null;\n\t\tconst payload = parts[1] ?? \"\";\n\t\tconst decoded = Buffer.from(payload, \"base64\").toString(\"utf-8\");\n\t\treturn JSON.parse(decoded) as JwtPayload;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\nfunction getAccountId(accessToken: string): string {\n\tconst payload = decodeJwt(accessToken);\n\tconst auth = payload?.[JWT_CLAIM_PATH];\n\tconst accountId = auth?.chatgpt_account_id;\n\tif (!accountId) {\n\t\tthrow new Error(\"Failed to extract accountId from token\");\n\t}\n\treturn accountId;\n}\n\nfunction shortHash(str: string): string {\n\tlet h1 = 0xdeadbeef;\n\tlet h2 = 0x41c6ce57;\n\tfor (let i = 0; i < str.length; i++) {\n\t\tconst ch = str.charCodeAt(i);\n\t\th1 = Math.imul(h1 ^ ch, 2654435761);\n\t\th2 = Math.imul(h2 ^ ch, 1597334677);\n\t}\n\th1 = Math.imul(h1 ^ (h1 >>> 16), 2246822507) ^ Math.imul(h2 ^ (h2 >>> 13), 3266489909);\n\th2 = Math.imul(h2 ^ (h2 >>> 16), 2246822507) ^ Math.imul(h1 ^ (h1 >>> 13), 3266489909);\n\treturn (h2 >>> 0).toString(36) + (h1 >>> 0).toString(36);\n}\n\nfunction convertMessages(model: Model<\"openai-codex-responses\">, context: Context): ResponseInput {\n\tconst messages: ResponseInput = [];\n\n\tconst transformedMessages = transformMessages(context.messages, model);\n\n\tlet msgIndex = 0;\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ResponseInputContent[] = msg.content.map((item): ResponseInputContent => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ResponseInputText;\n\t\t\t\t\t}\n\t\t\t\t\treturn {\n\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\timage_url: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t} satisfies ResponseInputImage;\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"input_image\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst output: ResponseInput = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"thinking\" && msg.stopReason !== \"error\") {\n\t\t\t\t\tif (block.thinkingSignature) {\n\t\t\t\t\t\tconst reasoningItem = JSON.parse(block.thinkingSignature) as ResponseReasoningItem;\n\t\t\t\t\t\toutput.push(reasoningItem);\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"text\") {\n\t\t\t\t\tconst textBlock = block as TextContent;\n\t\t\t\t\tlet msgId = textBlock.textSignature;\n\t\t\t\t\tif (!msgId) {\n\t\t\t\t\t\tmsgId = `msg_${msgIndex}`;\n\t\t\t\t\t} else if (msgId.length > 64) {\n\t\t\t\t\t\tmsgId = `msg_${shortHash(msgId)}`;\n\t\t\t\t\t}\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: [{ type: \"output_text\", text: sanitizeSurrogates(textBlock.text), annotations: [] }],\n\t\t\t\t\t\tstatus: \"completed\",\n\t\t\t\t\t\tid: msgId,\n\t\t\t\t\t} satisfies ResponseOutputMessage);\n\t\t\t\t} else if (block.type === \"toolCall\" && msg.stopReason !== \"error\") {\n\t\t\t\t\tconst toolCall = block as ToolCall;\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"function_call\",\n\t\t\t\t\t\tid: toolCall.id.split(\"|\")[1],\n\t\t\t\t\t\tcall_id: toolCall.id.split(\"|\")[0],\n\t\t\t\t\t\tname: toolCall.name,\n\t\t\t\t\t\targuments: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (output.length === 0) continue;\n\t\t\tmessages.push(...output);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tconst textResult = msg.content\n\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t.map((c) => (c as { text: string }).text)\n\t\t\t\t.join(\"\\n\");\n\t\t\tconst hasImages = msg.content.some((c) => c.type === \"image\");\n\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tmessages.push({\n\t\t\t\ttype: \"function_call_output\",\n\t\t\t\tcall_id: msg.toolCallId.split(\"|\")[0],\n\t\t\t\toutput: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t});\n\n\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\tconst contentParts: ResponseInputContent[] = [];\n\t\t\t\tcontentParts.push({\n\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t} satisfies ResponseInputText);\n\n\t\t\t\tfor (const block of msg.content) {\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tcontentParts.push({\n\t\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\t\timage_url: `data:${block.mimeType};base64,${block.data}`,\n\t\t\t\t\t\t} satisfies ResponseInputImage);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: contentParts,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\tmsgIndex++;\n\t}\n\n\treturn messages;\n}\n\nfunction convertTools(\n\ttools: Tool[],\n): Array<{ type: \"function\"; name: string; description: string; parameters: Record<string, unknown>; strict: null }> {\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tname: tool.name,\n\t\tdescription: tool.description,\n\t\tparameters: tool.parameters as unknown as Record<string, unknown>,\n\t\tstrict: null,\n\t}));\n}\n\nfunction mapStopReason(status: string | undefined): StopReason {\n\tif (!status) return \"stop\";\n\tswitch (status) {\n\t\tcase \"completed\":\n\t\t\treturn \"stop\";\n\t\tcase \"incomplete\":\n\t\t\treturn \"length\";\n\t\tcase \"failed\":\n\t\tcase \"cancelled\":\n\t\t\treturn \"error\";\n\t\tcase \"in_progress\":\n\t\tcase \"queued\":\n\t\t\treturn \"stop\";\n\t\tdefault:\n\t\t\treturn \"stop\";\n\t}\n}\n"]}
|
|
1
|
+
{"version":3,"file":"openai-codex-responses.d.ts","sourceRoot":"","sources":["../../src/providers/openai-codex-responses.ts"],"names":[],"mappings":"AAWA,OAAO,KAAK,EAMX,cAAc,EACd,aAAa,EAKb,MAAM,aAAa,CAAC;AAsBrB,MAAM,WAAW,2BAA4B,SAAQ,aAAa;IACjE,eAAe,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IAC3E,gBAAgB,CAAC,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,GAAG,IAAI,CAAC;IACzE,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IAC1C,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,SAAS,CAAC,EAAE,OAAO,CAAC;CACpB;AAID,eAAO,MAAM,0BAA0B,EAAE,cAAc,CAAC,wBAAwB,CAgU/E,CAAC","sourcesContent":["import type {\n\tResponseFunctionToolCall,\n\tResponseInput,\n\tResponseInputContent,\n\tResponseInputImage,\n\tResponseInputText,\n\tResponseOutputMessage,\n\tResponseReasoningItem,\n} from \"openai/resources/responses/responses.js\";\nimport { calculateCost } from \"../models.js\";\nimport { getEnvApiKey } from \"../stream.js\";\nimport type {\n\tApi,\n\tAssistantMessage,\n\tContext,\n\tModel,\n\tStopReason,\n\tStreamFunction,\n\tStreamOptions,\n\tTextContent,\n\tThinkingContent,\n\tTool,\n\tToolCall,\n} from \"../types.js\";\nimport { AssistantMessageEventStream } from \"../utils/event-stream.js\";\nimport { parseStreamingJson } from \"../utils/json-parse.js\";\nimport { sanitizeSurrogates } from \"../utils/sanitize-unicode.js\";\nimport {\n\tCODEX_BASE_URL,\n\tJWT_CLAIM_PATH,\n\tOPENAI_HEADER_VALUES,\n\tOPENAI_HEADERS,\n\tURL_PATHS,\n} from \"./openai-codex/constants.js\";\nimport { getCodexInstructions } from \"./openai-codex/prompts/codex.js\";\nimport { buildCodexPiBridge } from \"./openai-codex/prompts/pi-codex-bridge.js\";\nimport { buildCodexSystemPrompt } from \"./openai-codex/prompts/system-prompt.js\";\nimport {\n\ttype CodexRequestOptions,\n\ttype RequestBody,\n\ttransformRequestBody,\n} from \"./openai-codex/request-transformer.js\";\nimport { parseCodexError, parseCodexSseStream } from \"./openai-codex/response-handler.js\";\nimport { transformMessages } from \"./transorm-messages.js\";\n\nexport interface OpenAICodexResponsesOptions extends StreamOptions {\n\treasoningEffort?: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\treasoningSummary?: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\" | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n\tcodexMode?: boolean;\n}\n\nconst CODEX_DEBUG = process.env.PI_CODEX_DEBUG === \"1\" || process.env.PI_CODEX_DEBUG === \"true\";\n\nexport const streamOpenAICodexResponses: StreamFunction<\"openai-codex-responses\"> = (\n\tmodel: Model<\"openai-codex-responses\">,\n\tcontext: Context,\n\toptions?: OpenAICodexResponsesOptions,\n): AssistantMessageEventStream => {\n\tconst stream = new AssistantMessageEventStream();\n\n\t(async () => {\n\t\tconst output: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tcontent: [],\n\t\t\tapi: \"openai-codex-responses\" as Api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\tstopReason: \"stop\",\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\ttry {\n\t\t\tconst apiKey = options?.apiKey || getEnvApiKey(model.provider) || \"\";\n\t\t\tif (!apiKey) {\n\t\t\t\tthrow new Error(`No API key for provider: ${model.provider}`);\n\t\t\t}\n\n\t\t\tconst accountId = getAccountId(apiKey);\n\t\t\tconst baseUrl = model.baseUrl || CODEX_BASE_URL;\n\t\t\tconst baseWithSlash = baseUrl.endsWith(\"/\") ? baseUrl : `${baseUrl}/`;\n\t\t\tconst url = rewriteUrlForCodex(new URL(URL_PATHS.RESPONSES.slice(1), baseWithSlash).toString());\n\n\t\t\tconst messages = convertMessages(model, context);\n\t\t\tconst params: RequestBody = {\n\t\t\t\tmodel: model.id,\n\t\t\t\tinput: messages,\n\t\t\t\tstream: true,\n\t\t\t\tprompt_cache_key: options?.sessionId,\n\t\t\t};\n\n\t\t\tif (options?.maxTokens) {\n\t\t\t\tparams.max_output_tokens = options.maxTokens;\n\t\t\t}\n\n\t\t\tif (options?.temperature !== undefined) {\n\t\t\t\tparams.temperature = options.temperature;\n\t\t\t}\n\n\t\t\tif (context.tools) {\n\t\t\t\tparams.tools = convertTools(context.tools);\n\t\t\t}\n\n\t\t\tconst codexInstructions = await getCodexInstructions(params.model);\n\t\t\tconst bridgeText = buildCodexPiBridge(context.tools);\n\t\t\tconst systemPrompt = buildCodexSystemPrompt({\n\t\t\t\tcodexInstructions,\n\t\t\t\tbridgeText,\n\t\t\t\tuserSystemPrompt: context.systemPrompt,\n\t\t\t});\n\n\t\t\tparams.instructions = systemPrompt.instructions;\n\n\t\t\tconst codexOptions: CodexRequestOptions = {\n\t\t\t\treasoningEffort: options?.reasoningEffort,\n\t\t\t\treasoningSummary: options?.reasoningSummary ?? undefined,\n\t\t\t\ttextVerbosity: options?.textVerbosity,\n\t\t\t\tinclude: options?.include,\n\t\t\t};\n\n\t\t\tconst transformedBody = await transformRequestBody(params, codexOptions, systemPrompt);\n\n\t\t\tconst reasoningEffort = transformedBody.reasoning?.effort ?? null;\n\t\t\tconst headers = createCodexHeaders(model.headers, accountId, apiKey, options?.sessionId);\n\t\t\tlogCodexDebug(\"codex request\", {\n\t\t\t\turl,\n\t\t\t\tmodel: params.model,\n\t\t\t\treasoningEffort,\n\t\t\t\theaders: redactHeaders(headers),\n\t\t\t});\n\n\t\t\tconst response = await fetch(url, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders,\n\t\t\t\tbody: JSON.stringify(transformedBody),\n\t\t\t\tsignal: options?.signal,\n\t\t\t});\n\n\t\t\tlogCodexDebug(\"codex response\", {\n\t\t\t\turl: response.url,\n\t\t\t\tstatus: response.status,\n\t\t\t\tstatusText: response.statusText,\n\t\t\t\tcontentType: response.headers.get(\"content-type\") || null,\n\t\t\t\tcfRay: response.headers.get(\"cf-ray\") || null,\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tconst info = await parseCodexError(response);\n\t\t\t\tthrow new Error(info.friendlyMessage || info.message);\n\t\t\t}\n\n\t\t\tif (!response.body) {\n\t\t\t\tthrow new Error(\"No response body\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"start\", partial: output });\n\n\t\t\tlet currentItem: ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall | null = null;\n\t\t\tlet currentBlock: ThinkingContent | TextContent | (ToolCall & { partialJson: string }) | null = null;\n\t\t\tconst blocks = output.content;\n\t\t\tconst blockIndex = () => blocks.length - 1;\n\n\t\t\tfor await (const rawEvent of parseCodexSseStream(response)) {\n\t\t\t\tconst eventType = typeof rawEvent.type === \"string\" ? rawEvent.type : \"\";\n\t\t\t\tif (!eventType) continue;\n\n\t\t\t\tif (eventType === \"response.output_item.added\") {\n\t\t\t\t\tconst item = rawEvent.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\t\tif (item.type === \"reasoning\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = { type: \"thinking\", thinking: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"thinking_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t} else if (item.type === \"message\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = { type: \"text\", text: \"\" };\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"text_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\t\tcurrentItem = item;\n\t\t\t\t\t\tcurrentBlock = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\t\targuments: {},\n\t\t\t\t\t\t\tpartialJson: item.arguments || \"\",\n\t\t\t\t\t\t};\n\t\t\t\t\t\toutput.content.push(currentBlock);\n\t\t\t\t\t\tstream.push({ type: \"toolcall_start\", contentIndex: blockIndex(), partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_part.added\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tcurrentItem.summary.push((rawEvent as { part: ResponseReasoningItem[\"summary\"][number] }).part);\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_text.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.thinking += delta;\n\t\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.reasoning_summary_part.done\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentItem.summary = currentItem.summary || [];\n\t\t\t\t\t\tconst lastPart = currentItem.summary[currentItem.summary.length - 1];\n\t\t\t\t\t\tif (lastPart) {\n\t\t\t\t\t\t\tcurrentBlock.thinking += \"\\n\\n\";\n\t\t\t\t\t\t\tlastPart.text += \"\\n\\n\";\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta: \"\\n\\n\",\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.content_part.added\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\") {\n\t\t\t\t\t\tcurrentItem.content = currentItem.content || [];\n\t\t\t\t\t\tconst part = (rawEvent as { part?: ResponseOutputMessage[\"content\"][number] }).part;\n\t\t\t\t\t\tif (part && (part.type === \"output_text\" || part.type === \"refusal\")) {\n\t\t\t\t\t\t\tcurrentItem.content.push(part);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.output_text.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\t\tif (lastPart && lastPart.type === \"output_text\") {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\t\tlastPart.text += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.refusal.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tconst lastPart = currentItem.content[currentItem.content.length - 1];\n\t\t\t\t\t\tif (lastPart && lastPart.type === \"refusal\") {\n\t\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\t\tcurrentBlock.text += delta;\n\t\t\t\t\t\t\tlastPart.refusal += delta;\n\t\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.function_call_arguments.delta\") {\n\t\t\t\t\tif (currentItem && currentItem.type === \"function_call\" && currentBlock?.type === \"toolCall\") {\n\t\t\t\t\t\tconst delta = (rawEvent as { delta?: string }).delta || \"\";\n\t\t\t\t\t\tcurrentBlock.partialJson += delta;\n\t\t\t\t\t\tcurrentBlock.arguments = parseStreamingJson(currentBlock.partialJson);\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tdelta,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.output_item.done\") {\n\t\t\t\t\tconst item = rawEvent.item as ResponseReasoningItem | ResponseOutputMessage | ResponseFunctionToolCall;\n\t\t\t\t\tif (item.type === \"reasoning\" && currentBlock?.type === \"thinking\") {\n\t\t\t\t\t\tcurrentBlock.thinking = item.summary?.map((s) => s.text).join(\"\\n\\n\") || \"\";\n\t\t\t\t\t\tcurrentBlock.thinkingSignature = JSON.stringify(item);\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: currentBlock.thinking,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t} else if (item.type === \"message\" && currentBlock?.type === \"text\") {\n\t\t\t\t\t\tcurrentBlock.text = item.content.map((c) => (c.type === \"output_text\" ? c.text : c.refusal)).join(\"\");\n\t\t\t\t\t\tcurrentBlock.textSignature = item.id;\n\t\t\t\t\t\tstream.push({\n\t\t\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\t\t\tcontentIndex: blockIndex(),\n\t\t\t\t\t\t\tcontent: currentBlock.text,\n\t\t\t\t\t\t\tpartial: output,\n\t\t\t\t\t\t});\n\t\t\t\t\t\tcurrentBlock = null;\n\t\t\t\t\t} else if (item.type === \"function_call\") {\n\t\t\t\t\t\tconst toolCall: ToolCall = {\n\t\t\t\t\t\t\ttype: \"toolCall\",\n\t\t\t\t\t\t\tid: `${item.call_id}|${item.id}`,\n\t\t\t\t\t\t\tname: item.name,\n\t\t\t\t\t\t\targuments: JSON.parse(item.arguments),\n\t\t\t\t\t\t};\n\t\t\t\t\t\tstream.push({ type: \"toolcall_end\", contentIndex: blockIndex(), toolCall, partial: output });\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"response.completed\" || eventType === \"response.done\") {\n\t\t\t\t\tconst response = (\n\t\t\t\t\t\trawEvent as {\n\t\t\t\t\t\t\tresponse?: {\n\t\t\t\t\t\t\t\tusage?: {\n\t\t\t\t\t\t\t\t\tinput_tokens?: number;\n\t\t\t\t\t\t\t\t\toutput_tokens?: number;\n\t\t\t\t\t\t\t\t\ttotal_tokens?: number;\n\t\t\t\t\t\t\t\t\tinput_tokens_details?: { cached_tokens?: number };\n\t\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\t\tstatus?: string;\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t}\n\t\t\t\t\t).response;\n\t\t\t\t\tif (response?.usage) {\n\t\t\t\t\t\tconst cachedTokens = response.usage.input_tokens_details?.cached_tokens || 0;\n\t\t\t\t\t\toutput.usage = {\n\t\t\t\t\t\t\tinput: (response.usage.input_tokens || 0) - cachedTokens,\n\t\t\t\t\t\t\toutput: response.usage.output_tokens || 0,\n\t\t\t\t\t\t\tcacheRead: cachedTokens,\n\t\t\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\t\t\ttotalTokens: response.usage.total_tokens || 0,\n\t\t\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t\t\t};\n\t\t\t\t\t}\n\t\t\t\t\tcalculateCost(model, output.usage);\n\t\t\t\t\toutput.stopReason = mapStopReason(response?.status);\n\t\t\t\t\tif (output.content.some((b) => b.type === \"toolCall\") && output.stopReason === \"stop\") {\n\t\t\t\t\t\toutput.stopReason = \"toolUse\";\n\t\t\t\t\t}\n\t\t\t\t} else if (eventType === \"error\") {\n\t\t\t\t\tconst code = (rawEvent as { code?: string }).code || \"\";\n\t\t\t\t\tconst message = (rawEvent as { message?: string }).message || \"\";\n\t\t\t\t\tthrow new Error(formatCodexErrorEvent(rawEvent, code, message));\n\t\t\t\t} else if (eventType === \"response.failed\") {\n\t\t\t\t\tthrow new Error(formatCodexFailure(rawEvent) ?? \"Codex response failed\");\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options?.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t}\n\n\t\t\tif (output.stopReason === \"aborted\" || output.stopReason === \"error\") {\n\t\t\t\tthrow new Error(\"Codex response failed\");\n\t\t\t}\n\n\t\t\tstream.push({ type: \"done\", reason: output.stopReason, message: output });\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tfor (const block of output.content) delete (block as { index?: number }).index;\n\t\t\toutput.stopReason = options?.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\toutput.errorMessage = error instanceof Error ? error.message : JSON.stringify(error);\n\t\t\tstream.push({ type: \"error\", reason: output.stopReason, error: output });\n\t\t\tstream.end();\n\t\t}\n\t})();\n\n\treturn stream;\n};\n\nfunction createCodexHeaders(\n\tinitHeaders: Record<string, string> | undefined,\n\taccountId: string,\n\taccessToken: string,\n\tpromptCacheKey?: string,\n): Headers {\n\tconst headers = new Headers(initHeaders ?? {});\n\theaders.delete(\"x-api-key\");\n\theaders.set(\"Authorization\", `Bearer ${accessToken}`);\n\theaders.set(OPENAI_HEADERS.ACCOUNT_ID, accountId);\n\theaders.set(OPENAI_HEADERS.BETA, OPENAI_HEADER_VALUES.BETA_RESPONSES);\n\theaders.set(OPENAI_HEADERS.ORIGINATOR, OPENAI_HEADER_VALUES.ORIGINATOR_CODEX);\n\n\tif (promptCacheKey) {\n\t\theaders.set(OPENAI_HEADERS.CONVERSATION_ID, promptCacheKey);\n\t\theaders.set(OPENAI_HEADERS.SESSION_ID, promptCacheKey);\n\t} else {\n\t\theaders.delete(OPENAI_HEADERS.CONVERSATION_ID);\n\t\theaders.delete(OPENAI_HEADERS.SESSION_ID);\n\t}\n\n\theaders.set(\"accept\", \"text/event-stream\");\n\theaders.set(\"content-type\", \"application/json\");\n\treturn headers;\n}\n\nfunction logCodexDebug(message: string, details?: Record<string, unknown>): void {\n\tif (!CODEX_DEBUG) return;\n\tif (details) {\n\t\tconsole.error(`[codex] ${message}`, details);\n\t\treturn;\n\t}\n\tconsole.error(`[codex] ${message}`);\n}\n\nfunction redactHeaders(headers: Headers): Record<string, string> {\n\tconst redacted: Record<string, string> = {};\n\tfor (const [key, value] of headers.entries()) {\n\t\tconst lower = key.toLowerCase();\n\t\tif (lower === \"authorization\") {\n\t\t\tredacted[key] = \"Bearer [redacted]\";\n\t\t\tcontinue;\n\t\t}\n\t\tif (\n\t\t\tlower.includes(\"account\") ||\n\t\t\tlower.includes(\"session\") ||\n\t\t\tlower.includes(\"conversation\") ||\n\t\t\tlower === \"cookie\"\n\t\t) {\n\t\t\tredacted[key] = \"[redacted]\";\n\t\t\tcontinue;\n\t\t}\n\t\tredacted[key] = value;\n\t}\n\treturn redacted;\n}\n\nfunction rewriteUrlForCodex(url: string): string {\n\treturn url.replace(URL_PATHS.RESPONSES, URL_PATHS.CODEX_RESPONSES);\n}\n\ntype JwtPayload = {\n\t[JWT_CLAIM_PATH]?: {\n\t\tchatgpt_account_id?: string;\n\t};\n\t[key: string]: unknown;\n};\n\nfunction decodeJwt(token: string): JwtPayload | null {\n\ttry {\n\t\tconst parts = token.split(\".\");\n\t\tif (parts.length !== 3) return null;\n\t\tconst payload = parts[1] ?? \"\";\n\t\tconst decoded = Buffer.from(payload, \"base64\").toString(\"utf-8\");\n\t\treturn JSON.parse(decoded) as JwtPayload;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\nfunction getAccountId(accessToken: string): string {\n\tconst payload = decodeJwt(accessToken);\n\tconst auth = payload?.[JWT_CLAIM_PATH];\n\tconst accountId = auth?.chatgpt_account_id;\n\tif (!accountId) {\n\t\tthrow new Error(\"Failed to extract accountId from token\");\n\t}\n\treturn accountId;\n}\n\nfunction shortHash(str: string): string {\n\tlet h1 = 0xdeadbeef;\n\tlet h2 = 0x41c6ce57;\n\tfor (let i = 0; i < str.length; i++) {\n\t\tconst ch = str.charCodeAt(i);\n\t\th1 = Math.imul(h1 ^ ch, 2654435761);\n\t\th2 = Math.imul(h2 ^ ch, 1597334677);\n\t}\n\th1 = Math.imul(h1 ^ (h1 >>> 16), 2246822507) ^ Math.imul(h2 ^ (h2 >>> 13), 3266489909);\n\th2 = Math.imul(h2 ^ (h2 >>> 16), 2246822507) ^ Math.imul(h1 ^ (h1 >>> 13), 3266489909);\n\treturn (h2 >>> 0).toString(36) + (h1 >>> 0).toString(36);\n}\n\nfunction convertMessages(model: Model<\"openai-codex-responses\">, context: Context): ResponseInput {\n\tconst messages: ResponseInput = [];\n\n\tconst transformedMessages = transformMessages(context.messages, model);\n\n\tlet msgIndex = 0;\n\tfor (const msg of transformedMessages) {\n\t\tif (msg.role === \"user\") {\n\t\t\tif (typeof msg.content === \"string\") {\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text: sanitizeSurrogates(msg.content) }],\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tconst content: ResponseInputContent[] = msg.content.map((item): ResponseInputContent => {\n\t\t\t\t\tif (item.type === \"text\") {\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\t\t\ttext: sanitizeSurrogates(item.text),\n\t\t\t\t\t\t} satisfies ResponseInputText;\n\t\t\t\t\t}\n\t\t\t\t\treturn {\n\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\timage_url: `data:${item.mimeType};base64,${item.data}`,\n\t\t\t\t\t} satisfies ResponseInputImage;\n\t\t\t\t});\n\t\t\t\tconst filteredContent = !model.input.includes(\"image\")\n\t\t\t\t\t? content.filter((c) => c.type !== \"input_image\")\n\t\t\t\t\t: content;\n\t\t\t\tif (filteredContent.length === 0) continue;\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: filteredContent,\n\t\t\t\t});\n\t\t\t}\n\t\t} else if (msg.role === \"assistant\") {\n\t\t\tconst output: ResponseInput = [];\n\n\t\t\tfor (const block of msg.content) {\n\t\t\t\tif (block.type === \"thinking\" && msg.stopReason !== \"error\") {\n\t\t\t\t\tif (block.thinkingSignature) {\n\t\t\t\t\t\tconst reasoningItem = JSON.parse(block.thinkingSignature) as ResponseReasoningItem;\n\t\t\t\t\t\toutput.push(reasoningItem);\n\t\t\t\t\t}\n\t\t\t\t} else if (block.type === \"text\") {\n\t\t\t\t\tconst textBlock = block as TextContent;\n\t\t\t\t\tlet msgId = textBlock.textSignature;\n\t\t\t\t\tif (!msgId) {\n\t\t\t\t\t\tmsgId = `msg_${msgIndex}`;\n\t\t\t\t\t} else if (msgId.length > 64) {\n\t\t\t\t\t\tmsgId = `msg_${shortHash(msgId)}`;\n\t\t\t\t\t}\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\tcontent: [{ type: \"output_text\", text: sanitizeSurrogates(textBlock.text), annotations: [] }],\n\t\t\t\t\t\tstatus: \"completed\",\n\t\t\t\t\t\tid: msgId,\n\t\t\t\t\t} satisfies ResponseOutputMessage);\n\t\t\t\t} else if (block.type === \"toolCall\" && msg.stopReason !== \"error\") {\n\t\t\t\t\tconst toolCall = block as ToolCall;\n\t\t\t\t\toutput.push({\n\t\t\t\t\t\ttype: \"function_call\",\n\t\t\t\t\t\tid: toolCall.id.split(\"|\")[1],\n\t\t\t\t\t\tcall_id: toolCall.id.split(\"|\")[0],\n\t\t\t\t\t\tname: toolCall.name,\n\t\t\t\t\t\targuments: JSON.stringify(toolCall.arguments),\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (output.length === 0) continue;\n\t\t\tmessages.push(...output);\n\t\t} else if (msg.role === \"toolResult\") {\n\t\t\tconst textResult = msg.content\n\t\t\t\t.filter((c) => c.type === \"text\")\n\t\t\t\t.map((c) => (c as { text: string }).text)\n\t\t\t\t.join(\"\\n\");\n\t\t\tconst hasImages = msg.content.some((c) => c.type === \"image\");\n\n\t\t\tconst hasText = textResult.length > 0;\n\t\t\tmessages.push({\n\t\t\t\ttype: \"function_call_output\",\n\t\t\t\tcall_id: msg.toolCallId.split(\"|\")[0],\n\t\t\t\toutput: sanitizeSurrogates(hasText ? textResult : \"(see attached image)\"),\n\t\t\t});\n\n\t\t\tif (hasImages && model.input.includes(\"image\")) {\n\t\t\t\tconst contentParts: ResponseInputContent[] = [];\n\t\t\t\tcontentParts.push({\n\t\t\t\t\ttype: \"input_text\",\n\t\t\t\t\ttext: \"Attached image(s) from tool result:\",\n\t\t\t\t} satisfies ResponseInputText);\n\n\t\t\t\tfor (const block of msg.content) {\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tcontentParts.push({\n\t\t\t\t\t\t\ttype: \"input_image\",\n\t\t\t\t\t\t\tdetail: \"auto\",\n\t\t\t\t\t\t\timage_url: `data:${block.mimeType};base64,${block.data}`,\n\t\t\t\t\t\t} satisfies ResponseInputImage);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tmessages.push({\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent: contentParts,\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\tmsgIndex++;\n\t}\n\n\treturn messages;\n}\n\nfunction convertTools(\n\ttools: Tool[],\n): Array<{ type: \"function\"; name: string; description: string; parameters: Record<string, unknown>; strict: null }> {\n\treturn tools.map((tool) => ({\n\t\ttype: \"function\",\n\t\tname: tool.name,\n\t\tdescription: tool.description,\n\t\tparameters: tool.parameters as unknown as Record<string, unknown>,\n\t\tstrict: null,\n\t}));\n}\n\nfunction mapStopReason(status: string | undefined): StopReason {\n\tif (!status) return \"stop\";\n\tswitch (status) {\n\t\tcase \"completed\":\n\t\t\treturn \"stop\";\n\t\tcase \"incomplete\":\n\t\t\treturn \"length\";\n\t\tcase \"failed\":\n\t\tcase \"cancelled\":\n\t\t\treturn \"error\";\n\t\tcase \"in_progress\":\n\t\tcase \"queued\":\n\t\t\treturn \"stop\";\n\t\tdefault:\n\t\t\treturn \"stop\";\n\t}\n}\n\nfunction asRecord(value: unknown): Record<string, unknown> | null {\n\tif (value && typeof value === \"object\") {\n\t\treturn value as Record<string, unknown>;\n\t}\n\treturn null;\n}\n\nfunction getString(value: unknown): string | undefined {\n\treturn typeof value === \"string\" ? value : undefined;\n}\n\nfunction truncate(text: string, limit: number): string {\n\tif (text.length <= limit) return text;\n\treturn `${text.slice(0, limit)}...[truncated ${text.length - limit}]`;\n}\n\nfunction formatCodexFailure(rawEvent: Record<string, unknown>): string | null {\n\tconst response = asRecord(rawEvent.response);\n\tconst error = asRecord(rawEvent.error) ?? (response ? asRecord(response.error) : null);\n\n\tconst message = getString(error?.message) ?? getString(rawEvent.message) ?? getString(response?.message);\n\tconst code = getString(error?.code) ?? getString(error?.type) ?? getString(rawEvent.code);\n\tconst status = getString(response?.status) ?? getString(rawEvent.status);\n\n\tconst meta: string[] = [];\n\tif (code) meta.push(`code=${code}`);\n\tif (status) meta.push(`status=${status}`);\n\n\tif (message) {\n\t\tconst metaText = meta.length ? ` (${meta.join(\", \")})` : \"\";\n\t\treturn `Codex response failed: ${message}${metaText}`;\n\t}\n\n\tif (meta.length) {\n\t\treturn `Codex response failed (${meta.join(\", \")})`;\n\t}\n\n\ttry {\n\t\treturn `Codex response failed: ${truncate(JSON.stringify(rawEvent), 800)}`;\n\t} catch {\n\t\treturn \"Codex response failed\";\n\t}\n}\n\nfunction formatCodexErrorEvent(rawEvent: Record<string, unknown>, code: string, message: string): string {\n\tconst detail = formatCodexFailure(rawEvent);\n\tif (detail) {\n\t\treturn detail.replace(\"response failed\", \"error event\");\n\t}\n\n\tconst meta: string[] = [];\n\tif (code) meta.push(`code=${code}`);\n\tif (message) meta.push(`message=${message}`);\n\n\tif (meta.length > 0) {\n\t\treturn `Codex error event (${meta.join(\", \")})`;\n\t}\n\n\ttry {\n\t\treturn `Codex error event: ${truncate(JSON.stringify(rawEvent), 800)}`;\n\t} catch {\n\t\treturn \"Codex error event\";\n\t}\n}\n"]}
|