@mariozechner/pi-ai 0.37.2 → 0.37.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/README.md +61 -12
  2. package/dist/providers/openai-codex/prompts/codex.d.ts +0 -1
  3. package/dist/providers/openai-codex/prompts/codex.d.ts.map +1 -1
  4. package/dist/providers/openai-codex/prompts/codex.js +1 -42
  5. package/dist/providers/openai-codex/prompts/codex.js.map +1 -1
  6. package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts +2 -1
  7. package/dist/providers/openai-codex/prompts/pi-codex-bridge.d.ts.map +1 -1
  8. package/dist/providers/openai-codex/prompts/pi-codex-bridge.js +42 -42
  9. package/dist/providers/openai-codex/prompts/pi-codex-bridge.js.map +1 -1
  10. package/dist/providers/openai-codex/prompts/system-prompt.d.ts +10 -0
  11. package/dist/providers/openai-codex/prompts/system-prompt.d.ts.map +1 -0
  12. package/dist/providers/openai-codex/prompts/system-prompt.js +15 -0
  13. package/dist/providers/openai-codex/prompts/system-prompt.js.map +1 -0
  14. package/dist/providers/openai-codex/request-transformer.d.ts +5 -1
  15. package/dist/providers/openai-codex/request-transformer.d.ts.map +1 -1
  16. package/dist/providers/openai-codex/request-transformer.js +9 -41
  17. package/dist/providers/openai-codex/request-transformer.js.map +1 -1
  18. package/dist/providers/openai-codex-responses.d.ts.map +1 -1
  19. package/dist/providers/openai-codex-responses.js +13 -2
  20. package/dist/providers/openai-codex-responses.js.map +1 -1
  21. package/dist/stream.d.ts.map +1 -1
  22. package/dist/stream.js +1 -0
  23. package/dist/stream.js.map +1 -1
  24. package/dist/types.d.ts +6 -0
  25. package/dist/types.d.ts.map +1 -1
  26. package/dist/types.js.map +1 -1
  27. package/package.json +1 -1
package/README.md CHANGED
@@ -4,9 +4,50 @@ Unified LLM API with automatic model discovery, provider configuration, token an
4
4
 
5
5
  **Note**: This library only includes models that support tool calling (function calling), as this is essential for agentic workflows.
6
6
 
7
+ ## Table of Contents
8
+
9
+ - [Supported Providers](#supported-providers)
10
+ - [Installation](#installation)
11
+ - [Quick Start](#quick-start)
12
+ - [Tools](#tools)
13
+ - [Defining Tools](#defining-tools)
14
+ - [Handling Tool Calls](#handling-tool-calls)
15
+ - [Streaming Tool Calls with Partial JSON](#streaming-tool-calls-with-partial-json)
16
+ - [Validating Tool Arguments](#validating-tool-arguments)
17
+ - [Complete Event Reference](#complete-event-reference)
18
+ - [Image Input](#image-input)
19
+ - [Thinking/Reasoning](#thinkingreasoning)
20
+ - [Unified Interface](#unified-interface-streamsimplecompletesimple)
21
+ - [Provider-Specific Options](#provider-specific-options-streamcomplete)
22
+ - [Streaming Thinking Content](#streaming-thinking-content)
23
+ - [Stop Reasons](#stop-reasons)
24
+ - [Error Handling](#error-handling)
25
+ - [Aborting Requests](#aborting-requests)
26
+ - [Continuing After Abort](#continuing-after-abort)
27
+ - [APIs, Models, and Providers](#apis-models-and-providers)
28
+ - [Providers and Models](#providers-and-models)
29
+ - [Querying Providers and Models](#querying-providers-and-models)
30
+ - [Custom Models](#custom-models)
31
+ - [OpenAI Compatibility Settings](#openai-compatibility-settings)
32
+ - [Type Safety](#type-safety)
33
+ - [Cross-Provider Handoffs](#cross-provider-handoffs)
34
+ - [Context Serialization](#context-serialization)
35
+ - [Browser Usage](#browser-usage)
36
+ - [Environment Variables](#environment-variables-nodejs-only)
37
+ - [Checking Environment Variables](#checking-environment-variables)
38
+ - [OAuth Providers](#oauth-providers)
39
+ - [Vertex AI (ADC)](#vertex-ai-adc)
40
+ - [CLI Login](#cli-login)
41
+ - [Programmatic OAuth](#programmatic-oauth)
42
+ - [Login Flow Example](#login-flow-example)
43
+ - [Using OAuth Tokens](#using-oauth-tokens)
44
+ - [Provider Notes](#provider-notes)
45
+ - [License](#license)
46
+
7
47
  ## Supported Providers
8
48
 
9
49
  - **OpenAI**
50
+ - **OpenAI Codex** (ChatGPT Plus/Pro subscription, requires OAuth, see below)
10
51
  - **Anthropic**
11
52
  - **Google**
12
53
  - **Vertex AI** (Gemini via Vertex AI)
@@ -16,6 +57,8 @@ Unified LLM API with automatic model discovery, provider configuration, token an
16
57
  - **xAI**
17
58
  - **OpenRouter**
18
59
  - **GitHub Copilot** (requires OAuth, see below)
60
+ - **Google Gemini CLI** (requires OAuth, see below)
61
+ - **Antigravity** (requires OAuth, see below)
19
62
  - **Any OpenAI-compatible API**: Ollama, vLLM, LM Studio, etc.
20
63
 
21
64
  ## Installation
@@ -806,17 +849,19 @@ const response = await complete(model, {
806
849
 
807
850
  In Node.js environments, you can set environment variables to avoid passing API keys:
808
851
 
809
- ```bash
810
- OPENAI_API_KEY=sk-...
811
- ANTHROPIC_API_KEY=sk-ant-...
812
- GEMINI_API_KEY=...
813
- MISTRAL_API_KEY=...
814
- GROQ_API_KEY=gsk_...
815
- CEREBRAS_API_KEY=csk-...
816
- XAI_API_KEY=xai-...
817
- ZAI_API_KEY=...
818
- OPENROUTER_API_KEY=sk-or-...
819
- ```
852
+ | Provider | Environment Variable(s) |
853
+ |----------|------------------------|
854
+ | OpenAI | `OPENAI_API_KEY` |
855
+ | Anthropic | `ANTHROPIC_API_KEY` or `ANTHROPIC_OAUTH_TOKEN` |
856
+ | Google | `GEMINI_API_KEY` |
857
+ | Vertex AI | `GOOGLE_CLOUD_PROJECT` (or `GCLOUD_PROJECT`) + `GOOGLE_CLOUD_LOCATION` + ADC |
858
+ | Mistral | `MISTRAL_API_KEY` |
859
+ | Groq | `GROQ_API_KEY` |
860
+ | Cerebras | `CEREBRAS_API_KEY` |
861
+ | xAI | `XAI_API_KEY` |
862
+ | OpenRouter | `OPENROUTER_API_KEY` |
863
+ | zAI | `ZAI_API_KEY` |
864
+ | GitHub Copilot | `COPILOT_GITHUB_TOKEN` or `GH_TOKEN` or `GITHUB_TOKEN` |
820
865
 
821
866
  When set, the library automatically uses these keys:
822
867
 
@@ -845,6 +890,7 @@ const key = getEnvApiKey('openai'); // checks OPENAI_API_KEY
845
890
  Several providers require OAuth authentication instead of static API keys:
846
891
 
847
892
  - **Anthropic** (Claude Pro/Max subscription)
893
+ - **OpenAI Codex** (ChatGPT Plus/Pro subscription, access to GPT-5.x Codex models)
848
894
  - **GitHub Copilot** (Copilot subscription)
849
895
  - **Google Gemini CLI** (Free Gemini 2.0/2.5 via Google Cloud Code Assist)
850
896
  - **Antigravity** (Free Gemini 3, Claude, GPT-OSS via Google Cloud)
@@ -873,6 +919,7 @@ The library provides login and token refresh functions. Credential storage is th
873
919
  import {
874
920
  // Login functions (return credentials, do not store)
875
921
  loginAnthropic,
922
+ loginOpenAICodex,
876
923
  loginGitHubCopilot,
877
924
  loginGeminiCli,
878
925
  loginAntigravity,
@@ -882,7 +929,7 @@ import {
882
929
  getOAuthApiKey, // (provider, credentialsMap) => { newCredentials, apiKey } | null
883
930
 
884
931
  // Types
885
- type OAuthProvider, // 'anthropic' | 'github-copilot' | 'google-gemini-cli' | 'google-antigravity'
932
+ type OAuthProvider, // 'anthropic' | 'openai-codex' | 'github-copilot' | 'google-gemini-cli' | 'google-antigravity'
886
933
  type OAuthCredentials,
887
934
  } from '@mariozechner/pi-ai';
888
935
  ```
@@ -937,6 +984,8 @@ const response = await complete(model, {
937
984
 
938
985
  ### Provider Notes
939
986
 
987
+ **OpenAI Codex**: Requires a ChatGPT Plus or Pro subscription. Provides access to GPT-5.x Codex models with extended context windows and reasoning capabilities. The library automatically handles session-based prompt caching when `sessionId` is provided in stream options.
988
+
940
989
  **GitHub Copilot**: If you get "The requested model is not supported" error, enable the model manually in VS Code: open Copilot Chat, click the model selector, select the model (warning icon), and click "Enable".
941
990
 
942
991
  **Google Gemini CLI / Antigravity**: These use Google Cloud OAuth. The `apiKey` returned by `getOAuthApiKey()` is a JSON string containing both the token and project ID, which the library handles automatically.
@@ -7,5 +7,4 @@ export type CacheMetadata = {
7
7
  };
8
8
  export declare function getModelFamily(normalizedModel: string): ModelFamily;
9
9
  export declare function getCodexInstructions(normalizedModel?: string): Promise<string>;
10
- export declare const TOOL_REMAP_MESSAGE = "<user_instructions priority=\"0\">\n<environment_override priority=\"0\">\nYOU ARE IN A DIFFERENT ENVIRONMENT. These instructions override ALL previous tool references.\n</environment_override>\n\n<tool_replacements priority=\"0\">\n<critical_rule priority=\"0\">\n\u274C APPLY_PATCH DOES NOT EXIST \u2192 \u2705 USE \"edit\" INSTEAD\n- NEVER use: apply_patch, applyPatch\n- ALWAYS use: edit tool for ALL file modifications\n</critical_rule>\n\n<critical_rule priority=\"0\">\n\u274C UPDATE_PLAN DOES NOT EXIST\n- NEVER use: update_plan, updatePlan, read_plan, readPlan, todowrite, todoread\n- There is no plan tool in this environment\n</critical_rule>\n</tool_replacements>\n\n<available_tools priority=\"0\">\nFile Operations:\n \u2022 read - Read file contents\n \u2022 edit - Modify files with exact find/replace\n \u2022 write - Create or overwrite files\n\nSearch/Discovery:\n \u2022 grep - Search file contents for patterns (read-only)\n \u2022 find - Find files by glob pattern (read-only)\n \u2022 ls - List directory contents (read-only)\n\nExecution:\n \u2022 bash - Run shell commands\n</available_tools>\n\n<verification_checklist priority=\"0\">\nBefore file modifications:\n1. Am I using \"edit\" NOT \"apply_patch\"?\n2. Am I avoiding plan tools entirely?\n3. Am I using only the tools listed above?\n</verification_checklist>\n</user_instructions>";
11
10
  //# sourceMappingURL=codex.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"codex.d.ts","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/codex.ts"],"names":[],"mappings":"AAqBA,MAAM,MAAM,WAAW,GAAG,eAAe,GAAG,WAAW,GAAG,OAAO,GAAG,SAAS,GAAG,SAAS,CAAC;AAkB1F,MAAM,MAAM,aAAa,GAAG;IAC3B,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;IACZ,WAAW,EAAE,MAAM,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;CACZ,CAAC;AAEF,wBAAgB,cAAc,CAAC,eAAe,EAAE,MAAM,GAAG,WAAW,CAcnE;AAsCD,wBAAsB,oBAAoB,CAAC,eAAe,SAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,CAsF7F;AAED,eAAO,MAAM,kBAAkB,y2CAwCV,CAAC","sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\nconst GITHUB_API_RELEASES = \"https://api.github.com/repos/openai/codex/releases/latest\";\nconst GITHUB_HTML_RELEASES = \"https://github.com/openai/codex/releases/latest\";\n\nconst DEFAULT_AGENT_DIR = join(homedir(), \".pi\", \"agent\");\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst FALLBACK_PROMPT_PATH = join(__dirname, \"codex-instructions.md\");\n\nfunction getAgentDir(): string {\n\treturn process.env.PI_CODING_AGENT_DIR || DEFAULT_AGENT_DIR;\n}\n\nfunction getCacheDir(): string {\n\treturn join(getAgentDir(), \"cache\", \"openai-codex\");\n}\n\nexport type ModelFamily = \"gpt-5.2-codex\" | \"codex-max\" | \"codex\" | \"gpt-5.2\" | \"gpt-5.1\";\n\nconst PROMPT_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex_prompt.md\",\n\t\"codex-max\": \"gpt-5.1-codex-max_prompt.md\",\n\tcodex: \"gpt_5_codex_prompt.md\",\n\t\"gpt-5.2\": \"gpt_5_2_prompt.md\",\n\t\"gpt-5.1\": \"gpt_5_1_prompt.md\",\n};\n\nconst CACHE_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex-instructions.md\",\n\t\"codex-max\": \"codex-max-instructions.md\",\n\tcodex: \"codex-instructions.md\",\n\t\"gpt-5.2\": \"gpt-5.2-instructions.md\",\n\t\"gpt-5.1\": \"gpt-5.1-instructions.md\",\n};\n\nexport type CacheMetadata = {\n\tetag: string | null;\n\ttag: string;\n\tlastChecked: number;\n\turl: string;\n};\n\nexport function getModelFamily(normalizedModel: string): ModelFamily {\n\tif (normalizedModel.includes(\"gpt-5.2-codex\") || normalizedModel.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalizedModel.includes(\"codex-max\")) {\n\t\treturn \"codex-max\";\n\t}\n\tif (normalizedModel.includes(\"codex\") || normalizedModel.startsWith(\"codex-\")) {\n\t\treturn \"codex\";\n\t}\n\tif (normalizedModel.includes(\"gpt-5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\treturn \"gpt-5.1\";\n}\n\nasync function getLatestReleaseTag(): Promise<string> {\n\ttry {\n\t\tconst response = await fetch(GITHUB_API_RELEASES);\n\t\tif (response.ok) {\n\t\t\tconst data = (await response.json()) as { tag_name?: string };\n\t\t\tif (data.tag_name) {\n\t\t\t\treturn data.tag_name;\n\t\t\t}\n\t\t}\n\t} catch {\n\t\t// fallback\n\t}\n\n\tconst htmlResponse = await fetch(GITHUB_HTML_RELEASES);\n\tif (!htmlResponse.ok) {\n\t\tthrow new Error(`Failed to fetch latest release: ${htmlResponse.status}`);\n\t}\n\n\tconst finalUrl = htmlResponse.url;\n\tif (finalUrl) {\n\t\tconst parts = finalUrl.split(\"/tag/\");\n\t\tconst last = parts[parts.length - 1];\n\t\tif (last && !last.includes(\"/\")) {\n\t\t\treturn last;\n\t\t}\n\t}\n\n\tconst html = await htmlResponse.text();\n\tconst match = html.match(/\\/openai\\/codex\\/releases\\/tag\\/([^\"]+)/);\n\tif (match?.[1]) {\n\t\treturn match[1];\n\t}\n\n\tthrow new Error(\"Failed to determine latest release tag from GitHub\");\n}\n\nexport async function getCodexInstructions(normalizedModel = \"gpt-5.1-codex\"): Promise<string> {\n\tconst modelFamily = getModelFamily(normalizedModel);\n\tconst promptFile = PROMPT_FILES[modelFamily];\n\tconst cacheDir = getCacheDir();\n\tconst cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);\n\tconst cacheMetaFile = join(cacheDir, `${CACHE_FILES[modelFamily].replace(\".md\", \"-meta.json\")}`);\n\n\ttry {\n\t\tlet cachedETag: string | null = null;\n\t\tlet cachedTag: string | null = null;\n\t\tlet cachedTimestamp: number | null = null;\n\n\t\tif (existsSync(cacheMetaFile)) {\n\t\t\tconst metadata = JSON.parse(readFileSync(cacheMetaFile, \"utf8\")) as CacheMetadata;\n\t\t\tcachedETag = metadata.etag;\n\t\t\tcachedTag = metadata.tag;\n\t\t\tcachedTimestamp = metadata.lastChecked;\n\t\t}\n\n\t\tconst CACHE_TTL_MS = 15 * 60 * 1000;\n\t\tif (cachedTimestamp && Date.now() - cachedTimestamp < CACHE_TTL_MS && existsSync(cacheFile)) {\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tconst latestTag = await getLatestReleaseTag();\n\t\tconst instructionsUrl = `https://raw.githubusercontent.com/openai/codex/${latestTag}/codex-rs/core/${promptFile}`;\n\n\t\tif (cachedTag !== latestTag) {\n\t\t\tcachedETag = null;\n\t\t}\n\n\t\tconst headers: Record<string, string> = {};\n\t\tif (cachedETag) {\n\t\t\theaders[\"If-None-Match\"] = cachedETag;\n\t\t}\n\n\t\tconst response = await fetch(instructionsUrl, { headers });\n\n\t\tif (response.status === 304) {\n\t\t\tif (existsSync(cacheFile)) {\n\t\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t\t}\n\t\t}\n\n\t\tif (response.ok) {\n\t\t\tconst instructions = await response.text();\n\t\t\tconst newETag = response.headers.get(\"etag\");\n\n\t\t\tif (!existsSync(cacheDir)) {\n\t\t\t\tmkdirSync(cacheDir, { recursive: true });\n\t\t\t}\n\n\t\t\twriteFileSync(cacheFile, instructions, \"utf8\");\n\t\t\twriteFileSync(\n\t\t\t\tcacheMetaFile,\n\t\t\t\tJSON.stringify({\n\t\t\t\t\tetag: newETag,\n\t\t\t\t\ttag: latestTag,\n\t\t\t\t\tlastChecked: Date.now(),\n\t\t\t\t\turl: instructionsUrl,\n\t\t\t\t} satisfies CacheMetadata),\n\t\t\t\t\"utf8\",\n\t\t\t);\n\n\t\t\treturn instructions;\n\t\t}\n\n\t\tthrow new Error(`HTTP ${response.status}`);\n\t} catch (error) {\n\t\tconsole.error(\n\t\t\t`[openai-codex] Failed to fetch ${modelFamily} instructions from GitHub:`,\n\t\t\terror instanceof Error ? error.message : String(error),\n\t\t);\n\n\t\tif (existsSync(cacheFile)) {\n\t\t\tconsole.error(`[openai-codex] Using cached ${modelFamily} instructions`);\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tif (existsSync(FALLBACK_PROMPT_PATH)) {\n\t\t\tconsole.error(`[openai-codex] Falling back to bundled instructions for ${modelFamily}`);\n\t\t\treturn readFileSync(FALLBACK_PROMPT_PATH, \"utf8\");\n\t\t}\n\n\t\tthrow new Error(`No cached Codex instructions available for ${modelFamily}`);\n\t}\n}\n\nexport const TOOL_REMAP_MESSAGE = `<user_instructions priority=\"0\">\n<environment_override priority=\"0\">\nYOU ARE IN A DIFFERENT ENVIRONMENT. These instructions override ALL previous tool references.\n</environment_override>\n\n<tool_replacements priority=\"0\">\n<critical_rule priority=\"0\">\n❌ APPLY_PATCH DOES NOT EXIST → ✅ USE \"edit\" INSTEAD\n- NEVER use: apply_patch, applyPatch\n- ALWAYS use: edit tool for ALL file modifications\n</critical_rule>\n\n<critical_rule priority=\"0\">\n❌ UPDATE_PLAN DOES NOT EXIST\n- NEVER use: update_plan, updatePlan, read_plan, readPlan, todowrite, todoread\n- There is no plan tool in this environment\n</critical_rule>\n</tool_replacements>\n\n<available_tools priority=\"0\">\nFile Operations:\n • read - Read file contents\n • edit - Modify files with exact find/replace\n • write - Create or overwrite files\n\nSearch/Discovery:\n • grep - Search file contents for patterns (read-only)\n • find - Find files by glob pattern (read-only)\n • ls - List directory contents (read-only)\n\nExecution:\n • bash - Run shell commands\n</available_tools>\n\n<verification_checklist priority=\"0\">\nBefore file modifications:\n1. Am I using \"edit\" NOT \"apply_patch\"?\n2. Am I avoiding plan tools entirely?\n3. Am I using only the tools listed above?\n</verification_checklist>\n</user_instructions>`;\n"]}
1
+ {"version":3,"file":"codex.d.ts","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/codex.ts"],"names":[],"mappings":"AAqBA,MAAM,MAAM,WAAW,GAAG,eAAe,GAAG,WAAW,GAAG,OAAO,GAAG,SAAS,GAAG,SAAS,CAAC;AAkB1F,MAAM,MAAM,aAAa,GAAG;IAC3B,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;IACZ,WAAW,EAAE,MAAM,CAAC;IACpB,GAAG,EAAE,MAAM,CAAC;CACZ,CAAC;AAEF,wBAAgB,cAAc,CAAC,eAAe,EAAE,MAAM,GAAG,WAAW,CAcnE;AAsCD,wBAAsB,oBAAoB,CAAC,eAAe,SAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,CAsF7F","sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\nconst GITHUB_API_RELEASES = \"https://api.github.com/repos/openai/codex/releases/latest\";\nconst GITHUB_HTML_RELEASES = \"https://github.com/openai/codex/releases/latest\";\n\nconst DEFAULT_AGENT_DIR = join(homedir(), \".pi\", \"agent\");\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst FALLBACK_PROMPT_PATH = join(__dirname, \"codex-instructions.md\");\n\nfunction getAgentDir(): string {\n\treturn process.env.PI_CODING_AGENT_DIR || DEFAULT_AGENT_DIR;\n}\n\nfunction getCacheDir(): string {\n\treturn join(getAgentDir(), \"cache\", \"openai-codex\");\n}\n\nexport type ModelFamily = \"gpt-5.2-codex\" | \"codex-max\" | \"codex\" | \"gpt-5.2\" | \"gpt-5.1\";\n\nconst PROMPT_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex_prompt.md\",\n\t\"codex-max\": \"gpt-5.1-codex-max_prompt.md\",\n\tcodex: \"gpt_5_codex_prompt.md\",\n\t\"gpt-5.2\": \"gpt_5_2_prompt.md\",\n\t\"gpt-5.1\": \"gpt_5_1_prompt.md\",\n};\n\nconst CACHE_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex-instructions.md\",\n\t\"codex-max\": \"codex-max-instructions.md\",\n\tcodex: \"codex-instructions.md\",\n\t\"gpt-5.2\": \"gpt-5.2-instructions.md\",\n\t\"gpt-5.1\": \"gpt-5.1-instructions.md\",\n};\n\nexport type CacheMetadata = {\n\tetag: string | null;\n\ttag: string;\n\tlastChecked: number;\n\turl: string;\n};\n\nexport function getModelFamily(normalizedModel: string): ModelFamily {\n\tif (normalizedModel.includes(\"gpt-5.2-codex\") || normalizedModel.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalizedModel.includes(\"codex-max\")) {\n\t\treturn \"codex-max\";\n\t}\n\tif (normalizedModel.includes(\"codex\") || normalizedModel.startsWith(\"codex-\")) {\n\t\treturn \"codex\";\n\t}\n\tif (normalizedModel.includes(\"gpt-5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\treturn \"gpt-5.1\";\n}\n\nasync function getLatestReleaseTag(): Promise<string> {\n\ttry {\n\t\tconst response = await fetch(GITHUB_API_RELEASES);\n\t\tif (response.ok) {\n\t\t\tconst data = (await response.json()) as { tag_name?: string };\n\t\t\tif (data.tag_name) {\n\t\t\t\treturn data.tag_name;\n\t\t\t}\n\t\t}\n\t} catch {\n\t\t// fallback\n\t}\n\n\tconst htmlResponse = await fetch(GITHUB_HTML_RELEASES);\n\tif (!htmlResponse.ok) {\n\t\tthrow new Error(`Failed to fetch latest release: ${htmlResponse.status}`);\n\t}\n\n\tconst finalUrl = htmlResponse.url;\n\tif (finalUrl) {\n\t\tconst parts = finalUrl.split(\"/tag/\");\n\t\tconst last = parts[parts.length - 1];\n\t\tif (last && !last.includes(\"/\")) {\n\t\t\treturn last;\n\t\t}\n\t}\n\n\tconst html = await htmlResponse.text();\n\tconst match = html.match(/\\/openai\\/codex\\/releases\\/tag\\/([^\"]+)/);\n\tif (match?.[1]) {\n\t\treturn match[1];\n\t}\n\n\tthrow new Error(\"Failed to determine latest release tag from GitHub\");\n}\n\nexport async function getCodexInstructions(normalizedModel = \"gpt-5.1-codex\"): Promise<string> {\n\tconst modelFamily = getModelFamily(normalizedModel);\n\tconst promptFile = PROMPT_FILES[modelFamily];\n\tconst cacheDir = getCacheDir();\n\tconst cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);\n\tconst cacheMetaFile = join(cacheDir, `${CACHE_FILES[modelFamily].replace(\".md\", \"-meta.json\")}`);\n\n\ttry {\n\t\tlet cachedETag: string | null = null;\n\t\tlet cachedTag: string | null = null;\n\t\tlet cachedTimestamp: number | null = null;\n\n\t\tif (existsSync(cacheMetaFile)) {\n\t\t\tconst metadata = JSON.parse(readFileSync(cacheMetaFile, \"utf8\")) as CacheMetadata;\n\t\t\tcachedETag = metadata.etag;\n\t\t\tcachedTag = metadata.tag;\n\t\t\tcachedTimestamp = metadata.lastChecked;\n\t\t}\n\n\t\tconst CACHE_TTL_MS = 24 * 60 * 60 * 1000;\n\t\tif (cachedTimestamp && Date.now() - cachedTimestamp < CACHE_TTL_MS && existsSync(cacheFile)) {\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tconst latestTag = await getLatestReleaseTag();\n\t\tconst instructionsUrl = `https://raw.githubusercontent.com/openai/codex/${latestTag}/codex-rs/core/${promptFile}`;\n\n\t\tif (cachedTag !== latestTag) {\n\t\t\tcachedETag = null;\n\t\t}\n\n\t\tconst headers: Record<string, string> = {};\n\t\tif (cachedETag) {\n\t\t\theaders[\"If-None-Match\"] = cachedETag;\n\t\t}\n\n\t\tconst response = await fetch(instructionsUrl, { headers });\n\n\t\tif (response.status === 304) {\n\t\t\tif (existsSync(cacheFile)) {\n\t\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t\t}\n\t\t}\n\n\t\tif (response.ok) {\n\t\t\tconst instructions = await response.text();\n\t\t\tconst newETag = response.headers.get(\"etag\");\n\n\t\t\tif (!existsSync(cacheDir)) {\n\t\t\t\tmkdirSync(cacheDir, { recursive: true });\n\t\t\t}\n\n\t\t\twriteFileSync(cacheFile, instructions, \"utf8\");\n\t\t\twriteFileSync(\n\t\t\t\tcacheMetaFile,\n\t\t\t\tJSON.stringify({\n\t\t\t\t\tetag: newETag,\n\t\t\t\t\ttag: latestTag,\n\t\t\t\t\tlastChecked: Date.now(),\n\t\t\t\t\turl: instructionsUrl,\n\t\t\t\t} satisfies CacheMetadata),\n\t\t\t\t\"utf8\",\n\t\t\t);\n\n\t\t\treturn instructions;\n\t\t}\n\n\t\tthrow new Error(`HTTP ${response.status}`);\n\t} catch (error) {\n\t\tconsole.error(\n\t\t\t`[openai-codex] Failed to fetch ${modelFamily} instructions from GitHub:`,\n\t\t\terror instanceof Error ? error.message : String(error),\n\t\t);\n\n\t\tif (existsSync(cacheFile)) {\n\t\t\tconsole.error(`[openai-codex] Using cached ${modelFamily} instructions`);\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tif (existsSync(FALLBACK_PROMPT_PATH)) {\n\t\t\tconsole.error(`[openai-codex] Falling back to bundled instructions for ${modelFamily}`);\n\t\t\treturn readFileSync(FALLBACK_PROMPT_PATH, \"utf8\");\n\t\t}\n\n\t\tthrow new Error(`No cached Codex instructions available for ${modelFamily}`);\n\t}\n}\n"]}
@@ -91,7 +91,7 @@ export async function getCodexInstructions(normalizedModel = "gpt-5.1-codex") {
91
91
  cachedTag = metadata.tag;
92
92
  cachedTimestamp = metadata.lastChecked;
93
93
  }
94
- const CACHE_TTL_MS = 15 * 60 * 1000;
94
+ const CACHE_TTL_MS = 24 * 60 * 60 * 1000;
95
95
  if (cachedTimestamp && Date.now() - cachedTimestamp < CACHE_TTL_MS && existsSync(cacheFile)) {
96
96
  return readFileSync(cacheFile, "utf8");
97
97
  }
@@ -140,45 +140,4 @@ export async function getCodexInstructions(normalizedModel = "gpt-5.1-codex") {
140
140
  throw new Error(`No cached Codex instructions available for ${modelFamily}`);
141
141
  }
142
142
  }
143
- export const TOOL_REMAP_MESSAGE = `<user_instructions priority="0">
144
- <environment_override priority="0">
145
- YOU ARE IN A DIFFERENT ENVIRONMENT. These instructions override ALL previous tool references.
146
- </environment_override>
147
-
148
- <tool_replacements priority="0">
149
- <critical_rule priority="0">
150
- ❌ APPLY_PATCH DOES NOT EXIST → ✅ USE "edit" INSTEAD
151
- - NEVER use: apply_patch, applyPatch
152
- - ALWAYS use: edit tool for ALL file modifications
153
- </critical_rule>
154
-
155
- <critical_rule priority="0">
156
- ❌ UPDATE_PLAN DOES NOT EXIST
157
- - NEVER use: update_plan, updatePlan, read_plan, readPlan, todowrite, todoread
158
- - There is no plan tool in this environment
159
- </critical_rule>
160
- </tool_replacements>
161
-
162
- <available_tools priority="0">
163
- File Operations:
164
- • read - Read file contents
165
- • edit - Modify files with exact find/replace
166
- • write - Create or overwrite files
167
-
168
- Search/Discovery:
169
- • grep - Search file contents for patterns (read-only)
170
- • find - Find files by glob pattern (read-only)
171
- • ls - List directory contents (read-only)
172
-
173
- Execution:
174
- • bash - Run shell commands
175
- </available_tools>
176
-
177
- <verification_checklist priority="0">
178
- Before file modifications:
179
- 1. Am I using "edit" NOT "apply_patch"?
180
- 2. Am I avoiding plan tools entirely?
181
- 3. Am I using only the tools listed above?
182
- </verification_checklist>
183
- </user_instructions>`;
184
143
  //# sourceMappingURL=codex.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"codex.js","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/codex.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,SAAS,CAAC;AAC7E,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEzC,MAAM,mBAAmB,GAAG,2DAA2D,CAAC;AACxF,MAAM,oBAAoB,GAAG,iDAAiD,CAAC;AAE/E,MAAM,iBAAiB,GAAG,IAAI,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AAC1D,MAAM,UAAU,GAAG,aAAa,CAAC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;AACtC,MAAM,oBAAoB,GAAG,IAAI,CAAC,SAAS,EAAE,uBAAuB,CAAC,CAAC;AAEtE,SAAS,WAAW,GAAW;IAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,mBAAmB,IAAI,iBAAiB,CAAC;AAAA,CAC5D;AAED,SAAS,WAAW,GAAW;IAC9B,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;AAAA,CACpD;AAID,MAAM,YAAY,GAAgC;IACjD,eAAe,EAAE,yBAAyB;IAC1C,WAAW,EAAE,6BAA6B;IAC1C,KAAK,EAAE,uBAAuB;IAC9B,SAAS,EAAE,mBAAmB;IAC9B,SAAS,EAAE,mBAAmB;CAC9B,CAAC;AAEF,MAAM,WAAW,GAAgC;IAChD,eAAe,EAAE,+BAA+B;IAChD,WAAW,EAAE,2BAA2B;IACxC,KAAK,EAAE,uBAAuB;IAC9B,SAAS,EAAE,yBAAyB;IACpC,SAAS,EAAE,yBAAyB;CACpC,CAAC;AASF,MAAM,UAAU,cAAc,CAAC,eAAuB,EAAe;IACpE,IAAI,eAAe,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,eAAe,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;QAC5F,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;QAC3C,OAAO,WAAW,CAAC;IACpB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,eAAe,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC/E,OAAO,OAAO,CAAC;IAChB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACzC,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED,KAAK,UAAU,mBAAmB,GAAoB;IACrD,IAAI,CAAC;QACJ,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAClD,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAA0B,CAAC;YAC9D,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBACnB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACtB,CAAC;QACF,CAAC;IACF,CAAC;IAAC,MAAM,CAAC;QACR,WAAW;IACZ,CAAC;IAED,MAAM,YAAY,GAAG,MAAM,KAAK,CAAC,oBAAoB,CAAC,CAAC;IACvD,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC;QACtB,MAAM,IAAI,KAAK,CAAC,mCAAmC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC;IAC3E,CAAC;IAED,MAAM,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC;IAClC,IAAI,QAAQ,EAAE,CAAC;QACd,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACtC,MAAM,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QACrC,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;YACjC,OAAO,IAAI,CAAC;QACb,CAAC;IACF,CAAC;IAED,MAAM,IAAI,GAAG,MAAM,YAAY,CAAC,IAAI,EAAE,CAAC;IACvC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;IACpE,IAAI,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QAChB,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;IACjB,CAAC;IAED,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;AAAA,CACtE;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CAAC,eAAe,GAAG,eAAe,EAAmB;IAC9F,MAAM,WAAW,GAAG,cAAc,CAAC,eAAe,CAAC,CAAC;IACpD,MAAM,UAAU,GAAG,YAAY,CAAC,WAAW,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,WAAW,EAAE,CAAC;IAC/B,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,WAAW,CAAC,CAAC,CAAC;IAC3D,MAAM,aAAa,GAAG,IAAI,CAAC,QAAQ,EAAE,GAAG,WAAW,CAAC,WAAW,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,YAAY,CAAC,EAAE,CAAC,CAAC;IAEjG,IAAI,CAAC;QACJ,IAAI,UAAU,GAAkB,IAAI,CAAC;QACrC,IAAI,SAAS,GAAkB,IAAI,CAAC;QACpC,IAAI,eAAe,GAAkB,IAAI,CAAC;QAE1C,IAAI,UAAU,CAAC,aAAa,CAAC,EAAE,CAAC;YAC/B,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,aAAa,EAAE,MAAM,CAAC,CAAkB,CAAC;YAClF,UAAU,GAAG,QAAQ,CAAC,IAAI,CAAC;YAC3B,SAAS,GAAG,QAAQ,CAAC,GAAG,CAAC;YACzB,eAAe,GAAG,QAAQ,CAAC,WAAW,CAAC;QACxC,CAAC;QAED,MAAM,YAAY,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;QACpC,IAAI,eAAe,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,eAAe,GAAG,YAAY,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC7F,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;QACxC,CAAC;QAED,MAAM,SAAS,GAAG,MAAM,mBAAmB,EAAE,CAAC;QAC9C,MAAM,eAAe,GAAG,kDAAkD,SAAS,kBAAkB,UAAU,EAAE,CAAC;QAElH,IAAI,SAAS,KAAK,SAAS,EAAE,CAAC;YAC7B,UAAU,GAAG,IAAI,CAAC;QACnB,CAAC;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,UAAU,EAAE,CAAC;YAChB,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,CAAC;QACvC,CAAC;QAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,eAAe,EAAE,EAAE,OAAO,EAAE,CAAC,CAAC;QAE3D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;YAC7B,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;gBAC3B,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;YACxC,CAAC;QACF,CAAC;QAED,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,YAAY,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YAC3C,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAE7C,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;gBAC3B,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAC1C,CAAC;YAED,aAAa,CAAC,SAAS,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC;YAC/C,aAAa,CACZ,aAAa,EACb,IAAI,CAAC,SAAS,CAAC;gBACd,IAAI,EAAE,OAAO;gBACb,GAAG,EAAE,SAAS;gBACd,WAAW,EAAE,IAAI,CAAC,GAAG,EAAE;gBACvB,GAAG,EAAE,eAAe;aACI,CAAC,EAC1B,MAAM,CACN,CAAC;YAEF,OAAO,YAAY,CAAC;QACrB,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;IAC5C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAChB,OAAO,CAAC,KAAK,CACZ,kCAAkC,WAAW,4BAA4B,EACzE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CACtD,CAAC;QAEF,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC3B,OAAO,CAAC,KAAK,CAAC,+BAA+B,WAAW,eAAe,CAAC,CAAC;YACzE,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;QACxC,CAAC;QAED,IAAI,UAAU,CAAC,oBAAoB,CAAC,EAAE,CAAC;YACtC,OAAO,CAAC,KAAK,CAAC,2DAA2D,WAAW,EAAE,CAAC,CAAC;YACxF,OAAO,YAAY,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;QACnD,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,8CAA8C,WAAW,EAAE,CAAC,CAAC;IAC9E,CAAC;AAAA,CACD;AAED,MAAM,CAAC,MAAM,kBAAkB,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;qBAwCb,CAAC","sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\nconst GITHUB_API_RELEASES = \"https://api.github.com/repos/openai/codex/releases/latest\";\nconst GITHUB_HTML_RELEASES = \"https://github.com/openai/codex/releases/latest\";\n\nconst DEFAULT_AGENT_DIR = join(homedir(), \".pi\", \"agent\");\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst FALLBACK_PROMPT_PATH = join(__dirname, \"codex-instructions.md\");\n\nfunction getAgentDir(): string {\n\treturn process.env.PI_CODING_AGENT_DIR || DEFAULT_AGENT_DIR;\n}\n\nfunction getCacheDir(): string {\n\treturn join(getAgentDir(), \"cache\", \"openai-codex\");\n}\n\nexport type ModelFamily = \"gpt-5.2-codex\" | \"codex-max\" | \"codex\" | \"gpt-5.2\" | \"gpt-5.1\";\n\nconst PROMPT_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex_prompt.md\",\n\t\"codex-max\": \"gpt-5.1-codex-max_prompt.md\",\n\tcodex: \"gpt_5_codex_prompt.md\",\n\t\"gpt-5.2\": \"gpt_5_2_prompt.md\",\n\t\"gpt-5.1\": \"gpt_5_1_prompt.md\",\n};\n\nconst CACHE_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex-instructions.md\",\n\t\"codex-max\": \"codex-max-instructions.md\",\n\tcodex: \"codex-instructions.md\",\n\t\"gpt-5.2\": \"gpt-5.2-instructions.md\",\n\t\"gpt-5.1\": \"gpt-5.1-instructions.md\",\n};\n\nexport type CacheMetadata = {\n\tetag: string | null;\n\ttag: string;\n\tlastChecked: number;\n\turl: string;\n};\n\nexport function getModelFamily(normalizedModel: string): ModelFamily {\n\tif (normalizedModel.includes(\"gpt-5.2-codex\") || normalizedModel.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalizedModel.includes(\"codex-max\")) {\n\t\treturn \"codex-max\";\n\t}\n\tif (normalizedModel.includes(\"codex\") || normalizedModel.startsWith(\"codex-\")) {\n\t\treturn \"codex\";\n\t}\n\tif (normalizedModel.includes(\"gpt-5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\treturn \"gpt-5.1\";\n}\n\nasync function getLatestReleaseTag(): Promise<string> {\n\ttry {\n\t\tconst response = await fetch(GITHUB_API_RELEASES);\n\t\tif (response.ok) {\n\t\t\tconst data = (await response.json()) as { tag_name?: string };\n\t\t\tif (data.tag_name) {\n\t\t\t\treturn data.tag_name;\n\t\t\t}\n\t\t}\n\t} catch {\n\t\t// fallback\n\t}\n\n\tconst htmlResponse = await fetch(GITHUB_HTML_RELEASES);\n\tif (!htmlResponse.ok) {\n\t\tthrow new Error(`Failed to fetch latest release: ${htmlResponse.status}`);\n\t}\n\n\tconst finalUrl = htmlResponse.url;\n\tif (finalUrl) {\n\t\tconst parts = finalUrl.split(\"/tag/\");\n\t\tconst last = parts[parts.length - 1];\n\t\tif (last && !last.includes(\"/\")) {\n\t\t\treturn last;\n\t\t}\n\t}\n\n\tconst html = await htmlResponse.text();\n\tconst match = html.match(/\\/openai\\/codex\\/releases\\/tag\\/([^\"]+)/);\n\tif (match?.[1]) {\n\t\treturn match[1];\n\t}\n\n\tthrow new Error(\"Failed to determine latest release tag from GitHub\");\n}\n\nexport async function getCodexInstructions(normalizedModel = \"gpt-5.1-codex\"): Promise<string> {\n\tconst modelFamily = getModelFamily(normalizedModel);\n\tconst promptFile = PROMPT_FILES[modelFamily];\n\tconst cacheDir = getCacheDir();\n\tconst cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);\n\tconst cacheMetaFile = join(cacheDir, `${CACHE_FILES[modelFamily].replace(\".md\", \"-meta.json\")}`);\n\n\ttry {\n\t\tlet cachedETag: string | null = null;\n\t\tlet cachedTag: string | null = null;\n\t\tlet cachedTimestamp: number | null = null;\n\n\t\tif (existsSync(cacheMetaFile)) {\n\t\t\tconst metadata = JSON.parse(readFileSync(cacheMetaFile, \"utf8\")) as CacheMetadata;\n\t\t\tcachedETag = metadata.etag;\n\t\t\tcachedTag = metadata.tag;\n\t\t\tcachedTimestamp = metadata.lastChecked;\n\t\t}\n\n\t\tconst CACHE_TTL_MS = 15 * 60 * 1000;\n\t\tif (cachedTimestamp && Date.now() - cachedTimestamp < CACHE_TTL_MS && existsSync(cacheFile)) {\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tconst latestTag = await getLatestReleaseTag();\n\t\tconst instructionsUrl = `https://raw.githubusercontent.com/openai/codex/${latestTag}/codex-rs/core/${promptFile}`;\n\n\t\tif (cachedTag !== latestTag) {\n\t\t\tcachedETag = null;\n\t\t}\n\n\t\tconst headers: Record<string, string> = {};\n\t\tif (cachedETag) {\n\t\t\theaders[\"If-None-Match\"] = cachedETag;\n\t\t}\n\n\t\tconst response = await fetch(instructionsUrl, { headers });\n\n\t\tif (response.status === 304) {\n\t\t\tif (existsSync(cacheFile)) {\n\t\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t\t}\n\t\t}\n\n\t\tif (response.ok) {\n\t\t\tconst instructions = await response.text();\n\t\t\tconst newETag = response.headers.get(\"etag\");\n\n\t\t\tif (!existsSync(cacheDir)) {\n\t\t\t\tmkdirSync(cacheDir, { recursive: true });\n\t\t\t}\n\n\t\t\twriteFileSync(cacheFile, instructions, \"utf8\");\n\t\t\twriteFileSync(\n\t\t\t\tcacheMetaFile,\n\t\t\t\tJSON.stringify({\n\t\t\t\t\tetag: newETag,\n\t\t\t\t\ttag: latestTag,\n\t\t\t\t\tlastChecked: Date.now(),\n\t\t\t\t\turl: instructionsUrl,\n\t\t\t\t} satisfies CacheMetadata),\n\t\t\t\t\"utf8\",\n\t\t\t);\n\n\t\t\treturn instructions;\n\t\t}\n\n\t\tthrow new Error(`HTTP ${response.status}`);\n\t} catch (error) {\n\t\tconsole.error(\n\t\t\t`[openai-codex] Failed to fetch ${modelFamily} instructions from GitHub:`,\n\t\t\terror instanceof Error ? error.message : String(error),\n\t\t);\n\n\t\tif (existsSync(cacheFile)) {\n\t\t\tconsole.error(`[openai-codex] Using cached ${modelFamily} instructions`);\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tif (existsSync(FALLBACK_PROMPT_PATH)) {\n\t\t\tconsole.error(`[openai-codex] Falling back to bundled instructions for ${modelFamily}`);\n\t\t\treturn readFileSync(FALLBACK_PROMPT_PATH, \"utf8\");\n\t\t}\n\n\t\tthrow new Error(`No cached Codex instructions available for ${modelFamily}`);\n\t}\n}\n\nexport const TOOL_REMAP_MESSAGE = `<user_instructions priority=\"0\">\n<environment_override priority=\"0\">\nYOU ARE IN A DIFFERENT ENVIRONMENT. These instructions override ALL previous tool references.\n</environment_override>\n\n<tool_replacements priority=\"0\">\n<critical_rule priority=\"0\">\n❌ APPLY_PATCH DOES NOT EXIST → ✅ USE \"edit\" INSTEAD\n- NEVER use: apply_patch, applyPatch\n- ALWAYS use: edit tool for ALL file modifications\n</critical_rule>\n\n<critical_rule priority=\"0\">\n❌ UPDATE_PLAN DOES NOT EXIST\n- NEVER use: update_plan, updatePlan, read_plan, readPlan, todowrite, todoread\n- There is no plan tool in this environment\n</critical_rule>\n</tool_replacements>\n\n<available_tools priority=\"0\">\nFile Operations:\n • read - Read file contents\n • edit - Modify files with exact find/replace\n • write - Create or overwrite files\n\nSearch/Discovery:\n • grep - Search file contents for patterns (read-only)\n • find - Find files by glob pattern (read-only)\n • ls - List directory contents (read-only)\n\nExecution:\n • bash - Run shell commands\n</available_tools>\n\n<verification_checklist priority=\"0\">\nBefore file modifications:\n1. Am I using \"edit\" NOT \"apply_patch\"?\n2. Am I avoiding plan tools entirely?\n3. Am I using only the tools listed above?\n</verification_checklist>\n</user_instructions>`;\n"]}
1
+ {"version":3,"file":"codex.js","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/codex.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,SAAS,CAAC;AAC7E,OAAO,EAAE,OAAO,EAAE,MAAM,SAAS,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEzC,MAAM,mBAAmB,GAAG,2DAA2D,CAAC;AACxF,MAAM,oBAAoB,GAAG,iDAAiD,CAAC;AAE/E,MAAM,iBAAiB,GAAG,IAAI,CAAC,OAAO,EAAE,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AAC1D,MAAM,UAAU,GAAG,aAAa,CAAC,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;AACtC,MAAM,oBAAoB,GAAG,IAAI,CAAC,SAAS,EAAE,uBAAuB,CAAC,CAAC;AAEtE,SAAS,WAAW,GAAW;IAC9B,OAAO,OAAO,CAAC,GAAG,CAAC,mBAAmB,IAAI,iBAAiB,CAAC;AAAA,CAC5D;AAED,SAAS,WAAW,GAAW;IAC9B,OAAO,IAAI,CAAC,WAAW,EAAE,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;AAAA,CACpD;AAID,MAAM,YAAY,GAAgC;IACjD,eAAe,EAAE,yBAAyB;IAC1C,WAAW,EAAE,6BAA6B;IAC1C,KAAK,EAAE,uBAAuB;IAC9B,SAAS,EAAE,mBAAmB;IAC9B,SAAS,EAAE,mBAAmB;CAC9B,CAAC;AAEF,MAAM,WAAW,GAAgC;IAChD,eAAe,EAAE,+BAA+B;IAChD,WAAW,EAAE,2BAA2B;IACxC,KAAK,EAAE,uBAAuB;IAC9B,SAAS,EAAE,yBAAyB;IACpC,SAAS,EAAE,yBAAyB;CACpC,CAAC;AASF,MAAM,UAAU,cAAc,CAAC,eAAuB,EAAe;IACpE,IAAI,eAAe,CAAC,QAAQ,CAAC,eAAe,CAAC,IAAI,eAAe,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;QAC5F,OAAO,eAAe,CAAC;IACxB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE,CAAC;QAC3C,OAAO,WAAW,CAAC;IACpB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,eAAe,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC/E,OAAO,OAAO,CAAC;IAChB,CAAC;IACD,IAAI,eAAe,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QACzC,OAAO,SAAS,CAAC;IAClB,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED,KAAK,UAAU,mBAAmB,GAAoB;IACrD,IAAI,CAAC;QACJ,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAClD,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,IAAI,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAA0B,CAAC;YAC9D,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBACnB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACtB,CAAC;QACF,CAAC;IACF,CAAC;IAAC,MAAM,CAAC;QACR,WAAW;IACZ,CAAC;IAED,MAAM,YAAY,GAAG,MAAM,KAAK,CAAC,oBAAoB,CAAC,CAAC;IACvD,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC;QACtB,MAAM,IAAI,KAAK,CAAC,mCAAmC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC;IAC3E,CAAC;IAED,MAAM,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC;IAClC,IAAI,QAAQ,EAAE,CAAC;QACd,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACtC,MAAM,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QACrC,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;YACjC,OAAO,IAAI,CAAC;QACb,CAAC;IACF,CAAC;IAED,MAAM,IAAI,GAAG,MAAM,YAAY,CAAC,IAAI,EAAE,CAAC;IACvC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;IACpE,IAAI,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QAChB,OAAO,KAAK,CAAC,CAAC,CAAC,CAAC;IACjB,CAAC;IAED,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;AAAA,CACtE;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CAAC,eAAe,GAAG,eAAe,EAAmB;IAC9F,MAAM,WAAW,GAAG,cAAc,CAAC,eAAe,CAAC,CAAC;IACpD,MAAM,UAAU,GAAG,YAAY,CAAC,WAAW,CAAC,CAAC;IAC7C,MAAM,QAAQ,GAAG,WAAW,EAAE,CAAC;IAC/B,MAAM,SAAS,GAAG,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,WAAW,CAAC,CAAC,CAAC;IAC3D,MAAM,aAAa,GAAG,IAAI,CAAC,QAAQ,EAAE,GAAG,WAAW,CAAC,WAAW,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,YAAY,CAAC,EAAE,CAAC,CAAC;IAEjG,IAAI,CAAC;QACJ,IAAI,UAAU,GAAkB,IAAI,CAAC;QACrC,IAAI,SAAS,GAAkB,IAAI,CAAC;QACpC,IAAI,eAAe,GAAkB,IAAI,CAAC;QAE1C,IAAI,UAAU,CAAC,aAAa,CAAC,EAAE,CAAC;YAC/B,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,aAAa,EAAE,MAAM,CAAC,CAAkB,CAAC;YAClF,UAAU,GAAG,QAAQ,CAAC,IAAI,CAAC;YAC3B,SAAS,GAAG,QAAQ,CAAC,GAAG,CAAC;YACzB,eAAe,GAAG,QAAQ,CAAC,WAAW,CAAC;QACxC,CAAC;QAED,MAAM,YAAY,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;QACzC,IAAI,eAAe,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,eAAe,GAAG,YAAY,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC7F,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;QACxC,CAAC;QAED,MAAM,SAAS,GAAG,MAAM,mBAAmB,EAAE,CAAC;QAC9C,MAAM,eAAe,GAAG,kDAAkD,SAAS,kBAAkB,UAAU,EAAE,CAAC;QAElH,IAAI,SAAS,KAAK,SAAS,EAAE,CAAC;YAC7B,UAAU,GAAG,IAAI,CAAC;QACnB,CAAC;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,UAAU,EAAE,CAAC;YAChB,OAAO,CAAC,eAAe,CAAC,GAAG,UAAU,CAAC;QACvC,CAAC;QAED,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,eAAe,EAAE,EAAE,OAAO,EAAE,CAAC,CAAC;QAE3D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE,CAAC;YAC7B,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;gBAC3B,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;YACxC,CAAC;QACF,CAAC;QAED,IAAI,QAAQ,CAAC,EAAE,EAAE,CAAC;YACjB,MAAM,YAAY,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;YAC3C,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAE7C,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;gBAC3B,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YAC1C,CAAC;YAED,aAAa,CAAC,SAAS,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC;YAC/C,aAAa,CACZ,aAAa,EACb,IAAI,CAAC,SAAS,CAAC;gBACd,IAAI,EAAE,OAAO;gBACb,GAAG,EAAE,SAAS;gBACd,WAAW,EAAE,IAAI,CAAC,GAAG,EAAE;gBACvB,GAAG,EAAE,eAAe;aACI,CAAC,EAC1B,MAAM,CACN,CAAC;YAEF,OAAO,YAAY,CAAC;QACrB,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;IAC5C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAChB,OAAO,CAAC,KAAK,CACZ,kCAAkC,WAAW,4BAA4B,EACzE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CACtD,CAAC;QAEF,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YAC3B,OAAO,CAAC,KAAK,CAAC,+BAA+B,WAAW,eAAe,CAAC,CAAC;YACzE,OAAO,YAAY,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;QACxC,CAAC;QAED,IAAI,UAAU,CAAC,oBAAoB,CAAC,EAAE,CAAC;YACtC,OAAO,CAAC,KAAK,CAAC,2DAA2D,WAAW,EAAE,CAAC,CAAC;YACxF,OAAO,YAAY,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;QACnD,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,8CAA8C,WAAW,EAAE,CAAC,CAAC;IAC9E,CAAC;AAAA,CACD","sourcesContent":["import { existsSync, mkdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport { fileURLToPath } from \"node:url\";\n\nconst GITHUB_API_RELEASES = \"https://api.github.com/repos/openai/codex/releases/latest\";\nconst GITHUB_HTML_RELEASES = \"https://github.com/openai/codex/releases/latest\";\n\nconst DEFAULT_AGENT_DIR = join(homedir(), \".pi\", \"agent\");\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst FALLBACK_PROMPT_PATH = join(__dirname, \"codex-instructions.md\");\n\nfunction getAgentDir(): string {\n\treturn process.env.PI_CODING_AGENT_DIR || DEFAULT_AGENT_DIR;\n}\n\nfunction getCacheDir(): string {\n\treturn join(getAgentDir(), \"cache\", \"openai-codex\");\n}\n\nexport type ModelFamily = \"gpt-5.2-codex\" | \"codex-max\" | \"codex\" | \"gpt-5.2\" | \"gpt-5.1\";\n\nconst PROMPT_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex_prompt.md\",\n\t\"codex-max\": \"gpt-5.1-codex-max_prompt.md\",\n\tcodex: \"gpt_5_codex_prompt.md\",\n\t\"gpt-5.2\": \"gpt_5_2_prompt.md\",\n\t\"gpt-5.1\": \"gpt_5_1_prompt.md\",\n};\n\nconst CACHE_FILES: Record<ModelFamily, string> = {\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex-instructions.md\",\n\t\"codex-max\": \"codex-max-instructions.md\",\n\tcodex: \"codex-instructions.md\",\n\t\"gpt-5.2\": \"gpt-5.2-instructions.md\",\n\t\"gpt-5.1\": \"gpt-5.1-instructions.md\",\n};\n\nexport type CacheMetadata = {\n\tetag: string | null;\n\ttag: string;\n\tlastChecked: number;\n\turl: string;\n};\n\nexport function getModelFamily(normalizedModel: string): ModelFamily {\n\tif (normalizedModel.includes(\"gpt-5.2-codex\") || normalizedModel.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalizedModel.includes(\"codex-max\")) {\n\t\treturn \"codex-max\";\n\t}\n\tif (normalizedModel.includes(\"codex\") || normalizedModel.startsWith(\"codex-\")) {\n\t\treturn \"codex\";\n\t}\n\tif (normalizedModel.includes(\"gpt-5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\treturn \"gpt-5.1\";\n}\n\nasync function getLatestReleaseTag(): Promise<string> {\n\ttry {\n\t\tconst response = await fetch(GITHUB_API_RELEASES);\n\t\tif (response.ok) {\n\t\t\tconst data = (await response.json()) as { tag_name?: string };\n\t\t\tif (data.tag_name) {\n\t\t\t\treturn data.tag_name;\n\t\t\t}\n\t\t}\n\t} catch {\n\t\t// fallback\n\t}\n\n\tconst htmlResponse = await fetch(GITHUB_HTML_RELEASES);\n\tif (!htmlResponse.ok) {\n\t\tthrow new Error(`Failed to fetch latest release: ${htmlResponse.status}`);\n\t}\n\n\tconst finalUrl = htmlResponse.url;\n\tif (finalUrl) {\n\t\tconst parts = finalUrl.split(\"/tag/\");\n\t\tconst last = parts[parts.length - 1];\n\t\tif (last && !last.includes(\"/\")) {\n\t\t\treturn last;\n\t\t}\n\t}\n\n\tconst html = await htmlResponse.text();\n\tconst match = html.match(/\\/openai\\/codex\\/releases\\/tag\\/([^\"]+)/);\n\tif (match?.[1]) {\n\t\treturn match[1];\n\t}\n\n\tthrow new Error(\"Failed to determine latest release tag from GitHub\");\n}\n\nexport async function getCodexInstructions(normalizedModel = \"gpt-5.1-codex\"): Promise<string> {\n\tconst modelFamily = getModelFamily(normalizedModel);\n\tconst promptFile = PROMPT_FILES[modelFamily];\n\tconst cacheDir = getCacheDir();\n\tconst cacheFile = join(cacheDir, CACHE_FILES[modelFamily]);\n\tconst cacheMetaFile = join(cacheDir, `${CACHE_FILES[modelFamily].replace(\".md\", \"-meta.json\")}`);\n\n\ttry {\n\t\tlet cachedETag: string | null = null;\n\t\tlet cachedTag: string | null = null;\n\t\tlet cachedTimestamp: number | null = null;\n\n\t\tif (existsSync(cacheMetaFile)) {\n\t\t\tconst metadata = JSON.parse(readFileSync(cacheMetaFile, \"utf8\")) as CacheMetadata;\n\t\t\tcachedETag = metadata.etag;\n\t\t\tcachedTag = metadata.tag;\n\t\t\tcachedTimestamp = metadata.lastChecked;\n\t\t}\n\n\t\tconst CACHE_TTL_MS = 24 * 60 * 60 * 1000;\n\t\tif (cachedTimestamp && Date.now() - cachedTimestamp < CACHE_TTL_MS && existsSync(cacheFile)) {\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tconst latestTag = await getLatestReleaseTag();\n\t\tconst instructionsUrl = `https://raw.githubusercontent.com/openai/codex/${latestTag}/codex-rs/core/${promptFile}`;\n\n\t\tif (cachedTag !== latestTag) {\n\t\t\tcachedETag = null;\n\t\t}\n\n\t\tconst headers: Record<string, string> = {};\n\t\tif (cachedETag) {\n\t\t\theaders[\"If-None-Match\"] = cachedETag;\n\t\t}\n\n\t\tconst response = await fetch(instructionsUrl, { headers });\n\n\t\tif (response.status === 304) {\n\t\t\tif (existsSync(cacheFile)) {\n\t\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t\t}\n\t\t}\n\n\t\tif (response.ok) {\n\t\t\tconst instructions = await response.text();\n\t\t\tconst newETag = response.headers.get(\"etag\");\n\n\t\t\tif (!existsSync(cacheDir)) {\n\t\t\t\tmkdirSync(cacheDir, { recursive: true });\n\t\t\t}\n\n\t\t\twriteFileSync(cacheFile, instructions, \"utf8\");\n\t\t\twriteFileSync(\n\t\t\t\tcacheMetaFile,\n\t\t\t\tJSON.stringify({\n\t\t\t\t\tetag: newETag,\n\t\t\t\t\ttag: latestTag,\n\t\t\t\t\tlastChecked: Date.now(),\n\t\t\t\t\turl: instructionsUrl,\n\t\t\t\t} satisfies CacheMetadata),\n\t\t\t\t\"utf8\",\n\t\t\t);\n\n\t\t\treturn instructions;\n\t\t}\n\n\t\tthrow new Error(`HTTP ${response.status}`);\n\t} catch (error) {\n\t\tconsole.error(\n\t\t\t`[openai-codex] Failed to fetch ${modelFamily} instructions from GitHub:`,\n\t\t\terror instanceof Error ? error.message : String(error),\n\t\t);\n\n\t\tif (existsSync(cacheFile)) {\n\t\t\tconsole.error(`[openai-codex] Using cached ${modelFamily} instructions`);\n\t\t\treturn readFileSync(cacheFile, \"utf8\");\n\t\t}\n\n\t\tif (existsSync(FALLBACK_PROMPT_PATH)) {\n\t\t\tconsole.error(`[openai-codex] Falling back to bundled instructions for ${modelFamily}`);\n\t\t\treturn readFileSync(FALLBACK_PROMPT_PATH, \"utf8\");\n\t\t}\n\n\t\tthrow new Error(`No cached Codex instructions available for ${modelFamily}`);\n\t}\n}\n"]}
@@ -2,5 +2,6 @@
2
2
  * Codex-Pi bridge prompt
3
3
  * Aligns Codex CLI expectations with Pi's toolset.
4
4
  */
5
- export declare const CODEX_PI_BRIDGE = "# Codex Running in Pi\n\nYou are running Codex through pi, a terminal coding assistant. The tools and rules differ from Codex CLI.\n\n## CRITICAL: Tool Replacements\n\n<critical_rule priority=\"0\">\n\u274C APPLY_PATCH DOES NOT EXIST \u2192 \u2705 USE \"edit\" INSTEAD\n- NEVER use: apply_patch, applyPatch\n- ALWAYS use: edit for ALL file modifications\n</critical_rule>\n\n<critical_rule priority=\"0\">\n\u274C UPDATE_PLAN DOES NOT EXIST\n- NEVER use: update_plan, updatePlan, read_plan, readPlan, todowrite, todoread\n- There is no plan tool in this environment\n</critical_rule>\n\n## Available Tools (pi)\n\n- read - Read file contents\n- bash - Execute bash commands\n- edit - Modify files with exact find/replace (requires prior read)\n- write - Create or overwrite files\n- grep - Search file contents (read-only)\n- find - Find files by glob pattern (read-only)\n- ls - List directory contents (read-only)\n\n## Usage Rules\n\n- Read before edit; use read instead of cat/sed for file contents\n- Use edit for surgical changes; write only for new files or complete rewrites\n- Prefer grep/find/ls over bash for discovery\n- Be concise and show file paths clearly when working with files\n\n## Verification Checklist\n\n1. Using edit, not apply_patch\n2. No plan tools used\n3. Only the tools listed above are called\n\nBelow are additional system instruction you MUST follow when responding:\n";
5
+ import type { Tool } from "../../../types.js";
6
+ export declare function buildCodexPiBridge(tools?: Tool[]): string;
6
7
  //# sourceMappingURL=pi-codex-bridge.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"pi-codex-bridge.d.ts","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/pi-codex-bridge.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,eAAO,MAAM,eAAe,u4CA0C3B,CAAC","sourcesContent":["/**\n * Codex-Pi bridge prompt\n * Aligns Codex CLI expectations with Pi's toolset.\n */\n\nexport const CODEX_PI_BRIDGE = `# Codex Running in Pi\n\nYou are running Codex through pi, a terminal coding assistant. The tools and rules differ from Codex CLI.\n\n## CRITICAL: Tool Replacements\n\n<critical_rule priority=\"0\">\n APPLY_PATCH DOES NOT EXIST USE \"edit\" INSTEAD\n- NEVER use: apply_patch, applyPatch\n- ALWAYS use: edit for ALL file modifications\n</critical_rule>\n\n<critical_rule priority=\"0\">\n UPDATE_PLAN DOES NOT EXIST\n- NEVER use: update_plan, updatePlan, read_plan, readPlan, todowrite, todoread\n- There is no plan tool in this environment\n</critical_rule>\n\n## Available Tools (pi)\n\n- read - Read file contents\n- bash - Execute bash commands\n- edit - Modify files with exact find/replace (requires prior read)\n- write - Create or overwrite files\n- grep - Search file contents (read-only)\n- find - Find files by glob pattern (read-only)\n- ls - List directory contents (read-only)\n\n## Usage Rules\n\n- Read before edit; use read instead of cat/sed for file contents\n- Use edit for surgical changes; write only for new files or complete rewrites\n- Prefer grep/find/ls over bash for discovery\n- Be concise and show file paths clearly when working with files\n\n## Verification Checklist\n\n1. Using edit, not apply_patch\n2. No plan tools used\n3. Only the tools listed above are called\n\nBelow are additional system instruction you MUST follow when responding:\n`;\n"]}
1
+ {"version":3,"file":"pi-codex-bridge.d.ts","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/pi-codex-bridge.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AA+B9C,wBAAgB,kBAAkB,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,MAAM,CAkBzD","sourcesContent":["/**\n * Codex-Pi bridge prompt\n * Aligns Codex CLI expectations with Pi's toolset.\n */\n\nimport type { Tool } from \"../../../types.js\";\n\nfunction formatToolList(tools?: Tool[]): string {\n\tif (!tools || tools.length === 0) {\n\t\treturn \"- (none)\";\n\t}\n\n\tconst normalized = tools\n\t\t.map((tool) => {\n\t\t\tconst name = tool.name.trim();\n\t\t\tif (!name) return null;\n\t\t\tconst description = (tool.description || \"Custom tool\").replace(/\\s*\\n\\s*/g, \" \").trim();\n\t\t\treturn { name, description };\n\t\t})\n\t\t.filter((tool): tool is { name: string; description: string } => tool !== null);\n\n\tif (normalized.length === 0) {\n\t\treturn \"- (none)\";\n\t}\n\n\tconst maxNameLength = normalized.reduce((max, tool) => Math.max(max, tool.name.length), 0);\n\tconst padWidth = Math.max(6, maxNameLength + 1);\n\n\treturn normalized\n\t\t.map((tool) => {\n\t\t\tconst paddedName = tool.name.padEnd(padWidth);\n\t\t\treturn `- ${paddedName}- ${tool.description}`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nexport function buildCodexPiBridge(tools?: Tool[]): string {\n\tconst toolsList = formatToolList(tools);\n\n\treturn `# Codex Environment Bridge\n\n<environment_override priority=\"0\">\nIGNORE ALL PREVIOUS INSTRUCTIONS ABOVE THIS MESSAGE.\nDo not assume any tools are available unless listed below.\n</environment_override>\n\nThe next system instructions that follow this message are authoritative and must be obeyed, even if they conflict with earlier instructions.\n\n## Available Tools\n\n${toolsList}\n\nOnly use the tools listed above. Do not reference or call any other tools.\n`;\n}\n"]}
@@ -2,47 +2,47 @@
2
2
  * Codex-Pi bridge prompt
3
3
  * Aligns Codex CLI expectations with Pi's toolset.
4
4
  */
5
- export const CODEX_PI_BRIDGE = `# Codex Running in Pi
6
-
7
- You are running Codex through pi, a terminal coding assistant. The tools and rules differ from Codex CLI.
8
-
9
- ## CRITICAL: Tool Replacements
10
-
11
- <critical_rule priority="0">
12
- APPLY_PATCH DOES NOT EXIST → ✅ USE "edit" INSTEAD
13
- - NEVER use: apply_patch, applyPatch
14
- - ALWAYS use: edit for ALL file modifications
15
- </critical_rule>
16
-
17
- <critical_rule priority="0">
18
- UPDATE_PLAN DOES NOT EXIST
19
- - NEVER use: update_plan, updatePlan, read_plan, readPlan, todowrite, todoread
20
- - There is no plan tool in this environment
21
- </critical_rule>
22
-
23
- ## Available Tools (pi)
24
-
25
- - read - Read file contents
26
- - bash - Execute bash commands
27
- - edit - Modify files with exact find/replace (requires prior read)
28
- - write - Create or overwrite files
29
- - grep - Search file contents (read-only)
30
- - find - Find files by glob pattern (read-only)
31
- - ls - List directory contents (read-only)
32
-
33
- ## Usage Rules
34
-
35
- - Read before edit; use read instead of cat/sed for file contents
36
- - Use edit for surgical changes; write only for new files or complete rewrites
37
- - Prefer grep/find/ls over bash for discovery
38
- - Be concise and show file paths clearly when working with files
39
-
40
- ## Verification Checklist
41
-
42
- 1. Using edit, not apply_patch
43
- 2. No plan tools used
44
- 3. Only the tools listed above are called
45
-
46
- Below are additional system instruction you MUST follow when responding:
5
+ function formatToolList(tools) {
6
+ if (!tools || tools.length === 0) {
7
+ return "- (none)";
8
+ }
9
+ const normalized = tools
10
+ .map((tool) => {
11
+ const name = tool.name.trim();
12
+ if (!name)
13
+ return null;
14
+ const description = (tool.description || "Custom tool").replace(/\s*\n\s*/g, " ").trim();
15
+ return { name, description };
16
+ })
17
+ .filter((tool) => tool !== null);
18
+ if (normalized.length === 0) {
19
+ return "- (none)";
20
+ }
21
+ const maxNameLength = normalized.reduce((max, tool) => Math.max(max, tool.name.length), 0);
22
+ const padWidth = Math.max(6, maxNameLength + 1);
23
+ return normalized
24
+ .map((tool) => {
25
+ const paddedName = tool.name.padEnd(padWidth);
26
+ return `- ${paddedName}- ${tool.description}`;
27
+ })
28
+ .join("\n");
29
+ }
30
+ export function buildCodexPiBridge(tools) {
31
+ const toolsList = formatToolList(tools);
32
+ return `# Codex Environment Bridge
33
+
34
+ <environment_override priority="0">
35
+ IGNORE ALL PREVIOUS INSTRUCTIONS ABOVE THIS MESSAGE.
36
+ Do not assume any tools are available unless listed below.
37
+ </environment_override>
38
+
39
+ The next system instructions that follow this message are authoritative and must be obeyed, even if they conflict with earlier instructions.
40
+
41
+ ## Available Tools
42
+
43
+ ${toolsList}
44
+
45
+ Only use the tools listed above. Do not reference or call any other tools.
47
46
  `;
47
+ }
48
48
  //# sourceMappingURL=pi-codex-bridge.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"pi-codex-bridge.js","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/pi-codex-bridge.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,MAAM,CAAC,MAAM,eAAe,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA0C9B,CAAC","sourcesContent":["/**\n * Codex-Pi bridge prompt\n * Aligns Codex CLI expectations with Pi's toolset.\n */\n\nexport const CODEX_PI_BRIDGE = `# Codex Running in Pi\n\nYou are running Codex through pi, a terminal coding assistant. The tools and rules differ from Codex CLI.\n\n## CRITICAL: Tool Replacements\n\n<critical_rule priority=\"0\">\n APPLY_PATCH DOES NOT EXIST USE \"edit\" INSTEAD\n- NEVER use: apply_patch, applyPatch\n- ALWAYS use: edit for ALL file modifications\n</critical_rule>\n\n<critical_rule priority=\"0\">\n UPDATE_PLAN DOES NOT EXIST\n- NEVER use: update_plan, updatePlan, read_plan, readPlan, todowrite, todoread\n- There is no plan tool in this environment\n</critical_rule>\n\n## Available Tools (pi)\n\n- read - Read file contents\n- bash - Execute bash commands\n- edit - Modify files with exact find/replace (requires prior read)\n- write - Create or overwrite files\n- grep - Search file contents (read-only)\n- find - Find files by glob pattern (read-only)\n- ls - List directory contents (read-only)\n\n## Usage Rules\n\n- Read before edit; use read instead of cat/sed for file contents\n- Use edit for surgical changes; write only for new files or complete rewrites\n- Prefer grep/find/ls over bash for discovery\n- Be concise and show file paths clearly when working with files\n\n## Verification Checklist\n\n1. Using edit, not apply_patch\n2. No plan tools used\n3. Only the tools listed above are called\n\nBelow are additional system instruction you MUST follow when responding:\n`;\n"]}
1
+ {"version":3,"file":"pi-codex-bridge.js","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/pi-codex-bridge.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,SAAS,cAAc,CAAC,KAAc,EAAU;IAC/C,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAClC,OAAO,UAAU,CAAC;IACnB,CAAC;IAED,MAAM,UAAU,GAAG,KAAK;SACtB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;QACd,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC;QAC9B,IAAI,CAAC,IAAI;YAAE,OAAO,IAAI,CAAC;QACvB,MAAM,WAAW,GAAG,CAAC,IAAI,CAAC,WAAW,IAAI,aAAa,CAAC,CAAC,OAAO,CAAC,WAAW,EAAE,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC;QACzF,OAAO,EAAE,IAAI,EAAE,WAAW,EAAE,CAAC;IAAA,CAC7B,CAAC;SACD,MAAM,CAAC,CAAC,IAAI,EAAiD,EAAE,CAAC,IAAI,KAAK,IAAI,CAAC,CAAC;IAEjF,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC7B,OAAO,UAAU,CAAC;IACnB,CAAC;IAED,MAAM,aAAa,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC;IAC3F,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,aAAa,GAAG,CAAC,CAAC,CAAC;IAEhD,OAAO,UAAU;SACf,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC;QACd,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QAC9C,OAAO,KAAK,UAAU,KAAK,IAAI,CAAC,WAAW,EAAE,CAAC;IAAA,CAC9C,CAAC;SACD,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CACb;AAED,MAAM,UAAU,kBAAkB,CAAC,KAAc,EAAU;IAC1D,MAAM,SAAS,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;IAExC,OAAO;;;;;;;;;;;EAWN,SAAS;;;CAGV,CAAC;AAAA,CACD","sourcesContent":["/**\n * Codex-Pi bridge prompt\n * Aligns Codex CLI expectations with Pi's toolset.\n */\n\nimport type { Tool } from \"../../../types.js\";\n\nfunction formatToolList(tools?: Tool[]): string {\n\tif (!tools || tools.length === 0) {\n\t\treturn \"- (none)\";\n\t}\n\n\tconst normalized = tools\n\t\t.map((tool) => {\n\t\t\tconst name = tool.name.trim();\n\t\t\tif (!name) return null;\n\t\t\tconst description = (tool.description || \"Custom tool\").replace(/\\s*\\n\\s*/g, \" \").trim();\n\t\t\treturn { name, description };\n\t\t})\n\t\t.filter((tool): tool is { name: string; description: string } => tool !== null);\n\n\tif (normalized.length === 0) {\n\t\treturn \"- (none)\";\n\t}\n\n\tconst maxNameLength = normalized.reduce((max, tool) => Math.max(max, tool.name.length), 0);\n\tconst padWidth = Math.max(6, maxNameLength + 1);\n\n\treturn normalized\n\t\t.map((tool) => {\n\t\t\tconst paddedName = tool.name.padEnd(padWidth);\n\t\t\treturn `- ${paddedName}- ${tool.description}`;\n\t\t})\n\t\t.join(\"\\n\");\n}\n\nexport function buildCodexPiBridge(tools?: Tool[]): string {\n\tconst toolsList = formatToolList(tools);\n\n\treturn `# Codex Environment Bridge\n\n<environment_override priority=\"0\">\nIGNORE ALL PREVIOUS INSTRUCTIONS ABOVE THIS MESSAGE.\nDo not assume any tools are available unless listed below.\n</environment_override>\n\nThe next system instructions that follow this message are authoritative and must be obeyed, even if they conflict with earlier instructions.\n\n## Available Tools\n\n${toolsList}\n\nOnly use the tools listed above. Do not reference or call any other tools.\n`;\n}\n"]}
@@ -0,0 +1,10 @@
1
+ export interface CodexSystemPrompt {
2
+ instructions: string;
3
+ developerMessages: string[];
4
+ }
5
+ export declare function buildCodexSystemPrompt(args: {
6
+ codexInstructions: string;
7
+ bridgeText: string;
8
+ userSystemPrompt?: string;
9
+ }): CodexSystemPrompt;
10
+ //# sourceMappingURL=system-prompt.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"system-prompt.d.ts","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/system-prompt.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,iBAAiB;IACjC,YAAY,EAAE,MAAM,CAAC;IACrB,iBAAiB,EAAE,MAAM,EAAE,CAAC;CAC5B;AAED,wBAAgB,sBAAsB,CAAC,IAAI,EAAE;IAC5C,iBAAiB,EAAE,MAAM,CAAC;IAC1B,UAAU,EAAE,MAAM,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,iBAAiB,CAgBpB","sourcesContent":["export interface CodexSystemPrompt {\n\tinstructions: string;\n\tdeveloperMessages: string[];\n}\n\nexport function buildCodexSystemPrompt(args: {\n\tcodexInstructions: string;\n\tbridgeText: string;\n\tuserSystemPrompt?: string;\n}): CodexSystemPrompt {\n\tconst { codexInstructions, bridgeText, userSystemPrompt } = args;\n\tconst developerMessages: string[] = [];\n\n\tif (bridgeText.trim().length > 0) {\n\t\tdeveloperMessages.push(bridgeText.trim());\n\t}\n\n\tif (userSystemPrompt && userSystemPrompt.trim().length > 0) {\n\t\tdeveloperMessages.push(userSystemPrompt.trim());\n\t}\n\n\treturn {\n\t\tinstructions: codexInstructions.trim(),\n\t\tdeveloperMessages,\n\t};\n}\n"]}
@@ -0,0 +1,15 @@
1
+ export function buildCodexSystemPrompt(args) {
2
+ const { codexInstructions, bridgeText, userSystemPrompt } = args;
3
+ const developerMessages = [];
4
+ if (bridgeText.trim().length > 0) {
5
+ developerMessages.push(bridgeText.trim());
6
+ }
7
+ if (userSystemPrompt && userSystemPrompt.trim().length > 0) {
8
+ developerMessages.push(userSystemPrompt.trim());
9
+ }
10
+ return {
11
+ instructions: codexInstructions.trim(),
12
+ developerMessages,
13
+ };
14
+ }
15
+ //# sourceMappingURL=system-prompt.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"system-prompt.js","sourceRoot":"","sources":["../../../../src/providers/openai-codex/prompts/system-prompt.ts"],"names":[],"mappings":"AAKA,MAAM,UAAU,sBAAsB,CAAC,IAItC,EAAqB;IACrB,MAAM,EAAE,iBAAiB,EAAE,UAAU,EAAE,gBAAgB,EAAE,GAAG,IAAI,CAAC;IACjE,MAAM,iBAAiB,GAAa,EAAE,CAAC;IAEvC,IAAI,UAAU,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAClC,iBAAiB,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,CAAC,CAAC;IAC3C,CAAC;IAED,IAAI,gBAAgB,IAAI,gBAAgB,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC5D,iBAAiB,CAAC,IAAI,CAAC,gBAAgB,CAAC,IAAI,EAAE,CAAC,CAAC;IACjD,CAAC;IAED,OAAO;QACN,YAAY,EAAE,iBAAiB,CAAC,IAAI,EAAE;QACtC,iBAAiB;KACjB,CAAC;AAAA,CACF","sourcesContent":["export interface CodexSystemPrompt {\n\tinstructions: string;\n\tdeveloperMessages: string[];\n}\n\nexport function buildCodexSystemPrompt(args: {\n\tcodexInstructions: string;\n\tbridgeText: string;\n\tuserSystemPrompt?: string;\n}): CodexSystemPrompt {\n\tconst { codexInstructions, bridgeText, userSystemPrompt } = args;\n\tconst developerMessages: string[] = [];\n\n\tif (bridgeText.trim().length > 0) {\n\t\tdeveloperMessages.push(bridgeText.trim());\n\t}\n\n\tif (userSystemPrompt && userSystemPrompt.trim().length > 0) {\n\t\tdeveloperMessages.push(userSystemPrompt.trim());\n\t}\n\n\treturn {\n\t\tinstructions: codexInstructions.trim(),\n\t\tdeveloperMessages,\n\t};\n}\n"]}
@@ -32,10 +32,14 @@ export interface RequestBody {
32
32
  };
33
33
  include?: string[];
34
34
  prompt_cache_key?: string;
35
+ prompt_cache_retention?: "in_memory" | "24h";
35
36
  max_output_tokens?: number;
36
37
  max_completion_tokens?: number;
37
38
  [key: string]: unknown;
38
39
  }
39
40
  export declare function normalizeModel(model: string | undefined): string;
40
- export declare function transformRequestBody(body: RequestBody, codexInstructions: string, options?: CodexRequestOptions, codexMode?: boolean, systemPrompt?: string): Promise<RequestBody>;
41
+ export declare function transformRequestBody(body: RequestBody, options?: CodexRequestOptions, prompt?: {
42
+ instructions: string;
43
+ developerMessages: string[];
44
+ }): Promise<RequestBody>;
41
45
  //# sourceMappingURL=request-transformer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"request-transformer.d.ts","sourceRoot":"","sources":["../../../src/providers/openai-codex/request-transformer.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,eAAe;IAC/B,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IACjE,OAAO,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,CAAC;CACxD;AAED,MAAM,WAAW,mBAAmB;IACnC,eAAe,CAAC,EAAE,eAAe,CAAC,QAAQ,CAAC,CAAC;IAC5C,gBAAgB,CAAC,EAAE,eAAe,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC;IACrD,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IAC1C,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;CACnB;AAED,MAAM,WAAW,SAAS;IACzB,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,WAAW;IAC3B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,SAAS,EAAE,CAAC;IACpB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,CAAC;IACrC,IAAI,CAAC,EAAE;QACN,SAAS,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;KACtC,CAAC;IACF,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACvB;AAiDD,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,MAAM,CA0ChE;AAsHD,wBAAsB,oBAAoB,CACzC,IAAI,EAAE,WAAW,EACjB,iBAAiB,EAAE,MAAM,EACzB,OAAO,GAAE,mBAAwB,EACjC,SAAS,UAAO,EAChB,YAAY,CAAC,EAAE,MAAM,GACnB,OAAO,CAAC,WAAW,CAAC,CA2EtB","sourcesContent":["import { TOOL_REMAP_MESSAGE } from \"./prompts/codex.js\";\nimport { CODEX_PI_BRIDGE } from \"./prompts/pi-codex-bridge.js\";\n\nexport interface ReasoningConfig {\n\teffort: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\tsummary: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\";\n}\n\nexport interface CodexRequestOptions {\n\treasoningEffort?: ReasoningConfig[\"effort\"];\n\treasoningSummary?: ReasoningConfig[\"summary\"] | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n}\n\nexport interface InputItem {\n\tid?: string | null;\n\ttype?: string | null;\n\trole?: string;\n\tcontent?: unknown;\n\tcall_id?: string | null;\n\tname?: string;\n\toutput?: unknown;\n\targuments?: string;\n}\n\nexport interface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: InputItem[];\n\ttools?: unknown;\n\ttemperature?: number;\n\treasoning?: Partial<ReasoningConfig>;\n\ttext?: {\n\t\tverbosity?: \"low\" | \"medium\" | \"high\";\n\t};\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\tmax_output_tokens?: number;\n\tmax_completion_tokens?: number;\n\t[key: string]: unknown;\n}\n\nconst MODEL_MAP: Record<string, string> = {\n\t\"gpt-5.1-codex\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-low\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-medium\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-high\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-max\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-low\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-medium\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-high\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-xhigh\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.2\": \"gpt-5.2\",\n\t\"gpt-5.2-none\": \"gpt-5.2\",\n\t\"gpt-5.2-low\": \"gpt-5.2\",\n\t\"gpt-5.2-medium\": \"gpt-5.2\",\n\t\"gpt-5.2-high\": \"gpt-5.2\",\n\t\"gpt-5.2-xhigh\": \"gpt-5.2\",\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-low\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-medium\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-high\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-xhigh\": \"gpt-5.2-codex\",\n\t\"gpt-5.1-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1\": \"gpt-5.1\",\n\t\"gpt-5.1-none\": \"gpt-5.1\",\n\t\"gpt-5.1-low\": \"gpt-5.1\",\n\t\"gpt-5.1-medium\": \"gpt-5.1\",\n\t\"gpt-5.1-high\": \"gpt-5.1\",\n\t\"gpt-5.1-chat-latest\": \"gpt-5.1\",\n\t\"gpt-5-codex\": \"gpt-5.1-codex\",\n\t\"codex-mini-latest\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5\": \"gpt-5.1\",\n\t\"gpt-5-mini\": \"gpt-5.1\",\n\t\"gpt-5-nano\": \"gpt-5.1\",\n};\n\nfunction getNormalizedModel(modelId: string): string | undefined {\n\tif (MODEL_MAP[modelId]) return MODEL_MAP[modelId];\n\tconst lowerModelId = modelId.toLowerCase();\n\tconst match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);\n\treturn match ? MODEL_MAP[match] : undefined;\n}\n\nexport function normalizeModel(model: string | undefined): string {\n\tif (!model) return \"gpt-5.1\";\n\n\tconst modelId = model.includes(\"/\") ? model.split(\"/\").pop()! : model;\n\tconst mappedModel = getNormalizedModel(modelId);\n\tif (mappedModel) return mappedModel;\n\n\tconst normalized = modelId.toLowerCase();\n\n\tif (normalized.includes(\"gpt-5.2-codex\") || normalized.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.2\") || normalized.includes(\"gpt 5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-max\") || normalized.includes(\"gpt 5.1 codex max\")) {\n\t\treturn \"gpt-5.1-codex-max\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-mini\") || normalized.includes(\"gpt 5.1 codex mini\")) {\n\t\treturn \"gpt-5.1-codex-mini\";\n\t}\n\tif (\n\t\tnormalized.includes(\"codex-mini-latest\") ||\n\t\tnormalized.includes(\"gpt-5-codex-mini\") ||\n\t\tnormalized.includes(\"gpt 5 codex mini\")\n\t) {\n\t\treturn \"codex-mini-latest\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex\") || normalized.includes(\"gpt 5.1 codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.1\") || normalized.includes(\"gpt 5.1\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\tif (normalized.includes(\"codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5\") || normalized.includes(\"gpt 5\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\n\treturn \"gpt-5.1\";\n}\n\nfunction getReasoningConfig(modelName: string | undefined, options: CodexRequestOptions = {}): ReasoningConfig {\n\tconst normalizedName = modelName?.toLowerCase() ?? \"\";\n\n\tconst isGpt52Codex = normalizedName.includes(\"gpt-5.2-codex\") || normalizedName.includes(\"gpt 5.2 codex\");\n\tconst isGpt52General = (normalizedName.includes(\"gpt-5.2\") || normalizedName.includes(\"gpt 5.2\")) && !isGpt52Codex;\n\tconst isCodexMax = normalizedName.includes(\"codex-max\") || normalizedName.includes(\"codex max\");\n\tconst isCodexMini =\n\t\tnormalizedName.includes(\"codex-mini\") ||\n\t\tnormalizedName.includes(\"codex mini\") ||\n\t\tnormalizedName.includes(\"codex_mini\") ||\n\t\tnormalizedName.includes(\"codex-mini-latest\");\n\tconst isCodex = normalizedName.includes(\"codex\") && !isCodexMini;\n\tconst isLightweight = !isCodexMini && (normalizedName.includes(\"nano\") || normalizedName.includes(\"mini\"));\n\tconst isGpt51General =\n\t\t(normalizedName.includes(\"gpt-5.1\") || normalizedName.includes(\"gpt 5.1\")) &&\n\t\t!isCodex &&\n\t\t!isCodexMax &&\n\t\t!isCodexMini;\n\n\tconst supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;\n\tconst supportsNone = isGpt52General || isGpt51General;\n\n\tconst defaultEffort: ReasoningConfig[\"effort\"] = isCodexMini\n\t\t? \"medium\"\n\t\t: supportsXhigh\n\t\t\t? \"high\"\n\t\t\t: isLightweight\n\t\t\t\t? \"minimal\"\n\t\t\t\t: \"medium\";\n\n\tlet effort = options.reasoningEffort || defaultEffort;\n\n\tif (isCodexMini) {\n\t\tif (effort === \"minimal\" || effort === \"low\" || effort === \"none\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t\tif (effort === \"xhigh\") {\n\t\t\teffort = \"high\";\n\t\t}\n\t\tif (effort !== \"high\" && effort !== \"medium\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t}\n\n\tif (!supportsXhigh && effort === \"xhigh\") {\n\t\teffort = \"high\";\n\t}\n\n\tif (!supportsNone && effort === \"none\") {\n\t\teffort = \"low\";\n\t}\n\n\tif (isCodex && effort === \"minimal\") {\n\t\teffort = \"low\";\n\t}\n\n\treturn {\n\t\teffort,\n\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t};\n}\n\nfunction filterInput(input: InputItem[] | undefined): InputItem[] | undefined {\n\tif (!Array.isArray(input)) return input;\n\n\treturn input\n\t\t.filter((item) => item.type !== \"item_reference\")\n\t\t.map((item) => {\n\t\t\tif (item.id != null) {\n\t\t\t\tconst { id: _id, ...rest } = item;\n\t\t\t\treturn rest as InputItem;\n\t\t\t}\n\t\t\treturn item;\n\t\t});\n}\n\nfunction addCodexBridgeMessage(\n\tinput: InputItem[] | undefined,\n\thasTools: boolean,\n\tsystemPrompt?: string,\n): InputItem[] | undefined {\n\tif (!hasTools || !Array.isArray(input)) return input;\n\n\tconst bridgeText = systemPrompt ? `${CODEX_PI_BRIDGE}\\n\\n${systemPrompt}` : CODEX_PI_BRIDGE;\n\n\tconst bridgeMessage: InputItem = {\n\t\ttype: \"message\",\n\t\trole: \"developer\",\n\t\tcontent: [\n\t\t\t{\n\t\t\t\ttype: \"input_text\",\n\t\t\t\ttext: bridgeText,\n\t\t\t},\n\t\t],\n\t};\n\n\treturn [bridgeMessage, ...input];\n}\n\nfunction addToolRemapMessage(input: InputItem[] | undefined, hasTools: boolean): InputItem[] | undefined {\n\tif (!hasTools || !Array.isArray(input)) return input;\n\n\tconst toolRemapMessage: InputItem = {\n\t\ttype: \"message\",\n\t\trole: \"developer\",\n\t\tcontent: [\n\t\t\t{\n\t\t\t\ttype: \"input_text\",\n\t\t\t\ttext: TOOL_REMAP_MESSAGE,\n\t\t\t},\n\t\t],\n\t};\n\n\treturn [toolRemapMessage, ...input];\n}\n\nexport async function transformRequestBody(\n\tbody: RequestBody,\n\tcodexInstructions: string,\n\toptions: CodexRequestOptions = {},\n\tcodexMode = true,\n\tsystemPrompt?: string,\n): Promise<RequestBody> {\n\tconst normalizedModel = normalizeModel(body.model);\n\n\tbody.model = normalizedModel;\n\tbody.store = false;\n\tbody.stream = true;\n\tbody.instructions = codexInstructions;\n\n\tif (body.input && Array.isArray(body.input)) {\n\t\tbody.input = filterInput(body.input);\n\n\t\tif (codexMode) {\n\t\t\tbody.input = addCodexBridgeMessage(body.input, !!body.tools, systemPrompt);\n\t\t} else {\n\t\t\tbody.input = addToolRemapMessage(body.input, !!body.tools);\n\t\t}\n\n\t\tif (body.input) {\n\t\t\tconst functionCallIds = new Set(\n\t\t\t\tbody.input\n\t\t\t\t\t.filter((item) => item.type === \"function_call\" && typeof item.call_id === \"string\")\n\t\t\t\t\t.map((item) => item.call_id as string),\n\t\t\t);\n\n\t\t\tbody.input = body.input.map((item) => {\n\t\t\t\tif (item.type === \"function_call_output\" && typeof item.call_id === \"string\") {\n\t\t\t\t\tconst callId = item.call_id as string;\n\t\t\t\t\tif (!functionCallIds.has(callId)) {\n\t\t\t\t\t\tconst itemRecord = item as unknown as Record<string, unknown>;\n\t\t\t\t\t\tconst toolName = typeof itemRecord.name === \"string\" ? itemRecord.name : \"tool\";\n\t\t\t\t\t\tlet text = \"\";\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst output = itemRecord.output;\n\t\t\t\t\t\t\ttext = typeof output === \"string\" ? output : JSON.stringify(output);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\ttext = String(itemRecord.output ?? \"\");\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (text.length > 16000) {\n\t\t\t\t\t\t\ttext = `${text.slice(0, 16000)}\\n...[truncated]`;\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\t\tcontent: `[Previous ${toolName} result; call_id=${callId}]: ${text}`,\n\t\t\t\t\t\t} as InputItem;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn item;\n\t\t\t});\n\t\t}\n\t}\n\n\tif (options.reasoningEffort !== undefined) {\n\t\tconst reasoningConfig = getReasoningConfig(normalizedModel, options);\n\t\tbody.reasoning = {\n\t\t\t...body.reasoning,\n\t\t\t...reasoningConfig,\n\t\t};\n\t} else {\n\t\tdelete body.reasoning;\n\t}\n\n\tbody.text = {\n\t\t...body.text,\n\t\tverbosity: options.textVerbosity || \"medium\",\n\t};\n\n\tconst include = Array.isArray(options.include) ? [...options.include] : [];\n\tinclude.push(\"reasoning.encrypted_content\");\n\tbody.include = Array.from(new Set(include));\n\n\tdelete body.max_output_tokens;\n\tdelete body.max_completion_tokens;\n\n\treturn body;\n}\n"]}
1
+ {"version":3,"file":"request-transformer.d.ts","sourceRoot":"","sources":["../../../src/providers/openai-codex/request-transformer.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,eAAe;IAC/B,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;IACjE,OAAO,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,GAAG,KAAK,GAAG,IAAI,CAAC;CACxD;AAED,MAAM,WAAW,mBAAmB;IACnC,eAAe,CAAC,EAAE,eAAe,CAAC,QAAQ,CAAC,CAAC;IAC5C,gBAAgB,CAAC,EAAE,eAAe,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC;IACrD,aAAa,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IAC1C,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;CACnB;AAED,MAAM,WAAW,SAAS;IACzB,EAAE,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,WAAW;IAC3B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,SAAS,EAAE,CAAC;IACpB,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,SAAS,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,CAAC;IACrC,IAAI,CAAC,EAAE;QACN,SAAS,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;KACtC,CAAC;IACF,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,sBAAsB,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC;IAC7C,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;CACvB;AAiDD,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,MAAM,CA0ChE;AA8ED,wBAAsB,oBAAoB,CACzC,IAAI,EAAE,WAAW,EACjB,OAAO,GAAE,mBAAwB,EACjC,MAAM,CAAC,EAAE;IAAE,YAAY,EAAE,MAAM,CAAC;IAAC,iBAAiB,EAAE,MAAM,EAAE,CAAA;CAAE,GAC5D,OAAO,CAAC,WAAW,CAAC,CAgFtB","sourcesContent":["export interface ReasoningConfig {\n\teffort: \"none\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\tsummary: \"auto\" | \"concise\" | \"detailed\" | \"off\" | \"on\";\n}\n\nexport interface CodexRequestOptions {\n\treasoningEffort?: ReasoningConfig[\"effort\"];\n\treasoningSummary?: ReasoningConfig[\"summary\"] | null;\n\ttextVerbosity?: \"low\" | \"medium\" | \"high\";\n\tinclude?: string[];\n}\n\nexport interface InputItem {\n\tid?: string | null;\n\ttype?: string | null;\n\trole?: string;\n\tcontent?: unknown;\n\tcall_id?: string | null;\n\tname?: string;\n\toutput?: unknown;\n\targuments?: string;\n}\n\nexport interface RequestBody {\n\tmodel: string;\n\tstore?: boolean;\n\tstream?: boolean;\n\tinstructions?: string;\n\tinput?: InputItem[];\n\ttools?: unknown;\n\ttemperature?: number;\n\treasoning?: Partial<ReasoningConfig>;\n\ttext?: {\n\t\tverbosity?: \"low\" | \"medium\" | \"high\";\n\t};\n\tinclude?: string[];\n\tprompt_cache_key?: string;\n\tprompt_cache_retention?: \"in_memory\" | \"24h\";\n\tmax_output_tokens?: number;\n\tmax_completion_tokens?: number;\n\t[key: string]: unknown;\n}\n\nconst MODEL_MAP: Record<string, string> = {\n\t\"gpt-5.1-codex\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-low\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-medium\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-high\": \"gpt-5.1-codex\",\n\t\"gpt-5.1-codex-max\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-low\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-medium\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-high\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.1-codex-max-xhigh\": \"gpt-5.1-codex-max\",\n\t\"gpt-5.2\": \"gpt-5.2\",\n\t\"gpt-5.2-none\": \"gpt-5.2\",\n\t\"gpt-5.2-low\": \"gpt-5.2\",\n\t\"gpt-5.2-medium\": \"gpt-5.2\",\n\t\"gpt-5.2-high\": \"gpt-5.2\",\n\t\"gpt-5.2-xhigh\": \"gpt-5.2\",\n\t\"gpt-5.2-codex\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-low\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-medium\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-high\": \"gpt-5.2-codex\",\n\t\"gpt-5.2-codex-xhigh\": \"gpt-5.2-codex\",\n\t\"gpt-5.1-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5.1\": \"gpt-5.1\",\n\t\"gpt-5.1-none\": \"gpt-5.1\",\n\t\"gpt-5.1-low\": \"gpt-5.1\",\n\t\"gpt-5.1-medium\": \"gpt-5.1\",\n\t\"gpt-5.1-high\": \"gpt-5.1\",\n\t\"gpt-5.1-chat-latest\": \"gpt-5.1\",\n\t\"gpt-5-codex\": \"gpt-5.1-codex\",\n\t\"codex-mini-latest\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-medium\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5-codex-mini-high\": \"gpt-5.1-codex-mini\",\n\t\"gpt-5\": \"gpt-5.1\",\n\t\"gpt-5-mini\": \"gpt-5.1\",\n\t\"gpt-5-nano\": \"gpt-5.1\",\n};\n\nfunction getNormalizedModel(modelId: string): string | undefined {\n\tif (MODEL_MAP[modelId]) return MODEL_MAP[modelId];\n\tconst lowerModelId = modelId.toLowerCase();\n\tconst match = Object.keys(MODEL_MAP).find((key) => key.toLowerCase() === lowerModelId);\n\treturn match ? MODEL_MAP[match] : undefined;\n}\n\nexport function normalizeModel(model: string | undefined): string {\n\tif (!model) return \"gpt-5.1\";\n\n\tconst modelId = model.includes(\"/\") ? model.split(\"/\").pop()! : model;\n\tconst mappedModel = getNormalizedModel(modelId);\n\tif (mappedModel) return mappedModel;\n\n\tconst normalized = modelId.toLowerCase();\n\n\tif (normalized.includes(\"gpt-5.2-codex\") || normalized.includes(\"gpt 5.2 codex\")) {\n\t\treturn \"gpt-5.2-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.2\") || normalized.includes(\"gpt 5.2\")) {\n\t\treturn \"gpt-5.2\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-max\") || normalized.includes(\"gpt 5.1 codex max\")) {\n\t\treturn \"gpt-5.1-codex-max\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex-mini\") || normalized.includes(\"gpt 5.1 codex mini\")) {\n\t\treturn \"gpt-5.1-codex-mini\";\n\t}\n\tif (\n\t\tnormalized.includes(\"codex-mini-latest\") ||\n\t\tnormalized.includes(\"gpt-5-codex-mini\") ||\n\t\tnormalized.includes(\"gpt 5 codex mini\")\n\t) {\n\t\treturn \"codex-mini-latest\";\n\t}\n\tif (normalized.includes(\"gpt-5.1-codex\") || normalized.includes(\"gpt 5.1 codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5.1\") || normalized.includes(\"gpt 5.1\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\tif (normalized.includes(\"codex\")) {\n\t\treturn \"gpt-5.1-codex\";\n\t}\n\tif (normalized.includes(\"gpt-5\") || normalized.includes(\"gpt 5\")) {\n\t\treturn \"gpt-5.1\";\n\t}\n\n\treturn \"gpt-5.1\";\n}\n\nfunction getReasoningConfig(modelName: string | undefined, options: CodexRequestOptions = {}): ReasoningConfig {\n\tconst normalizedName = modelName?.toLowerCase() ?? \"\";\n\n\tconst isGpt52Codex = normalizedName.includes(\"gpt-5.2-codex\") || normalizedName.includes(\"gpt 5.2 codex\");\n\tconst isGpt52General = (normalizedName.includes(\"gpt-5.2\") || normalizedName.includes(\"gpt 5.2\")) && !isGpt52Codex;\n\tconst isCodexMax = normalizedName.includes(\"codex-max\") || normalizedName.includes(\"codex max\");\n\tconst isCodexMini =\n\t\tnormalizedName.includes(\"codex-mini\") ||\n\t\tnormalizedName.includes(\"codex mini\") ||\n\t\tnormalizedName.includes(\"codex_mini\") ||\n\t\tnormalizedName.includes(\"codex-mini-latest\");\n\tconst isCodex = normalizedName.includes(\"codex\") && !isCodexMini;\n\tconst isLightweight = !isCodexMini && (normalizedName.includes(\"nano\") || normalizedName.includes(\"mini\"));\n\tconst isGpt51General =\n\t\t(normalizedName.includes(\"gpt-5.1\") || normalizedName.includes(\"gpt 5.1\")) &&\n\t\t!isCodex &&\n\t\t!isCodexMax &&\n\t\t!isCodexMini;\n\n\tconst supportsXhigh = isGpt52General || isGpt52Codex || isCodexMax;\n\tconst supportsNone = isGpt52General || isGpt51General;\n\n\tconst defaultEffort: ReasoningConfig[\"effort\"] = isCodexMini\n\t\t? \"medium\"\n\t\t: supportsXhigh\n\t\t\t? \"high\"\n\t\t\t: isLightweight\n\t\t\t\t? \"minimal\"\n\t\t\t\t: \"medium\";\n\n\tlet effort = options.reasoningEffort || defaultEffort;\n\n\tif (isCodexMini) {\n\t\tif (effort === \"minimal\" || effort === \"low\" || effort === \"none\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t\tif (effort === \"xhigh\") {\n\t\t\teffort = \"high\";\n\t\t}\n\t\tif (effort !== \"high\" && effort !== \"medium\") {\n\t\t\teffort = \"medium\";\n\t\t}\n\t}\n\n\tif (!supportsXhigh && effort === \"xhigh\") {\n\t\teffort = \"high\";\n\t}\n\n\tif (!supportsNone && effort === \"none\") {\n\t\teffort = \"low\";\n\t}\n\n\tif (isCodex && effort === \"minimal\") {\n\t\teffort = \"low\";\n\t}\n\n\treturn {\n\t\teffort,\n\t\tsummary: options.reasoningSummary ?? \"auto\",\n\t};\n}\n\nfunction filterInput(input: InputItem[] | undefined): InputItem[] | undefined {\n\tif (!Array.isArray(input)) return input;\n\n\treturn input\n\t\t.filter((item) => item.type !== \"item_reference\")\n\t\t.map((item) => {\n\t\t\tif (item.id != null) {\n\t\t\t\tconst { id: _id, ...rest } = item;\n\t\t\t\treturn rest as InputItem;\n\t\t\t}\n\t\t\treturn item;\n\t\t});\n}\n\nexport async function transformRequestBody(\n\tbody: RequestBody,\n\toptions: CodexRequestOptions = {},\n\tprompt?: { instructions: string; developerMessages: string[] },\n): Promise<RequestBody> {\n\tconst normalizedModel = normalizeModel(body.model);\n\n\tbody.model = normalizedModel;\n\tbody.store = false;\n\tbody.stream = true;\n\n\tif (body.input && Array.isArray(body.input)) {\n\t\tbody.input = filterInput(body.input);\n\n\t\tif (body.input) {\n\t\t\tconst functionCallIds = new Set(\n\t\t\t\tbody.input\n\t\t\t\t\t.filter((item) => item.type === \"function_call\" && typeof item.call_id === \"string\")\n\t\t\t\t\t.map((item) => item.call_id as string),\n\t\t\t);\n\n\t\t\tbody.input = body.input.map((item) => {\n\t\t\t\tif (item.type === \"function_call_output\" && typeof item.call_id === \"string\") {\n\t\t\t\t\tconst callId = item.call_id as string;\n\t\t\t\t\tif (!functionCallIds.has(callId)) {\n\t\t\t\t\t\tconst itemRecord = item as unknown as Record<string, unknown>;\n\t\t\t\t\t\tconst toolName = typeof itemRecord.name === \"string\" ? itemRecord.name : \"tool\";\n\t\t\t\t\t\tlet text = \"\";\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst output = itemRecord.output;\n\t\t\t\t\t\t\ttext = typeof output === \"string\" ? output : JSON.stringify(output);\n\t\t\t\t\t\t} catch {\n\t\t\t\t\t\t\ttext = String(itemRecord.output ?? \"\");\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif (text.length > 16000) {\n\t\t\t\t\t\t\ttext = `${text.slice(0, 16000)}\\n...[truncated]`;\n\t\t\t\t\t\t}\n\t\t\t\t\t\treturn {\n\t\t\t\t\t\t\ttype: \"message\",\n\t\t\t\t\t\t\trole: \"assistant\",\n\t\t\t\t\t\t\tcontent: `[Previous ${toolName} result; call_id=${callId}]: ${text}`,\n\t\t\t\t\t\t} as InputItem;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn item;\n\t\t\t});\n\t\t}\n\t}\n\n\tif (prompt?.developerMessages && prompt.developerMessages.length > 0 && Array.isArray(body.input)) {\n\t\tconst developerMessages = prompt.developerMessages.map(\n\t\t\t(text) =>\n\t\t\t\t({\n\t\t\t\t\ttype: \"message\",\n\t\t\t\t\trole: \"developer\",\n\t\t\t\t\tcontent: [{ type: \"input_text\", text }],\n\t\t\t\t}) as InputItem,\n\t\t);\n\t\tbody.input = [...developerMessages, ...body.input];\n\t}\n\n\tif (options.reasoningEffort !== undefined) {\n\t\tconst reasoningConfig = getReasoningConfig(normalizedModel, options);\n\t\tbody.reasoning = {\n\t\t\t...body.reasoning,\n\t\t\t...reasoningConfig,\n\t\t};\n\t} else {\n\t\tdelete body.reasoning;\n\t}\n\n\tbody.text = {\n\t\t...body.text,\n\t\tverbosity: options.textVerbosity || \"medium\",\n\t};\n\n\tconst include = Array.isArray(options.include) ? [...options.include] : [];\n\tinclude.push(\"reasoning.encrypted_content\");\n\tbody.include = Array.from(new Set(include));\n\n\tdelete body.max_output_tokens;\n\tdelete body.max_completion_tokens;\n\n\treturn body;\n}\n"]}