codex-auth-helper 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,207 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[codz]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py.cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # UV
98
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ #uv.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ #poetry.lock
109
+ #poetry.toml
110
+
111
+ # pdm
112
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
113
+ # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
114
+ # https://pdm-project.org/en/latest/usage/project/#working-with-version-control
115
+ #pdm.lock
116
+ #pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # pixi
121
+ # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
122
+ #pixi.lock
123
+ # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
124
+ # in the .venv directory. It is recommended not to include this directory in version control.
125
+ .pixi
126
+
127
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
128
+ __pypackages__/
129
+
130
+ # Celery stuff
131
+ celerybeat-schedule
132
+ celerybeat.pid
133
+
134
+ # SageMath parsed files
135
+ *.sage.py
136
+
137
+ # Environments
138
+ .env
139
+ .envrc
140
+ .venv
141
+ env/
142
+ venv/
143
+ ENV/
144
+ env.bak/
145
+ venv.bak/
146
+
147
+ # Spyder project settings
148
+ .spyderproject
149
+ .spyproject
150
+
151
+ # Rope project settings
152
+ .ropeproject
153
+
154
+ # mkdocs documentation
155
+ /site
156
+
157
+ # mypy
158
+ .mypy_cache/
159
+ .dmypy.json
160
+ dmypy.json
161
+
162
+ # Pyre type checker
163
+ .pyre/
164
+
165
+ # pytype static type analyzer
166
+ .pytype/
167
+
168
+ # Cython debug symbols
169
+ cython_debug/
170
+
171
+ # PyCharm
172
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
173
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
174
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
175
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
176
+ #.idea/
177
+
178
+ # Abstra
179
+ # Abstra is an AI-powered process automation framework.
180
+ # Ignore directories containing user credentials, local state, and settings.
181
+ # Learn more at https://abstra.io/docs
182
+ .abstra/
183
+
184
+ # Visual Studio Code
185
+ # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
186
+ # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
187
+ # and can be added to the global gitignore or merged into this file. However, if you prefer,
188
+ # you could uncomment the following to ignore the entire vscode folder
189
+ # .vscode/
190
+
191
+ # Ruff stuff:
192
+ .ruff_cache/
193
+
194
+ # PyPI configuration file
195
+ .pypirc
196
+
197
+ # Cursor
198
+ # Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
199
+ # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
200
+ # refer to https://docs.cursor.com/context/ignore-files
201
+ .cursorignore
202
+ .cursorindexingignore
203
+
204
+ # Marimo
205
+ marimo/_static/
206
+ marimo/_lsp/
207
+ __marimo__/
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 vCode
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,156 @@
1
+ Metadata-Version: 2.4
2
+ Name: codex-auth-helper
3
+ Version: 0.1.0
4
+ Summary: Codex auth helpers for pydantic-ai OpenAI Responses models.
5
+ Project-URL: Homepage, https://github.com/vcoderun/codex-auth-helper
6
+ Project-URL: Issues, https://github.com/vcoderun/codex-auth-helper/issues
7
+ Project-URL: Repository, https://github.com/vcoderun/codex-auth-helper
8
+ License: MIT
9
+ License-File: LICENSE
10
+ Requires-Python: >=3.11
11
+ Requires-Dist: httpx>=0.28.1
12
+ Requires-Dist: openai>=1.109.1
13
+ Requires-Dist: pydantic-ai-slim>=1.73.0
14
+ Requires-Dist: typing-extensions>=4.12.0
15
+ Description-Content-Type: text/markdown
16
+
17
+ # codex-auth-helper
18
+
19
+ `codex-auth-helper` turns an existing local Codex auth session into a
20
+ `pydantic-ai` model.
21
+
22
+ It reads `~/.codex/auth.json`, refreshes access tokens when needed, builds a
23
+ custom `AsyncOpenAI` client for the Codex Responses endpoint, and returns a
24
+ ready-to-use `CodexResponsesModel`.
25
+
26
+ ## What It Does
27
+
28
+ - Reads tokens from `~/.codex/auth.json`
29
+ - Derives `ChatGPT-Account-Id` from the auth file or token claims
30
+ - Refreshes expired access tokens with `https://auth.openai.com/oauth/token`
31
+ - Writes refreshed tokens back to the auth file
32
+ - Builds an OpenAI-compatible client pointed at `https://chatgpt.com/backend-api/codex`
33
+ - Returns a `pydantic-ai` responses model that already applies the Codex backend requirements
34
+
35
+ The helper enforces two backend-specific behaviors for you:
36
+
37
+ - `openai_store=False`
38
+ - streamed responses even when `pydantic-ai` calls the non-streamed `request()` path
39
+
40
+ ## What It Does Not Do
41
+
42
+ - It does not log you into Codex
43
+ - It does not create `~/.codex/auth.json`
44
+ - It does not currently support `pydantic_ai.models.openai.OpenAIChatModel` or Chat Completions flows
45
+ - `OpenAIChatModel` support is planned for a later release
46
+ - It does not replace `pydantic-ai`; it only provides a model/client factory
47
+
48
+ ## Install
49
+
50
+ ```bash
51
+ uv pip install codex-auth-helper
52
+ ```
53
+
54
+ You also need an existing Codex auth session on the same machine:
55
+
56
+ ```text
57
+ ~/.codex/auth.json
58
+ ```
59
+
60
+ If you have not logged in yet:
61
+
62
+ ```bash
63
+ codex login
64
+ ```
65
+
66
+ ## Quick Start
67
+
68
+ ```python
69
+ from codex_auth_helper import create_codex_responses_model
70
+ from pydantic_ai import Agent
71
+
72
+ model = create_codex_responses_model("gpt-5")
73
+ agent = Agent(model, instructions="You are a helpful coding assistant.")
74
+
75
+ result = agent.run_sync("Naber")
76
+ print(result.output)
77
+ ```
78
+
79
+ ## Custom Auth Path
80
+
81
+ If you want to read a different auth file, pass a custom config:
82
+
83
+ ```python
84
+ from pathlib import Path
85
+
86
+ from codex_auth_helper import CodexAuthConfig, create_codex_responses_model
87
+
88
+ config = CodexAuthConfig(auth_path=Path("/tmp/codex-auth.json"))
89
+ model = create_codex_responses_model("gpt-5", config=config)
90
+ ```
91
+
92
+ ## Passing Extra OpenAI Responses Settings
93
+
94
+ Additional `OpenAIResponsesModelSettings` can still be passed through. The helper
95
+ keeps `openai_store=False` unless you explicitly override the model after
96
+ construction.
97
+
98
+ ```python
99
+ from codex_auth_helper import create_codex_responses_model
100
+
101
+ model = create_codex_responses_model(
102
+ "gpt-5",
103
+ settings={
104
+ "openai_reasoning_summary": "concise",
105
+ },
106
+ )
107
+ ```
108
+
109
+ ## Lower-Level Client Factory
110
+
111
+ If you only want the authenticated OpenAI client, use `create_codex_async_openai(...)`:
112
+
113
+ ```python
114
+ from codex_auth_helper import create_codex_async_openai
115
+
116
+ client = create_codex_async_openai()
117
+ ```
118
+
119
+ This returns `CodexAsyncOpenAI`, a subclass of `openai.AsyncOpenAI`.
120
+
121
+ ## Public API
122
+
123
+ ```python
124
+ from codex_auth_helper import (
125
+ CodexAsyncOpenAI,
126
+ CodexAuthConfig,
127
+ CodexAuthState,
128
+ CodexAuthStore,
129
+ CodexResponsesModel,
130
+ CodexTokenManager,
131
+ create_codex_async_openai,
132
+ create_codex_responses_model,
133
+ )
134
+ ```
135
+
136
+ ## Errors
137
+
138
+ Typical failure modes:
139
+
140
+ - `Codex auth file was not found ...`
141
+ The machine is not logged into Codex yet.
142
+ - `Codex auth file ... does not contain valid JSON`
143
+ The auth file is corrupt or partially written.
144
+ - `ModelHTTPError ... Store must be set to false`
145
+ Means you are not using the helper-backed model instance.
146
+ - `ModelHTTPError ... Stream must be set to true`
147
+ Means you are not using `CodexResponsesModel`.
148
+
149
+ ## Package Notes
150
+
151
+ This package is intentionally small and focused:
152
+
153
+ - auth file parsing
154
+ - token refresh
155
+ - Codex-specific OpenAI client wiring
156
+ - `pydantic-ai` responses model factory
@@ -0,0 +1,140 @@
1
+ # codex-auth-helper
2
+
3
+ `codex-auth-helper` turns an existing local Codex auth session into a
4
+ `pydantic-ai` model.
5
+
6
+ It reads `~/.codex/auth.json`, refreshes access tokens when needed, builds a
7
+ custom `AsyncOpenAI` client for the Codex Responses endpoint, and returns a
8
+ ready-to-use `CodexResponsesModel`.
9
+
10
+ ## What It Does
11
+
12
+ - Reads tokens from `~/.codex/auth.json`
13
+ - Derives `ChatGPT-Account-Id` from the auth file or token claims
14
+ - Refreshes expired access tokens with `https://auth.openai.com/oauth/token`
15
+ - Writes refreshed tokens back to the auth file
16
+ - Builds an OpenAI-compatible client pointed at `https://chatgpt.com/backend-api/codex`
17
+ - Returns a `pydantic-ai` responses model that already applies the Codex backend requirements
18
+
19
+ The helper enforces two backend-specific behaviors for you:
20
+
21
+ - `openai_store=False`
22
+ - streamed responses even when `pydantic-ai` calls the non-streamed `request()` path
23
+
24
+ ## What It Does Not Do
25
+
26
+ - It does not log you into Codex
27
+ - It does not create `~/.codex/auth.json`
28
+ - It does not currently support `pydantic_ai.models.openai.OpenAIChatModel` or Chat Completions flows
29
+ - `OpenAIChatModel` support is planned for a later release
30
+ - It does not replace `pydantic-ai`; it only provides a model/client factory
31
+
32
+ ## Install
33
+
34
+ ```bash
35
+ uv pip install codex-auth-helper
36
+ ```
37
+
38
+ You also need an existing Codex auth session on the same machine:
39
+
40
+ ```text
41
+ ~/.codex/auth.json
42
+ ```
43
+
44
+ If you have not logged in yet:
45
+
46
+ ```bash
47
+ codex login
48
+ ```
49
+
50
+ ## Quick Start
51
+
52
+ ```python
53
+ from codex_auth_helper import create_codex_responses_model
54
+ from pydantic_ai import Agent
55
+
56
+ model = create_codex_responses_model("gpt-5")
57
+ agent = Agent(model, instructions="You are a helpful coding assistant.")
58
+
59
+ result = agent.run_sync("Naber")
60
+ print(result.output)
61
+ ```
62
+
63
+ ## Custom Auth Path
64
+
65
+ If you want to read a different auth file, pass a custom config:
66
+
67
+ ```python
68
+ from pathlib import Path
69
+
70
+ from codex_auth_helper import CodexAuthConfig, create_codex_responses_model
71
+
72
+ config = CodexAuthConfig(auth_path=Path("/tmp/codex-auth.json"))
73
+ model = create_codex_responses_model("gpt-5", config=config)
74
+ ```
75
+
76
+ ## Passing Extra OpenAI Responses Settings
77
+
78
+ Additional `OpenAIResponsesModelSettings` can still be passed through. The helper
79
+ keeps `openai_store=False` unless you explicitly override the model after
80
+ construction.
81
+
82
+ ```python
83
+ from codex_auth_helper import create_codex_responses_model
84
+
85
+ model = create_codex_responses_model(
86
+ "gpt-5",
87
+ settings={
88
+ "openai_reasoning_summary": "concise",
89
+ },
90
+ )
91
+ ```
92
+
93
+ ## Lower-Level Client Factory
94
+
95
+ If you only want the authenticated OpenAI client, use `create_codex_async_openai(...)`:
96
+
97
+ ```python
98
+ from codex_auth_helper import create_codex_async_openai
99
+
100
+ client = create_codex_async_openai()
101
+ ```
102
+
103
+ This returns `CodexAsyncOpenAI`, a subclass of `openai.AsyncOpenAI`.
104
+
105
+ ## Public API
106
+
107
+ ```python
108
+ from codex_auth_helper import (
109
+ CodexAsyncOpenAI,
110
+ CodexAuthConfig,
111
+ CodexAuthState,
112
+ CodexAuthStore,
113
+ CodexResponsesModel,
114
+ CodexTokenManager,
115
+ create_codex_async_openai,
116
+ create_codex_responses_model,
117
+ )
118
+ ```
119
+
120
+ ## Errors
121
+
122
+ Typical failure modes:
123
+
124
+ - `Codex auth file was not found ...`
125
+ The machine is not logged into Codex yet.
126
+ - `Codex auth file ... does not contain valid JSON`
127
+ The auth file is corrupt or partially written.
128
+ - `ModelHTTPError ... Store must be set to false`
129
+ Means you are not using the helper-backed model instance.
130
+ - `ModelHTTPError ... Stream must be set to true`
131
+ Means you are not using `CodexResponsesModel`.
132
+
133
+ ## Package Notes
134
+
135
+ This package is intentionally small and focused:
136
+
137
+ - auth file parsing
138
+ - token refresh
139
+ - Codex-specific OpenAI client wiring
140
+ - `pydantic-ai` responses model factory
@@ -0,0 +1,25 @@
1
+ [project]
2
+ name = "codex-auth-helper"
3
+ version = "0.1.0"
4
+ description = "Codex auth helpers for pydantic-ai OpenAI Responses models."
5
+ readme = "README.md"
6
+ requires-python = ">=3.11"
7
+ license = { text = "MIT" }
8
+ dependencies = [
9
+ "httpx>=0.28.1",
10
+ "openai>=1.109.1",
11
+ "pydantic-ai-slim>=1.73.0",
12
+ "typing-extensions>=4.12.0",
13
+ ]
14
+
15
+ [project.urls]
16
+ Homepage = "https://github.com/vcoderun/codex-auth-helper"
17
+ Issues = "https://github.com/vcoderun/codex-auth-helper/issues"
18
+ Repository = "https://github.com/vcoderun/codex-auth-helper"
19
+
20
+ [build-system]
21
+ requires = ["hatchling"]
22
+ build-backend = "hatchling.build"
23
+
24
+ [tool.hatch.build.targets.wheel]
25
+ packages = ["src/codex_auth_helper"]
@@ -0,0 +1,17 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ from .auth import CodexAuthConfig, CodexAuthState, CodexAuthStore, CodexTokenManager
4
+ from .client import CodexAsyncOpenAI, create_codex_async_openai
5
+ from .factory import create_codex_responses_model
6
+ from .model import CodexResponsesModel
7
+
8
+ __all__ = (
9
+ "CodexAsyncOpenAI",
10
+ "CodexAuthConfig",
11
+ "CodexAuthState",
12
+ "CodexResponsesModel",
13
+ "CodexAuthStore",
14
+ "CodexTokenManager",
15
+ "create_codex_async_openai",
16
+ "create_codex_responses_model",
17
+ )
@@ -0,0 +1,13 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ from .config import CodexAuthConfig
4
+ from .manager import CodexTokenManager
5
+ from .state import CodexAuthState
6
+ from .store import CodexAuthStore
7
+
8
+ __all__ = (
9
+ "CodexAuthConfig",
10
+ "CodexAuthState",
11
+ "CodexAuthStore",
12
+ "CodexTokenManager",
13
+ )
@@ -0,0 +1,22 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ from dataclasses import dataclass, field
4
+ from datetime import timedelta
5
+ from pathlib import Path
6
+
7
+ __all__ = ("CodexAuthConfig",)
8
+
9
+
10
+ def default_auth_path() -> Path:
11
+ return Path.home() / ".codex" / "auth.json"
12
+
13
+
14
+ @dataclass(frozen=True, slots=True)
15
+ class CodexAuthConfig:
16
+ auth_path: Path = field(default_factory=default_auth_path)
17
+ api_base_url: str = "https://chatgpt.com/backend-api/codex"
18
+ client_id: str = "app_EMoamEEZ73f0CkXaXp7hrann"
19
+ default_token_ttl: timedelta = timedelta(hours=1)
20
+ issuer: str = "https://auth.openai.com"
21
+ refresh_margin: timedelta = timedelta(seconds=30)
22
+ timeout_seconds: float = 30.0
@@ -0,0 +1,115 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ import asyncio
4
+ from collections.abc import Mapping
5
+ from dataclasses import dataclass, field
6
+ from datetime import UTC, datetime
7
+
8
+ import httpx
9
+
10
+ from .config import CodexAuthConfig
11
+ from .state import CodexAuthState
12
+ from .store import CodexAuthStore
13
+
14
+ __all__ = ("CodexTokenManager",)
15
+
16
+
17
+ def _now_utc() -> datetime:
18
+ return datetime.now(tz=UTC)
19
+
20
+
21
+ def _response_mapping(response: httpx.Response) -> dict[str, object]:
22
+ payload = response.json()
23
+ if not isinstance(payload, dict):
24
+ raise ValueError("Expected the token endpoint to return an object.")
25
+ return payload
26
+
27
+
28
+ def _string_value(data: Mapping[str, object], key: str) -> str | None:
29
+ value = data.get(key)
30
+ return value if isinstance(value, str) and value else None
31
+
32
+
33
+ @dataclass(slots=True)
34
+ class CodexTokenManager:
35
+ config: CodexAuthConfig
36
+ store: CodexAuthStore
37
+ http_client: httpx.AsyncClient
38
+ owns_http_client: bool = False
39
+ _lock: asyncio.Lock = field(default_factory=asyncio.Lock, init=False)
40
+ _state: CodexAuthState = field(init=False, repr=False)
41
+
42
+ def __post_init__(self) -> None:
43
+ self._state = self.store.read_state()
44
+
45
+ @property
46
+ def current_state(self) -> CodexAuthState:
47
+ return self._state
48
+
49
+ @property
50
+ def current_account_id(self) -> str | None:
51
+ return self._state.account_id
52
+
53
+ async def close(self) -> None:
54
+ if self.owns_http_client:
55
+ await self.http_client.aclose()
56
+
57
+ async def get_access_token(self) -> str:
58
+ async with self._lock:
59
+ if self._should_refresh(self._state):
60
+ self._state = await self._refresh_locked()
61
+ return self._state.access_token
62
+
63
+ async def prepare_account_header(self, request: httpx.Request) -> None:
64
+ if request.url.host != "chatgpt.com":
65
+ return
66
+ account_id = self.current_account_id
67
+ if account_id is not None:
68
+ request.headers["ChatGPT-Account-Id"] = account_id
69
+
70
+ def _refresh_deadline(self, state: CodexAuthState) -> datetime | None:
71
+ if state.expires_at is not None:
72
+ return state.expires_at
73
+ if state.last_refresh is not None:
74
+ return state.last_refresh + self.config.default_token_ttl
75
+ return None
76
+
77
+ def _should_refresh(self, state: CodexAuthState) -> bool:
78
+ deadline = self._refresh_deadline(state)
79
+ if deadline is None:
80
+ return False
81
+ return deadline <= _now_utc() + self.config.refresh_margin
82
+
83
+ async def _refresh_locked(self) -> CodexAuthState:
84
+ response = await self.http_client.post(
85
+ f"{self.config.issuer}/oauth/token",
86
+ content=str(
87
+ httpx.QueryParams(
88
+ {
89
+ "client_id": self.config.client_id,
90
+ "grant_type": "refresh_token",
91
+ "refresh_token": self._state.refresh_token,
92
+ }
93
+ )
94
+ ),
95
+ headers={"Content-Type": "application/x-www-form-urlencoded"},
96
+ )
97
+ response.raise_for_status()
98
+
99
+ payload = _response_mapping(response)
100
+ refreshed_state = CodexAuthState.from_json_dict(
101
+ {
102
+ "OPENAI_API_KEY": self._state.openai_api_key,
103
+ "auth_mode": self._state.auth_mode,
104
+ "last_refresh": _now_utc().isoformat().replace("+00:00", "Z"),
105
+ "tokens": {
106
+ "access_token": _string_value(payload, "access_token"),
107
+ "account_id": _string_value(payload, "account_id") or self._state.account_id,
108
+ "id_token": _string_value(payload, "id_token") or self._state.id_token,
109
+ "refresh_token": _string_value(payload, "refresh_token")
110
+ or self._state.refresh_token,
111
+ },
112
+ }
113
+ )
114
+ self.store.write_state(refreshed_state)
115
+ return refreshed_state
@@ -0,0 +1,165 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ import base64
4
+ import json
5
+ from collections.abc import Mapping
6
+ from dataclasses import dataclass
7
+ from datetime import UTC, datetime
8
+ from typing import Final
9
+
10
+ __all__ = ("CodexAuthState",)
11
+
12
+ _AUTH_CLAIMS_KEY: Final[str] = "https://api.openai.com/auth"
13
+ _ORGANIZATIONS_KEY: Final[str] = "organizations"
14
+
15
+
16
+ def _require_str(data: dict[str, object], key: str) -> str:
17
+ value = data.get(key)
18
+ if not isinstance(value, str) or not value:
19
+ raise ValueError(f"Expected a non-empty string for `{key}`.")
20
+ return value
21
+
22
+
23
+ def _optional_str(data: dict[str, object], key: str) -> str | None:
24
+ value = data.get(key)
25
+ return value if isinstance(value, str) and value else None
26
+
27
+
28
+ def _as_string_mapping(value: object) -> dict[str, object] | None:
29
+ if not isinstance(value, dict):
30
+ return None
31
+
32
+ normalized: dict[str, object] = {}
33
+ for key, item in value.items():
34
+ if isinstance(key, str):
35
+ normalized[key] = item
36
+ return normalized
37
+
38
+
39
+ def _parse_timestamp(value: str | None) -> datetime | None:
40
+ if value is None:
41
+ return None
42
+ normalized = value.replace("Z", "+00:00")
43
+ return datetime.fromisoformat(normalized).astimezone(UTC)
44
+
45
+
46
+ def _encode_timestamp(value: datetime | None) -> str | None:
47
+ if value is None:
48
+ return None
49
+ return value.astimezone(UTC).isoformat().replace("+00:00", "Z")
50
+
51
+
52
+ def _parse_jwt_claims(token: str) -> dict[str, object] | None:
53
+ parts = token.split(".")
54
+ if len(parts) != 3:
55
+ return None
56
+ payload = parts[1] + "=" * (-len(parts[1]) % 4)
57
+ try:
58
+ claims = json.loads(base64.urlsafe_b64decode(payload.encode("utf-8")))
59
+ except (ValueError, json.JSONDecodeError):
60
+ return None
61
+ return _as_string_mapping(claims)
62
+
63
+
64
+ def _extract_account_id_from_claims(claims: Mapping[str, object]) -> str | None:
65
+ direct_account_id = claims.get("chatgpt_account_id")
66
+ if isinstance(direct_account_id, str) and direct_account_id:
67
+ return direct_account_id
68
+
69
+ auth_claims = claims.get(_AUTH_CLAIMS_KEY)
70
+ auth_mapping = _as_string_mapping(auth_claims)
71
+ if auth_mapping is not None:
72
+ nested_account_id = auth_mapping.get("chatgpt_account_id")
73
+ if isinstance(nested_account_id, str) and nested_account_id:
74
+ return nested_account_id
75
+
76
+ organizations = claims.get(_ORGANIZATIONS_KEY)
77
+ if isinstance(organizations, list) and organizations:
78
+ organization = _as_string_mapping(organizations[0])
79
+ if organization is not None:
80
+ organization_id = organization.get("id")
81
+ if isinstance(organization_id, str) and organization_id:
82
+ return organization_id
83
+
84
+ return None
85
+
86
+
87
+ def _extract_account_id(
88
+ *, access_token: str, account_id: str | None, id_token: str | None
89
+ ) -> str | None:
90
+ if account_id is not None:
91
+ return account_id
92
+
93
+ if id_token is not None:
94
+ id_claims = _parse_jwt_claims(id_token)
95
+ if id_claims is not None:
96
+ id_account_id = _extract_account_id_from_claims(id_claims)
97
+ if id_account_id is not None:
98
+ return id_account_id
99
+
100
+ access_claims = _parse_jwt_claims(access_token)
101
+ if access_claims is None:
102
+ return None
103
+ return _extract_account_id_from_claims(access_claims)
104
+
105
+
106
+ def _extract_expiry(*, access_token: str, id_token: str | None) -> datetime | None:
107
+ for token in (id_token, access_token):
108
+ if token is None:
109
+ continue
110
+ claims = _parse_jwt_claims(token)
111
+ if claims is None:
112
+ continue
113
+ exp = claims.get("exp")
114
+ if isinstance(exp, int):
115
+ return datetime.fromtimestamp(exp, tz=UTC)
116
+ return None
117
+
118
+
119
+ @dataclass(frozen=True, slots=True)
120
+ class CodexAuthState:
121
+ access_token: str
122
+ refresh_token: str
123
+ account_id: str | None = None
124
+ auth_mode: str | None = None
125
+ expires_at: datetime | None = None
126
+ id_token: str | None = None
127
+ last_refresh: datetime | None = None
128
+ openai_api_key: str | None = None
129
+
130
+ @classmethod
131
+ def from_json_dict(cls, data: dict[str, object]) -> CodexAuthState:
132
+ tokens = _as_string_mapping(data.get("tokens"))
133
+ if tokens is None:
134
+ raise ValueError("Expected `tokens` to be an object.")
135
+
136
+ access_token = _require_str(tokens, "access_token")
137
+ id_token = _optional_str(tokens, "id_token")
138
+
139
+ return cls(
140
+ access_token=access_token,
141
+ refresh_token=_require_str(tokens, "refresh_token"),
142
+ account_id=_extract_account_id(
143
+ access_token=access_token,
144
+ account_id=_optional_str(tokens, "account_id"),
145
+ id_token=id_token,
146
+ ),
147
+ auth_mode=_optional_str(data, "auth_mode"),
148
+ expires_at=_extract_expiry(access_token=access_token, id_token=id_token),
149
+ id_token=id_token,
150
+ last_refresh=_parse_timestamp(_optional_str(data, "last_refresh")),
151
+ openai_api_key=_optional_str(data, "OPENAI_API_KEY"),
152
+ )
153
+
154
+ def to_json_dict(self) -> dict[str, object]:
155
+ return {
156
+ "OPENAI_API_KEY": self.openai_api_key,
157
+ "auth_mode": self.auth_mode,
158
+ "last_refresh": _encode_timestamp(self.last_refresh),
159
+ "tokens": {
160
+ "access_token": self.access_token,
161
+ "account_id": self.account_id,
162
+ "id_token": self.id_token,
163
+ "refresh_token": self.refresh_token,
164
+ },
165
+ }
@@ -0,0 +1,37 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ import json
4
+ from dataclasses import dataclass
5
+ from json import JSONDecodeError
6
+ from pathlib import Path
7
+
8
+ from .state import CodexAuthState
9
+
10
+ __all__ = ("CodexAuthStore",)
11
+
12
+
13
+ @dataclass(slots=True)
14
+ class CodexAuthStore:
15
+ path: Path
16
+
17
+ def read_state(self) -> CodexAuthState:
18
+ try:
19
+ text = self.path.read_text(encoding="utf-8")
20
+ except FileNotFoundError as exc:
21
+ raise FileNotFoundError(f"Codex auth file was not found at `{self.path}`.") from exc
22
+
23
+ try:
24
+ raw = json.loads(text)
25
+ except JSONDecodeError as exc:
26
+ raise ValueError(
27
+ f"Codex auth file at `{self.path}` does not contain valid JSON."
28
+ ) from exc
29
+
30
+ if not isinstance(raw, dict):
31
+ raise ValueError(f"Codex auth file at `{self.path}` must contain a JSON object.")
32
+ return CodexAuthState.from_json_dict(raw)
33
+
34
+ def write_state(self, state: CodexAuthState) -> None:
35
+ self.path.parent.mkdir(parents=True, exist_ok=True)
36
+ encoded = json.dumps(state.to_json_dict(), indent=2) + "\n"
37
+ self.path.write_text(encoded, encoding="utf-8")
@@ -0,0 +1,58 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ import httpx
4
+ from openai import AsyncOpenAI, Omit
5
+ from typing_extensions import override
6
+
7
+ from .auth import CodexAuthConfig, CodexAuthStore, CodexTokenManager
8
+
9
+ __all__ = ("CodexAsyncOpenAI", "create_codex_async_openai")
10
+
11
+
12
+ class CodexAsyncOpenAI(AsyncOpenAI):
13
+ def __init__(
14
+ self,
15
+ *,
16
+ base_url: str,
17
+ http_client: httpx.AsyncClient,
18
+ token_manager: CodexTokenManager,
19
+ ) -> None:
20
+ self.token_manager = token_manager
21
+ super().__init__(
22
+ api_key=token_manager.get_access_token,
23
+ base_url=base_url,
24
+ http_client=http_client,
25
+ )
26
+
27
+ @property
28
+ @override
29
+ def default_headers(self) -> dict[str, str | Omit]:
30
+ headers = dict(super().default_headers)
31
+ account_id = self.token_manager.current_account_id
32
+ if account_id is not None:
33
+ headers["ChatGPT-Account-Id"] = account_id
34
+ return headers
35
+
36
+
37
+ def create_codex_async_openai(
38
+ *,
39
+ config: CodexAuthConfig | None = None,
40
+ http_client: httpx.AsyncClient | None = None,
41
+ ) -> CodexAsyncOpenAI:
42
+ resolved_config = config or CodexAuthConfig()
43
+ owns_http_client = http_client is None
44
+ resolved_http_client = http_client or httpx.AsyncClient(
45
+ follow_redirects=True,
46
+ timeout=resolved_config.timeout_seconds,
47
+ )
48
+ token_manager = CodexTokenManager(
49
+ config=resolved_config,
50
+ store=CodexAuthStore(resolved_config.auth_path),
51
+ http_client=resolved_http_client,
52
+ owns_http_client=owns_http_client,
53
+ )
54
+ return CodexAsyncOpenAI(
55
+ base_url=resolved_config.api_base_url,
56
+ http_client=resolved_http_client,
57
+ token_manager=token_manager,
58
+ )
@@ -0,0 +1,30 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ import httpx
4
+ from pydantic_ai.models.openai import OpenAIResponsesModelSettings
5
+ from pydantic_ai.providers.openai import OpenAIProvider
6
+
7
+ from .auth import CodexAuthConfig
8
+ from .client import create_codex_async_openai
9
+ from .model import CodexResponsesModel
10
+
11
+ __all__ = ("create_codex_responses_model",)
12
+
13
+
14
+ def create_codex_responses_model(
15
+ model_name: str,
16
+ *,
17
+ config: CodexAuthConfig | None = None,
18
+ http_client: httpx.AsyncClient | None = None,
19
+ settings: OpenAIResponsesModelSettings | None = None,
20
+ ) -> CodexResponsesModel:
21
+ client = create_codex_async_openai(config=config, http_client=http_client)
22
+ model_settings: OpenAIResponsesModelSettings = {"openai_store": False}
23
+ if settings is not None:
24
+ model_settings.update(settings)
25
+ model_settings.setdefault("openai_store", False)
26
+ return CodexResponsesModel(
27
+ model_name,
28
+ provider=OpenAIProvider(openai_client=client),
29
+ settings=model_settings,
30
+ )
@@ -0,0 +1,25 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ from pydantic_ai.messages import ModelRequest, ModelResponse
4
+ from pydantic_ai.models import ModelRequestParameters
5
+ from pydantic_ai.models.openai import OpenAIResponsesModel
6
+ from pydantic_ai.settings import ModelSettings
7
+
8
+ __all__ = ("CodexResponsesModel",)
9
+
10
+
11
+ class CodexResponsesModel(OpenAIResponsesModel):
12
+ async def request(
13
+ self,
14
+ messages: list[ModelRequest | ModelResponse],
15
+ model_settings: ModelSettings | None,
16
+ model_request_parameters: ModelRequestParameters,
17
+ ) -> ModelResponse:
18
+ async with super().request_stream(
19
+ messages,
20
+ model_settings,
21
+ model_request_parameters,
22
+ ) as streamed_response:
23
+ async for _ in streamed_response:
24
+ pass
25
+ return streamed_response.get()