codex-agent-sdk 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_agent_sdk/__init__.py +86 -0
- codex_agent_sdk/agent.py +576 -0
- codex_agent_sdk/cli.py +245 -0
- codex_agent_sdk/client.py +363 -0
- codex_agent_sdk/errors.py +54 -0
- codex_agent_sdk/models.py +377 -0
- codex_agent_sdk/py.typed +0 -0
- codex_agent_sdk/query.py +38 -0
- codex_agent_sdk/schema.py +102 -0
- codex_agent_sdk/schema_tools.py +260 -0
- codex_agent_sdk/transport/__init__.py +38 -0
- codex_agent_sdk/transport/subprocess.py +184 -0
- codex_agent_sdk/types.py +66 -0
- codex_agent_sdk-0.0.1.dist-info/METADATA +241 -0
- codex_agent_sdk-0.0.1.dist-info/RECORD +18 -0
- codex_agent_sdk-0.0.1.dist-info/WHEEL +4 -0
- codex_agent_sdk-0.0.1.dist-info/entry_points.txt +2 -0
- codex_agent_sdk-0.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""Codex Agent SDK for Python."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
|
|
7
|
+
from .agent import Agent, Conversation, StreamingResponse, run
|
|
8
|
+
from .client import CodexClient
|
|
9
|
+
from .errors import (
|
|
10
|
+
CodexConnectionError,
|
|
11
|
+
CodexJSONDecodeError,
|
|
12
|
+
CodexProcessError,
|
|
13
|
+
CodexRPCError,
|
|
14
|
+
CodexSchemaGenerationError,
|
|
15
|
+
CodexSchemaValidationError,
|
|
16
|
+
CodexSDKError,
|
|
17
|
+
)
|
|
18
|
+
from .models import (
|
|
19
|
+
CommandApproval,
|
|
20
|
+
Event,
|
|
21
|
+
ExecCompleted,
|
|
22
|
+
ExecStarted,
|
|
23
|
+
FileChangeApproval,
|
|
24
|
+
ItemCompleted,
|
|
25
|
+
ItemStarted,
|
|
26
|
+
McpStartupComplete,
|
|
27
|
+
MessageCompleted,
|
|
28
|
+
MessageDelta,
|
|
29
|
+
Response,
|
|
30
|
+
TaskCompleted,
|
|
31
|
+
TaskStarted,
|
|
32
|
+
Thread,
|
|
33
|
+
ThreadStarted,
|
|
34
|
+
ToolInput,
|
|
35
|
+
Turn,
|
|
36
|
+
TurnCompleted,
|
|
37
|
+
TurnStarted,
|
|
38
|
+
)
|
|
39
|
+
from .query import query
|
|
40
|
+
from .schema import CodexSchemaValidator, load_schema_validator_from_codex_cli
|
|
41
|
+
from .types import CodexClientOptions
|
|
42
|
+
|
|
43
|
+
logging.getLogger("codex_agent_sdk").addHandler(logging.NullHandler())
|
|
44
|
+
|
|
45
|
+
__all__ = [
|
|
46
|
+
# High-level API
|
|
47
|
+
"Agent",
|
|
48
|
+
"Conversation",
|
|
49
|
+
"StreamingResponse",
|
|
50
|
+
"run",
|
|
51
|
+
# Models
|
|
52
|
+
"Thread",
|
|
53
|
+
"Turn",
|
|
54
|
+
"Response",
|
|
55
|
+
"Event",
|
|
56
|
+
"ThreadStarted",
|
|
57
|
+
"TurnStarted",
|
|
58
|
+
"TurnCompleted",
|
|
59
|
+
"MessageDelta",
|
|
60
|
+
"MessageCompleted",
|
|
61
|
+
"TaskStarted",
|
|
62
|
+
"TaskCompleted",
|
|
63
|
+
"ItemStarted",
|
|
64
|
+
"ItemCompleted",
|
|
65
|
+
"ExecStarted",
|
|
66
|
+
"ExecCompleted",
|
|
67
|
+
"McpStartupComplete",
|
|
68
|
+
"CommandApproval",
|
|
69
|
+
"FileChangeApproval",
|
|
70
|
+
"ToolInput",
|
|
71
|
+
# Low-level API
|
|
72
|
+
"CodexClient",
|
|
73
|
+
"CodexClientOptions",
|
|
74
|
+
"query",
|
|
75
|
+
# Errors
|
|
76
|
+
"CodexSDKError",
|
|
77
|
+
"CodexConnectionError",
|
|
78
|
+
"CodexProcessError",
|
|
79
|
+
"CodexJSONDecodeError",
|
|
80
|
+
"CodexRPCError",
|
|
81
|
+
"CodexSchemaGenerationError",
|
|
82
|
+
"CodexSchemaValidationError",
|
|
83
|
+
# Schema
|
|
84
|
+
"CodexSchemaValidator",
|
|
85
|
+
"load_schema_validator_from_codex_cli",
|
|
86
|
+
]
|
codex_agent_sdk/agent.py
ADDED
|
@@ -0,0 +1,576 @@
|
|
|
1
|
+
"""High-level Agent API with Pythonic syntactic sugar."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import AsyncIterator, Awaitable, Callable
|
|
6
|
+
from contextlib import asynccontextmanager
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, overload
|
|
10
|
+
|
|
11
|
+
from .client import CodexClient
|
|
12
|
+
from .models import (
|
|
13
|
+
CommandApproval,
|
|
14
|
+
Event,
|
|
15
|
+
FileChangeApproval,
|
|
16
|
+
MessageDelta,
|
|
17
|
+
Response,
|
|
18
|
+
Thread,
|
|
19
|
+
ToolInput,
|
|
20
|
+
Turn,
|
|
21
|
+
TurnCompleted,
|
|
22
|
+
)
|
|
23
|
+
from .types import CodexClientOptions
|
|
24
|
+
|
|
25
|
+
# Type aliases for handlers
|
|
26
|
+
CommandHandler = Callable[[CommandApproval], Awaitable[str | dict[str, Any]]]
|
|
27
|
+
FileChangeHandler = Callable[[FileChangeApproval], Awaitable[str | dict[str, Any]]]
|
|
28
|
+
ToolInputHandler = Callable[[ToolInput], Awaitable[dict[str, Any]]]
|
|
29
|
+
EventHandler = Callable[[Event], Awaitable[None]]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class AgentConfig:
|
|
34
|
+
"""Configuration for an Agent."""
|
|
35
|
+
|
|
36
|
+
model: str = "gpt-5.2-codex"
|
|
37
|
+
cwd: str | Path | None = None
|
|
38
|
+
codex_path: str = "codex"
|
|
39
|
+
client_name: str = "codex_agent_sdk"
|
|
40
|
+
client_version: str = "0.1.0"
|
|
41
|
+
approval_policy: str | None = None
|
|
42
|
+
effort: str | None = None
|
|
43
|
+
env: dict[str, str] = field(default_factory=dict)
|
|
44
|
+
args: list[str] = field(default_factory=list)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class Conversation:
|
|
48
|
+
"""A conversation thread with context manager support."""
|
|
49
|
+
|
|
50
|
+
def __init__(
|
|
51
|
+
self,
|
|
52
|
+
agent: Agent,
|
|
53
|
+
thread: Thread,
|
|
54
|
+
client: CodexClient,
|
|
55
|
+
) -> None:
|
|
56
|
+
self._agent = agent
|
|
57
|
+
self._thread = thread
|
|
58
|
+
self._client = client
|
|
59
|
+
self._events: list[Event] = []
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def id(self) -> str:
|
|
63
|
+
"""Thread ID."""
|
|
64
|
+
return self._thread.id
|
|
65
|
+
|
|
66
|
+
@property
|
|
67
|
+
def thread(self) -> Thread:
|
|
68
|
+
"""The underlying thread object."""
|
|
69
|
+
return self._thread
|
|
70
|
+
|
|
71
|
+
async def send(self, prompt: str, **kwargs: Any) -> Response:
|
|
72
|
+
"""Send a message and get the full response."""
|
|
73
|
+
text_parts: list[str] = []
|
|
74
|
+
events: list[Event] = []
|
|
75
|
+
turn: Turn | None = None
|
|
76
|
+
|
|
77
|
+
async for event in self.stream(prompt, **kwargs):
|
|
78
|
+
events.append(event)
|
|
79
|
+
if isinstance(event, MessageDelta):
|
|
80
|
+
text_parts.append(event.delta)
|
|
81
|
+
elif isinstance(event, TurnCompleted):
|
|
82
|
+
turn = Turn(
|
|
83
|
+
id=event.turn_id or "",
|
|
84
|
+
thread_id=event.thread_id,
|
|
85
|
+
status=event.status,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
return Response(
|
|
89
|
+
text="".join(text_parts),
|
|
90
|
+
turn=turn,
|
|
91
|
+
thread=self._thread,
|
|
92
|
+
events=events,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
async def stream(self, prompt: str, **kwargs: Any) -> AsyncIterator[Event]:
|
|
96
|
+
"""Send a message and stream events."""
|
|
97
|
+
params = {"threadId": self._thread.id, "input": [{"type": "text", "text": prompt}]}
|
|
98
|
+
|
|
99
|
+
# Add agent-level defaults
|
|
100
|
+
if self._agent._config.approval_policy:
|
|
101
|
+
params.setdefault("approvalPolicy", self._agent._config.approval_policy)
|
|
102
|
+
if self._agent._config.effort:
|
|
103
|
+
params.setdefault("effort", self._agent._config.effort)
|
|
104
|
+
if self._agent._config.cwd:
|
|
105
|
+
params.setdefault("cwd", str(self._agent._config.cwd))
|
|
106
|
+
|
|
107
|
+
params.update(kwargs)
|
|
108
|
+
|
|
109
|
+
response = await self._client.turn_start(params)
|
|
110
|
+
turn_id = response.get("turn", {}).get("id")
|
|
111
|
+
|
|
112
|
+
async for note in self._client.notifications():
|
|
113
|
+
event = Event.from_notification(note)
|
|
114
|
+
self._events.append(event)
|
|
115
|
+
yield event
|
|
116
|
+
|
|
117
|
+
if isinstance(event, TurnCompleted) and (
|
|
118
|
+
turn_id is None or event.turn_id == turn_id
|
|
119
|
+
):
|
|
120
|
+
return
|
|
121
|
+
|
|
122
|
+
async def stream_text(self, prompt: str, **kwargs: Any) -> AsyncIterator[str]:
|
|
123
|
+
"""Send a message and stream only text deltas."""
|
|
124
|
+
async for event in self.stream(prompt, **kwargs):
|
|
125
|
+
if isinstance(event, MessageDelta):
|
|
126
|
+
yield event.delta
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class StreamingResponse:
|
|
130
|
+
"""A streaming response that can be iterated or awaited."""
|
|
131
|
+
|
|
132
|
+
def __init__(
|
|
133
|
+
self,
|
|
134
|
+
agent: Agent,
|
|
135
|
+
prompt: str,
|
|
136
|
+
kwargs: dict[str, Any],
|
|
137
|
+
) -> None:
|
|
138
|
+
self._agent = agent
|
|
139
|
+
self._prompt = prompt
|
|
140
|
+
self._kwargs = kwargs
|
|
141
|
+
self._events: list[Event] = []
|
|
142
|
+
self._text_parts: list[str] = []
|
|
143
|
+
self._consumed = False
|
|
144
|
+
|
|
145
|
+
def __aiter__(self) -> AsyncIterator[str]:
|
|
146
|
+
"""Iterate over text chunks."""
|
|
147
|
+
return self._stream_text()
|
|
148
|
+
|
|
149
|
+
async def _stream_text(self) -> AsyncIterator[str]:
|
|
150
|
+
"""Internal text streaming."""
|
|
151
|
+
async for event in self._stream_events():
|
|
152
|
+
if isinstance(event, MessageDelta):
|
|
153
|
+
yield event.delta
|
|
154
|
+
|
|
155
|
+
async def _stream_events(self) -> AsyncIterator[Event]:
|
|
156
|
+
"""Internal event streaming."""
|
|
157
|
+
if self._consumed:
|
|
158
|
+
raise RuntimeError("Response already consumed")
|
|
159
|
+
self._consumed = True
|
|
160
|
+
|
|
161
|
+
async with self._agent._create_client() as client:
|
|
162
|
+
thread_resp = await client.thread_start({"model": self._agent._config.model})
|
|
163
|
+
thread = Thread.from_dict(thread_resp)
|
|
164
|
+
|
|
165
|
+
params: dict[str, Any] = {
|
|
166
|
+
"threadId": thread.id,
|
|
167
|
+
"input": [{"type": "text", "text": self._prompt}],
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
if self._agent._config.approval_policy:
|
|
171
|
+
params["approvalPolicy"] = self._agent._config.approval_policy
|
|
172
|
+
if self._agent._config.effort:
|
|
173
|
+
params["effort"] = self._agent._config.effort
|
|
174
|
+
if self._agent._config.cwd:
|
|
175
|
+
params["cwd"] = str(self._agent._config.cwd)
|
|
176
|
+
|
|
177
|
+
params.update(self._kwargs)
|
|
178
|
+
|
|
179
|
+
response = await client.turn_start(params)
|
|
180
|
+
turn_id = response.get("turn", {}).get("id")
|
|
181
|
+
|
|
182
|
+
async for note in client.notifications():
|
|
183
|
+
event = Event.from_notification(note)
|
|
184
|
+
self._events.append(event)
|
|
185
|
+
if isinstance(event, MessageDelta):
|
|
186
|
+
self._text_parts.append(event.delta)
|
|
187
|
+
yield event
|
|
188
|
+
|
|
189
|
+
if isinstance(event, TurnCompleted) and (
|
|
190
|
+
turn_id is None or event.turn_id == turn_id
|
|
191
|
+
):
|
|
192
|
+
return
|
|
193
|
+
|
|
194
|
+
async def text(self) -> str:
|
|
195
|
+
"""Consume the stream and return full text."""
|
|
196
|
+
async for _ in self._stream_text():
|
|
197
|
+
pass
|
|
198
|
+
return "".join(self._text_parts)
|
|
199
|
+
|
|
200
|
+
async def response(self) -> Response:
|
|
201
|
+
"""Consume the stream and return full Response object."""
|
|
202
|
+
turn: Turn | None = None
|
|
203
|
+
async for event in self._stream_events():
|
|
204
|
+
if isinstance(event, TurnCompleted):
|
|
205
|
+
turn = Turn(
|
|
206
|
+
id=event.turn_id or "",
|
|
207
|
+
thread_id=event.thread_id,
|
|
208
|
+
status=event.status,
|
|
209
|
+
)
|
|
210
|
+
return Response(
|
|
211
|
+
text="".join(self._text_parts),
|
|
212
|
+
turn=turn,
|
|
213
|
+
events=self._events,
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
def events(self) -> AsyncIterator[Event]:
|
|
217
|
+
"""Iterate over typed Event objects."""
|
|
218
|
+
return self._stream_events()
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
class Agent:
|
|
222
|
+
"""
|
|
223
|
+
High-level Pythonic interface for Codex agents.
|
|
224
|
+
|
|
225
|
+
Examples
|
|
226
|
+
--------
|
|
227
|
+
Simple one-shot:
|
|
228
|
+
|
|
229
|
+
agent = Agent(model="gpt-5.1-codex")
|
|
230
|
+
async for chunk in agent.run("Hello!"):
|
|
231
|
+
print(chunk, end="")
|
|
232
|
+
|
|
233
|
+
With builder pattern:
|
|
234
|
+
|
|
235
|
+
agent = (
|
|
236
|
+
Agent()
|
|
237
|
+
.model("gpt-5.1-codex")
|
|
238
|
+
.cwd("/path/to/project")
|
|
239
|
+
.auto_approve_commands()
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
With decorator handlers:
|
|
243
|
+
|
|
244
|
+
agent = Agent()
|
|
245
|
+
|
|
246
|
+
@agent.on_command_approval
|
|
247
|
+
async def handle_command(cmd: CommandApproval) -> str:
|
|
248
|
+
print(f"Running: {cmd.command}")
|
|
249
|
+
return "accept"
|
|
250
|
+
|
|
251
|
+
With conversation context:
|
|
252
|
+
|
|
253
|
+
async with agent.conversation() as conv:
|
|
254
|
+
response = await conv.send("Hello!")
|
|
255
|
+
print(response.text)
|
|
256
|
+
response = await conv.send("Tell me more")
|
|
257
|
+
|
|
258
|
+
"""
|
|
259
|
+
|
|
260
|
+
def __init__(
|
|
261
|
+
self,
|
|
262
|
+
model: str = "gpt-5.2-codex",
|
|
263
|
+
*,
|
|
264
|
+
cwd: str | Path | None = None,
|
|
265
|
+
codex_path: str = "codex",
|
|
266
|
+
client_name: str = "codex_agent_sdk",
|
|
267
|
+
client_version: str = "0.1.0",
|
|
268
|
+
approval_policy: str | None = None,
|
|
269
|
+
effort: str | None = None,
|
|
270
|
+
auto_approve: bool = False,
|
|
271
|
+
) -> None:
|
|
272
|
+
self._config = AgentConfig(
|
|
273
|
+
model=model,
|
|
274
|
+
cwd=cwd,
|
|
275
|
+
codex_path=codex_path,
|
|
276
|
+
client_name=client_name,
|
|
277
|
+
client_version=client_version,
|
|
278
|
+
approval_policy=approval_policy,
|
|
279
|
+
effort=effort,
|
|
280
|
+
)
|
|
281
|
+
self._command_handler: CommandHandler | None = None
|
|
282
|
+
self._file_change_handler: FileChangeHandler | None = None
|
|
283
|
+
self._tool_input_handler: ToolInputHandler | None = None
|
|
284
|
+
self._event_handlers: list[EventHandler] = []
|
|
285
|
+
|
|
286
|
+
if auto_approve:
|
|
287
|
+
self.auto_approve_commands()
|
|
288
|
+
self.auto_approve_file_changes()
|
|
289
|
+
|
|
290
|
+
# =========================================================================
|
|
291
|
+
# Builder methods (return self for chaining)
|
|
292
|
+
# =========================================================================
|
|
293
|
+
|
|
294
|
+
def model(self, model: str) -> Agent:
|
|
295
|
+
"""Set the model to use."""
|
|
296
|
+
self._config.model = model
|
|
297
|
+
return self
|
|
298
|
+
|
|
299
|
+
def cwd(self, path: str | Path) -> Agent:
|
|
300
|
+
"""Set the working directory."""
|
|
301
|
+
self._config.cwd = path
|
|
302
|
+
return self
|
|
303
|
+
|
|
304
|
+
def codex_path(self, path: str) -> Agent:
|
|
305
|
+
"""Set the path to the codex CLI."""
|
|
306
|
+
self._config.codex_path = path
|
|
307
|
+
return self
|
|
308
|
+
|
|
309
|
+
def approval_policy(self, policy: str) -> Agent:
|
|
310
|
+
"""Set the approval policy (e.g., 'on-request', 'auto-approve')."""
|
|
311
|
+
self._config.approval_policy = policy
|
|
312
|
+
return self
|
|
313
|
+
|
|
314
|
+
def effort(self, level: str) -> Agent:
|
|
315
|
+
"""Set the effort level (e.g., 'low', 'medium', 'high')."""
|
|
316
|
+
self._config.effort = level
|
|
317
|
+
return self
|
|
318
|
+
|
|
319
|
+
def env(self, **kwargs: str) -> Agent:
|
|
320
|
+
"""Set environment variables."""
|
|
321
|
+
self._config.env.update(kwargs)
|
|
322
|
+
return self
|
|
323
|
+
|
|
324
|
+
def args(self, *args: str) -> Agent:
|
|
325
|
+
"""Add CLI arguments."""
|
|
326
|
+
self._config.args.extend(args)
|
|
327
|
+
return self
|
|
328
|
+
|
|
329
|
+
# =========================================================================
|
|
330
|
+
# Handler registration (decorators and methods)
|
|
331
|
+
# =========================================================================
|
|
332
|
+
|
|
333
|
+
def auto_approve_commands(self) -> Agent:
|
|
334
|
+
"""Automatically approve all command executions."""
|
|
335
|
+
|
|
336
|
+
async def _auto_approve(_: CommandApproval) -> str:
|
|
337
|
+
return "accept"
|
|
338
|
+
|
|
339
|
+
self._command_handler = _auto_approve
|
|
340
|
+
return self
|
|
341
|
+
|
|
342
|
+
def auto_approve_file_changes(self) -> Agent:
|
|
343
|
+
"""Automatically approve all file changes."""
|
|
344
|
+
|
|
345
|
+
async def _auto_approve(_: FileChangeApproval) -> str:
|
|
346
|
+
return "accept"
|
|
347
|
+
|
|
348
|
+
self._file_change_handler = _auto_approve
|
|
349
|
+
return self
|
|
350
|
+
|
|
351
|
+
@overload
|
|
352
|
+
def on_command_approval(self, handler: CommandHandler) -> CommandHandler: ...
|
|
353
|
+
|
|
354
|
+
@overload
|
|
355
|
+
def on_command_approval(self) -> Callable[[CommandHandler], CommandHandler]: ...
|
|
356
|
+
|
|
357
|
+
def on_command_approval(
|
|
358
|
+
self, handler: CommandHandler | None = None
|
|
359
|
+
) -> CommandHandler | Callable[[CommandHandler], CommandHandler]:
|
|
360
|
+
"""
|
|
361
|
+
Register a command approval handler.
|
|
362
|
+
|
|
363
|
+
Can be used as a decorator or method:
|
|
364
|
+
|
|
365
|
+
@agent.on_command_approval
|
|
366
|
+
async def handle(cmd: CommandApproval) -> str:
|
|
367
|
+
return "accept"
|
|
368
|
+
|
|
369
|
+
# or
|
|
370
|
+
|
|
371
|
+
agent.on_command_approval(my_handler)
|
|
372
|
+
"""
|
|
373
|
+
|
|
374
|
+
def decorator(fn: CommandHandler) -> CommandHandler:
|
|
375
|
+
self._command_handler = fn
|
|
376
|
+
return fn
|
|
377
|
+
|
|
378
|
+
if handler is not None:
|
|
379
|
+
return decorator(handler)
|
|
380
|
+
return decorator
|
|
381
|
+
|
|
382
|
+
@overload
|
|
383
|
+
def on_file_change(self, handler: FileChangeHandler) -> FileChangeHandler: ...
|
|
384
|
+
|
|
385
|
+
@overload
|
|
386
|
+
def on_file_change(self) -> Callable[[FileChangeHandler], FileChangeHandler]: ...
|
|
387
|
+
|
|
388
|
+
def on_file_change(
|
|
389
|
+
self, handler: FileChangeHandler | None = None
|
|
390
|
+
) -> FileChangeHandler | Callable[[FileChangeHandler], FileChangeHandler]:
|
|
391
|
+
"""Register a file change approval handler."""
|
|
392
|
+
|
|
393
|
+
def decorator(fn: FileChangeHandler) -> FileChangeHandler:
|
|
394
|
+
self._file_change_handler = fn
|
|
395
|
+
return fn
|
|
396
|
+
|
|
397
|
+
if handler is not None:
|
|
398
|
+
return decorator(handler)
|
|
399
|
+
return decorator
|
|
400
|
+
|
|
401
|
+
@overload
|
|
402
|
+
def on_tool_input(self, handler: ToolInputHandler) -> ToolInputHandler: ...
|
|
403
|
+
|
|
404
|
+
@overload
|
|
405
|
+
def on_tool_input(self) -> Callable[[ToolInputHandler], ToolInputHandler]: ...
|
|
406
|
+
|
|
407
|
+
def on_tool_input(
|
|
408
|
+
self, handler: ToolInputHandler | None = None
|
|
409
|
+
) -> ToolInputHandler | Callable[[ToolInputHandler], ToolInputHandler]:
|
|
410
|
+
"""Register a tool input handler."""
|
|
411
|
+
|
|
412
|
+
def decorator(fn: ToolInputHandler) -> ToolInputHandler:
|
|
413
|
+
self._tool_input_handler = fn
|
|
414
|
+
return fn
|
|
415
|
+
|
|
416
|
+
if handler is not None:
|
|
417
|
+
return decorator(handler)
|
|
418
|
+
return decorator
|
|
419
|
+
|
|
420
|
+
@overload
|
|
421
|
+
def on_event(self, handler: EventHandler) -> EventHandler: ...
|
|
422
|
+
|
|
423
|
+
@overload
|
|
424
|
+
def on_event(self) -> Callable[[EventHandler], EventHandler]: ...
|
|
425
|
+
|
|
426
|
+
def on_event(
|
|
427
|
+
self, handler: EventHandler | None = None
|
|
428
|
+
) -> EventHandler | Callable[[EventHandler], EventHandler]:
|
|
429
|
+
"""Register an event handler that receives all events."""
|
|
430
|
+
|
|
431
|
+
def decorator(fn: EventHandler) -> EventHandler:
|
|
432
|
+
self._event_handlers.append(fn)
|
|
433
|
+
return fn
|
|
434
|
+
|
|
435
|
+
if handler is not None:
|
|
436
|
+
return decorator(handler)
|
|
437
|
+
return decorator
|
|
438
|
+
|
|
439
|
+
# =========================================================================
|
|
440
|
+
# Execution methods
|
|
441
|
+
# =========================================================================
|
|
442
|
+
|
|
443
|
+
def run(self, prompt: str, **kwargs: Any) -> StreamingResponse:
|
|
444
|
+
"""
|
|
445
|
+
Run a prompt and return a streaming response.
|
|
446
|
+
|
|
447
|
+
Examples
|
|
448
|
+
--------
|
|
449
|
+
Stream text chunks:
|
|
450
|
+
|
|
451
|
+
async for chunk in agent.run("Hello"):
|
|
452
|
+
print(chunk, end="")
|
|
453
|
+
|
|
454
|
+
Get full text:
|
|
455
|
+
|
|
456
|
+
text = await agent.run("Hello").text()
|
|
457
|
+
|
|
458
|
+
Get full response with metadata:
|
|
459
|
+
|
|
460
|
+
response = await agent.run("Hello").response()
|
|
461
|
+
print(response.text)
|
|
462
|
+
print(response.turn.id)
|
|
463
|
+
|
|
464
|
+
Stream typed events:
|
|
465
|
+
|
|
466
|
+
async for event in agent.run("Hello").events():
|
|
467
|
+
if isinstance(event, MessageDelta):
|
|
468
|
+
print(event.delta)
|
|
469
|
+
|
|
470
|
+
"""
|
|
471
|
+
return StreamingResponse(self, prompt, kwargs)
|
|
472
|
+
|
|
473
|
+
@asynccontextmanager
|
|
474
|
+
async def conversation(self, **thread_kwargs: Any) -> AsyncIterator[Conversation]:
|
|
475
|
+
"""
|
|
476
|
+
Start a conversation with context manager support.
|
|
477
|
+
|
|
478
|
+
Examples
|
|
479
|
+
--------
|
|
480
|
+
async with agent.conversation() as conv:
|
|
481
|
+
response = await conv.send("Hello!")
|
|
482
|
+
print(response.text)
|
|
483
|
+
|
|
484
|
+
# Continue the conversation
|
|
485
|
+
response = await conv.send("Tell me more")
|
|
486
|
+
print(response.text)
|
|
487
|
+
|
|
488
|
+
"""
|
|
489
|
+
async with self._create_client() as client:
|
|
490
|
+
params = {"model": self._config.model}
|
|
491
|
+
if self._config.cwd:
|
|
492
|
+
params["cwd"] = str(self._config.cwd)
|
|
493
|
+
params.update(thread_kwargs)
|
|
494
|
+
|
|
495
|
+
thread_resp = await client.thread_start(params)
|
|
496
|
+
thread = Thread.from_dict(thread_resp)
|
|
497
|
+
|
|
498
|
+
yield Conversation(self, thread, client)
|
|
499
|
+
|
|
500
|
+
# Alias for conversation
|
|
501
|
+
thread = conversation
|
|
502
|
+
|
|
503
|
+
# =========================================================================
|
|
504
|
+
# Internal methods
|
|
505
|
+
# =========================================================================
|
|
506
|
+
|
|
507
|
+
@asynccontextmanager
|
|
508
|
+
async def _create_client(self) -> AsyncIterator[CodexClient]:
|
|
509
|
+
"""Create a configured CodexClient."""
|
|
510
|
+
options = CodexClientOptions(
|
|
511
|
+
codex_path=self._config.codex_path,
|
|
512
|
+
cwd=str(self._config.cwd) if self._config.cwd else None,
|
|
513
|
+
env=self._config.env,
|
|
514
|
+
args=self._config.args,
|
|
515
|
+
client_name=self._config.client_name,
|
|
516
|
+
client_version=self._config.client_version,
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
# Wrap handlers to convert typed objects
|
|
520
|
+
async def command_handler(params: dict[str, Any]) -> str | dict[str, Any]:
|
|
521
|
+
if self._command_handler is None:
|
|
522
|
+
return "reject"
|
|
523
|
+
approval = CommandApproval.from_params(params)
|
|
524
|
+
return await self._command_handler(approval)
|
|
525
|
+
|
|
526
|
+
async def file_change_handler(params: dict[str, Any]) -> str | dict[str, Any]:
|
|
527
|
+
if self._file_change_handler is None:
|
|
528
|
+
return "reject"
|
|
529
|
+
approval = FileChangeApproval.from_params(params)
|
|
530
|
+
return await self._file_change_handler(approval)
|
|
531
|
+
|
|
532
|
+
async def tool_input_handler(params: dict[str, Any]) -> dict[str, Any]:
|
|
533
|
+
if self._tool_input_handler is None:
|
|
534
|
+
return {"answers": {}}
|
|
535
|
+
tool_input = ToolInput.from_params(params)
|
|
536
|
+
return await self._tool_input_handler(tool_input)
|
|
537
|
+
|
|
538
|
+
async with CodexClient(
|
|
539
|
+
options=options,
|
|
540
|
+
command_approval_handler=command_handler if self._command_handler else None,
|
|
541
|
+
file_change_approval_handler=file_change_handler if self._file_change_handler else None,
|
|
542
|
+
tool_input_handler=tool_input_handler if self._tool_input_handler else None,
|
|
543
|
+
) as client:
|
|
544
|
+
yield client
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
# =============================================================================
|
|
548
|
+
# Module-level convenience function
|
|
549
|
+
# =============================================================================
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
def run(
|
|
553
|
+
prompt: str,
|
|
554
|
+
*,
|
|
555
|
+
model: str = "gpt-5.2-codex",
|
|
556
|
+
cwd: str | Path | None = None,
|
|
557
|
+
auto_approve: bool = False,
|
|
558
|
+
**kwargs: Any,
|
|
559
|
+
) -> StreamingResponse:
|
|
560
|
+
"""
|
|
561
|
+
One-liner to run a prompt.
|
|
562
|
+
|
|
563
|
+
Examples
|
|
564
|
+
--------
|
|
565
|
+
from codex_agent_sdk import run
|
|
566
|
+
|
|
567
|
+
# Stream text
|
|
568
|
+
async for chunk in run("Hello", model="gpt-5.1-codex"):
|
|
569
|
+
print(chunk, end="")
|
|
570
|
+
|
|
571
|
+
# Get full text
|
|
572
|
+
text = await run("Hello").text()
|
|
573
|
+
|
|
574
|
+
"""
|
|
575
|
+
agent = Agent(model=model, cwd=cwd, auto_approve=auto_approve)
|
|
576
|
+
return agent.run(prompt, **kwargs)
|