nonebot-plugin-codex 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,165 @@
1
+ from __future__ import annotations
2
+
3
+ from nonebot.params import CommandArg
4
+ from nonebot.adapters.telegram import Bot
5
+ from nonebot.plugin import PluginMetadata
6
+ from nonebot.adapters.telegram.message import Message
7
+ from nonebot.adapters.telegram import Adapter as TelegramAdapter
8
+ from nonebot import on_type, on_command, on_message, get_plugin_config
9
+ from nonebot.adapters.telegram.event import MessageEvent, CallbackQueryEvent
10
+
11
+ from .config import Config
12
+ from .telegram import TelegramHandlers
13
+ from .native_client import NativeCodexClient
14
+ from .service import CodexBridgeService, CodexBridgeSettings
15
+
16
+ __plugin_meta__ = PluginMetadata(
17
+ name="Codex",
18
+ description="Telegram bridge plugin for driving Codex from NoneBot",
19
+ usage=(
20
+ "/codex [prompt], /mode, /exec, /new, /stop, /models, /model, /effort, "
21
+ "/permission, /pwd, /cd, /home, /sessions"
22
+ ),
23
+ homepage="https://github.com/ttiee/nonebot-plugin-codex",
24
+ type="application",
25
+ config=Config,
26
+ supported_adapters={TelegramAdapter},
27
+ )
28
+
29
+ try:
30
+ plugin_config = get_plugin_config(Config)
31
+ _runtime_ready = True
32
+ except ValueError:
33
+ plugin_config = Config()
34
+ _runtime_ready = False
35
+
36
+ service = CodexBridgeService(
37
+ CodexBridgeSettings(
38
+ binary=plugin_config.codex_binary,
39
+ workdir=str(plugin_config.codex_workdir),
40
+ kill_timeout=plugin_config.codex_kill_timeout,
41
+ progress_history=plugin_config.codex_progress_history,
42
+ diagnostic_history=plugin_config.codex_diagnostic_history,
43
+ chunk_size=plugin_config.codex_chunk_size,
44
+ stream_read_limit=plugin_config.codex_stream_read_limit,
45
+ models_cache_path=plugin_config.codex_models_cache_path,
46
+ codex_config_path=plugin_config.codex_codex_config_path,
47
+ preferences_path=plugin_config.codex_preferences_path,
48
+ session_index_path=plugin_config.codex_session_index_path,
49
+ sessions_dir=plugin_config.codex_sessions_dir,
50
+ archived_sessions_dir=plugin_config.codex_archived_sessions_dir,
51
+ ),
52
+ native_client=NativeCodexClient(
53
+ binary=plugin_config.codex_binary,
54
+ stream_read_limit=plugin_config.codex_stream_read_limit,
55
+ ),
56
+ )
57
+ handlers = TelegramHandlers(service)
58
+
59
+ if _runtime_ready:
60
+ codex_cmd = on_command("codex", priority=10, block=True)
61
+ mode_cmd = on_command("mode", priority=10, block=True)
62
+ exec_cmd = on_command("exec", priority=10, block=True)
63
+ new_cmd = on_command("new", priority=10, block=True)
64
+ stop_cmd = on_command("stop", priority=10, block=True)
65
+ models_cmd = on_command("models", priority=10, block=True)
66
+ model_cmd = on_command("model", priority=10, block=True)
67
+ effort_cmd = on_command("effort", priority=10, block=True)
68
+ permission_cmd = on_command("permission", priority=10, block=True)
69
+ pwd_cmd = on_command("pwd", priority=10, block=True)
70
+ cd_cmd = on_command("cd", priority=10, block=True)
71
+ home_cmd = on_command("home", priority=10, block=True)
72
+ sessions_cmd = on_command("sessions", priority=10, block=True)
73
+ follow_up = on_message(priority=20, block=True, rule=handlers.is_active_follow_up)
74
+ browser_callback = on_type(
75
+ CallbackQueryEvent,
76
+ priority=10,
77
+ block=True,
78
+ rule=handlers.is_browser_callback,
79
+ )
80
+ history_callback = on_type(
81
+ CallbackQueryEvent,
82
+ priority=10,
83
+ block=True,
84
+ rule=handlers.is_history_callback,
85
+ )
86
+
87
+ @codex_cmd.handle()
88
+ async def _handle_codex(
89
+ bot: Bot, event: MessageEvent, args: Message = CommandArg()
90
+ ) -> None:
91
+ await handlers.handle_codex(bot, event, args)
92
+
93
+ @mode_cmd.handle()
94
+ async def _handle_mode(
95
+ bot: Bot, event: MessageEvent, args: Message = CommandArg()
96
+ ) -> None:
97
+ await handlers.handle_mode(bot, event, args)
98
+
99
+ @exec_cmd.handle()
100
+ async def _handle_exec(
101
+ bot: Bot, event: MessageEvent, args: Message = CommandArg()
102
+ ) -> None:
103
+ await handlers.handle_exec(bot, event, args)
104
+
105
+ @new_cmd.handle()
106
+ async def _handle_new(bot: Bot, event: MessageEvent) -> None:
107
+ await handlers.handle_new(bot, event)
108
+
109
+ @stop_cmd.handle()
110
+ async def _handle_stop(bot: Bot, event: MessageEvent) -> None:
111
+ await handlers.handle_stop(bot, event)
112
+
113
+ @models_cmd.handle()
114
+ async def _handle_models(bot: Bot, event: MessageEvent) -> None:
115
+ await handlers.handle_models(bot, event)
116
+
117
+ @model_cmd.handle()
118
+ async def _handle_model(
119
+ bot: Bot, event: MessageEvent, args: Message = CommandArg()
120
+ ) -> None:
121
+ await handlers.handle_model(bot, event, args)
122
+
123
+ @effort_cmd.handle()
124
+ async def _handle_effort(
125
+ bot: Bot, event: MessageEvent, args: Message = CommandArg()
126
+ ) -> None:
127
+ await handlers.handle_effort(bot, event, args)
128
+
129
+ @permission_cmd.handle()
130
+ async def _handle_permission(
131
+ bot: Bot,
132
+ event: MessageEvent,
133
+ args: Message = CommandArg(),
134
+ ) -> None:
135
+ await handlers.handle_permission(bot, event, args)
136
+
137
+ @pwd_cmd.handle()
138
+ async def _handle_pwd(bot: Bot, event: MessageEvent) -> None:
139
+ await handlers.handle_pwd(bot, event)
140
+
141
+ @cd_cmd.handle()
142
+ async def _handle_cd(
143
+ bot: Bot, event: MessageEvent, args: Message = CommandArg()
144
+ ) -> None:
145
+ await handlers.handle_cd(bot, event, args)
146
+
147
+ @home_cmd.handle()
148
+ async def _handle_home(bot: Bot, event: MessageEvent) -> None:
149
+ await handlers.handle_home(bot, event)
150
+
151
+ @sessions_cmd.handle()
152
+ async def _handle_sessions(bot: Bot, event: MessageEvent) -> None:
153
+ await handlers.handle_sessions(bot, event)
154
+
155
+ @browser_callback.handle()
156
+ async def _handle_browser_callback(bot: Bot, event: CallbackQueryEvent) -> None:
157
+ await handlers.handle_browser_callback(bot, event)
158
+
159
+ @history_callback.handle()
160
+ async def _handle_history_callback(bot: Bot, event: CallbackQueryEvent) -> None:
161
+ await handlers.handle_history_callback(bot, event)
162
+
163
+ @follow_up.handle()
164
+ async def _handle_follow_up(bot: Bot, event: MessageEvent) -> None:
165
+ await handlers.handle_follow_up(bot, event)
@@ -0,0 +1,84 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+
5
+ from pydantic import Field, BaseModel, AliasChoices
6
+
7
+
8
+ class Config(BaseModel):
9
+ codex_binary: str = Field(
10
+ default="codex",
11
+ validation_alias=AliasChoices("codex_binary", "codex_bridge_binary"),
12
+ )
13
+ codex_workdir: Path = Field(
14
+ default_factory=Path.home,
15
+ validation_alias=AliasChoices("codex_workdir", "codex_bridge_workdir"),
16
+ )
17
+ codex_kill_timeout: float = Field(
18
+ default=5.0,
19
+ validation_alias=AliasChoices("codex_kill_timeout", "codex_bridge_kill_timeout"),
20
+ )
21
+ codex_progress_history: int = Field(
22
+ default=6,
23
+ validation_alias=AliasChoices(
24
+ "codex_progress_history",
25
+ "codex_bridge_progress_history",
26
+ ),
27
+ )
28
+ codex_diagnostic_history: int = Field(
29
+ default=20,
30
+ validation_alias=AliasChoices(
31
+ "codex_diagnostic_history",
32
+ "codex_bridge_diagnostic_history",
33
+ ),
34
+ )
35
+ codex_chunk_size: int = Field(
36
+ default=3500,
37
+ validation_alias=AliasChoices("codex_chunk_size", "codex_bridge_chunk_size"),
38
+ )
39
+ codex_stream_read_limit: int = Field(
40
+ default=1024 * 1024,
41
+ validation_alias=AliasChoices(
42
+ "codex_stream_read_limit",
43
+ "codex_bridge_stream_read_limit",
44
+ ),
45
+ )
46
+ codex_models_cache_path: Path = Field(
47
+ default_factory=lambda: Path.home() / ".codex" / "models_cache.json",
48
+ validation_alias=AliasChoices(
49
+ "codex_models_cache_path",
50
+ "codex_bridge_models_cache_path",
51
+ ),
52
+ )
53
+ codex_codex_config_path: Path = Field(
54
+ default_factory=lambda: Path.home() / ".codex" / "config.toml",
55
+ validation_alias=AliasChoices(
56
+ "codex_codex_config_path",
57
+ "codex_bridge_codex_config_path",
58
+ ),
59
+ )
60
+ codex_preferences_path: Path = Field(
61
+ default_factory=lambda: Path("data") / "codex_bridge" / "preferences.json",
62
+ validation_alias=AliasChoices(
63
+ "codex_preferences_path",
64
+ "codex_bridge_preferences_path",
65
+ ),
66
+ )
67
+ codex_session_index_path: Path = Field(
68
+ default_factory=lambda: Path.home() / ".codex" / "session_index.jsonl",
69
+ validation_alias=AliasChoices(
70
+ "codex_session_index_path",
71
+ "codex_bridge_session_index_path",
72
+ ),
73
+ )
74
+ codex_sessions_dir: Path = Field(
75
+ default_factory=lambda: Path.home() / ".codex" / "sessions",
76
+ validation_alias=AliasChoices("codex_sessions_dir", "codex_bridge_sessions_dir"),
77
+ )
78
+ codex_archived_sessions_dir: Path = Field(
79
+ default_factory=lambda: Path.home() / ".codex" / "archived_sessions",
80
+ validation_alias=AliasChoices(
81
+ "codex_archived_sessions_dir",
82
+ "codex_bridge_archived_sessions_dir",
83
+ ),
84
+ )
@@ -0,0 +1,419 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import asyncio
5
+ import inspect
6
+ from dataclasses import field, dataclass
7
+ from typing import Any
8
+ from collections.abc import Callable, Awaitable
9
+
10
+ Callback = Callable[[str], object]
11
+ ProcessLauncher = Callable[..., Awaitable[Any]]
12
+
13
+
14
+ @dataclass(slots=True)
15
+ class NativeThreadSummary:
16
+ thread_id: str
17
+ thread_name: str
18
+ updated_at: str
19
+ cwd: str | None
20
+ source_kind: str
21
+ preview: str | None = None
22
+
23
+
24
+ @dataclass(slots=True)
25
+ class NativeRunResult:
26
+ exit_code: int
27
+ final_text: str = ""
28
+ thread_id: str | None = None
29
+ diagnostics: list[str] = field(default_factory=list)
30
+
31
+
32
+ def _normalize_source_kind(source: object) -> str:
33
+ if isinstance(source, str) and source:
34
+ return source
35
+ if isinstance(source, dict) and "subAgent" in source:
36
+ sub_agent = source["subAgent"]
37
+ if isinstance(sub_agent, str) and sub_agent:
38
+ return f"subAgent:{sub_agent}"
39
+ if isinstance(sub_agent, dict):
40
+ return "subAgent"
41
+ return "unknown"
42
+
43
+
44
+ def _thread_summary_from_payload(thread: dict[str, Any]) -> NativeThreadSummary:
45
+ thread_id = str(thread.get("id") or "")
46
+ thread_name = str(thread.get("name") or thread.get("preview") or thread_id)
47
+ updated_at = str(thread.get("updatedAt") or thread.get("updated_at") or "")
48
+ cwd = thread.get("cwd")
49
+ preview = thread.get("preview")
50
+ return NativeThreadSummary(
51
+ thread_id=thread_id,
52
+ thread_name=thread_name,
53
+ updated_at=updated_at,
54
+ cwd=cwd if isinstance(cwd, str) else None,
55
+ source_kind=_normalize_source_kind(thread.get("source")),
56
+ preview=preview if isinstance(preview, str) and preview.strip() else None,
57
+ )
58
+
59
+
60
+ async def _maybe_call(callback: Callback | None, text: str) -> None:
61
+ if callback is None:
62
+ return
63
+ result = callback(text)
64
+ if inspect.isawaitable(result):
65
+ await result
66
+
67
+
68
+ def _trim_progress_command(command: str, limit: int = 120) -> str:
69
+ compact = " ".join(command.split())
70
+ if len(compact) <= limit:
71
+ return compact
72
+ return f"{compact[: limit - 3]}..."
73
+
74
+
75
+ async def _terminate_process(process: Any, timeout: float) -> None:
76
+ if process is None:
77
+ return
78
+ if getattr(process, "returncode", None) is not None:
79
+ return
80
+ process.terminate()
81
+ try:
82
+ await asyncio.wait_for(process.wait(), timeout=timeout)
83
+ except asyncio.TimeoutError:
84
+ process.kill()
85
+ await process.wait()
86
+
87
+
88
+ class NativeCodexClient:
89
+ def __init__(
90
+ self,
91
+ *,
92
+ binary: str = "codex",
93
+ launcher: ProcessLauncher | None = None,
94
+ client_name: str = "tg_bot",
95
+ client_version: str = "0",
96
+ stream_read_limit: int = 1024 * 1024,
97
+ ) -> None:
98
+ self.binary = binary
99
+ self.launcher = launcher or asyncio.create_subprocess_exec
100
+ self.client_name = client_name
101
+ self.client_version = client_version
102
+ self.stream_read_limit = stream_read_limit
103
+ self._process: Any = None
104
+ self._initialized = False
105
+ self._next_request_id = 1
106
+
107
+ def clone(self) -> NativeCodexClient:
108
+ return NativeCodexClient(
109
+ binary=self.binary,
110
+ launcher=self.launcher,
111
+ client_name=self.client_name,
112
+ client_version=self.client_version,
113
+ stream_read_limit=self.stream_read_limit,
114
+ )
115
+
116
+ async def close(self, timeout: float = 5.0) -> None:
117
+ process = self._process
118
+ self._process = None
119
+ self._initialized = False
120
+ self._next_request_id = 1
121
+ await _terminate_process(process, timeout)
122
+
123
+ async def start_thread(
124
+ self,
125
+ *,
126
+ workdir: str,
127
+ model: str,
128
+ reasoning_effort: str,
129
+ permission_mode: str,
130
+ ) -> NativeThreadSummary:
131
+ result = await self._request(
132
+ "thread/start",
133
+ {
134
+ "cwd": workdir,
135
+ "model": model,
136
+ "config": {"model_reasoning_effort": reasoning_effort},
137
+ **self._permission_params(permission_mode),
138
+ },
139
+ )
140
+ thread = result.get("thread")
141
+ if not isinstance(thread, dict):
142
+ raise RuntimeError("thread/start 缺少 thread 响应。")
143
+ return _thread_summary_from_payload(thread)
144
+
145
+ async def resume_thread(
146
+ self,
147
+ thread_id: str,
148
+ *,
149
+ workdir: str,
150
+ model: str,
151
+ reasoning_effort: str,
152
+ permission_mode: str,
153
+ ) -> NativeThreadSummary:
154
+ result = await self._request(
155
+ "thread/resume",
156
+ {
157
+ "threadId": thread_id,
158
+ "cwd": workdir,
159
+ "model": model,
160
+ "config": {"model_reasoning_effort": reasoning_effort},
161
+ **self._permission_params(permission_mode),
162
+ },
163
+ )
164
+ thread = result.get("thread")
165
+ if not isinstance(thread, dict):
166
+ raise RuntimeError("thread/resume 缺少 thread 响应。")
167
+ return _thread_summary_from_payload(thread)
168
+
169
+ async def run_turn(
170
+ self,
171
+ thread_id: str,
172
+ prompt: str,
173
+ *,
174
+ cwd: str | None = None,
175
+ model: str | None = None,
176
+ reasoning_effort: str | None = None,
177
+ on_progress: Callback | None = None,
178
+ on_stream_text: Callback | None = None,
179
+ ) -> NativeRunResult:
180
+ diagnostics: list[str] = []
181
+ streamed_text = ""
182
+ final_text = ""
183
+
184
+ await self._request(
185
+ "turn/start",
186
+ self._turn_start_params(
187
+ thread_id=thread_id,
188
+ prompt=prompt,
189
+ cwd=cwd,
190
+ model=model,
191
+ reasoning_effort=reasoning_effort,
192
+ ),
193
+ diagnostics=diagnostics,
194
+ )
195
+
196
+ while True:
197
+ message = await self._read_message(diagnostics)
198
+ if message is None:
199
+ continue
200
+
201
+ method = message.get("method")
202
+ params = message.get("params")
203
+ if not isinstance(method, str) or not isinstance(params, dict):
204
+ continue
205
+
206
+ if method == "turn/started":
207
+ await _maybe_call(on_progress, "开始处理请求")
208
+ continue
209
+
210
+ if method in {"item/started", "item/completed"}:
211
+ item = params.get("item")
212
+ if not isinstance(item, dict):
213
+ continue
214
+ item_type = item.get("type")
215
+ if item_type == "commandExecution":
216
+ command = _trim_progress_command(str(item.get("command") or ""))
217
+ prefix = "执行" if method == "item/started" else "完成"
218
+ await _maybe_call(on_progress, f"{prefix}: {command}")
219
+ continue
220
+ if item_type == "agentMessage":
221
+ text = item.get("text")
222
+ if isinstance(text, str) and text.strip():
223
+ final_text = text.strip()
224
+ streamed_text = final_text
225
+ await _maybe_call(on_stream_text, final_text)
226
+ continue
227
+
228
+ if method == "item/agentMessage/delta":
229
+ delta = params.get("delta")
230
+ if isinstance(delta, str) and delta:
231
+ streamed_text += delta
232
+ await _maybe_call(on_stream_text, streamed_text)
233
+ continue
234
+
235
+ if method == "turn/completed":
236
+ turn = params.get("turn")
237
+ if not isinstance(turn, dict):
238
+ return NativeRunResult(
239
+ exit_code=1,
240
+ final_text=final_text,
241
+ thread_id=thread_id,
242
+ diagnostics=diagnostics,
243
+ )
244
+ status = turn.get("status")
245
+ error = turn.get("error")
246
+ exit_code = 0 if status == "completed" and error is None else 1
247
+ return NativeRunResult(
248
+ exit_code=exit_code,
249
+ final_text=final_text,
250
+ thread_id=str(params.get("threadId") or thread_id),
251
+ diagnostics=diagnostics,
252
+ )
253
+
254
+ async def list_threads(self) -> list[NativeThreadSummary]:
255
+ threads: list[NativeThreadSummary] = []
256
+ cursor: str | None = None
257
+
258
+ while True:
259
+ params: dict[str, Any] = {
260
+ "sortKey": "updated_at",
261
+ "sourceKinds": ["cli", "vscode", "appServer"],
262
+ "limit": 100,
263
+ }
264
+ if cursor is not None:
265
+ params["cursor"] = cursor
266
+
267
+ result = await self._request("thread/list", params)
268
+ entries = result.get("data")
269
+ if not isinstance(entries, list):
270
+ raise RuntimeError("thread/list 缺少 data 响应。")
271
+ threads.extend(
272
+ _thread_summary_from_payload(thread)
273
+ for thread in entries
274
+ if isinstance(thread, dict)
275
+ )
276
+
277
+ next_cursor = result.get("nextCursor")
278
+ if not isinstance(next_cursor, str) or not next_cursor:
279
+ break
280
+ cursor = next_cursor
281
+
282
+ return threads
283
+
284
+ def _permission_params(self, permission_mode: str) -> dict[str, str]:
285
+ if permission_mode == "safe":
286
+ return {"approvalPolicy": "never", "sandbox": "workspace-write"}
287
+ if permission_mode == "danger":
288
+ return {
289
+ "approvalPolicy": "never",
290
+ "sandbox": "danger-full-access",
291
+ }
292
+ raise ValueError(f"Unsupported permission mode: {permission_mode}")
293
+
294
+ def _turn_start_params(
295
+ self,
296
+ *,
297
+ thread_id: str,
298
+ prompt: str,
299
+ cwd: str | None,
300
+ model: str | None,
301
+ reasoning_effort: str | None,
302
+ ) -> dict[str, Any]:
303
+ params: dict[str, Any] = {
304
+ "threadId": thread_id,
305
+ "input": [{"type": "text", "text": prompt}],
306
+ }
307
+ if cwd is not None:
308
+ params["cwd"] = cwd
309
+ if model is not None:
310
+ params["model"] = model
311
+ if reasoning_effort is not None:
312
+ params["effort"] = reasoning_effort
313
+ return params
314
+
315
+ async def _ensure_initialized(self) -> None:
316
+ if self._initialized and self._process is not None:
317
+ return
318
+ self._process = await self.launcher(
319
+ self.binary,
320
+ "app-server",
321
+ "--listen",
322
+ "stdio://",
323
+ stdin=asyncio.subprocess.PIPE,
324
+ stdout=asyncio.subprocess.PIPE,
325
+ stderr=asyncio.subprocess.STDOUT,
326
+ limit=self.stream_read_limit,
327
+ )
328
+ request_id = self._allocate_request_id()
329
+ await self._write_message(
330
+ {
331
+ "jsonrpc": "2.0",
332
+ "id": request_id,
333
+ "method": "initialize",
334
+ "params": {
335
+ "clientInfo": {
336
+ "name": self.client_name,
337
+ "version": self.client_version,
338
+ }
339
+ },
340
+ }
341
+ )
342
+ await self._read_response(request_id, diagnostics=[])
343
+ await self._write_message(
344
+ {
345
+ "jsonrpc": "2.0",
346
+ "method": "notifications/initialized",
347
+ "params": {},
348
+ }
349
+ )
350
+ self._initialized = True
351
+
352
+ async def _request(
353
+ self,
354
+ method: str,
355
+ params: dict[str, Any],
356
+ *,
357
+ diagnostics: list[str] | None = None,
358
+ ) -> dict[str, Any]:
359
+ await self._ensure_initialized()
360
+ request_id = self._allocate_request_id()
361
+ await self._write_message(
362
+ {
363
+ "jsonrpc": "2.0",
364
+ "id": request_id,
365
+ "method": method,
366
+ "params": params,
367
+ }
368
+ )
369
+ return await self._read_response(request_id, diagnostics=diagnostics or [])
370
+
371
+ async def _write_message(self, payload: dict[str, Any]) -> None:
372
+ if self._process is None or getattr(self._process, "stdin", None) is None:
373
+ raise RuntimeError("Codex app-server 尚未启动。")
374
+ data = json.dumps(payload, ensure_ascii=False) + "\n"
375
+ self._process.stdin.write(data.encode("utf-8"))
376
+ await self._process.stdin.drain()
377
+
378
+ async def _read_response(
379
+ self,
380
+ request_id: int,
381
+ *,
382
+ diagnostics: list[str],
383
+ ) -> dict[str, Any]:
384
+ while True:
385
+ message = await self._read_message(diagnostics)
386
+ if message is None:
387
+ continue
388
+ if message.get("id") != request_id:
389
+ continue
390
+ error = message.get("error")
391
+ if isinstance(error, dict):
392
+ raise RuntimeError(
393
+ str(error.get("message") or "Codex app-server 请求失败。")
394
+ )
395
+ result = message.get("result")
396
+ if not isinstance(result, dict):
397
+ raise RuntimeError("Codex app-server 返回了无效响应。")
398
+ return result
399
+
400
+ async def _read_message(self, diagnostics: list[str]) -> dict[str, Any] | None:
401
+ if self._process is None or getattr(self._process, "stdout", None) is None:
402
+ raise RuntimeError("Codex app-server 尚未启动。")
403
+ raw_line = await self._process.stdout.readline()
404
+ if not raw_line:
405
+ raise RuntimeError("Codex app-server 已提前退出。")
406
+ line = raw_line.decode("utf-8", errors="replace").strip()
407
+ if not line:
408
+ return None
409
+ try:
410
+ message = json.loads(line)
411
+ except json.JSONDecodeError:
412
+ diagnostics.append(line)
413
+ return None
414
+ return message if isinstance(message, dict) else None
415
+
416
+ def _allocate_request_id(self) -> int:
417
+ request_id = self._next_request_id
418
+ self._next_request_id += 1
419
+ return request_id