sibyl-cli 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- parallel_developer/__init__.py +5 -0
- parallel_developer/cli.py +649 -0
- parallel_developer/controller/__init__.py +1398 -0
- parallel_developer/controller/commands.py +132 -0
- parallel_developer/controller/events.py +17 -0
- parallel_developer/controller/flow.py +43 -0
- parallel_developer/controller/history.py +70 -0
- parallel_developer/controller/pause.py +94 -0
- parallel_developer/controller/workflow_runner.py +135 -0
- parallel_developer/orchestrator.py +1234 -0
- parallel_developer/services/__init__.py +14 -0
- parallel_developer/services/codex_monitor.py +627 -0
- parallel_developer/services/log_manager.py +161 -0
- parallel_developer/services/tmux_manager.py +245 -0
- parallel_developer/services/worktree_manager.py +119 -0
- parallel_developer/stores/__init__.py +20 -0
- parallel_developer/stores/session_manifest.py +165 -0
- parallel_developer/stores/settings_store.py +242 -0
- parallel_developer/ui/widgets.py +269 -0
- sibyl_cli-0.2.0.dist-info/METADATA +15 -0
- sibyl_cli-0.2.0.dist-info/RECORD +23 -0
- sibyl_cli-0.2.0.dist-info/WHEEL +4 -0
- sibyl_cli-0.2.0.dist-info/entry_points.txt +4 -0
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""Service layer components exposed for orchestrator and controller."""
|
|
2
|
+
|
|
3
|
+
from .codex_monitor import CodexMonitor, SessionReservationError
|
|
4
|
+
from .log_manager import LogManager
|
|
5
|
+
from .tmux_manager import TmuxLayoutManager
|
|
6
|
+
from .worktree_manager import WorktreeManager
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"CodexMonitor",
|
|
10
|
+
"LogManager",
|
|
11
|
+
"SessionReservationError",
|
|
12
|
+
"TmuxLayoutManager",
|
|
13
|
+
"WorktreeManager",
|
|
14
|
+
]
|
|
@@ -0,0 +1,627 @@
|
|
|
1
|
+
"""Codex セッションのロールアウトを監視するサービス."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import time
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence, Set, Union
|
|
10
|
+
|
|
11
|
+
import yaml
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SessionReservationError(RuntimeError):
|
|
15
|
+
"""Codex rollout が既に別 namespace によって予約されている場合の例外."""
|
|
16
|
+
|
|
17
|
+
def __init__(self, session_id: str, owner_namespace: Optional[str]) -> None:
|
|
18
|
+
self.session_id = session_id
|
|
19
|
+
self.owner_namespace = owner_namespace or "unknown"
|
|
20
|
+
super().__init__(
|
|
21
|
+
f"Codex session {session_id} is currently reserved by namespace '{self.owner_namespace}'. "
|
|
22
|
+
"別の parallel-dev インスタンスが使用中のため、処理を中断します。"
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class CodexMonitor:
|
|
27
|
+
"""Codex rollout JSONL を読み取り、セッション状態/完了を追跡する."""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
logs_dir: Path,
|
|
32
|
+
session_map_path: Path,
|
|
33
|
+
*,
|
|
34
|
+
codex_sessions_root: Optional[Path] = None,
|
|
35
|
+
poll_interval: float = 1.0,
|
|
36
|
+
session_namespace: Optional[str] = None,
|
|
37
|
+
) -> None:
|
|
38
|
+
self.logs_dir = Path(logs_dir)
|
|
39
|
+
self.session_map_path = Path(session_map_path)
|
|
40
|
+
self.poll_interval = poll_interval
|
|
41
|
+
self.logs_dir.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
(self.logs_dir / "sessions").mkdir(parents=True, exist_ok=True)
|
|
43
|
+
if not self.session_map_path.exists():
|
|
44
|
+
self.session_map_path.write_text("{}\n", encoding="utf-8")
|
|
45
|
+
self.codex_sessions_root = (
|
|
46
|
+
Path(codex_sessions_root)
|
|
47
|
+
if codex_sessions_root is not None
|
|
48
|
+
else Path.home() / ".codex" / "sessions"
|
|
49
|
+
)
|
|
50
|
+
self._session_namespace = session_namespace or "default"
|
|
51
|
+
self._registry_dir = self.session_map_path.parent / "codex_session_registry"
|
|
52
|
+
self._owned_sessions: Set[str] = set()
|
|
53
|
+
self._forced_done: Set[str] = set()
|
|
54
|
+
self._active_signal_paths: Dict[str, Path] = {}
|
|
55
|
+
|
|
56
|
+
def register_session(self, *, pane_id: str, session_id: str, rollout_path: Path) -> None:
|
|
57
|
+
try:
|
|
58
|
+
offset = rollout_path.stat().st_size
|
|
59
|
+
except OSError:
|
|
60
|
+
offset = 0
|
|
61
|
+
|
|
62
|
+
self._reserve_session(session_id, rollout_path)
|
|
63
|
+
|
|
64
|
+
data = self._load_map()
|
|
65
|
+
panes = data.setdefault("panes", {})
|
|
66
|
+
sessions = data.setdefault("sessions", {})
|
|
67
|
+
|
|
68
|
+
panes[pane_id] = {
|
|
69
|
+
"session_id": session_id,
|
|
70
|
+
"rollout_path": str(rollout_path),
|
|
71
|
+
"offset": int(offset),
|
|
72
|
+
}
|
|
73
|
+
sessions[session_id] = {
|
|
74
|
+
"pane_id": pane_id,
|
|
75
|
+
"rollout_path": str(rollout_path),
|
|
76
|
+
"offset": int(offset),
|
|
77
|
+
}
|
|
78
|
+
self._write_map(data)
|
|
79
|
+
|
|
80
|
+
def consume_session_until_eof(self, session_id: str) -> None:
|
|
81
|
+
data = self._load_map()
|
|
82
|
+
entry = data.get("sessions", {}).get(session_id)
|
|
83
|
+
if entry is None:
|
|
84
|
+
return
|
|
85
|
+
rollout_path = Path(entry.get("rollout_path", ""))
|
|
86
|
+
if not rollout_path.exists():
|
|
87
|
+
return
|
|
88
|
+
try:
|
|
89
|
+
size = rollout_path.stat().st_size
|
|
90
|
+
except OSError:
|
|
91
|
+
return
|
|
92
|
+
entry["offset"] = int(size)
|
|
93
|
+
sessions = data.setdefault("sessions", {})
|
|
94
|
+
sessions[session_id] = entry
|
|
95
|
+
panes = data.setdefault("panes", {})
|
|
96
|
+
for pane_id, pane_entry in panes.items():
|
|
97
|
+
if pane_entry.get("session_id") == session_id:
|
|
98
|
+
pane_entry["offset"] = int(size)
|
|
99
|
+
self._write_map(data)
|
|
100
|
+
|
|
101
|
+
def refresh_session_id(self, session_id: str) -> str:
|
|
102
|
+
data = self._load_map()
|
|
103
|
+
sessions = data.get("sessions", {})
|
|
104
|
+
entry = sessions.get(session_id)
|
|
105
|
+
if entry is None or not session_id.startswith("unknown-"):
|
|
106
|
+
return session_id
|
|
107
|
+
|
|
108
|
+
rollout_path = Path(entry.get("rollout_path", ""))
|
|
109
|
+
actual_id = self._extract_session_meta(rollout_path)
|
|
110
|
+
if not actual_id or actual_id.startswith("unknown-"):
|
|
111
|
+
return session_id
|
|
112
|
+
|
|
113
|
+
entry["session_id"] = actual_id
|
|
114
|
+
sessions.pop(session_id, None)
|
|
115
|
+
sessions[actual_id] = entry
|
|
116
|
+
|
|
117
|
+
panes = data.get("panes", {})
|
|
118
|
+
for pane_entry in panes.values():
|
|
119
|
+
if pane_entry.get("session_id") == session_id:
|
|
120
|
+
pane_entry["session_id"] = actual_id
|
|
121
|
+
|
|
122
|
+
self._write_map(data)
|
|
123
|
+
|
|
124
|
+
if session_id in self._owned_sessions:
|
|
125
|
+
self._owned_sessions.discard(session_id)
|
|
126
|
+
self._owned_sessions.add(actual_id)
|
|
127
|
+
if session_id in self._forced_done:
|
|
128
|
+
self._forced_done.discard(session_id)
|
|
129
|
+
self._forced_done.add(actual_id)
|
|
130
|
+
|
|
131
|
+
if self._registry_dir.exists():
|
|
132
|
+
old_record = self._registry_dir / f"{session_id}.json"
|
|
133
|
+
new_record = self._registry_dir / f"{actual_id}.json"
|
|
134
|
+
if old_record.exists():
|
|
135
|
+
try:
|
|
136
|
+
old_record.rename(new_record)
|
|
137
|
+
except OSError:
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
return actual_id
|
|
141
|
+
|
|
142
|
+
def bind_existing_session(self, *, pane_id: str, session_id: str) -> None:
|
|
143
|
+
data = self._load_map()
|
|
144
|
+
sessions = data.setdefault("sessions", {})
|
|
145
|
+
entry = sessions.get(session_id)
|
|
146
|
+
if entry is None:
|
|
147
|
+
raise RuntimeError(f"Session {session_id!r} not found in session_map")
|
|
148
|
+
|
|
149
|
+
rollout_path = Path(entry.get("rollout_path", ""))
|
|
150
|
+
try:
|
|
151
|
+
offset = rollout_path.stat().st_size
|
|
152
|
+
except OSError:
|
|
153
|
+
offset = int(entry.get("offset", 0))
|
|
154
|
+
|
|
155
|
+
entry["pane_id"] = pane_id
|
|
156
|
+
entry["offset"] = int(offset)
|
|
157
|
+
|
|
158
|
+
panes = data.setdefault("panes", {})
|
|
159
|
+
for existing_pane, pane_entry in list(panes.items()):
|
|
160
|
+
if existing_pane == pane_id or pane_entry.get("session_id") == session_id:
|
|
161
|
+
panes.pop(existing_pane, None)
|
|
162
|
+
|
|
163
|
+
panes[pane_id] = {
|
|
164
|
+
"session_id": session_id,
|
|
165
|
+
"rollout_path": entry["rollout_path"],
|
|
166
|
+
"offset": int(offset),
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
self._write_map(data)
|
|
170
|
+
|
|
171
|
+
def snapshot_rollouts(self) -> Dict[Path, float]:
|
|
172
|
+
if not self.codex_sessions_root.exists():
|
|
173
|
+
return {}
|
|
174
|
+
result: Dict[Path, float] = {}
|
|
175
|
+
for path in self.codex_sessions_root.glob("**/rollout-*.jsonl"):
|
|
176
|
+
try:
|
|
177
|
+
result[path] = path.stat().st_mtime
|
|
178
|
+
except FileNotFoundError:
|
|
179
|
+
continue
|
|
180
|
+
return result
|
|
181
|
+
|
|
182
|
+
def register_new_rollout(
|
|
183
|
+
self,
|
|
184
|
+
*,
|
|
185
|
+
pane_id: str,
|
|
186
|
+
baseline: Mapping[Path, float],
|
|
187
|
+
timeout_seconds: float = 30.0,
|
|
188
|
+
) -> str:
|
|
189
|
+
baseline_map: Dict[Path, float] = dict(baseline)
|
|
190
|
+
deadline = time.time() + timeout_seconds
|
|
191
|
+
|
|
192
|
+
while True:
|
|
193
|
+
remaining = deadline - time.time()
|
|
194
|
+
if remaining <= 0:
|
|
195
|
+
break
|
|
196
|
+
paths = self._wait_for_new_rollouts(
|
|
197
|
+
baseline_map,
|
|
198
|
+
expected=1,
|
|
199
|
+
timeout_seconds=remaining,
|
|
200
|
+
)
|
|
201
|
+
if not paths:
|
|
202
|
+
break
|
|
203
|
+
for rollout_path in paths:
|
|
204
|
+
self._mark_rollout_seen(baseline_map, rollout_path)
|
|
205
|
+
session_id = self._parse_session_meta(rollout_path)
|
|
206
|
+
try:
|
|
207
|
+
self.register_session(
|
|
208
|
+
pane_id=pane_id,
|
|
209
|
+
session_id=session_id,
|
|
210
|
+
rollout_path=rollout_path,
|
|
211
|
+
)
|
|
212
|
+
except SessionReservationError:
|
|
213
|
+
continue
|
|
214
|
+
return session_id
|
|
215
|
+
|
|
216
|
+
raise TimeoutError("Failed to detect available Codex session rollout")
|
|
217
|
+
|
|
218
|
+
def register_worker_rollouts(
|
|
219
|
+
self,
|
|
220
|
+
*,
|
|
221
|
+
worker_panes: Sequence[str],
|
|
222
|
+
baseline: Mapping[Path, float],
|
|
223
|
+
timeout_seconds: float = 30.0,
|
|
224
|
+
) -> Dict[str, str]:
|
|
225
|
+
if not worker_panes:
|
|
226
|
+
return {}
|
|
227
|
+
|
|
228
|
+
baseline_map: Dict[Path, float] = dict(baseline)
|
|
229
|
+
deadline = time.time() + timeout_seconds
|
|
230
|
+
fork_map: Dict[str, str] = {}
|
|
231
|
+
|
|
232
|
+
while len(fork_map) < len(worker_panes):
|
|
233
|
+
remaining = deadline - time.time()
|
|
234
|
+
if remaining <= 0:
|
|
235
|
+
break
|
|
236
|
+
paths = self._wait_for_new_rollouts(
|
|
237
|
+
baseline_map,
|
|
238
|
+
expected=1,
|
|
239
|
+
timeout_seconds=remaining,
|
|
240
|
+
)
|
|
241
|
+
if not paths:
|
|
242
|
+
break
|
|
243
|
+
for path in paths:
|
|
244
|
+
self._mark_rollout_seen(baseline_map, path)
|
|
245
|
+
pane_index = len(fork_map)
|
|
246
|
+
if pane_index >= len(worker_panes):
|
|
247
|
+
break
|
|
248
|
+
pane_id = worker_panes[pane_index]
|
|
249
|
+
session_id = self._parse_session_meta(path)
|
|
250
|
+
try:
|
|
251
|
+
self.register_session(pane_id=pane_id, session_id=session_id, rollout_path=path)
|
|
252
|
+
except SessionReservationError:
|
|
253
|
+
continue
|
|
254
|
+
fork_map[pane_id] = session_id
|
|
255
|
+
if len(fork_map) == len(worker_panes):
|
|
256
|
+
break
|
|
257
|
+
|
|
258
|
+
if len(fork_map) < len(worker_panes):
|
|
259
|
+
raise TimeoutError(f"Detected {len(fork_map)} worker rollouts but {len(worker_panes)} required.")
|
|
260
|
+
|
|
261
|
+
return fork_map
|
|
262
|
+
|
|
263
|
+
def get_last_assistant_message(self, session_id: str) -> Optional[str]:
|
|
264
|
+
data = self._load_map()
|
|
265
|
+
sessions = data.get("sessions", {})
|
|
266
|
+
entry = sessions.get(session_id)
|
|
267
|
+
if entry is None:
|
|
268
|
+
return None
|
|
269
|
+
|
|
270
|
+
rollout_path = Path(entry.get("rollout_path", ""))
|
|
271
|
+
if not rollout_path.exists():
|
|
272
|
+
return None
|
|
273
|
+
|
|
274
|
+
last_text: Optional[str] = None
|
|
275
|
+
try:
|
|
276
|
+
with rollout_path.open("r", encoding="utf-8") as fh:
|
|
277
|
+
for line in fh:
|
|
278
|
+
try:
|
|
279
|
+
obj = json.loads(line)
|
|
280
|
+
except json.JSONDecodeError:
|
|
281
|
+
continue
|
|
282
|
+
|
|
283
|
+
if obj.get("type") != "response_item":
|
|
284
|
+
continue
|
|
285
|
+
|
|
286
|
+
payload = obj.get("payload", {})
|
|
287
|
+
if payload.get("role") != "assistant":
|
|
288
|
+
continue
|
|
289
|
+
|
|
290
|
+
texts: List[str] = []
|
|
291
|
+
for block in payload.get("content", []):
|
|
292
|
+
block_type = block.get("type")
|
|
293
|
+
if block_type in {"output_text", "text"}:
|
|
294
|
+
texts.append(block.get("text", ""))
|
|
295
|
+
elif block_type == "output_markdown":
|
|
296
|
+
texts.append(block.get("markdown", ""))
|
|
297
|
+
elif block_type == "output_json":
|
|
298
|
+
data = block.get("json")
|
|
299
|
+
if data is not None:
|
|
300
|
+
texts.append(json.dumps(data))
|
|
301
|
+
if texts:
|
|
302
|
+
last_text = "\n".join(part for part in texts if part).strip()
|
|
303
|
+
except OSError:
|
|
304
|
+
return None
|
|
305
|
+
|
|
306
|
+
return last_text
|
|
307
|
+
|
|
308
|
+
def capture_instruction(self, *, pane_id: str, instruction: str) -> str:
|
|
309
|
+
data = self._load_map()
|
|
310
|
+
pane_entry = data.get("panes", {}).get(pane_id)
|
|
311
|
+
if pane_entry is None:
|
|
312
|
+
raise RuntimeError(
|
|
313
|
+
f"Pane {pane_id!r} is not registered in session_map; ensure Codex session detection succeeded."
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
instruction_log = self.logs_dir / "instruction.log"
|
|
317
|
+
with instruction_log.open("a", encoding="utf-8") as fh:
|
|
318
|
+
fh.write(json.dumps({"pane": pane_id, "instruction": instruction}) + "\n")
|
|
319
|
+
|
|
320
|
+
return pane_entry["session_id"]
|
|
321
|
+
|
|
322
|
+
def await_completion(
|
|
323
|
+
self,
|
|
324
|
+
*,
|
|
325
|
+
session_ids: Iterable[str],
|
|
326
|
+
timeout_seconds: Optional[int] = None,
|
|
327
|
+
signal_paths: Optional[Mapping[str, Union[str, Path]]] = None,
|
|
328
|
+
) -> Dict[str, Any]:
|
|
329
|
+
data = self._load_map()
|
|
330
|
+
sessions = data.get("sessions", {})
|
|
331
|
+
|
|
332
|
+
targets: Dict[str, Path] = {}
|
|
333
|
+
offsets: Dict[str, int] = {}
|
|
334
|
+
for session_id in session_ids:
|
|
335
|
+
entry = sessions.get(session_id)
|
|
336
|
+
if entry is None:
|
|
337
|
+
raise RuntimeError(f"Session {session_id!r} not found in session_map")
|
|
338
|
+
targets[session_id] = Path(entry["rollout_path"])
|
|
339
|
+
offsets[session_id] = int(entry.get("offset", 0))
|
|
340
|
+
|
|
341
|
+
remaining = set(targets)
|
|
342
|
+
completion: Dict[str, Any] = {}
|
|
343
|
+
signal_targets: Dict[str, Path] = {}
|
|
344
|
+
if signal_paths:
|
|
345
|
+
for session_id, raw_path in signal_paths.items():
|
|
346
|
+
if not session_id:
|
|
347
|
+
continue
|
|
348
|
+
flag_path = Path(raw_path)
|
|
349
|
+
signal_targets[session_id] = flag_path
|
|
350
|
+
self._active_signal_paths[session_id] = flag_path
|
|
351
|
+
|
|
352
|
+
def consume_forced() -> None:
|
|
353
|
+
forced_now = remaining.intersection(self._forced_done)
|
|
354
|
+
for session_id in list(forced_now):
|
|
355
|
+
path = targets[session_id]
|
|
356
|
+
try:
|
|
357
|
+
offset = path.stat().st_size
|
|
358
|
+
except OSError:
|
|
359
|
+
offset = 0
|
|
360
|
+
completion[session_id] = {"done": True, "rollout_path": str(path), "forced": True}
|
|
361
|
+
offsets[session_id] = offset
|
|
362
|
+
remaining.remove(session_id)
|
|
363
|
+
|
|
364
|
+
deadline = None if timeout_seconds is None else time.time() + timeout_seconds
|
|
365
|
+
while remaining:
|
|
366
|
+
consume_forced()
|
|
367
|
+
if not remaining:
|
|
368
|
+
break
|
|
369
|
+
for session_id in list(remaining):
|
|
370
|
+
if session_id in signal_targets and signal_targets[session_id].exists():
|
|
371
|
+
completion[session_id] = {"done": True, "rollout_path": str(targets[session_id])}
|
|
372
|
+
remaining.remove(session_id)
|
|
373
|
+
flag_path = signal_targets[session_id]
|
|
374
|
+
try:
|
|
375
|
+
flag_path.unlink()
|
|
376
|
+
except OSError:
|
|
377
|
+
pass
|
|
378
|
+
continue
|
|
379
|
+
done, new_offset = self._contains_done(
|
|
380
|
+
session_id=session_id,
|
|
381
|
+
rollout_path=targets[session_id],
|
|
382
|
+
offset=offsets.get(session_id, 0),
|
|
383
|
+
)
|
|
384
|
+
if new_offset != offsets.get(session_id, 0):
|
|
385
|
+
offsets[session_id] = new_offset
|
|
386
|
+
self._update_session_offset(session_id, new_offset)
|
|
387
|
+
if done:
|
|
388
|
+
completion[session_id] = {"done": True, "rollout_path": str(targets[session_id])}
|
|
389
|
+
remaining.remove(session_id)
|
|
390
|
+
if not remaining:
|
|
391
|
+
break
|
|
392
|
+
if deadline is not None and time.time() >= deadline:
|
|
393
|
+
break
|
|
394
|
+
time.sleep(self.poll_interval)
|
|
395
|
+
|
|
396
|
+
for session_id in remaining:
|
|
397
|
+
completion[session_id] = {
|
|
398
|
+
"done": False,
|
|
399
|
+
"rollout_path": str(targets[session_id]),
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
for session_id in signal_targets:
|
|
403
|
+
self._active_signal_paths.pop(session_id, None)
|
|
404
|
+
|
|
405
|
+
return completion
|
|
406
|
+
|
|
407
|
+
def force_completion(self, session_ids: Iterable[str]) -> None:
|
|
408
|
+
for session_id in session_ids:
|
|
409
|
+
if session_id:
|
|
410
|
+
self._forced_done.add(session_id)
|
|
411
|
+
self._release_session(session_id)
|
|
412
|
+
|
|
413
|
+
def wait_for_rollout_activity(
|
|
414
|
+
self,
|
|
415
|
+
session_id: str,
|
|
416
|
+
*,
|
|
417
|
+
min_bytes: int = 1,
|
|
418
|
+
timeout_seconds: float = 5.0,
|
|
419
|
+
) -> None:
|
|
420
|
+
data = self._load_map()
|
|
421
|
+
sessions = data.get("sessions", {})
|
|
422
|
+
entry = sessions.get(session_id)
|
|
423
|
+
if entry is None:
|
|
424
|
+
return
|
|
425
|
+
rollout_path = Path(entry.get("rollout_path", ""))
|
|
426
|
+
baseline = int(entry.get("offset", 0))
|
|
427
|
+
deadline = time.time() + timeout_seconds
|
|
428
|
+
last_size = baseline
|
|
429
|
+
|
|
430
|
+
while time.time() < deadline:
|
|
431
|
+
try:
|
|
432
|
+
size = rollout_path.stat().st_size
|
|
433
|
+
except OSError:
|
|
434
|
+
break
|
|
435
|
+
if size - baseline >= min_bytes:
|
|
436
|
+
last_size = size
|
|
437
|
+
break
|
|
438
|
+
time.sleep(self.poll_interval / 2)
|
|
439
|
+
entry["offset"] = int(last_size)
|
|
440
|
+
self._update_session_offset(session_id, int(last_size))
|
|
441
|
+
|
|
442
|
+
# 内部ユーティリティ -------------------------------------------------
|
|
443
|
+
def _reserve_session(self, session_id: str, rollout_path: Path) -> None:
|
|
444
|
+
try:
|
|
445
|
+
self._registry_dir.mkdir(parents=True, exist_ok=True)
|
|
446
|
+
except OSError:
|
|
447
|
+
return
|
|
448
|
+
metadata = {
|
|
449
|
+
"session_id": session_id,
|
|
450
|
+
"namespace": self._session_namespace,
|
|
451
|
+
"pid": os.getpid(),
|
|
452
|
+
"timestamp": time.time(),
|
|
453
|
+
"rollout_path": str(rollout_path),
|
|
454
|
+
}
|
|
455
|
+
record_path = self._registry_dir / f"{session_id}.json"
|
|
456
|
+
while True:
|
|
457
|
+
try:
|
|
458
|
+
with record_path.open("x", encoding="utf-8") as fh:
|
|
459
|
+
json.dump(metadata, fh, ensure_ascii=False)
|
|
460
|
+
self._owned_sessions.add(session_id)
|
|
461
|
+
return
|
|
462
|
+
except FileExistsError:
|
|
463
|
+
try:
|
|
464
|
+
existing = json.loads(record_path.read_text(encoding="utf-8"))
|
|
465
|
+
except (json.JSONDecodeError, OSError):
|
|
466
|
+
existing = {}
|
|
467
|
+
owner_ns = existing.get("namespace")
|
|
468
|
+
owner_pid = existing.get("pid")
|
|
469
|
+
if owner_ns == self._session_namespace:
|
|
470
|
+
try:
|
|
471
|
+
record_path.write_text(json.dumps(metadata, ensure_ascii=False), encoding="utf-8")
|
|
472
|
+
self._owned_sessions.add(session_id)
|
|
473
|
+
except OSError:
|
|
474
|
+
pass
|
|
475
|
+
return
|
|
476
|
+
if owner_pid and not self._pid_exists(owner_pid):
|
|
477
|
+
try:
|
|
478
|
+
record_path.unlink()
|
|
479
|
+
except OSError:
|
|
480
|
+
break
|
|
481
|
+
continue
|
|
482
|
+
raise SessionReservationError(session_id, owner_ns)
|
|
483
|
+
except OSError:
|
|
484
|
+
return
|
|
485
|
+
|
|
486
|
+
def _release_session(self, session_id: str) -> None:
|
|
487
|
+
record_path = self._registry_dir / f"{session_id}.json"
|
|
488
|
+
try:
|
|
489
|
+
existing = json.loads(record_path.read_text(encoding="utf-8"))
|
|
490
|
+
except (FileNotFoundError, json.JSONDecodeError):
|
|
491
|
+
self._owned_sessions.discard(session_id)
|
|
492
|
+
return
|
|
493
|
+
if existing.get("namespace") == self._session_namespace:
|
|
494
|
+
try:
|
|
495
|
+
record_path.unlink()
|
|
496
|
+
except OSError:
|
|
497
|
+
pass
|
|
498
|
+
self._owned_sessions.discard(session_id)
|
|
499
|
+
|
|
500
|
+
@staticmethod
|
|
501
|
+
def _pid_exists(pid: int) -> bool:
|
|
502
|
+
if pid <= 0:
|
|
503
|
+
return False
|
|
504
|
+
try:
|
|
505
|
+
os.kill(pid, 0)
|
|
506
|
+
except ProcessLookupError:
|
|
507
|
+
return False
|
|
508
|
+
except PermissionError:
|
|
509
|
+
return True
|
|
510
|
+
return True
|
|
511
|
+
|
|
512
|
+
def _mark_rollout_seen(self, baseline: MutableMapping[Path, float], path: Path) -> None:
|
|
513
|
+
try:
|
|
514
|
+
baseline[path] = path.stat().st_mtime
|
|
515
|
+
except OSError:
|
|
516
|
+
baseline[path] = time.time()
|
|
517
|
+
|
|
518
|
+
def _wait_for_new_rollouts(
|
|
519
|
+
self,
|
|
520
|
+
baseline: Mapping[Path, float],
|
|
521
|
+
*,
|
|
522
|
+
expected: int,
|
|
523
|
+
timeout_seconds: float,
|
|
524
|
+
) -> List[Path]:
|
|
525
|
+
deadline = time.time() + timeout_seconds
|
|
526
|
+
baseline_paths = set(baseline.keys())
|
|
527
|
+
while True:
|
|
528
|
+
current = self.snapshot_rollouts()
|
|
529
|
+
new_paths = [path for path in current.keys() if path not in baseline_paths]
|
|
530
|
+
if len(new_paths) >= expected:
|
|
531
|
+
new_paths.sort(key=lambda p: current.get(p, 0.0))
|
|
532
|
+
return new_paths
|
|
533
|
+
if time.time() >= deadline:
|
|
534
|
+
new_paths.sort(key=lambda p: current.get(p, 0.0))
|
|
535
|
+
return new_paths
|
|
536
|
+
time.sleep(self.poll_interval)
|
|
537
|
+
|
|
538
|
+
def _parse_session_meta(self, rollout_path: Path) -> str:
|
|
539
|
+
session_id = self._extract_session_meta(rollout_path)
|
|
540
|
+
if session_id:
|
|
541
|
+
return session_id
|
|
542
|
+
suffix = int(time.time() * 1000)
|
|
543
|
+
return f"unknown-{suffix}"
|
|
544
|
+
|
|
545
|
+
def _extract_session_meta(self, rollout_path: Path) -> Optional[str]:
|
|
546
|
+
try:
|
|
547
|
+
with rollout_path.open("r", encoding="utf-8") as fh:
|
|
548
|
+
for line in fh:
|
|
549
|
+
try:
|
|
550
|
+
obj = json.loads(line)
|
|
551
|
+
except json.JSONDecodeError:
|
|
552
|
+
continue
|
|
553
|
+
if obj.get("type") == "session_meta" and "payload" in obj:
|
|
554
|
+
ident = obj["payload"].get("id")
|
|
555
|
+
if ident:
|
|
556
|
+
return str(ident)
|
|
557
|
+
except FileNotFoundError:
|
|
558
|
+
pass
|
|
559
|
+
return None
|
|
560
|
+
|
|
561
|
+
def _load_map(self) -> Dict[str, Any]:
|
|
562
|
+
text = self.session_map_path.read_text(encoding="utf-8")
|
|
563
|
+
if not text.strip():
|
|
564
|
+
return {}
|
|
565
|
+
return yaml.safe_load(text) or {}
|
|
566
|
+
|
|
567
|
+
def _write_map(self, data: Mapping[str, Any]) -> None:
|
|
568
|
+
self.session_map_path.write_text(yaml.safe_dump(dict(data), sort_keys=True), encoding="utf-8")
|
|
569
|
+
|
|
570
|
+
def _contains_done(
|
|
571
|
+
self,
|
|
572
|
+
*,
|
|
573
|
+
session_id: str,
|
|
574
|
+
rollout_path: Path,
|
|
575
|
+
offset: int,
|
|
576
|
+
) -> tuple[bool, int]:
|
|
577
|
+
if not rollout_path.exists():
|
|
578
|
+
return False, offset
|
|
579
|
+
try:
|
|
580
|
+
with rollout_path.open("rb") as fh:
|
|
581
|
+
fh.seek(offset)
|
|
582
|
+
chunk = fh.read()
|
|
583
|
+
new_offset = fh.tell()
|
|
584
|
+
except OSError:
|
|
585
|
+
return False, offset
|
|
586
|
+
|
|
587
|
+
if not chunk:
|
|
588
|
+
return False, new_offset
|
|
589
|
+
|
|
590
|
+
done_detected = False
|
|
591
|
+
for line in chunk.decode("utf-8", errors="ignore").splitlines():
|
|
592
|
+
try:
|
|
593
|
+
obj = json.loads(line)
|
|
594
|
+
except json.JSONDecodeError:
|
|
595
|
+
continue
|
|
596
|
+
|
|
597
|
+
if obj.get("type") != "response_item":
|
|
598
|
+
continue
|
|
599
|
+
|
|
600
|
+
payload = obj.get("payload", {})
|
|
601
|
+
if payload.get("role") != "assistant":
|
|
602
|
+
continue
|
|
603
|
+
|
|
604
|
+
for block in payload.get("content", []):
|
|
605
|
+
block_type = block.get("type")
|
|
606
|
+
if block_type in {"output_text", "text"}:
|
|
607
|
+
text = block.get("text", "")
|
|
608
|
+
lines = [segment.strip() for segment in text.splitlines() if segment.strip()]
|
|
609
|
+
if any(segment == "/done" for segment in lines):
|
|
610
|
+
done_detected = True
|
|
611
|
+
break
|
|
612
|
+
if done_detected:
|
|
613
|
+
break
|
|
614
|
+
|
|
615
|
+
return done_detected, new_offset
|
|
616
|
+
|
|
617
|
+
def _update_session_offset(self, session_id: str, new_offset: int) -> None:
|
|
618
|
+
data = self._load_map()
|
|
619
|
+
sessions = data.get("sessions", {})
|
|
620
|
+
panes = data.get("panes", {})
|
|
621
|
+
session_entry = sessions.get(session_id)
|
|
622
|
+
if session_entry is not None:
|
|
623
|
+
session_entry["offset"] = int(new_offset)
|
|
624
|
+
pane_id = session_entry.get("pane_id")
|
|
625
|
+
if pane_id and pane_id in panes:
|
|
626
|
+
panes[pane_id]["offset"] = int(new_offset)
|
|
627
|
+
self._write_map(data)
|