fast-resume 1.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fast_resume/__init__.py +5 -0
- fast_resume/adapters/__init__.py +25 -0
- fast_resume/adapters/base.py +263 -0
- fast_resume/adapters/claude.py +209 -0
- fast_resume/adapters/codex.py +216 -0
- fast_resume/adapters/copilot.py +176 -0
- fast_resume/adapters/copilot_vscode.py +326 -0
- fast_resume/adapters/crush.py +341 -0
- fast_resume/adapters/opencode.py +333 -0
- fast_resume/adapters/vibe.py +188 -0
- fast_resume/assets/claude.png +0 -0
- fast_resume/assets/codex.png +0 -0
- fast_resume/assets/copilot-cli.png +0 -0
- fast_resume/assets/copilot-vscode.png +0 -0
- fast_resume/assets/crush.png +0 -0
- fast_resume/assets/opencode.png +0 -0
- fast_resume/assets/vibe.png +0 -0
- fast_resume/cli.py +327 -0
- fast_resume/config.py +30 -0
- fast_resume/index.py +758 -0
- fast_resume/logging_config.py +57 -0
- fast_resume/query.py +264 -0
- fast_resume/search.py +281 -0
- fast_resume/tui/__init__.py +58 -0
- fast_resume/tui/app.py +629 -0
- fast_resume/tui/filter_bar.py +128 -0
- fast_resume/tui/modal.py +73 -0
- fast_resume/tui/preview.py +396 -0
- fast_resume/tui/query.py +86 -0
- fast_resume/tui/results_table.py +178 -0
- fast_resume/tui/search_input.py +117 -0
- fast_resume/tui/styles.py +302 -0
- fast_resume/tui/utils.py +160 -0
- fast_resume-1.12.8.dist-info/METADATA +545 -0
- fast_resume-1.12.8.dist-info/RECORD +38 -0
- fast_resume-1.12.8.dist-info/WHEEL +4 -0
- fast_resume-1.12.8.dist-info/entry_points.txt +3 -0
- fast_resume-1.12.8.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,333 @@
|
|
|
1
|
+
"""OpenCode session adapter."""
|
|
2
|
+
|
|
3
|
+
import orjson
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from ..config import AGENTS, OPENCODE_DIR
|
|
9
|
+
from ..logging_config import log_parse_error
|
|
10
|
+
from .base import ErrorCallback, ParseError, RawAdapterStats, Session
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class OpenCodeAdapter:
|
|
14
|
+
"""Adapter for OpenCode sessions."""
|
|
15
|
+
|
|
16
|
+
name = "opencode"
|
|
17
|
+
color = AGENTS["opencode"]["color"]
|
|
18
|
+
badge = AGENTS["opencode"]["badge"]
|
|
19
|
+
|
|
20
|
+
def __init__(self, sessions_dir: Path | None = None) -> None:
|
|
21
|
+
self._sessions_dir = sessions_dir if sessions_dir is not None else OPENCODE_DIR
|
|
22
|
+
|
|
23
|
+
def is_available(self) -> bool:
|
|
24
|
+
"""Check if OpenCode data directory exists."""
|
|
25
|
+
return self._sessions_dir.exists()
|
|
26
|
+
|
|
27
|
+
def find_sessions(self) -> list[Session]:
|
|
28
|
+
"""Find all OpenCode sessions."""
|
|
29
|
+
if not self.is_available():
|
|
30
|
+
return []
|
|
31
|
+
|
|
32
|
+
sessions = []
|
|
33
|
+
session_dir = self._sessions_dir / "session"
|
|
34
|
+
message_dir = self._sessions_dir / "message"
|
|
35
|
+
part_dir = self._sessions_dir / "part"
|
|
36
|
+
|
|
37
|
+
if not session_dir.exists():
|
|
38
|
+
return []
|
|
39
|
+
|
|
40
|
+
# Pre-index all messages by session_id: {session_id: [(msg_file, msg_id, role), ...]}
|
|
41
|
+
messages_by_session: dict[str, list[tuple[Path, str, str]]] = defaultdict(list)
|
|
42
|
+
if message_dir.exists():
|
|
43
|
+
for msg_file in message_dir.glob("*/msg_*.json"):
|
|
44
|
+
try:
|
|
45
|
+
with open(msg_file, "rb") as f:
|
|
46
|
+
msg_data = orjson.loads(f.read())
|
|
47
|
+
session_id = msg_file.parent.name
|
|
48
|
+
msg_id = msg_data.get("id", "")
|
|
49
|
+
role = msg_data.get("role", "")
|
|
50
|
+
if msg_id:
|
|
51
|
+
messages_by_session[session_id].append((msg_file, msg_id, role))
|
|
52
|
+
except Exception:
|
|
53
|
+
continue
|
|
54
|
+
|
|
55
|
+
# Pre-index all parts by message_id: {msg_id: [text, ...]}
|
|
56
|
+
parts_by_message: dict[str, list[str]] = defaultdict(list)
|
|
57
|
+
if part_dir.exists():
|
|
58
|
+
for part_file in sorted(part_dir.glob("*/*.json")):
|
|
59
|
+
try:
|
|
60
|
+
with open(part_file, "rb") as f:
|
|
61
|
+
part_data = orjson.loads(f.read())
|
|
62
|
+
msg_id = part_file.parent.name
|
|
63
|
+
if part_data.get("type") == "text":
|
|
64
|
+
text = part_data.get("text", "")
|
|
65
|
+
if text:
|
|
66
|
+
parts_by_message[msg_id].append(text)
|
|
67
|
+
except Exception:
|
|
68
|
+
continue
|
|
69
|
+
|
|
70
|
+
# OpenCode stores sessions in project-hash subdirectories
|
|
71
|
+
for project_dir in session_dir.iterdir():
|
|
72
|
+
if not project_dir.is_dir():
|
|
73
|
+
continue
|
|
74
|
+
|
|
75
|
+
for session_file in project_dir.glob("ses_*.json"):
|
|
76
|
+
session = self._parse_session(
|
|
77
|
+
session_file, messages_by_session, parts_by_message
|
|
78
|
+
)
|
|
79
|
+
if session:
|
|
80
|
+
sessions.append(session)
|
|
81
|
+
|
|
82
|
+
return sessions
|
|
83
|
+
|
|
84
|
+
def _parse_session(
|
|
85
|
+
self,
|
|
86
|
+
session_file: Path,
|
|
87
|
+
messages_by_session: dict[str, list[tuple[Path, str, str]]],
|
|
88
|
+
parts_by_message: dict[str, list[str]],
|
|
89
|
+
on_error: ErrorCallback = None,
|
|
90
|
+
) -> Session | None:
|
|
91
|
+
"""Parse an OpenCode session file."""
|
|
92
|
+
try:
|
|
93
|
+
with open(session_file, "rb") as f:
|
|
94
|
+
data = orjson.loads(f.read())
|
|
95
|
+
|
|
96
|
+
session_id = data.get("id", "")
|
|
97
|
+
title = data.get("title", "Untitled session")
|
|
98
|
+
directory = data.get("directory", "")
|
|
99
|
+
|
|
100
|
+
# Parse timestamp from milliseconds
|
|
101
|
+
time_data = data.get("time", {})
|
|
102
|
+
created = time_data.get("created", 0)
|
|
103
|
+
if created:
|
|
104
|
+
timestamp = datetime.fromtimestamp(created / 1000)
|
|
105
|
+
else:
|
|
106
|
+
timestamp = datetime.fromtimestamp(session_file.stat().st_mtime)
|
|
107
|
+
|
|
108
|
+
# Get message content from pre-indexed data
|
|
109
|
+
messages = self._get_session_messages(
|
|
110
|
+
session_id, messages_by_session, parts_by_message
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Count actual message turns (not text parts)
|
|
114
|
+
turn_count = len(messages_by_session.get(session_id, []))
|
|
115
|
+
|
|
116
|
+
full_content = "\n\n".join(messages)
|
|
117
|
+
|
|
118
|
+
return Session(
|
|
119
|
+
id=session_id,
|
|
120
|
+
agent=self.name,
|
|
121
|
+
title=title,
|
|
122
|
+
directory=directory,
|
|
123
|
+
timestamp=timestamp,
|
|
124
|
+
content=full_content,
|
|
125
|
+
message_count=turn_count,
|
|
126
|
+
)
|
|
127
|
+
except OSError as e:
|
|
128
|
+
error = ParseError(
|
|
129
|
+
agent=self.name,
|
|
130
|
+
file_path=str(session_file),
|
|
131
|
+
error_type="OSError",
|
|
132
|
+
message=str(e),
|
|
133
|
+
)
|
|
134
|
+
log_parse_error(
|
|
135
|
+
error.agent, error.file_path, error.error_type, error.message
|
|
136
|
+
)
|
|
137
|
+
if on_error:
|
|
138
|
+
on_error(error)
|
|
139
|
+
return None
|
|
140
|
+
except orjson.JSONDecodeError as e:
|
|
141
|
+
error = ParseError(
|
|
142
|
+
agent=self.name,
|
|
143
|
+
file_path=str(session_file),
|
|
144
|
+
error_type="JSONDecodeError",
|
|
145
|
+
message=str(e),
|
|
146
|
+
)
|
|
147
|
+
log_parse_error(
|
|
148
|
+
error.agent, error.file_path, error.error_type, error.message
|
|
149
|
+
)
|
|
150
|
+
if on_error:
|
|
151
|
+
on_error(error)
|
|
152
|
+
return None
|
|
153
|
+
except (KeyError, TypeError, AttributeError) as e:
|
|
154
|
+
error = ParseError(
|
|
155
|
+
agent=self.name,
|
|
156
|
+
file_path=str(session_file),
|
|
157
|
+
error_type=type(e).__name__,
|
|
158
|
+
message=str(e),
|
|
159
|
+
)
|
|
160
|
+
log_parse_error(
|
|
161
|
+
error.agent, error.file_path, error.error_type, error.message
|
|
162
|
+
)
|
|
163
|
+
if on_error:
|
|
164
|
+
on_error(error)
|
|
165
|
+
return None
|
|
166
|
+
|
|
167
|
+
def _get_session_messages(
|
|
168
|
+
self,
|
|
169
|
+
session_id: str,
|
|
170
|
+
messages_by_session: dict[str, list[tuple[Path, str, str]]],
|
|
171
|
+
parts_by_message: dict[str, list[str]],
|
|
172
|
+
) -> list[str]:
|
|
173
|
+
"""Get all messages for a session from pre-indexed parts."""
|
|
174
|
+
messages: list[str] = []
|
|
175
|
+
|
|
176
|
+
# Sort by filename to maintain order
|
|
177
|
+
session_msgs = sorted(
|
|
178
|
+
messages_by_session.get(session_id, []), key=lambda x: x[0].name
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
for _msg_file, msg_id, role in session_msgs:
|
|
182
|
+
role_prefix = "» " if role == "user" else " "
|
|
183
|
+
for text in parts_by_message.get(msg_id, []):
|
|
184
|
+
messages.append(f"{role_prefix}{text}")
|
|
185
|
+
|
|
186
|
+
return messages
|
|
187
|
+
|
|
188
|
+
def find_sessions_incremental(
|
|
189
|
+
self,
|
|
190
|
+
known: dict[str, tuple[float, str]],
|
|
191
|
+
on_error: ErrorCallback = None,
|
|
192
|
+
) -> tuple[list[Session], list[str]]:
|
|
193
|
+
"""Find sessions incrementally, comparing against known sessions."""
|
|
194
|
+
if not self.is_available():
|
|
195
|
+
deleted_ids = [
|
|
196
|
+
sid for sid, (_, agent) in known.items() if agent == self.name
|
|
197
|
+
]
|
|
198
|
+
return [], deleted_ids
|
|
199
|
+
|
|
200
|
+
session_dir = self._sessions_dir / "session"
|
|
201
|
+
if not session_dir.exists():
|
|
202
|
+
deleted_ids = [
|
|
203
|
+
sid for sid, (_, agent) in known.items() if agent == self.name
|
|
204
|
+
]
|
|
205
|
+
return [], deleted_ids
|
|
206
|
+
|
|
207
|
+
# Scan session files and get timestamps
|
|
208
|
+
# For OpenCode, we use the 'created' timestamp from the file content
|
|
209
|
+
# (not file mtime) because that's what we store in the index
|
|
210
|
+
current_sessions: dict[str, tuple[Path, float]] = {}
|
|
211
|
+
|
|
212
|
+
for project_dir in session_dir.iterdir():
|
|
213
|
+
if not project_dir.is_dir():
|
|
214
|
+
continue
|
|
215
|
+
|
|
216
|
+
for session_file in project_dir.glob("ses_*.json"):
|
|
217
|
+
try:
|
|
218
|
+
with open(session_file, "rb") as f:
|
|
219
|
+
data = orjson.loads(f.read())
|
|
220
|
+
session_id = data.get("id", "")
|
|
221
|
+
if session_id:
|
|
222
|
+
# Use created timestamp to match what _parse_session stores
|
|
223
|
+
created = data.get("time", {}).get("created", 0)
|
|
224
|
+
if created:
|
|
225
|
+
mtime = datetime.fromtimestamp(created / 1000).timestamp()
|
|
226
|
+
else:
|
|
227
|
+
mtime = session_file.stat().st_mtime
|
|
228
|
+
current_sessions[session_id] = (session_file, mtime)
|
|
229
|
+
except (OSError, orjson.JSONDecodeError):
|
|
230
|
+
# Skip files that can't be read during scanning
|
|
231
|
+
continue
|
|
232
|
+
|
|
233
|
+
# Check which sessions need parsing
|
|
234
|
+
# Use 1ms tolerance for mtime comparison due to datetime precision loss
|
|
235
|
+
sessions_to_parse: list[tuple[str, Path, float]] = []
|
|
236
|
+
for session_id, (path, mtime) in current_sessions.items():
|
|
237
|
+
known_entry = known.get(session_id)
|
|
238
|
+
if known_entry is None or mtime > known_entry[0] + 0.001:
|
|
239
|
+
sessions_to_parse.append((session_id, path, mtime))
|
|
240
|
+
|
|
241
|
+
# Find deleted sessions
|
|
242
|
+
current_ids = set(current_sessions.keys())
|
|
243
|
+
deleted_ids = [
|
|
244
|
+
sid
|
|
245
|
+
for sid, (_, agent) in known.items()
|
|
246
|
+
if agent == self.name and sid not in current_ids
|
|
247
|
+
]
|
|
248
|
+
|
|
249
|
+
if not sessions_to_parse:
|
|
250
|
+
return [], deleted_ids
|
|
251
|
+
|
|
252
|
+
# Build indexes only for sessions we need to parse
|
|
253
|
+
message_dir = self._sessions_dir / "message"
|
|
254
|
+
part_dir = self._sessions_dir / "part"
|
|
255
|
+
|
|
256
|
+
messages_by_session: dict[str, list[tuple[Path, str, str]]] = defaultdict(list)
|
|
257
|
+
if message_dir.exists():
|
|
258
|
+
for msg_file in message_dir.glob("*/msg_*.json"):
|
|
259
|
+
try:
|
|
260
|
+
with open(msg_file, "rb") as f:
|
|
261
|
+
msg_data = orjson.loads(f.read())
|
|
262
|
+
session_id = msg_file.parent.name
|
|
263
|
+
msg_id = msg_data.get("id", "")
|
|
264
|
+
role = msg_data.get("role", "")
|
|
265
|
+
if msg_id:
|
|
266
|
+
messages_by_session[session_id].append((msg_file, msg_id, role))
|
|
267
|
+
except (OSError, orjson.JSONDecodeError):
|
|
268
|
+
# Skip files that can't be read during indexing
|
|
269
|
+
continue
|
|
270
|
+
|
|
271
|
+
parts_by_message: dict[str, list[str]] = defaultdict(list)
|
|
272
|
+
if part_dir.exists():
|
|
273
|
+
for part_file in sorted(part_dir.glob("*/*.json")):
|
|
274
|
+
try:
|
|
275
|
+
with open(part_file, "rb") as f:
|
|
276
|
+
part_data = orjson.loads(f.read())
|
|
277
|
+
msg_id = part_file.parent.name
|
|
278
|
+
if part_data.get("type") == "text":
|
|
279
|
+
text = part_data.get("text", "")
|
|
280
|
+
if text:
|
|
281
|
+
parts_by_message[msg_id].append(text)
|
|
282
|
+
except (OSError, orjson.JSONDecodeError):
|
|
283
|
+
# Skip files that can't be read during indexing
|
|
284
|
+
continue
|
|
285
|
+
|
|
286
|
+
# Parse the changed sessions
|
|
287
|
+
new_or_modified = []
|
|
288
|
+
for session_id, path, mtime in sessions_to_parse:
|
|
289
|
+
session = self._parse_session(
|
|
290
|
+
path, messages_by_session, parts_by_message, on_error=on_error
|
|
291
|
+
)
|
|
292
|
+
if session:
|
|
293
|
+
session.mtime = mtime
|
|
294
|
+
new_or_modified.append(session)
|
|
295
|
+
|
|
296
|
+
return new_or_modified, deleted_ids
|
|
297
|
+
|
|
298
|
+
def get_resume_command(self, session: Session, yolo: bool = False) -> list[str]:
|
|
299
|
+
"""Get command to resume an OpenCode session."""
|
|
300
|
+
return ["opencode", session.directory, "--session", session.id]
|
|
301
|
+
|
|
302
|
+
def get_raw_stats(self) -> RawAdapterStats:
|
|
303
|
+
"""Get raw statistics from the OpenCode data folder."""
|
|
304
|
+
if not self.is_available():
|
|
305
|
+
return RawAdapterStats(
|
|
306
|
+
agent=self.name,
|
|
307
|
+
data_dir=str(self._sessions_dir),
|
|
308
|
+
available=False,
|
|
309
|
+
file_count=0,
|
|
310
|
+
total_bytes=0,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
# Count all files: session, message, and part directories
|
|
314
|
+
file_count = 0
|
|
315
|
+
total_bytes = 0
|
|
316
|
+
|
|
317
|
+
for subdir in ["session", "message", "part"]:
|
|
318
|
+
dir_path = self._sessions_dir / subdir
|
|
319
|
+
if dir_path.exists():
|
|
320
|
+
for json_file in dir_path.rglob("*.json"):
|
|
321
|
+
try:
|
|
322
|
+
file_count += 1
|
|
323
|
+
total_bytes += json_file.stat().st_size
|
|
324
|
+
except OSError:
|
|
325
|
+
pass
|
|
326
|
+
|
|
327
|
+
return RawAdapterStats(
|
|
328
|
+
agent=self.name,
|
|
329
|
+
data_dir=str(self._sessions_dir),
|
|
330
|
+
available=True,
|
|
331
|
+
file_count=file_count,
|
|
332
|
+
total_bytes=total_bytes,
|
|
333
|
+
)
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
"""Vibe (Mistral) session adapter."""
|
|
2
|
+
|
|
3
|
+
import orjson
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from ..config import AGENTS, VIBE_DIR
|
|
8
|
+
from ..logging_config import log_parse_error
|
|
9
|
+
from .base import BaseSessionAdapter, ErrorCallback, ParseError, Session, truncate_title
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class VibeAdapter(BaseSessionAdapter):
|
|
13
|
+
"""Adapter for Vibe (Mistral) sessions."""
|
|
14
|
+
|
|
15
|
+
name = "vibe"
|
|
16
|
+
color = AGENTS["vibe"]["color"]
|
|
17
|
+
badge = AGENTS["vibe"]["badge"]
|
|
18
|
+
supports_yolo = True
|
|
19
|
+
|
|
20
|
+
def __init__(self, sessions_dir: Path | None = None) -> None:
|
|
21
|
+
self._sessions_dir = sessions_dir if sessions_dir is not None else VIBE_DIR
|
|
22
|
+
|
|
23
|
+
def find_sessions(self) -> list[Session]:
|
|
24
|
+
"""Find all Vibe sessions."""
|
|
25
|
+
if not self.is_available():
|
|
26
|
+
return []
|
|
27
|
+
|
|
28
|
+
sessions = []
|
|
29
|
+
for session_file in self._sessions_dir.glob("session_*.json"):
|
|
30
|
+
session = self._parse_session_file(session_file)
|
|
31
|
+
if session:
|
|
32
|
+
sessions.append(session)
|
|
33
|
+
|
|
34
|
+
return sessions
|
|
35
|
+
|
|
36
|
+
def _parse_session_file(
|
|
37
|
+
self, session_file: Path, on_error: ErrorCallback = None
|
|
38
|
+
) -> Session | None:
|
|
39
|
+
"""Parse a Vibe session file."""
|
|
40
|
+
try:
|
|
41
|
+
with open(session_file, "rb") as f:
|
|
42
|
+
data = orjson.loads(f.read())
|
|
43
|
+
|
|
44
|
+
metadata = data.get("metadata", {})
|
|
45
|
+
session_id = metadata.get("session_id", session_file.stem)
|
|
46
|
+
|
|
47
|
+
# Get directory from environment
|
|
48
|
+
env = metadata.get("environment", {})
|
|
49
|
+
directory = env.get("working_directory", "")
|
|
50
|
+
|
|
51
|
+
# Check if session was started with auto_approve
|
|
52
|
+
yolo = metadata.get("auto_approve", False)
|
|
53
|
+
|
|
54
|
+
# Parse timestamps
|
|
55
|
+
start_time = metadata.get("start_time", "")
|
|
56
|
+
if start_time:
|
|
57
|
+
try:
|
|
58
|
+
timestamp = datetime.fromisoformat(start_time)
|
|
59
|
+
except ValueError:
|
|
60
|
+
timestamp = datetime.fromtimestamp(session_file.stat().st_mtime)
|
|
61
|
+
else:
|
|
62
|
+
timestamp = datetime.fromtimestamp(session_file.stat().st_mtime)
|
|
63
|
+
|
|
64
|
+
# Extract messages
|
|
65
|
+
messages_data = data.get("messages", [])
|
|
66
|
+
messages: list[str] = []
|
|
67
|
+
|
|
68
|
+
for msg in messages_data:
|
|
69
|
+
role = msg.get("role", "")
|
|
70
|
+
content = msg.get("content", "")
|
|
71
|
+
|
|
72
|
+
# Skip system messages
|
|
73
|
+
if role == "system":
|
|
74
|
+
continue
|
|
75
|
+
|
|
76
|
+
role_prefix = "» " if role == "user" else " "
|
|
77
|
+
|
|
78
|
+
if isinstance(content, str) and content:
|
|
79
|
+
messages.append(f"{role_prefix}{content}")
|
|
80
|
+
elif isinstance(content, list):
|
|
81
|
+
for part in content:
|
|
82
|
+
if isinstance(part, dict):
|
|
83
|
+
text = part.get("text", "")
|
|
84
|
+
if text:
|
|
85
|
+
messages.append(f"{role_prefix}{text}")
|
|
86
|
+
|
|
87
|
+
# Generate title from first user message (80-char hard truncate)
|
|
88
|
+
user_messages = [
|
|
89
|
+
m for i, m in enumerate(messages_data) if m.get("role") == "user"
|
|
90
|
+
]
|
|
91
|
+
if user_messages:
|
|
92
|
+
first_msg = user_messages[0].get("content", "")
|
|
93
|
+
if isinstance(first_msg, str):
|
|
94
|
+
title = truncate_title(first_msg, max_length=80, word_break=False)
|
|
95
|
+
else:
|
|
96
|
+
title = "Vibe session"
|
|
97
|
+
else:
|
|
98
|
+
title = "Vibe session"
|
|
99
|
+
|
|
100
|
+
full_content = "\n\n".join(messages)
|
|
101
|
+
|
|
102
|
+
return Session(
|
|
103
|
+
id=session_id,
|
|
104
|
+
agent=self.name,
|
|
105
|
+
title=title,
|
|
106
|
+
directory=directory,
|
|
107
|
+
timestamp=timestamp,
|
|
108
|
+
content=full_content,
|
|
109
|
+
message_count=len(messages),
|
|
110
|
+
yolo=yolo,
|
|
111
|
+
)
|
|
112
|
+
except OSError as e:
|
|
113
|
+
error = ParseError(
|
|
114
|
+
agent=self.name,
|
|
115
|
+
file_path=str(session_file),
|
|
116
|
+
error_type="OSError",
|
|
117
|
+
message=str(e),
|
|
118
|
+
)
|
|
119
|
+
log_parse_error(
|
|
120
|
+
error.agent, error.file_path, error.error_type, error.message
|
|
121
|
+
)
|
|
122
|
+
if on_error:
|
|
123
|
+
on_error(error)
|
|
124
|
+
return None
|
|
125
|
+
except orjson.JSONDecodeError as e:
|
|
126
|
+
error = ParseError(
|
|
127
|
+
agent=self.name,
|
|
128
|
+
file_path=str(session_file),
|
|
129
|
+
error_type="JSONDecodeError",
|
|
130
|
+
message=str(e),
|
|
131
|
+
)
|
|
132
|
+
log_parse_error(
|
|
133
|
+
error.agent, error.file_path, error.error_type, error.message
|
|
134
|
+
)
|
|
135
|
+
if on_error:
|
|
136
|
+
on_error(error)
|
|
137
|
+
return None
|
|
138
|
+
except (KeyError, TypeError, AttributeError) as e:
|
|
139
|
+
error = ParseError(
|
|
140
|
+
agent=self.name,
|
|
141
|
+
file_path=str(session_file),
|
|
142
|
+
error_type=type(e).__name__,
|
|
143
|
+
message=str(e),
|
|
144
|
+
)
|
|
145
|
+
log_parse_error(
|
|
146
|
+
error.agent, error.file_path, error.error_type, error.message
|
|
147
|
+
)
|
|
148
|
+
if on_error:
|
|
149
|
+
on_error(error)
|
|
150
|
+
return None
|
|
151
|
+
|
|
152
|
+
def _scan_session_files(self) -> dict[str, tuple[Path, float]]:
|
|
153
|
+
"""Scan all Vibe session files.
|
|
154
|
+
|
|
155
|
+
Uses start_time from JSON metadata as mtime for consistency with parsing.
|
|
156
|
+
"""
|
|
157
|
+
current_files: dict[str, tuple[Path, float]] = {}
|
|
158
|
+
|
|
159
|
+
for session_file in self._sessions_dir.glob("session_*.json"):
|
|
160
|
+
try:
|
|
161
|
+
with open(session_file, "rb") as f:
|
|
162
|
+
data = orjson.loads(f.read())
|
|
163
|
+
metadata = data.get("metadata", {})
|
|
164
|
+
session_id = metadata.get("session_id", session_file.stem)
|
|
165
|
+
|
|
166
|
+
# Use start_time to match what _parse_session_file stores
|
|
167
|
+
start_time = metadata.get("start_time", "")
|
|
168
|
+
if start_time:
|
|
169
|
+
try:
|
|
170
|
+
mtime = datetime.fromisoformat(start_time).timestamp()
|
|
171
|
+
except ValueError:
|
|
172
|
+
mtime = session_file.stat().st_mtime
|
|
173
|
+
else:
|
|
174
|
+
mtime = session_file.stat().st_mtime
|
|
175
|
+
|
|
176
|
+
current_files[session_id] = (session_file, mtime)
|
|
177
|
+
except Exception:
|
|
178
|
+
continue
|
|
179
|
+
|
|
180
|
+
return current_files
|
|
181
|
+
|
|
182
|
+
def get_resume_command(self, session: Session, yolo: bool = False) -> list[str]:
|
|
183
|
+
"""Get command to resume a Vibe session."""
|
|
184
|
+
cmd = ["vibe"]
|
|
185
|
+
if yolo:
|
|
186
|
+
cmd.append("--auto-approve")
|
|
187
|
+
cmd.extend(["--resume", session.id])
|
|
188
|
+
return cmd
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|