repr-cli 0.2.15__py3-none-any.whl → 0.2.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- repr/__init__.py +1 -1
- repr/api.py +363 -62
- repr/auth.py +47 -38
- repr/change_synthesis.py +478 -0
- repr/cli.py +4103 -267
- repr/config.py +119 -11
- repr/configure.py +889 -0
- repr/cron.py +419 -0
- repr/dashboard/__init__.py +9 -0
- repr/dashboard/build.py +126 -0
- repr/dashboard/dist/assets/index-BYFVbEev.css +1 -0
- repr/dashboard/dist/assets/index-BrrhyJFO.css +1 -0
- repr/dashboard/dist/assets/index-CcEg74ts.js +270 -0
- repr/dashboard/dist/assets/index-Cerc-iA_.js +377 -0
- repr/dashboard/dist/assets/index-CjVcBW2L.css +1 -0
- repr/dashboard/dist/assets/index-Dfl3mR5E.js +377 -0
- repr/dashboard/dist/favicon.svg +4 -0
- repr/dashboard/dist/index.html +14 -0
- repr/dashboard/manager.py +234 -0
- repr/dashboard/server.py +1298 -0
- repr/db.py +980 -0
- repr/hooks.py +3 -2
- repr/loaders/__init__.py +22 -0
- repr/loaders/base.py +156 -0
- repr/loaders/claude_code.py +287 -0
- repr/loaders/clawdbot.py +313 -0
- repr/loaders/gemini_antigravity.py +381 -0
- repr/mcp_server.py +1196 -0
- repr/models.py +503 -0
- repr/openai_analysis.py +25 -0
- repr/session_extractor.py +481 -0
- repr/storage.py +360 -0
- repr/story_synthesis.py +1296 -0
- repr/templates.py +68 -4
- repr/timeline.py +710 -0
- repr/tools.py +17 -8
- {repr_cli-0.2.15.dist-info → repr_cli-0.2.17.dist-info}/METADATA +50 -10
- repr_cli-0.2.17.dist-info/RECORD +52 -0
- {repr_cli-0.2.15.dist-info → repr_cli-0.2.17.dist-info}/WHEEL +1 -1
- {repr_cli-0.2.15.dist-info → repr_cli-0.2.17.dist-info}/entry_points.txt +1 -0
- repr_cli-0.2.15.dist-info/RECORD +0 -26
- {repr_cli-0.2.15.dist-info → repr_cli-0.2.17.dist-info}/licenses/LICENSE +0 -0
- {repr_cli-0.2.15.dist-info → repr_cli-0.2.17.dist-info}/top_level.txt +0 -0
repr/timeline.py
ADDED
|
@@ -0,0 +1,710 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Timeline management for unified commit + session context.
|
|
3
|
+
|
|
4
|
+
Handles:
|
|
5
|
+
- Timeline storage (.repr/timeline.json)
|
|
6
|
+
- Commit extraction from git
|
|
7
|
+
- Session loading and context extraction
|
|
8
|
+
- Commit-session matching and merging
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
from dataclasses import asdict
|
|
14
|
+
from datetime import datetime, timedelta, timezone
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Any, Callable
|
|
17
|
+
|
|
18
|
+
from git import Repo
|
|
19
|
+
from git.exc import InvalidGitRepositoryError
|
|
20
|
+
|
|
21
|
+
from .models import (
|
|
22
|
+
CommitData,
|
|
23
|
+
CommitSessionMatch,
|
|
24
|
+
ReprTimeline,
|
|
25
|
+
SessionContext,
|
|
26
|
+
TimelineEntry,
|
|
27
|
+
TimelineEntryType,
|
|
28
|
+
match_commits_to_sessions,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# =============================================================================
|
|
33
|
+
# Constants
|
|
34
|
+
# =============================================================================
|
|
35
|
+
|
|
36
|
+
REPR_DIR = ".repr"
|
|
37
|
+
TIMELINE_FILE = "timeline.json"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
# =============================================================================
|
|
41
|
+
# Helpers
|
|
42
|
+
# =============================================================================
|
|
43
|
+
|
|
44
|
+
def get_repr_dir(project_path: Path) -> Path:
|
|
45
|
+
"""Get the .repr directory for a project."""
|
|
46
|
+
return project_path / REPR_DIR
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def get_timeline_path(project_path: Path) -> Path:
|
|
50
|
+
"""Get the timeline.json path for a project."""
|
|
51
|
+
return get_repr_dir(project_path) / TIMELINE_FILE
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def ensure_repr_dir(project_path: Path) -> Path:
|
|
55
|
+
"""Ensure .repr directory exists and return its path."""
|
|
56
|
+
repr_dir = get_repr_dir(project_path)
|
|
57
|
+
repr_dir.mkdir(parents=True, exist_ok=True)
|
|
58
|
+
|
|
59
|
+
# Add .gitignore to .repr if it doesn't exist
|
|
60
|
+
gitignore_path = repr_dir / ".gitignore"
|
|
61
|
+
if not gitignore_path.exists():
|
|
62
|
+
gitignore_path.write_text("# Ignore everything in .repr\n*\n!.gitignore\n")
|
|
63
|
+
|
|
64
|
+
return repr_dir
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def is_initialized(project_path: Path) -> bool:
|
|
68
|
+
"""Check if a project has been initialized with repr timeline."""
|
|
69
|
+
return get_timeline_path(project_path).exists()
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def detect_project_root(start_path: Path | None = None) -> Path | None:
|
|
73
|
+
"""
|
|
74
|
+
Detect the git repository root from a starting path.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
start_path: Starting path (defaults to cwd)
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
Repository root path or None if not in a git repo
|
|
81
|
+
"""
|
|
82
|
+
start_path = start_path or Path.cwd()
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
repo = Repo(start_path, search_parent_directories=True)
|
|
86
|
+
return Path(repo.working_tree_dir)
|
|
87
|
+
except InvalidGitRepositoryError:
|
|
88
|
+
return None
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
# =============================================================================
|
|
92
|
+
# Timeline Serialization
|
|
93
|
+
# =============================================================================
|
|
94
|
+
|
|
95
|
+
def _datetime_to_iso(dt: datetime) -> str:
|
|
96
|
+
"""Convert datetime to ISO string with timezone."""
|
|
97
|
+
if dt.tzinfo is None:
|
|
98
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
|
99
|
+
return dt.isoformat()
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def _serialize_commit(commit: CommitData | None) -> dict | None:
|
|
103
|
+
"""Serialize CommitData to JSON-safe dict."""
|
|
104
|
+
if commit is None:
|
|
105
|
+
return None
|
|
106
|
+
return {
|
|
107
|
+
"sha": commit.sha,
|
|
108
|
+
"message": commit.message,
|
|
109
|
+
"author": commit.author,
|
|
110
|
+
"timestamp": _datetime_to_iso(commit.timestamp),
|
|
111
|
+
"files": commit.files,
|
|
112
|
+
"insertions": commit.insertions,
|
|
113
|
+
"deletions": commit.deletions,
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _serialize_session_context(ctx: SessionContext | None) -> dict | None:
|
|
118
|
+
"""Serialize SessionContext to JSON-safe dict."""
|
|
119
|
+
if ctx is None:
|
|
120
|
+
return None
|
|
121
|
+
return {
|
|
122
|
+
"session_id": ctx.session_id,
|
|
123
|
+
"timestamp": _datetime_to_iso(ctx.timestamp),
|
|
124
|
+
"problem": ctx.problem,
|
|
125
|
+
"approach": ctx.approach,
|
|
126
|
+
"decisions": ctx.decisions,
|
|
127
|
+
"files_modified": ctx.files_modified,
|
|
128
|
+
"tools_used": ctx.tools_used,
|
|
129
|
+
"outcome": ctx.outcome,
|
|
130
|
+
"lessons": ctx.lessons,
|
|
131
|
+
"linked_commits": ctx.linked_commits,
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def _serialize_entry(entry: TimelineEntry) -> dict:
|
|
136
|
+
"""Serialize TimelineEntry to JSON-safe dict."""
|
|
137
|
+
return {
|
|
138
|
+
"timestamp": _datetime_to_iso(entry.timestamp),
|
|
139
|
+
"type": entry.type.value,
|
|
140
|
+
"commit": _serialize_commit(entry.commit),
|
|
141
|
+
"session_context": _serialize_session_context(entry.session_context),
|
|
142
|
+
"story": entry.story,
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _deserialize_commit(data: dict | None) -> CommitData | None:
|
|
147
|
+
"""Deserialize CommitData from JSON dict."""
|
|
148
|
+
if data is None:
|
|
149
|
+
return None
|
|
150
|
+
return CommitData(
|
|
151
|
+
sha=data["sha"],
|
|
152
|
+
message=data["message"],
|
|
153
|
+
author=data["author"],
|
|
154
|
+
timestamp=datetime.fromisoformat(data["timestamp"]),
|
|
155
|
+
files=data["files"],
|
|
156
|
+
insertions=data.get("insertions", 0),
|
|
157
|
+
deletions=data.get("deletions", 0),
|
|
158
|
+
author_email=data.get("author_email", ""),
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def _deserialize_session_context(data: dict | None) -> SessionContext | None:
|
|
163
|
+
"""Deserialize SessionContext from JSON dict."""
|
|
164
|
+
if data is None:
|
|
165
|
+
return None
|
|
166
|
+
return SessionContext(
|
|
167
|
+
session_id=data["session_id"],
|
|
168
|
+
timestamp=datetime.fromisoformat(data["timestamp"]),
|
|
169
|
+
problem=data["problem"],
|
|
170
|
+
approach=data["approach"],
|
|
171
|
+
decisions=data.get("decisions", []),
|
|
172
|
+
files_modified=data.get("files_modified", []),
|
|
173
|
+
tools_used=data.get("tools_used", []),
|
|
174
|
+
outcome=data["outcome"],
|
|
175
|
+
lessons=data.get("lessons", []),
|
|
176
|
+
linked_commits=data.get("linked_commits", []),
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _deserialize_entry(data: dict) -> TimelineEntry:
|
|
181
|
+
"""Deserialize TimelineEntry from JSON dict."""
|
|
182
|
+
return TimelineEntry(
|
|
183
|
+
timestamp=datetime.fromisoformat(data["timestamp"]),
|
|
184
|
+
type=TimelineEntryType(data["type"]),
|
|
185
|
+
commit=_deserialize_commit(data.get("commit")),
|
|
186
|
+
session_context=_deserialize_session_context(data.get("session_context")),
|
|
187
|
+
story=data.get("story"),
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def serialize_timeline(timeline: ReprTimeline) -> dict:
|
|
192
|
+
"""Serialize ReprTimeline to JSON-safe dict."""
|
|
193
|
+
return {
|
|
194
|
+
"project_path": timeline.project_path,
|
|
195
|
+
"initialized_at": _datetime_to_iso(timeline.initialized_at),
|
|
196
|
+
"entries": [_serialize_entry(e) for e in timeline.entries],
|
|
197
|
+
"session_sources": timeline.session_sources,
|
|
198
|
+
"last_updated": _datetime_to_iso(timeline.last_updated) if timeline.last_updated else None,
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def deserialize_timeline(data: dict) -> ReprTimeline:
|
|
203
|
+
"""Deserialize ReprTimeline from JSON dict."""
|
|
204
|
+
timeline = ReprTimeline(
|
|
205
|
+
project_path=data["project_path"],
|
|
206
|
+
initialized_at=datetime.fromisoformat(data["initialized_at"]),
|
|
207
|
+
entries=[_deserialize_entry(e) for e in data.get("entries", [])],
|
|
208
|
+
session_sources=data.get("session_sources", []),
|
|
209
|
+
last_updated=datetime.fromisoformat(data["last_updated"]) if data.get("last_updated") else None,
|
|
210
|
+
)
|
|
211
|
+
return timeline
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
# =============================================================================
|
|
215
|
+
# Timeline Storage
|
|
216
|
+
# =============================================================================
|
|
217
|
+
|
|
218
|
+
def save_timeline(timeline: ReprTimeline, project_path: Path) -> Path:
|
|
219
|
+
"""
|
|
220
|
+
Save timeline to .repr/timeline.json.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
timeline: Timeline to save
|
|
224
|
+
project_path: Project root path
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
Path to the saved timeline file
|
|
228
|
+
"""
|
|
229
|
+
ensure_repr_dir(project_path)
|
|
230
|
+
timeline_path = get_timeline_path(project_path)
|
|
231
|
+
|
|
232
|
+
timeline.last_updated = datetime.now(timezone.utc)
|
|
233
|
+
|
|
234
|
+
with open(timeline_path, "w") as f:
|
|
235
|
+
json.dump(serialize_timeline(timeline), f, indent=2)
|
|
236
|
+
|
|
237
|
+
return timeline_path
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def load_timeline(project_path: Path) -> ReprTimeline | None:
|
|
241
|
+
"""
|
|
242
|
+
Load timeline from .repr/timeline.json.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
project_path: Project root path
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
ReprTimeline or None if not found
|
|
249
|
+
"""
|
|
250
|
+
timeline_path = get_timeline_path(project_path)
|
|
251
|
+
|
|
252
|
+
if not timeline_path.exists():
|
|
253
|
+
return None
|
|
254
|
+
|
|
255
|
+
with open(timeline_path) as f:
|
|
256
|
+
data = json.load(f)
|
|
257
|
+
|
|
258
|
+
return deserialize_timeline(data)
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
# =============================================================================
|
|
262
|
+
# Commit Extraction
|
|
263
|
+
# =============================================================================
|
|
264
|
+
|
|
265
|
+
def extract_commits_from_git(
|
|
266
|
+
project_path: Path,
|
|
267
|
+
days: int = 90,
|
|
268
|
+
max_commits: int = 500,
|
|
269
|
+
since: datetime | None = None,
|
|
270
|
+
progress_callback: Callable[[int, int], None] | None = None,
|
|
271
|
+
) -> list[CommitData]:
|
|
272
|
+
"""
|
|
273
|
+
Extract commits from git history.
|
|
274
|
+
|
|
275
|
+
Args:
|
|
276
|
+
project_path: Path to git repository
|
|
277
|
+
days: Number of days to look back
|
|
278
|
+
max_commits: Maximum number of commits to extract
|
|
279
|
+
since: Only get commits after this time
|
|
280
|
+
progress_callback: Optional callback(current, total) for progress
|
|
281
|
+
|
|
282
|
+
Returns:
|
|
283
|
+
List of CommitData objects sorted by timestamp
|
|
284
|
+
"""
|
|
285
|
+
repo = Repo(project_path)
|
|
286
|
+
commits = []
|
|
287
|
+
|
|
288
|
+
# Calculate cutoff
|
|
289
|
+
if since:
|
|
290
|
+
cutoff = since
|
|
291
|
+
else:
|
|
292
|
+
cutoff = datetime.now(timezone.utc) - timedelta(days=days)
|
|
293
|
+
|
|
294
|
+
cutoff_timestamp = cutoff.timestamp()
|
|
295
|
+
|
|
296
|
+
# Collect commits
|
|
297
|
+
commit_count = 0
|
|
298
|
+
for commit in repo.iter_commits(max_count=max_commits):
|
|
299
|
+
if commit.committed_date < cutoff_timestamp:
|
|
300
|
+
break
|
|
301
|
+
|
|
302
|
+
# Skip merge commits
|
|
303
|
+
if len(commit.parents) > 1:
|
|
304
|
+
continue
|
|
305
|
+
|
|
306
|
+
# Get files changed
|
|
307
|
+
files = list(commit.stats.files.keys())
|
|
308
|
+
|
|
309
|
+
commits.append(CommitData(
|
|
310
|
+
sha=commit.hexsha,
|
|
311
|
+
message=commit.message.strip(),
|
|
312
|
+
author=commit.author.name,
|
|
313
|
+
timestamp=datetime.fromtimestamp(commit.committed_date, tz=timezone.utc),
|
|
314
|
+
files=files,
|
|
315
|
+
insertions=commit.stats.total.get("insertions", 0),
|
|
316
|
+
deletions=commit.stats.total.get("deletions", 0),
|
|
317
|
+
author_email=commit.author.email or "",
|
|
318
|
+
))
|
|
319
|
+
|
|
320
|
+
commit_count += 1
|
|
321
|
+
if progress_callback:
|
|
322
|
+
progress_callback(commit_count, max_commits)
|
|
323
|
+
|
|
324
|
+
# Sort by timestamp (oldest first)
|
|
325
|
+
commits.sort(key=lambda c: c.timestamp)
|
|
326
|
+
|
|
327
|
+
return commits
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
# =============================================================================
|
|
331
|
+
# Timeline Initialization
|
|
332
|
+
# =============================================================================
|
|
333
|
+
|
|
334
|
+
def init_timeline_commits_only(
|
|
335
|
+
project_path: Path,
|
|
336
|
+
days: int = 90,
|
|
337
|
+
max_commits: int = 500,
|
|
338
|
+
progress_callback: Callable[[str, int, int], None] | None = None,
|
|
339
|
+
) -> ReprTimeline:
|
|
340
|
+
"""
|
|
341
|
+
Initialize timeline with commits only (no session extraction).
|
|
342
|
+
|
|
343
|
+
Args:
|
|
344
|
+
project_path: Path to git repository
|
|
345
|
+
days: Number of days to look back
|
|
346
|
+
max_commits: Maximum number of commits
|
|
347
|
+
progress_callback: Optional callback(stage, current, total)
|
|
348
|
+
|
|
349
|
+
Returns:
|
|
350
|
+
Initialized ReprTimeline
|
|
351
|
+
"""
|
|
352
|
+
project_path = Path(project_path).resolve()
|
|
353
|
+
|
|
354
|
+
# Create timeline
|
|
355
|
+
timeline = ReprTimeline(
|
|
356
|
+
project_path=str(project_path),
|
|
357
|
+
initialized_at=datetime.now(timezone.utc),
|
|
358
|
+
entries=[],
|
|
359
|
+
session_sources=[],
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
# Extract commits
|
|
363
|
+
def commit_progress(current: int, total: int) -> None:
|
|
364
|
+
if progress_callback:
|
|
365
|
+
progress_callback("commits", current, total)
|
|
366
|
+
|
|
367
|
+
commits = extract_commits_from_git(
|
|
368
|
+
project_path,
|
|
369
|
+
days=days,
|
|
370
|
+
max_commits=max_commits,
|
|
371
|
+
progress_callback=commit_progress,
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
# Create timeline entries from commits
|
|
375
|
+
for commit in commits:
|
|
376
|
+
entry = TimelineEntry(
|
|
377
|
+
timestamp=commit.timestamp,
|
|
378
|
+
type=TimelineEntryType.COMMIT,
|
|
379
|
+
commit=commit,
|
|
380
|
+
session_context=None,
|
|
381
|
+
story=None,
|
|
382
|
+
)
|
|
383
|
+
timeline.entries.append(entry)
|
|
384
|
+
|
|
385
|
+
# Save timeline
|
|
386
|
+
save_timeline(timeline, project_path)
|
|
387
|
+
|
|
388
|
+
return timeline
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
async def init_timeline_with_sessions(
|
|
392
|
+
project_path: Path,
|
|
393
|
+
days: int = 90,
|
|
394
|
+
max_commits: int = 500,
|
|
395
|
+
session_sources: list[str] | None = None,
|
|
396
|
+
api_key: str | None = None,
|
|
397
|
+
base_url: str | None = None,
|
|
398
|
+
model: str = "openai/gpt-4.1-mini",
|
|
399
|
+
extraction_concurrency: int = 3,
|
|
400
|
+
progress_callback: Callable[[str, int, int], None] | None = None,
|
|
401
|
+
) -> ReprTimeline:
|
|
402
|
+
"""
|
|
403
|
+
Initialize timeline with commits and session context extraction.
|
|
404
|
+
|
|
405
|
+
Args:
|
|
406
|
+
project_path: Path to git repository
|
|
407
|
+
days: Number of days to look back
|
|
408
|
+
max_commits: Maximum number of commits
|
|
409
|
+
session_sources: Sources to use (None = auto-detect)
|
|
410
|
+
api_key: API key for LLM extraction
|
|
411
|
+
base_url: Base URL for LLM API (e.g., for local Ollama)
|
|
412
|
+
model: Model for extraction
|
|
413
|
+
extraction_concurrency: Max concurrent LLM extractions
|
|
414
|
+
progress_callback: Optional callback(stage, current, total)
|
|
415
|
+
|
|
416
|
+
Returns:
|
|
417
|
+
Initialized ReprTimeline with session context
|
|
418
|
+
"""
|
|
419
|
+
from .loaders import load_sessions_for_project, detect_session_source
|
|
420
|
+
from .session_extractor import SessionExtractor
|
|
421
|
+
|
|
422
|
+
project_path = Path(project_path).resolve()
|
|
423
|
+
|
|
424
|
+
# Create timeline
|
|
425
|
+
timeline = ReprTimeline(
|
|
426
|
+
project_path=str(project_path),
|
|
427
|
+
initialized_at=datetime.now(timezone.utc),
|
|
428
|
+
entries=[],
|
|
429
|
+
session_sources=[],
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
# Extract commits
|
|
433
|
+
def commit_progress(current: int, total: int) -> None:
|
|
434
|
+
if progress_callback:
|
|
435
|
+
progress_callback("commits", current, total)
|
|
436
|
+
|
|
437
|
+
commits = extract_commits_from_git(
|
|
438
|
+
project_path,
|
|
439
|
+
days=days,
|
|
440
|
+
max_commits=max_commits,
|
|
441
|
+
progress_callback=commit_progress,
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
# Load sessions
|
|
445
|
+
if session_sources is None:
|
|
446
|
+
session_sources = detect_session_source(project_path)
|
|
447
|
+
|
|
448
|
+
timeline.session_sources = session_sources
|
|
449
|
+
|
|
450
|
+
sessions = load_sessions_for_project(
|
|
451
|
+
project_path,
|
|
452
|
+
sources=session_sources,
|
|
453
|
+
days_back=days,
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
if progress_callback:
|
|
457
|
+
progress_callback("sessions_loaded", len(sessions), len(sessions))
|
|
458
|
+
|
|
459
|
+
# Match commits to sessions
|
|
460
|
+
matches = match_commits_to_sessions(commits, sessions)
|
|
461
|
+
|
|
462
|
+
# Build lookup of commit SHA -> matched sessions
|
|
463
|
+
commit_to_sessions: dict[str, list[str]] = {}
|
|
464
|
+
for match in matches:
|
|
465
|
+
if match.commit_sha not in commit_to_sessions:
|
|
466
|
+
commit_to_sessions[match.commit_sha] = []
|
|
467
|
+
commit_to_sessions[match.commit_sha].append(match.session_id)
|
|
468
|
+
|
|
469
|
+
# Extract context from sessions
|
|
470
|
+
session_contexts: dict[str, SessionContext] = {}
|
|
471
|
+
|
|
472
|
+
if sessions:
|
|
473
|
+
async with SessionExtractor(api_key=api_key, base_url=base_url, model=model) as extractor:
|
|
474
|
+
# Extract in batches with progress
|
|
475
|
+
total_sessions = len(sessions)
|
|
476
|
+
for i, session in enumerate(sessions):
|
|
477
|
+
if progress_callback:
|
|
478
|
+
progress_callback("extracting", i + 1, total_sessions)
|
|
479
|
+
|
|
480
|
+
try:
|
|
481
|
+
# Find linked commits for this session
|
|
482
|
+
linked_commits = [
|
|
483
|
+
m.commit_sha for m in matches
|
|
484
|
+
if m.session_id == session.id
|
|
485
|
+
]
|
|
486
|
+
|
|
487
|
+
context = await extractor.extract_context(session, linked_commits=linked_commits)
|
|
488
|
+
session_contexts[session.id] = context
|
|
489
|
+
except Exception as e:
|
|
490
|
+
# Log but continue
|
|
491
|
+
print(f"Warning: Failed to extract context from session {session.id}: {e}")
|
|
492
|
+
|
|
493
|
+
# Create timeline entries
|
|
494
|
+
# First, add all commits
|
|
495
|
+
commit_entries: dict[str, TimelineEntry] = {}
|
|
496
|
+
for commit in commits:
|
|
497
|
+
entry = TimelineEntry(
|
|
498
|
+
timestamp=commit.timestamp,
|
|
499
|
+
type=TimelineEntryType.COMMIT,
|
|
500
|
+
commit=commit,
|
|
501
|
+
session_context=None,
|
|
502
|
+
story=None,
|
|
503
|
+
)
|
|
504
|
+
commit_entries[commit.sha] = entry
|
|
505
|
+
timeline.entries.append(entry)
|
|
506
|
+
|
|
507
|
+
# Merge session context into matching commits
|
|
508
|
+
for match in matches:
|
|
509
|
+
if match.session_id in session_contexts and match.commit_sha in commit_entries:
|
|
510
|
+
entry = commit_entries[match.commit_sha]
|
|
511
|
+
ctx = session_contexts[match.session_id]
|
|
512
|
+
|
|
513
|
+
# Upgrade to merged entry if we have session context
|
|
514
|
+
if entry.session_context is None:
|
|
515
|
+
entry.session_context = ctx
|
|
516
|
+
entry.type = TimelineEntryType.MERGED
|
|
517
|
+
|
|
518
|
+
# Add standalone sessions (those not matched to any commit)
|
|
519
|
+
matched_session_ids = {m.session_id for m in matches}
|
|
520
|
+
for session_id, ctx in session_contexts.items():
|
|
521
|
+
if session_id not in matched_session_ids:
|
|
522
|
+
entry = TimelineEntry(
|
|
523
|
+
timestamp=ctx.timestamp,
|
|
524
|
+
type=TimelineEntryType.SESSION,
|
|
525
|
+
commit=None,
|
|
526
|
+
session_context=ctx,
|
|
527
|
+
story=None,
|
|
528
|
+
)
|
|
529
|
+
timeline.entries.append(entry)
|
|
530
|
+
|
|
531
|
+
# Sort entries by timestamp
|
|
532
|
+
timeline.entries.sort(key=lambda e: e.timestamp)
|
|
533
|
+
|
|
534
|
+
# Save timeline
|
|
535
|
+
save_timeline(timeline, project_path)
|
|
536
|
+
|
|
537
|
+
return timeline
|
|
538
|
+
|
|
539
|
+
|
|
540
|
+
def init_timeline_with_sessions_sync(
|
|
541
|
+
project_path: Path,
|
|
542
|
+
**kwargs,
|
|
543
|
+
) -> ReprTimeline:
|
|
544
|
+
"""Synchronous wrapper for init_timeline_with_sessions."""
|
|
545
|
+
import asyncio
|
|
546
|
+
return asyncio.run(init_timeline_with_sessions(project_path, **kwargs))
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
# =============================================================================
|
|
550
|
+
# Timeline Queries
|
|
551
|
+
# =============================================================================
|
|
552
|
+
|
|
553
|
+
def get_timeline_stats(timeline: ReprTimeline) -> dict[str, Any]:
|
|
554
|
+
"""
|
|
555
|
+
Get statistics about a timeline.
|
|
556
|
+
|
|
557
|
+
Returns dict with:
|
|
558
|
+
- total_entries
|
|
559
|
+
- commit_count
|
|
560
|
+
- session_count
|
|
561
|
+
- merged_count
|
|
562
|
+
- date_range (first, last)
|
|
563
|
+
- session_sources
|
|
564
|
+
"""
|
|
565
|
+
stats = {
|
|
566
|
+
"total_entries": len(timeline.entries),
|
|
567
|
+
"commit_count": 0,
|
|
568
|
+
"session_count": 0,
|
|
569
|
+
"merged_count": 0,
|
|
570
|
+
"date_range": {"first": None, "last": None},
|
|
571
|
+
"session_sources": timeline.session_sources,
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
for entry in timeline.entries:
|
|
575
|
+
if entry.type == TimelineEntryType.COMMIT:
|
|
576
|
+
stats["commit_count"] += 1
|
|
577
|
+
elif entry.type == TimelineEntryType.SESSION:
|
|
578
|
+
stats["session_count"] += 1
|
|
579
|
+
elif entry.type == TimelineEntryType.MERGED:
|
|
580
|
+
stats["merged_count"] += 1
|
|
581
|
+
|
|
582
|
+
if timeline.entries:
|
|
583
|
+
stats["date_range"]["first"] = timeline.entries[0].timestamp.isoformat()
|
|
584
|
+
stats["date_range"]["last"] = timeline.entries[-1].timestamp.isoformat()
|
|
585
|
+
|
|
586
|
+
return stats
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
def query_timeline(
|
|
590
|
+
timeline: ReprTimeline,
|
|
591
|
+
since: datetime | None = None,
|
|
592
|
+
until: datetime | None = None,
|
|
593
|
+
entry_types: list[TimelineEntryType] | None = None,
|
|
594
|
+
files: list[str] | None = None,
|
|
595
|
+
) -> list[TimelineEntry]:
|
|
596
|
+
"""
|
|
597
|
+
Query timeline entries with filters.
|
|
598
|
+
|
|
599
|
+
Args:
|
|
600
|
+
timeline: Timeline to query
|
|
601
|
+
since: Only entries after this time
|
|
602
|
+
until: Only entries before this time
|
|
603
|
+
entry_types: Filter by entry types
|
|
604
|
+
files: Filter by files touched
|
|
605
|
+
|
|
606
|
+
Returns:
|
|
607
|
+
Filtered list of timeline entries
|
|
608
|
+
"""
|
|
609
|
+
results = []
|
|
610
|
+
|
|
611
|
+
for entry in timeline.entries:
|
|
612
|
+
# Time filters
|
|
613
|
+
if since and entry.timestamp < since:
|
|
614
|
+
continue
|
|
615
|
+
if until and entry.timestamp > until:
|
|
616
|
+
continue
|
|
617
|
+
|
|
618
|
+
# Type filter
|
|
619
|
+
if entry_types and entry.type not in entry_types:
|
|
620
|
+
continue
|
|
621
|
+
|
|
622
|
+
# File filter
|
|
623
|
+
if files:
|
|
624
|
+
entry_files = set()
|
|
625
|
+
if entry.commit:
|
|
626
|
+
entry_files.update(entry.commit.files)
|
|
627
|
+
if entry.session_context:
|
|
628
|
+
entry_files.update(entry.session_context.files_modified)
|
|
629
|
+
|
|
630
|
+
if not any(f in entry_files for f in files):
|
|
631
|
+
continue
|
|
632
|
+
|
|
633
|
+
results.append(entry)
|
|
634
|
+
|
|
635
|
+
return results
|
|
636
|
+
|
|
637
|
+
|
|
638
|
+
async def get_session_contexts_for_commits(
|
|
639
|
+
project_path: Path,
|
|
640
|
+
commits: list[CommitData],
|
|
641
|
+
days: int = 30,
|
|
642
|
+
session_sources: list[str] | None = None,
|
|
643
|
+
api_key: str | None = None,
|
|
644
|
+
base_url: str | None = None,
|
|
645
|
+
model: str = "openai/gpt-4.1-mini",
|
|
646
|
+
progress_callback: Callable[[str, int, int], None] | None = None,
|
|
647
|
+
) -> list[SessionContext]:
|
|
648
|
+
"""
|
|
649
|
+
Load and extract context from sessions linked to a list of commits.
|
|
650
|
+
|
|
651
|
+
Args:
|
|
652
|
+
project_path: Path to the project
|
|
653
|
+
commits: List of commits to match against
|
|
654
|
+
days: How far back to look for sessions
|
|
655
|
+
session_sources: Sources to check (None = auto-detect)
|
|
656
|
+
api_key: API key for extraction
|
|
657
|
+
base_url: Base URL for extraction
|
|
658
|
+
model: Model to use for extraction
|
|
659
|
+
progress_callback: Optional progress callback
|
|
660
|
+
|
|
661
|
+
Returns:
|
|
662
|
+
List of extracted SessionContext objects
|
|
663
|
+
"""
|
|
664
|
+
from .loaders import load_sessions_for_project, detect_session_source
|
|
665
|
+
from .session_extractor import SessionExtractor
|
|
666
|
+
|
|
667
|
+
# 1. Detect and load sessions
|
|
668
|
+
if session_sources is None:
|
|
669
|
+
session_sources = detect_session_source(project_path)
|
|
670
|
+
|
|
671
|
+
if not session_sources:
|
|
672
|
+
return []
|
|
673
|
+
|
|
674
|
+
sessions = load_sessions_for_project(
|
|
675
|
+
project_path,
|
|
676
|
+
sources=session_sources,
|
|
677
|
+
days_back=days,
|
|
678
|
+
)
|
|
679
|
+
|
|
680
|
+
if not sessions:
|
|
681
|
+
return []
|
|
682
|
+
|
|
683
|
+
if progress_callback:
|
|
684
|
+
progress_callback("sessions_loaded", len(sessions), len(sessions))
|
|
685
|
+
|
|
686
|
+
# 2. Match commits to sessions
|
|
687
|
+
matches = match_commits_to_sessions(commits, sessions)
|
|
688
|
+
|
|
689
|
+
if not matches:
|
|
690
|
+
return []
|
|
691
|
+
|
|
692
|
+
# 3. Extract context from matched sessions
|
|
693
|
+
matched_session_ids = {m.session_id for m in matches}
|
|
694
|
+
matched_sessions = [s for s in sessions if s.id in matched_session_ids]
|
|
695
|
+
|
|
696
|
+
session_contexts = []
|
|
697
|
+
async with SessionExtractor(api_key=api_key, base_url=base_url, model=model) as extractor:
|
|
698
|
+
total_to_extract = len(matched_sessions)
|
|
699
|
+
for i, session in enumerate(matched_sessions):
|
|
700
|
+
if progress_callback:
|
|
701
|
+
progress_callback("extracting", i + 1, total_to_extract)
|
|
702
|
+
|
|
703
|
+
try:
|
|
704
|
+
linked_commits = [m.commit_sha for m in matches if m.session_id == session.id]
|
|
705
|
+
context = await extractor.extract_context(session, linked_commits=linked_commits)
|
|
706
|
+
session_contexts.append(context)
|
|
707
|
+
except Exception:
|
|
708
|
+
continue
|
|
709
|
+
|
|
710
|
+
return session_contexts
|