repr-cli 0.2.16__py3-none-any.whl → 0.2.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- repr/__init__.py +1 -1
- repr/api.py +363 -62
- repr/auth.py +47 -38
- repr/change_synthesis.py +478 -0
- repr/cli.py +4306 -364
- repr/config.py +119 -11
- repr/configure.py +889 -0
- repr/cron.py +419 -0
- repr/dashboard/__init__.py +9 -0
- repr/dashboard/build.py +126 -0
- repr/dashboard/dist/assets/index-B-aCjaCw.js +384 -0
- repr/dashboard/dist/assets/index-BYFVbEev.css +1 -0
- repr/dashboard/dist/assets/index-BrrhyJFO.css +1 -0
- repr/dashboard/dist/assets/index-C7Gzxc4f.js +384 -0
- repr/dashboard/dist/assets/index-CQdMXo6g.js +391 -0
- repr/dashboard/dist/assets/index-CcEg74ts.js +270 -0
- repr/dashboard/dist/assets/index-Cerc-iA_.js +377 -0
- repr/dashboard/dist/assets/index-CjVcBW2L.css +1 -0
- repr/dashboard/dist/assets/index-Cs8ofFGd.js +384 -0
- repr/dashboard/dist/assets/index-Dfl3mR5E.js +377 -0
- repr/dashboard/dist/assets/index-DwN0SeMc.css +1 -0
- repr/dashboard/dist/assets/index-YFch_e0S.js +384 -0
- repr/dashboard/dist/favicon.svg +4 -0
- repr/dashboard/dist/index.html +14 -0
- repr/dashboard/manager.py +234 -0
- repr/dashboard/server.py +1489 -0
- repr/db.py +980 -0
- repr/hooks.py +3 -2
- repr/loaders/__init__.py +22 -0
- repr/loaders/base.py +156 -0
- repr/loaders/claude_code.py +287 -0
- repr/loaders/clawdbot.py +313 -0
- repr/loaders/gemini_antigravity.py +381 -0
- repr/mcp_server.py +1196 -0
- repr/models.py +503 -0
- repr/openai_analysis.py +25 -0
- repr/session_extractor.py +481 -0
- repr/storage.py +328 -0
- repr/story_synthesis.py +1296 -0
- repr/templates.py +68 -4
- repr/timeline.py +710 -0
- repr/tools.py +17 -8
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/METADATA +48 -10
- repr_cli-0.2.18.dist-info/RECORD +58 -0
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/WHEEL +1 -1
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/entry_points.txt +1 -0
- repr_cli-0.2.16.dist-info/RECORD +0 -26
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/licenses/LICENSE +0 -0
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/top_level.txt +0 -0
repr/cli.py
CHANGED
|
@@ -21,10 +21,11 @@ import os
|
|
|
21
21
|
import sys
|
|
22
22
|
from datetime import datetime, timedelta
|
|
23
23
|
from pathlib import Path
|
|
24
|
-
from typing import Optional, List, Dict, Callable
|
|
24
|
+
from typing import Optional, List, Dict, Callable, Any, Any
|
|
25
25
|
from collections import defaultdict
|
|
26
26
|
|
|
27
27
|
import typer
|
|
28
|
+
from rich.markup import escape as rich_escape
|
|
28
29
|
from rich.prompt import Confirm, Prompt
|
|
29
30
|
from rich.table import Table
|
|
30
31
|
|
|
@@ -86,9 +87,6 @@ from .config import (
|
|
|
86
87
|
)
|
|
87
88
|
from .storage import (
|
|
88
89
|
save_story,
|
|
89
|
-
load_story,
|
|
90
|
-
delete_story,
|
|
91
|
-
list_stories,
|
|
92
90
|
get_story_count,
|
|
93
91
|
get_unpushed_stories,
|
|
94
92
|
mark_story_pushed,
|
|
@@ -98,9 +96,245 @@ from .storage import (
|
|
|
98
96
|
restore_from_backup,
|
|
99
97
|
get_storage_stats,
|
|
100
98
|
)
|
|
99
|
+
from .db import get_db
|
|
101
100
|
from .auth import AuthFlow, AuthError, logout as auth_logout, get_current_user, migrate_plaintext_auth
|
|
102
101
|
from .api import APIError
|
|
103
102
|
|
|
103
|
+
|
|
104
|
+
# Database-backed story listing (replaces JSON storage)
|
|
105
|
+
def list_stories(
|
|
106
|
+
repo_name: str | None = None,
|
|
107
|
+
since: datetime | None = None,
|
|
108
|
+
needs_review: bool = False,
|
|
109
|
+
limit: int | None = None,
|
|
110
|
+
) -> list[dict[str, Any]]:
|
|
111
|
+
"""
|
|
112
|
+
List all stories from the database.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
repo_name: Filter by repository name
|
|
116
|
+
since: Filter by creation date
|
|
117
|
+
needs_review: Only show stories needing review
|
|
118
|
+
limit: Maximum stories to return
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
List of story metadata dicts (sorted by creation date, newest first)
|
|
122
|
+
"""
|
|
123
|
+
db = get_db()
|
|
124
|
+
|
|
125
|
+
# Build query with project join
|
|
126
|
+
conditions = []
|
|
127
|
+
params = []
|
|
128
|
+
|
|
129
|
+
if since:
|
|
130
|
+
iso_since = since.isoformat()
|
|
131
|
+
conditions.append("s.created_at >= ?")
|
|
132
|
+
params.append(iso_since)
|
|
133
|
+
|
|
134
|
+
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
|
135
|
+
|
|
136
|
+
query = f"""
|
|
137
|
+
SELECT
|
|
138
|
+
s.id,
|
|
139
|
+
s.project_id,
|
|
140
|
+
s.created_at,
|
|
141
|
+
s.updated_at,
|
|
142
|
+
s.title,
|
|
143
|
+
s.problem,
|
|
144
|
+
s.approach,
|
|
145
|
+
s.tradeoffs,
|
|
146
|
+
s.outcome,
|
|
147
|
+
s.category,
|
|
148
|
+
s.scope,
|
|
149
|
+
s.technologies,
|
|
150
|
+
s.started_at,
|
|
151
|
+
s.ended_at,
|
|
152
|
+
s.implementation_details,
|
|
153
|
+
s.decisions,
|
|
154
|
+
s.lessons,
|
|
155
|
+
s.hook,
|
|
156
|
+
s.what,
|
|
157
|
+
s.value,
|
|
158
|
+
s.insight,
|
|
159
|
+
s.show,
|
|
160
|
+
s.diagram,
|
|
161
|
+
s.post_body,
|
|
162
|
+
p.name as repo_name,
|
|
163
|
+
p.path as repo_path
|
|
164
|
+
FROM stories s
|
|
165
|
+
JOIN projects p ON s.project_id = p.id
|
|
166
|
+
WHERE {where_clause}
|
|
167
|
+
ORDER BY s.created_at DESC
|
|
168
|
+
"""
|
|
169
|
+
|
|
170
|
+
if limit:
|
|
171
|
+
query += " LIMIT ?"
|
|
172
|
+
params.append(limit)
|
|
173
|
+
|
|
174
|
+
stories = []
|
|
175
|
+
with db.connect() as conn:
|
|
176
|
+
for row in conn.execute(query, params).fetchall():
|
|
177
|
+
story = {
|
|
178
|
+
"id": row["id"],
|
|
179
|
+
"repo_name": row["repo_name"],
|
|
180
|
+
"repo_path": row["repo_path"],
|
|
181
|
+
"summary": row["title"],
|
|
182
|
+
"created_at": row["created_at"],
|
|
183
|
+
"updated_at": row["updated_at"],
|
|
184
|
+
"problem": row["problem"],
|
|
185
|
+
"approach": row["approach"],
|
|
186
|
+
"tradeoffs": row["tradeoffs"],
|
|
187
|
+
"outcome": row["outcome"],
|
|
188
|
+
"category": row["category"],
|
|
189
|
+
"scope": row["scope"],
|
|
190
|
+
"started_at": row["started_at"],
|
|
191
|
+
"ended_at": row["ended_at"],
|
|
192
|
+
"hook": row["hook"],
|
|
193
|
+
"what": row["what"],
|
|
194
|
+
"value": row["value"],
|
|
195
|
+
"insight": row["insight"],
|
|
196
|
+
"show": row["show"],
|
|
197
|
+
"diagram": row["diagram"],
|
|
198
|
+
"post_body": row["post_body"],
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
# Deserialize JSON fields
|
|
202
|
+
if row["technologies"]:
|
|
203
|
+
try:
|
|
204
|
+
story["technologies"] = json.loads(row["technologies"])
|
|
205
|
+
except json.JSONDecodeError:
|
|
206
|
+
story["technologies"] = []
|
|
207
|
+
else:
|
|
208
|
+
story["technologies"] = []
|
|
209
|
+
|
|
210
|
+
if row["implementation_details"]:
|
|
211
|
+
try:
|
|
212
|
+
story["implementation_details"] = json.loads(row["implementation_details"])
|
|
213
|
+
except json.JSONDecodeError:
|
|
214
|
+
story["implementation_details"] = []
|
|
215
|
+
else:
|
|
216
|
+
story["implementation_details"] = []
|
|
217
|
+
|
|
218
|
+
if row["decisions"]:
|
|
219
|
+
try:
|
|
220
|
+
story["decisions"] = json.loads(row["decisions"])
|
|
221
|
+
except json.JSONDecodeError:
|
|
222
|
+
story["decisions"] = []
|
|
223
|
+
else:
|
|
224
|
+
story["decisions"] = []
|
|
225
|
+
|
|
226
|
+
if row["lessons"]:
|
|
227
|
+
try:
|
|
228
|
+
story["lessons"] = json.loads(row["lessons"])
|
|
229
|
+
except json.JSONDecodeError:
|
|
230
|
+
story["lessons"] = []
|
|
231
|
+
else:
|
|
232
|
+
story["lessons"] = []
|
|
233
|
+
|
|
234
|
+
# Apply filters that couldn't be in SQL
|
|
235
|
+
if repo_name and story["repo_name"] != repo_name:
|
|
236
|
+
continue
|
|
237
|
+
|
|
238
|
+
if needs_review and not story.get("needs_review", False):
|
|
239
|
+
continue
|
|
240
|
+
|
|
241
|
+
stories.append(story)
|
|
242
|
+
|
|
243
|
+
return stories
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def load_story(story_id: str) -> tuple[str, dict[str, Any]] | None:
|
|
247
|
+
"""
|
|
248
|
+
Load a story by ID from the database.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
story_id: Story ULID
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
Tuple of (content, metadata) or None if not found
|
|
255
|
+
"""
|
|
256
|
+
db = get_db()
|
|
257
|
+
story = db.get_story(story_id)
|
|
258
|
+
|
|
259
|
+
if not story:
|
|
260
|
+
return None
|
|
261
|
+
|
|
262
|
+
# Convert Story model to markdown content
|
|
263
|
+
content = f"""# {story.title}
|
|
264
|
+
|
|
265
|
+
## Problem
|
|
266
|
+
{story.problem or "No problem specified."}
|
|
267
|
+
|
|
268
|
+
## Approach
|
|
269
|
+
{story.approach or "No approach specified."}
|
|
270
|
+
|
|
271
|
+
## Tradeoffs
|
|
272
|
+
{story.tradeoffs or "No tradeoffs specified."}
|
|
273
|
+
|
|
274
|
+
## Outcome
|
|
275
|
+
{story.outcome or "No outcome specified."}
|
|
276
|
+
|
|
277
|
+
## Implementation Details
|
|
278
|
+
{chr(10).join(f"- {d}" for d in story.implementation_details) if story.implementation_details else "None"}
|
|
279
|
+
|
|
280
|
+
## Decisions
|
|
281
|
+
{chr(10).join(f"- {d}" for d in story.decisions) if story.decisions else "None"}
|
|
282
|
+
|
|
283
|
+
## Lessons
|
|
284
|
+
{chr(10).join(f"- {l}" for l in story.lessons) if story.lessons else "None"}
|
|
285
|
+
|
|
286
|
+
## Technologies
|
|
287
|
+
{chr(10).join(f"- {t}" for t in story.technologies) if story.technologies else "None"}
|
|
288
|
+
"""
|
|
289
|
+
|
|
290
|
+
# Build metadata dict (for backward compatibility)
|
|
291
|
+
metadata = {
|
|
292
|
+
"id": story.id,
|
|
293
|
+
"title": story.title,
|
|
294
|
+
"summary": story.title, # For compatibility
|
|
295
|
+
"problem": story.problem,
|
|
296
|
+
"approach": story.approach,
|
|
297
|
+
"tradeoffs": story.tradeoffs,
|
|
298
|
+
"outcome": story.outcome,
|
|
299
|
+
"category": story.category,
|
|
300
|
+
"scope": story.scope,
|
|
301
|
+
"created_at": story.created_at.isoformat() if story.created_at else None,
|
|
302
|
+
"updated_at": story.updated_at.isoformat() if story.updated_at else None,
|
|
303
|
+
"started_at": story.started_at.isoformat() if story.started_at else None,
|
|
304
|
+
"ended_at": story.ended_at.isoformat() if story.ended_at else None,
|
|
305
|
+
"technologies": story.technologies,
|
|
306
|
+
"implementation_details": story.implementation_details,
|
|
307
|
+
"decisions": story.decisions,
|
|
308
|
+
"lessons": story.lessons,
|
|
309
|
+
"files": story.files,
|
|
310
|
+
"commit_shas": story.commit_shas,
|
|
311
|
+
"session_ids": story.session_ids,
|
|
312
|
+
"hook": story.hook,
|
|
313
|
+
"what": story.what,
|
|
314
|
+
"value": story.value,
|
|
315
|
+
"insight": story.insight,
|
|
316
|
+
"show": story.show,
|
|
317
|
+
"diagram": story.diagram,
|
|
318
|
+
"post_body": story.post_body,
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
return content, metadata
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
def delete_story(story_id: str) -> bool:
|
|
325
|
+
"""
|
|
326
|
+
Delete a story by ID from the database.
|
|
327
|
+
|
|
328
|
+
Args:
|
|
329
|
+
story_id: Story ULID
|
|
330
|
+
|
|
331
|
+
Returns:
|
|
332
|
+
True if deleted, False if not found
|
|
333
|
+
"""
|
|
334
|
+
db = get_db()
|
|
335
|
+
return db.delete_story(story_id)
|
|
336
|
+
|
|
337
|
+
|
|
104
338
|
# Create Typer app
|
|
105
339
|
app = typer.Typer(
|
|
106
340
|
name="repr",
|
|
@@ -111,18 +345,30 @@ app = typer.Typer(
|
|
|
111
345
|
|
|
112
346
|
# Sub-apps for command groups
|
|
113
347
|
hooks_app = typer.Typer(help="Manage git post-commit hooks")
|
|
348
|
+
cron_app = typer.Typer(help="Scheduled story generation (every 4h)")
|
|
114
349
|
llm_app = typer.Typer(help="Configure LLM (local/cloud/BYOK)")
|
|
115
350
|
privacy_app = typer.Typer(help="Privacy audit and controls")
|
|
116
351
|
config_app = typer.Typer(help="View and modify configuration")
|
|
117
352
|
data_app = typer.Typer(help="Backup, restore, and manage data")
|
|
118
353
|
profile_app = typer.Typer(help="View and manage profile")
|
|
354
|
+
mcp_app = typer.Typer(help="MCP server for AI agent integration")
|
|
355
|
+
timeline_app = typer.Typer(help="Unified timeline of commits + AI sessions")
|
|
356
|
+
friends_app = typer.Typer(help="Manage friends")
|
|
357
|
+
skill_app = typer.Typer(help="Manage repr skill for AI agents")
|
|
358
|
+
configure_app = typer.Typer(help="Configure repr (LLM, repos, schedule)")
|
|
119
359
|
|
|
120
360
|
app.add_typer(hooks_app, name="hooks")
|
|
361
|
+
app.add_typer(cron_app, name="cron")
|
|
121
362
|
app.add_typer(llm_app, name="llm")
|
|
122
363
|
app.add_typer(privacy_app, name="privacy")
|
|
123
364
|
app.add_typer(config_app, name="config")
|
|
124
365
|
app.add_typer(data_app, name="data")
|
|
125
366
|
app.add_typer(profile_app, name="profile")
|
|
367
|
+
app.add_typer(mcp_app, name="mcp")
|
|
368
|
+
app.add_typer(timeline_app, name="timeline")
|
|
369
|
+
app.add_typer(friends_app, name="friends")
|
|
370
|
+
app.add_typer(skill_app, name="skill")
|
|
371
|
+
app.add_typer(configure_app, name="configure")
|
|
126
372
|
|
|
127
373
|
|
|
128
374
|
def version_callback(value: bool):
|
|
@@ -153,18 +399,147 @@ def main(
|
|
|
153
399
|
),
|
|
154
400
|
):
|
|
155
401
|
"""repr - understand what you've actually worked on.
|
|
156
|
-
|
|
402
|
+
|
|
157
403
|
Cloud features require sign-in. Local generation always works offline.
|
|
158
404
|
"""
|
|
159
405
|
# Migrate plaintext auth tokens on startup
|
|
160
406
|
migrate_plaintext_auth()
|
|
161
|
-
|
|
407
|
+
|
|
408
|
+
# First-run detection: trigger wizard on first use
|
|
409
|
+
# Skip for: configure, --help, mcp (automated), CI, non-interactive
|
|
410
|
+
skip_first_run_commands = {"configure", "mcp", None}
|
|
411
|
+
if ctx.invoked_subcommand not in skip_first_run_commands:
|
|
412
|
+
import os
|
|
413
|
+
is_ci = os.getenv("CI") or os.getenv("GITHUB_ACTIONS") or os.getenv("REPR_CI")
|
|
414
|
+
is_interactive = sys.stdin.isatty() if hasattr(sys.stdin, 'isatty') else False
|
|
415
|
+
if is_interactive and not is_ci:
|
|
416
|
+
from .configure import is_first_run, run_full_wizard
|
|
417
|
+
if is_first_run():
|
|
418
|
+
run_full_wizard()
|
|
419
|
+
|
|
162
420
|
# Track command usage (if telemetry enabled)
|
|
163
421
|
from .telemetry import track_command
|
|
164
422
|
if ctx.invoked_subcommand:
|
|
165
423
|
track_command(ctx.invoked_subcommand)
|
|
166
424
|
|
|
167
425
|
|
|
426
|
+
# =============================================================================
|
|
427
|
+
# DASHBOARD
|
|
428
|
+
# =============================================================================
|
|
429
|
+
|
|
430
|
+
@app.command("dashboard")
|
|
431
|
+
def dashboard(
|
|
432
|
+
port: int = typer.Option(
|
|
433
|
+
8787, "--port", "-p",
|
|
434
|
+
help="Port to serve on",
|
|
435
|
+
),
|
|
436
|
+
host: str = typer.Option(
|
|
437
|
+
"127.0.0.1", "--host",
|
|
438
|
+
help="Host to bind to",
|
|
439
|
+
),
|
|
440
|
+
open_browser: bool = typer.Option(
|
|
441
|
+
True, "--open/--no-open",
|
|
442
|
+
help="Auto-open browser (default: enabled)",
|
|
443
|
+
),
|
|
444
|
+
):
|
|
445
|
+
"""
|
|
446
|
+
Launch web dashboard for exploring your stories.
|
|
447
|
+
|
|
448
|
+
Starts a local web server to browse and search through your
|
|
449
|
+
stories with rich context visualization.
|
|
450
|
+
|
|
451
|
+
Works from any directory - reads from central SQLite database.
|
|
452
|
+
|
|
453
|
+
Examples:
|
|
454
|
+
repr dashboard # localhost:8787, auto-opens browser
|
|
455
|
+
repr dashboard --port 8080 # custom port
|
|
456
|
+
repr dashboard --no-open # don't auto-open browser
|
|
457
|
+
rp dashboard # using the 'rp' alias
|
|
458
|
+
"""
|
|
459
|
+
import webbrowser
|
|
460
|
+
from .dashboard import run_server
|
|
461
|
+
from .db import get_db_path, get_db
|
|
462
|
+
|
|
463
|
+
# Check if SQLite database exists and has stories
|
|
464
|
+
if not get_db_path().exists():
|
|
465
|
+
print_error("No stories database found")
|
|
466
|
+
print_info("Run `repr generate` in a git repository first")
|
|
467
|
+
raise typer.Exit(1)
|
|
468
|
+
|
|
469
|
+
db = get_db()
|
|
470
|
+
stats = db.get_stats()
|
|
471
|
+
story_count = stats.get("story_count", 0)
|
|
472
|
+
project_count = stats.get("project_count", 0)
|
|
473
|
+
|
|
474
|
+
if story_count == 0:
|
|
475
|
+
print_error("No stories in database")
|
|
476
|
+
print_info("Run `repr generate` to create stories from commits")
|
|
477
|
+
raise typer.Exit(1)
|
|
478
|
+
|
|
479
|
+
console.print(f"Starting dashboard with [bold]{story_count} stories[/] from [bold]{project_count} repositories[/]")
|
|
480
|
+
|
|
481
|
+
# Ensure dashboard is built (build on the fly if needed)
|
|
482
|
+
_ensure_dashboard_built()
|
|
483
|
+
|
|
484
|
+
url = f"http://{host}:{port}"
|
|
485
|
+
|
|
486
|
+
print_header()
|
|
487
|
+
console.print(f" URL: [bold blue]{url}[/]")
|
|
488
|
+
console.print()
|
|
489
|
+
console.print("[dim]Press Ctrl+C to stop[/]")
|
|
490
|
+
console.print()
|
|
491
|
+
|
|
492
|
+
if open_browser:
|
|
493
|
+
webbrowser.open(url)
|
|
494
|
+
|
|
495
|
+
try:
|
|
496
|
+
run_server(port, host)
|
|
497
|
+
except KeyboardInterrupt:
|
|
498
|
+
console.print()
|
|
499
|
+
print_info("Server stopped")
|
|
500
|
+
except OSError as e:
|
|
501
|
+
if "Address already in use" in str(e):
|
|
502
|
+
print_error(f"Port {port} is already in use")
|
|
503
|
+
print_info(f"Try: repr dashboard --port {port + 1}")
|
|
504
|
+
raise
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
def _ensure_dashboard_built():
|
|
508
|
+
"""Build dashboard if index.html doesn't exist or source files are newer."""
|
|
509
|
+
from pathlib import Path
|
|
510
|
+
|
|
511
|
+
dashboard_dir = Path(__file__).parent / "dashboard"
|
|
512
|
+
src_dir = dashboard_dir / "src"
|
|
513
|
+
index_html = dashboard_dir / "index.html"
|
|
514
|
+
|
|
515
|
+
# If no source directory, can't build
|
|
516
|
+
if not src_dir.exists():
|
|
517
|
+
return
|
|
518
|
+
|
|
519
|
+
# Check if we need to build
|
|
520
|
+
needs_build = False
|
|
521
|
+
|
|
522
|
+
if not index_html.exists():
|
|
523
|
+
needs_build = True
|
|
524
|
+
reason = "index.html not found"
|
|
525
|
+
else:
|
|
526
|
+
# Check if any source file is newer than index.html
|
|
527
|
+
index_mtime = index_html.stat().st_mtime
|
|
528
|
+
|
|
529
|
+
for src_file in src_dir.rglob("*"):
|
|
530
|
+
if src_file.is_file() and src_file.stat().st_mtime > index_mtime:
|
|
531
|
+
needs_build = True
|
|
532
|
+
reason = f"{src_file.relative_to(dashboard_dir)} is newer than index.html"
|
|
533
|
+
break
|
|
534
|
+
|
|
535
|
+
if needs_build:
|
|
536
|
+
from .dashboard.build import build
|
|
537
|
+
print_info(f"Building dashboard ({reason})...")
|
|
538
|
+
result = build()
|
|
539
|
+
if result != 0:
|
|
540
|
+
print_warning("Dashboard build failed, using cached version if available")
|
|
541
|
+
|
|
542
|
+
|
|
168
543
|
# =============================================================================
|
|
169
544
|
# INIT
|
|
170
545
|
# =============================================================================
|
|
@@ -236,7 +611,65 @@ def init(
|
|
|
236
611
|
console.print(f"Local LLM: detected {llm_info.name} at {llm_info.url}")
|
|
237
612
|
else:
|
|
238
613
|
console.print(f"[{BRAND_MUTED}]Local LLM: not detected (install Ollama for offline generation)[/]")
|
|
239
|
-
|
|
614
|
+
|
|
615
|
+
# Ask about automatic story generation
|
|
616
|
+
console.print()
|
|
617
|
+
console.print("[bold]Automatic Story Generation[/]")
|
|
618
|
+
console.print()
|
|
619
|
+
console.print("How should repr generate stories from your commits?")
|
|
620
|
+
console.print()
|
|
621
|
+
console.print(f" [bold]1.[/] Scheduled (recommended) - Every 4 hours via cron")
|
|
622
|
+
console.print(f" [{BRAND_MUTED}]Predictable, batches work, never interrupts[/]")
|
|
623
|
+
console.print()
|
|
624
|
+
console.print(f" [bold]2.[/] On commit - After every 5 commits via git hook")
|
|
625
|
+
console.print(f" [{BRAND_MUTED}]Real-time, but needs LLM running during commits[/]")
|
|
626
|
+
console.print()
|
|
627
|
+
console.print(f" [bold]3.[/] Manual only - Run `repr generate` yourself")
|
|
628
|
+
console.print(f" [{BRAND_MUTED}]Full control, no automation[/]")
|
|
629
|
+
console.print()
|
|
630
|
+
|
|
631
|
+
schedule_choice = Prompt.ask(
|
|
632
|
+
"Choose",
|
|
633
|
+
choices=["1", "2", "3"],
|
|
634
|
+
default="1",
|
|
635
|
+
)
|
|
636
|
+
|
|
637
|
+
from .hooks import install_hook
|
|
638
|
+
from .cron import install_cron
|
|
639
|
+
|
|
640
|
+
if schedule_choice == "1":
|
|
641
|
+
# Scheduled via cron
|
|
642
|
+
result = install_cron(interval_hours=4, min_commits=3)
|
|
643
|
+
if result["success"]:
|
|
644
|
+
print_success("Cron job installed (every 4h)")
|
|
645
|
+
# Install hooks for queue tracking (but disable auto-generate)
|
|
646
|
+
config = load_config()
|
|
647
|
+
config["generation"]["auto_generate_on_hook"] = False
|
|
648
|
+
save_config(config)
|
|
649
|
+
for repo in repos:
|
|
650
|
+
install_hook(Path(repo.path))
|
|
651
|
+
set_repo_hook_status(str(repo.path), True)
|
|
652
|
+
else:
|
|
653
|
+
print_warning(f"Could not install cron: {result['message']}")
|
|
654
|
+
print_info("You can set it up later with `repr cron install`")
|
|
655
|
+
|
|
656
|
+
elif schedule_choice == "2":
|
|
657
|
+
# On-commit via hooks
|
|
658
|
+
config = load_config()
|
|
659
|
+
config["generation"]["auto_generate_on_hook"] = True
|
|
660
|
+
save_config(config)
|
|
661
|
+
for repo in repos:
|
|
662
|
+
install_hook(Path(repo.path))
|
|
663
|
+
set_repo_hook_status(str(repo.path), True)
|
|
664
|
+
print_success(f"Hooks installed in {len(repos)} repos (generates after 5 commits)")
|
|
665
|
+
|
|
666
|
+
else:
|
|
667
|
+
# Manual only - disable auto-generation
|
|
668
|
+
config = load_config()
|
|
669
|
+
config["generation"]["auto_generate_on_hook"] = False
|
|
670
|
+
save_config(config)
|
|
671
|
+
print_info("Manual mode - run `repr generate` when you want stories")
|
|
672
|
+
|
|
240
673
|
console.print()
|
|
241
674
|
print_next_steps([
|
|
242
675
|
"repr week See what you worked on this week",
|
|
@@ -249,6 +682,90 @@ def init(
|
|
|
249
682
|
# GENERATE
|
|
250
683
|
# =============================================================================
|
|
251
684
|
|
|
685
|
+
# Technology detection from file extensions
|
|
686
|
+
_TECH_EXTENSIONS = {
|
|
687
|
+
".py": "Python",
|
|
688
|
+
".ts": "TypeScript",
|
|
689
|
+
".tsx": "TypeScript",
|
|
690
|
+
".js": "JavaScript",
|
|
691
|
+
".jsx": "JavaScript",
|
|
692
|
+
".go": "Go",
|
|
693
|
+
".rs": "Rust",
|
|
694
|
+
".java": "Java",
|
|
695
|
+
".kt": "Kotlin",
|
|
696
|
+
".swift": "Swift",
|
|
697
|
+
".c": "C",
|
|
698
|
+
".cpp": "C++",
|
|
699
|
+
".h": "C",
|
|
700
|
+
".hpp": "C++",
|
|
701
|
+
".rb": "Ruby",
|
|
702
|
+
".php": "PHP",
|
|
703
|
+
".cs": "C#",
|
|
704
|
+
".scala": "Scala",
|
|
705
|
+
".vue": "Vue",
|
|
706
|
+
".svelte": "Svelte",
|
|
707
|
+
".sql": "SQL",
|
|
708
|
+
".sh": "Shell",
|
|
709
|
+
".bash": "Shell",
|
|
710
|
+
".yaml": "YAML",
|
|
711
|
+
".yml": "YAML",
|
|
712
|
+
".json": "JSON",
|
|
713
|
+
".graphql": "GraphQL",
|
|
714
|
+
".prisma": "Prisma",
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
# Special file name patterns that indicate technologies
|
|
718
|
+
_TECH_FILES = {
|
|
719
|
+
"Dockerfile": "Docker",
|
|
720
|
+
"docker-compose": "Docker",
|
|
721
|
+
"package.json": "Node.js",
|
|
722
|
+
"tsconfig.json": "TypeScript",
|
|
723
|
+
"pyproject.toml": "Python",
|
|
724
|
+
"requirements.txt": "Python",
|
|
725
|
+
"Cargo.toml": "Rust",
|
|
726
|
+
"go.mod": "Go",
|
|
727
|
+
"Gemfile": "Ruby",
|
|
728
|
+
"pom.xml": "Maven",
|
|
729
|
+
"build.gradle": "Gradle",
|
|
730
|
+
".eslintrc": "ESLint",
|
|
731
|
+
"tailwind.config": "Tailwind CSS",
|
|
732
|
+
"next.config": "Next.js",
|
|
733
|
+
"vite.config": "Vite",
|
|
734
|
+
"webpack.config": "Webpack",
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
|
|
738
|
+
def _detect_technologies_from_files(files: list[str]) -> list[str]:
|
|
739
|
+
"""
|
|
740
|
+
Detect technologies from file paths/extensions.
|
|
741
|
+
|
|
742
|
+
Args:
|
|
743
|
+
files: List of file paths
|
|
744
|
+
|
|
745
|
+
Returns:
|
|
746
|
+
Sorted list of detected technology names
|
|
747
|
+
"""
|
|
748
|
+
tech = set()
|
|
749
|
+
|
|
750
|
+
for f in files:
|
|
751
|
+
# Handle files as either dict with 'path' or string
|
|
752
|
+
if isinstance(f, dict):
|
|
753
|
+
f = f.get("path", "")
|
|
754
|
+
|
|
755
|
+
# Check extensions
|
|
756
|
+
for ext, name in _TECH_EXTENSIONS.items():
|
|
757
|
+
if f.endswith(ext):
|
|
758
|
+
tech.add(name)
|
|
759
|
+
break
|
|
760
|
+
|
|
761
|
+
# Check special file names
|
|
762
|
+
for fname, name in _TECH_FILES.items():
|
|
763
|
+
if fname in f:
|
|
764
|
+
tech.add(name)
|
|
765
|
+
|
|
766
|
+
return sorted(tech)
|
|
767
|
+
|
|
768
|
+
|
|
252
769
|
def _parse_date_reference(date_str: str) -> str | None:
|
|
253
770
|
"""
|
|
254
771
|
Parse a date reference string into an ISO date string.
|
|
@@ -372,10 +889,18 @@ def generate(
|
|
|
372
889
|
False, "--json",
|
|
373
890
|
help="Output as JSON",
|
|
374
891
|
),
|
|
892
|
+
force: bool = typer.Option(
|
|
893
|
+
False, "--force", "-f",
|
|
894
|
+
help="Reprocess commits even if already in existing stories",
|
|
895
|
+
),
|
|
896
|
+
with_sessions: bool = typer.Option(
|
|
897
|
+
True, "--with-sessions/--no-sessions", "-s",
|
|
898
|
+
help="Include AI session context (Claude Code, Clawdbot)",
|
|
899
|
+
),
|
|
375
900
|
):
|
|
376
901
|
"""
|
|
377
902
|
Generate stories from commits.
|
|
378
|
-
|
|
903
|
+
|
|
379
904
|
Examples:
|
|
380
905
|
repr generate --local
|
|
381
906
|
repr generate --cloud
|
|
@@ -383,8 +908,16 @@ def generate(
|
|
|
383
908
|
repr generate --days 30
|
|
384
909
|
repr generate --template changelog
|
|
385
910
|
repr generate --commits abc123,def456
|
|
911
|
+
repr generate --force # Reprocess all commits
|
|
386
912
|
"""
|
|
913
|
+
import asyncio
|
|
914
|
+
from .timeline import extract_commits_from_git, detect_project_root, get_session_contexts_for_commits
|
|
915
|
+
from .story_synthesis import synthesize_stories
|
|
916
|
+
from .db import get_db
|
|
387
917
|
from .privacy import check_cloud_permission, log_cloud_operation
|
|
918
|
+
|
|
919
|
+
def synthesize_stories_sync(*args, **kwargs):
|
|
920
|
+
return asyncio.run(synthesize_stories(*args, **kwargs))
|
|
388
921
|
|
|
389
922
|
# Determine mode
|
|
390
923
|
if cloud:
|
|
@@ -401,10 +934,16 @@ def generate(
|
|
|
401
934
|
raise typer.Exit(1)
|
|
402
935
|
|
|
403
936
|
if not local and not cloud:
|
|
404
|
-
#
|
|
405
|
-
|
|
937
|
+
# Check config for default mode
|
|
938
|
+
llm_config = get_llm_config()
|
|
939
|
+
default_mode = llm_config.get("default", "local")
|
|
940
|
+
|
|
941
|
+
if default_mode == "local":
|
|
942
|
+
local = True
|
|
943
|
+
elif default_mode == "cloud" and is_authenticated() and is_cloud_allowed():
|
|
406
944
|
cloud = True
|
|
407
945
|
else:
|
|
946
|
+
# Fallback: local if not signed in or cloud not allowed
|
|
408
947
|
local = True
|
|
409
948
|
|
|
410
949
|
if not json_output:
|
|
@@ -451,201 +990,193 @@ def generate(
|
|
|
451
990
|
from .tools import get_commits_with_diffs, get_commits_by_shas
|
|
452
991
|
from .discovery import analyze_repo
|
|
453
992
|
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
993
|
+
all_generated_stories = []
|
|
994
|
+
|
|
457
995
|
for repo_path in repo_paths:
|
|
458
|
-
repo_info = analyze_repo(repo_path)
|
|
459
996
|
if not json_output:
|
|
460
|
-
console.print(f"[bold]{
|
|
461
|
-
|
|
997
|
+
console.print(f"[bold]{repo_path.name}[/]")
|
|
998
|
+
|
|
999
|
+
# Determine commit range
|
|
1000
|
+
repo_commits = []
|
|
462
1001
|
if commits:
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
1002
|
+
# Specific SHA list filtering
|
|
1003
|
+
repo_commits = extract_commits_from_git(repo_path, days=90)
|
|
1004
|
+
target_shas = [s.strip() for s in commits.split(",")]
|
|
1005
|
+
repo_commits = [c for c in repo_commits if any(c.sha.startswith(t) for t in target_shas)]
|
|
1006
|
+
elif since_date:
|
|
1007
|
+
# Parse date (rough approximation)
|
|
1008
|
+
filter_days = 30
|
|
1009
|
+
if "week" in since_date: filter_days = 14
|
|
1010
|
+
if "month" in since_date: filter_days = 30
|
|
1011
|
+
repo_commits = extract_commits_from_git(repo_path, days=filter_days)
|
|
468
1012
|
else:
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
# Parse natural language date if provided
|
|
474
|
-
if since_date:
|
|
475
|
-
since_str = _parse_date_reference(since_date)
|
|
476
|
-
|
|
477
|
-
# Recent commits within timeframe
|
|
478
|
-
if not json_output:
|
|
479
|
-
console.print(f" Scanning commits...")
|
|
480
|
-
commit_list = get_commits_with_diffs(
|
|
481
|
-
repo_path,
|
|
482
|
-
count=500, # Higher limit when filtering by time
|
|
483
|
-
days=timeframe_days,
|
|
484
|
-
since=since_str,
|
|
485
|
-
)
|
|
486
|
-
if not json_output and commit_list:
|
|
487
|
-
console.print(f" Found {len(commit_list)} commits")
|
|
488
|
-
|
|
489
|
-
if not commit_list:
|
|
1013
|
+
filter_days = days if days else 90
|
|
1014
|
+
repo_commits = extract_commits_from_git(repo_path, days=filter_days)
|
|
1015
|
+
|
|
1016
|
+
if not repo_commits:
|
|
490
1017
|
if not json_output:
|
|
491
|
-
console.print(f"
|
|
1018
|
+
console.print(f" No matching commits found")
|
|
492
1019
|
continue
|
|
493
|
-
|
|
494
|
-
# Filter out already
|
|
495
|
-
from .
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
if
|
|
504
|
-
console.print(f" [{BRAND_MUTED}]Skipping {
|
|
505
|
-
|
|
506
|
-
if not
|
|
1020
|
+
|
|
1021
|
+
# Filter out commits that are already part of existing stories (unless --force)
|
|
1022
|
+
from .db import get_db
|
|
1023
|
+
db = get_db()
|
|
1024
|
+
project_id = db.register_project(repo_path, repo_path.name)
|
|
1025
|
+
processed_shas = db.get_processed_commits(project_id)
|
|
1026
|
+
if processed_shas and not force:
|
|
1027
|
+
original_count = len(repo_commits)
|
|
1028
|
+
repo_commits = [c for c in repo_commits if c.sha not in processed_shas]
|
|
1029
|
+
skipped = original_count - len(repo_commits)
|
|
1030
|
+
if skipped > 0 and not json_output:
|
|
1031
|
+
console.print(f" [{BRAND_MUTED}]Skipping {skipped} already-processed commits[/]")
|
|
1032
|
+
|
|
1033
|
+
if not repo_commits:
|
|
507
1034
|
if not json_output:
|
|
508
|
-
console.print(f"
|
|
1035
|
+
console.print(f" No new commits to process")
|
|
509
1036
|
continue
|
|
510
|
-
|
|
511
|
-
# Dry run: show what would be sent
|
|
512
|
-
if dry_run:
|
|
513
|
-
from .openai_analysis import estimate_tokens, get_batch_size
|
|
514
|
-
from .config import load_config
|
|
515
|
-
|
|
516
|
-
config = load_config()
|
|
517
|
-
max_commits = config.get("generation", {}).get("max_commits_per_batch", 50)
|
|
518
|
-
token_limit = config.get("generation", {}).get("token_limit", 100000)
|
|
519
1037
|
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
console.print(f" Template: {template}")
|
|
523
|
-
console.print()
|
|
1038
|
+
if not json_output:
|
|
1039
|
+
console.print(f" Analyzing {len(repo_commits)} commits...")
|
|
524
1040
|
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
1041
|
+
# Load sessions if requested
|
|
1042
|
+
repo_sessions = None
|
|
1043
|
+
if with_sessions:
|
|
1044
|
+
if not json_output:
|
|
1045
|
+
console.print(f" Looking for AI sessions...")
|
|
1046
|
+
|
|
1047
|
+
# Use same days lookback as commits
|
|
1048
|
+
session_days = days if days else 90
|
|
1049
|
+
|
|
1050
|
+
def session_progress(stage: str, current: int, total: int) -> None:
|
|
1051
|
+
if not json_output:
|
|
1052
|
+
if stage == "extracting":
|
|
1053
|
+
console.print(f" Extracting session {current}/{total}...", end="\r")
|
|
1054
|
+
elif stage == "sessions_loaded" and current > 0:
|
|
1055
|
+
console.print(f" Found {current} sessions")
|
|
1056
|
+
|
|
1057
|
+
try:
|
|
1058
|
+
# Run async extraction in sync context
|
|
1059
|
+
repo_sessions = asyncio.run(get_session_contexts_for_commits(
|
|
1060
|
+
repo_path,
|
|
1061
|
+
repo_commits,
|
|
1062
|
+
days=session_days,
|
|
1063
|
+
progress_callback=session_progress
|
|
1064
|
+
))
|
|
1065
|
+
if not json_output and repo_sessions:
|
|
1066
|
+
console.print(f"\n Enriched with {len(repo_sessions)} AI sessions")
|
|
1067
|
+
except Exception as e:
|
|
1068
|
+
if not json_output:
|
|
1069
|
+
print_warning(f" Failed to load sessions: {e}")
|
|
528
1070
|
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
console.print(f" ⚠ {len(commit_list)} commits exceeds {max_commits}-commit limit")
|
|
534
|
-
console.print()
|
|
535
|
-
console.print(f" Will split into {num_batches} batches:")
|
|
536
|
-
for batch_num in range(num_batches):
|
|
537
|
-
start = batch_num * max_commits + 1
|
|
538
|
-
end = min((batch_num + 1) * max_commits, len(commit_list))
|
|
539
|
-
batch_commits = commit_list[batch_num * max_commits:end]
|
|
540
|
-
batch_tokens = estimate_tokens(batch_commits)
|
|
541
|
-
console.print(f" Batch {batch_num + 1}: commits {start}-{end} (est. {batch_tokens // 1000}k tokens)")
|
|
1071
|
+
# Progress callback
|
|
1072
|
+
def progress(current: int, total: int) -> None:
|
|
1073
|
+
if not json_output:
|
|
1074
|
+
console.print(f" Batch {current}/{total}")
|
|
542
1075
|
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
# Check token limits and prompt user if needed
|
|
552
|
-
from .openai_analysis import estimate_tokens
|
|
553
|
-
from .config import load_config
|
|
1076
|
+
# Determine model based on mode (respect config default if no flag given)
|
|
1077
|
+
llm_config = get_llm_config()
|
|
1078
|
+
use_local = local or (not cloud and llm_config.get("default") == "local")
|
|
1079
|
+
if use_local:
|
|
1080
|
+
model = llm_config.get("local_model") or "llama3.2"
|
|
1081
|
+
else:
|
|
1082
|
+
model = None # Use default cloud model
|
|
554
1083
|
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
1084
|
+
try:
|
|
1085
|
+
# Run synthesis sync
|
|
1086
|
+
stories, index = synthesize_stories_sync(
|
|
1087
|
+
commits=repo_commits,
|
|
1088
|
+
sessions=repo_sessions,
|
|
1089
|
+
model=model,
|
|
1090
|
+
batch_size=batch_size,
|
|
1091
|
+
progress_callback=progress,
|
|
1092
|
+
)
|
|
558
1093
|
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
1094
|
+
# Generate public/internal posts for each story
|
|
1095
|
+
if stories and not dry_run:
|
|
1096
|
+
from .story_synthesis import transform_story_for_feed_sync, _build_fallback_post
|
|
1097
|
+
|
|
1098
|
+
if not json_output:
|
|
1099
|
+
console.print(f" Generating build log posts...")
|
|
1100
|
+
|
|
1101
|
+
for story in stories:
|
|
1102
|
+
try:
|
|
1103
|
+
# Generate Tripartite Codex content (internal includes all fields)
|
|
1104
|
+
result = transform_story_for_feed_sync(story, mode="internal")
|
|
1105
|
+
|
|
1106
|
+
# Store structured fields
|
|
1107
|
+
story.hook = result.hook
|
|
1108
|
+
story.what = result.what
|
|
1109
|
+
story.value = result.value
|
|
1110
|
+
story.insight = result.insight
|
|
1111
|
+
story.show = result.show
|
|
1112
|
+
story.post_body = result.post_body
|
|
1113
|
+
|
|
1114
|
+
# Internal-specific fields
|
|
1115
|
+
if hasattr(result, 'problem') and result.problem:
|
|
1116
|
+
story.problem = result.problem
|
|
1117
|
+
if hasattr(result, 'how') and result.how:
|
|
1118
|
+
story.implementation_details = result.how
|
|
1119
|
+
|
|
1120
|
+
# Legacy fields for backward compatibility
|
|
1121
|
+
what_clean = result.what.rstrip(".").rstrip()
|
|
1122
|
+
value_clean = result.value.lstrip(".").lstrip()
|
|
1123
|
+
story.public_post = f"{result.hook}\n\n{what_clean}. {value_clean}\n\nInsight: {result.insight}"
|
|
1124
|
+
story.internal_post = story.public_post
|
|
1125
|
+
story.public_show = result.show
|
|
1126
|
+
story.internal_show = result.show
|
|
1127
|
+
except Exception as e:
|
|
1128
|
+
# Fallback: build from story data
|
|
1129
|
+
from .story_synthesis import _build_fallback_codex
|
|
1130
|
+
result = _build_fallback_codex(story, "internal")
|
|
1131
|
+
story.hook = result.hook
|
|
1132
|
+
story.what = result.what
|
|
1133
|
+
story.value = result.value
|
|
1134
|
+
story.insight = result.insight
|
|
1135
|
+
story.post_body = result.post_body
|
|
1136
|
+
what_clean = result.what.rstrip(".").rstrip()
|
|
1137
|
+
value_clean = result.value.lstrip(".").lstrip()
|
|
1138
|
+
story.public_post = f"{result.hook}\n\n{what_clean}. {value_clean}\n\nInsight: {result.insight}"
|
|
1139
|
+
story.internal_post = story.public_post
|
|
1140
|
+
|
|
1141
|
+
# Save to SQLite
|
|
1142
|
+
if not dry_run and stories:
|
|
1143
|
+
db = get_db()
|
|
1144
|
+
project_id = db.register_project(repo_path, repo_path.name)
|
|
1145
|
+
|
|
1146
|
+
for story in stories:
|
|
1147
|
+
db.save_story(story, project_id)
|
|
1148
|
+
|
|
1149
|
+
# Update freshness with latest commit
|
|
1150
|
+
if repo_commits:
|
|
1151
|
+
latest_commit = repo_commits[0] # Already sorted by date desc
|
|
1152
|
+
db.update_freshness(
|
|
1153
|
+
project_id,
|
|
1154
|
+
latest_commit.sha,
|
|
1155
|
+
latest_commit.timestamp,
|
|
1156
|
+
)
|
|
1157
|
+
|
|
1158
|
+
if not json_output:
|
|
1159
|
+
print_success(f"Saved {len(stories)} stories to SQLite")
|
|
1160
|
+
else:
|
|
1161
|
+
if not json_output:
|
|
1162
|
+
print_info("Dry run - not saved")
|
|
573
1163
|
|
|
574
|
-
|
|
575
|
-
console.print(f" [{BRAND_MUTED}]Skipped {repo_info.name}[/]")
|
|
576
|
-
continue
|
|
1164
|
+
all_generated_stories.extend(stories)
|
|
577
1165
|
|
|
578
|
-
|
|
579
|
-
num_batches = (len(commit_list) + batch_size - 1) // batch_size
|
|
580
|
-
|
|
581
|
-
# Generate stories with progress tracking
|
|
582
|
-
if not json_output and num_batches > 1:
|
|
583
|
-
# Use progress bar for multiple batches
|
|
584
|
-
with BatchProgress(num_batches, f"Analyzing {repo_info.name}") as progress:
|
|
585
|
-
def on_progress(batch_num, total, status):
|
|
586
|
-
if status == "complete":
|
|
587
|
-
progress.update(1, f"batch {batch_num}/{total}")
|
|
588
|
-
|
|
589
|
-
stories = _generate_stories(
|
|
590
|
-
commits=commit_list,
|
|
591
|
-
repo_info=repo_info,
|
|
592
|
-
batch_size=batch_size,
|
|
593
|
-
local=local,
|
|
594
|
-
template=template,
|
|
595
|
-
custom_prompt=prompt,
|
|
596
|
-
progress_callback=on_progress,
|
|
597
|
-
)
|
|
598
|
-
else:
|
|
599
|
-
# Single batch or JSON mode - no progress bar needed
|
|
600
|
-
if not json_output and num_batches == 1:
|
|
601
|
-
console.print(f" Analyzing {len(commit_list)} commits...")
|
|
602
|
-
|
|
603
|
-
stories = _generate_stories(
|
|
604
|
-
commits=commit_list,
|
|
605
|
-
repo_info=repo_info,
|
|
606
|
-
batch_size=batch_size,
|
|
607
|
-
local=local,
|
|
608
|
-
template=template,
|
|
609
|
-
custom_prompt=prompt,
|
|
610
|
-
)
|
|
611
|
-
|
|
612
|
-
for story in stories:
|
|
1166
|
+
except Exception as e:
|
|
613
1167
|
if not json_output:
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
all_stories.append(story)
|
|
617
|
-
|
|
618
|
-
# Log cloud operation if using cloud
|
|
619
|
-
if cloud and stories:
|
|
620
|
-
log_cloud_operation(
|
|
621
|
-
operation="cloud_generation",
|
|
622
|
-
destination="repr.dev",
|
|
623
|
-
payload_summary={
|
|
624
|
-
"repo": repo_info.name,
|
|
625
|
-
"commits": len(commit_list),
|
|
626
|
-
"stories_generated": len(stories),
|
|
627
|
-
},
|
|
628
|
-
bytes_sent=len(str(commit_list)) // 2, # Rough estimate
|
|
629
|
-
)
|
|
630
|
-
|
|
631
|
-
if not json_output:
|
|
632
|
-
console.print()
|
|
633
|
-
|
|
1168
|
+
print_error(f"Failed to generate for {repo_path.name}: {e}")
|
|
1169
|
+
|
|
634
1170
|
if json_output:
|
|
635
|
-
print(json.dumps({
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
else:
|
|
642
|
-
print_success(f"Generated {total_stories} stories")
|
|
1171
|
+
print(json.dumps({
|
|
1172
|
+
"success": True,
|
|
1173
|
+
"stories_count": len(all_generated_stories),
|
|
1174
|
+
"stories": [s.model_dump(mode="json") for s in all_generated_stories]
|
|
1175
|
+
}, default=str))
|
|
1176
|
+
elif all_generated_stories:
|
|
643
1177
|
console.print()
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
"repr stories View your stories",
|
|
647
|
-
"repr push Publish to repr.dev (requires login)",
|
|
648
|
-
])
|
|
1178
|
+
print_success(f"Generated {len(all_generated_stories)} stories")
|
|
1179
|
+
print_info("Run `repr dashboard` to view")
|
|
649
1180
|
|
|
650
1181
|
|
|
651
1182
|
async def _generate_stories_async(
|
|
@@ -748,6 +1279,15 @@ async def _generate_stories_async(
|
|
|
748
1279
|
if not content or content.startswith("[Batch"):
|
|
749
1280
|
continue
|
|
750
1281
|
|
|
1282
|
+
# Get technologies from LLM output, fallback to file-based detection
|
|
1283
|
+
technologies = story_output.technologies or []
|
|
1284
|
+
if not technologies:
|
|
1285
|
+
# Detect from files in this batch
|
|
1286
|
+
all_files = []
|
|
1287
|
+
for c in batch:
|
|
1288
|
+
all_files.extend(c.get("files", []))
|
|
1289
|
+
technologies = _detect_technologies_from_files(all_files)
|
|
1290
|
+
|
|
751
1291
|
metadata = {
|
|
752
1292
|
"summary": summary,
|
|
753
1293
|
"repo_name": repo_info.name,
|
|
@@ -761,6 +1301,8 @@ async def _generate_stories_async(
|
|
|
761
1301
|
"generated_locally": local,
|
|
762
1302
|
"template": template,
|
|
763
1303
|
"needs_review": False,
|
|
1304
|
+
# Technologies
|
|
1305
|
+
"technologies": technologies,
|
|
764
1306
|
# Categories
|
|
765
1307
|
"category": story_output.category,
|
|
766
1308
|
"scope": story_output.scope,
|
|
@@ -815,49 +1357,286 @@ def _generate_stories(
|
|
|
815
1357
|
# STORIES MANAGEMENT
|
|
816
1358
|
# =============================================================================
|
|
817
1359
|
|
|
818
|
-
@app.command()
|
|
819
|
-
def
|
|
820
|
-
repo: Optional[str] = typer.Option(None, "--repo", help="Filter by repository"),
|
|
821
|
-
category: Optional[str] = typer.Option(None, "--category", "-c", help="Filter by category (feature, bugfix, refactor, perf, infra, docs, test, chore)"),
|
|
822
|
-
scope: Optional[str] = typer.Option(None, "--scope", "-s", help="Filter by scope (user-facing, internal, platform, ops)"),
|
|
823
|
-
stack: Optional[str] = typer.Option(None, "--stack", help="Filter by stack (frontend, backend, database, infra, mobile, fullstack)"),
|
|
824
|
-
needs_review: bool = typer.Option(False, "--needs-review", help="Show only stories needing review"),
|
|
1360
|
+
@app.command("week")
|
|
1361
|
+
def week(
|
|
825
1362
|
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
826
1363
|
):
|
|
827
1364
|
"""
|
|
828
|
-
|
|
1365
|
+
Weekly summary — what you built in the last 7 days.
|
|
829
1366
|
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
repr stories --repo myproject
|
|
833
|
-
repr stories --category feature
|
|
834
|
-
repr stories --scope user-facing
|
|
835
|
-
repr stories --stack backend
|
|
836
|
-
repr stories --needs-review
|
|
1367
|
+
Provides a quick overview of your work from the past week,
|
|
1368
|
+
including commits and generated stories.
|
|
837
1369
|
"""
|
|
838
|
-
|
|
1370
|
+
from .timeline import extract_commits_from_git
|
|
1371
|
+
from .db import get_db
|
|
839
1372
|
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
1373
|
+
week_ago = datetime.now() - timedelta(days=7)
|
|
1374
|
+
|
|
1375
|
+
# Get stories from DB
|
|
1376
|
+
db = get_db()
|
|
1377
|
+
story_list = list_stories()
|
|
1378
|
+
recent_stories = []
|
|
1379
|
+
for s in story_list:
|
|
1380
|
+
try:
|
|
1381
|
+
created_at = s.get("created_at")
|
|
1382
|
+
if isinstance(created_at, str):
|
|
1383
|
+
dt = datetime.fromisoformat(created_at.replace("Z", "+00:00")).replace(tzinfo=None)
|
|
1384
|
+
else:
|
|
1385
|
+
dt = created_at.replace(tzinfo=None)
|
|
1386
|
+
|
|
1387
|
+
if dt > week_ago:
|
|
1388
|
+
recent_stories.append(s)
|
|
1389
|
+
except Exception:
|
|
1390
|
+
continue
|
|
1391
|
+
|
|
1392
|
+
# Get commits from tracked repos
|
|
1393
|
+
tracked = get_tracked_repos()
|
|
1394
|
+
all_commits = []
|
|
1395
|
+
for repo in tracked:
|
|
1396
|
+
path = Path(repo["path"])
|
|
1397
|
+
if path.exists():
|
|
1398
|
+
repo_commits = extract_commits_from_git(path, days=7)
|
|
1399
|
+
for c in repo_commits:
|
|
1400
|
+
c_dict = {
|
|
1401
|
+
"sha": c.sha,
|
|
1402
|
+
"message": c.message,
|
|
1403
|
+
"date": c.timestamp.isoformat(),
|
|
1404
|
+
"repo_name": path.name,
|
|
1405
|
+
"insertions": c.insertions,
|
|
1406
|
+
"deletions": c.deletions,
|
|
1407
|
+
}
|
|
1408
|
+
all_commits.append(c_dict)
|
|
847
1409
|
|
|
848
1410
|
if json_output:
|
|
849
|
-
print(json.dumps(
|
|
1411
|
+
print(json.dumps({
|
|
1412
|
+
"stories": recent_stories,
|
|
1413
|
+
"commits": all_commits,
|
|
1414
|
+
"period": "7 days",
|
|
1415
|
+
}, indent=2, default=str))
|
|
850
1416
|
return
|
|
1417
|
+
|
|
1418
|
+
print_header()
|
|
1419
|
+
console.print(f"[bold]Weekly Summary[/] (since {week_ago.strftime('%Y-%m-%d')})")
|
|
1420
|
+
console.print()
|
|
851
1421
|
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
1422
|
+
# Stats
|
|
1423
|
+
total_commits = len(all_commits)
|
|
1424
|
+
repos = set(c.get("repo_name") for c in all_commits)
|
|
1425
|
+
total_adds = sum(c.get("insertions", 0) for c in all_commits)
|
|
1426
|
+
total_dels = sum(c.get("deletions", 0) for c in all_commits)
|
|
856
1427
|
|
|
857
|
-
console.print(f"
|
|
1428
|
+
console.print(f" {total_commits} commits across {len(repos)} repos")
|
|
1429
|
+
console.print(f" [{BRAND_SUCCESS}]+{total_adds}[/] / [{BRAND_ERROR}]-{total_dels}[/] lines changed")
|
|
858
1430
|
console.print()
|
|
859
|
-
|
|
860
|
-
#
|
|
1431
|
+
|
|
1432
|
+
# Recent stories
|
|
1433
|
+
if recent_stories:
|
|
1434
|
+
console.print("[bold]Stories Generated[/]")
|
|
1435
|
+
for s in recent_stories[:10]:
|
|
1436
|
+
summary = s.get("summary", s.get("title", "Untitled"))
|
|
1437
|
+
repo = s.get("repo_name", "unknown")
|
|
1438
|
+
console.print(f" • {summary} [{BRAND_MUTED}]({repo})[/]")
|
|
1439
|
+
console.print()
|
|
1440
|
+
|
|
1441
|
+
# Commits by repo
|
|
1442
|
+
console.print("[bold]Recent Activity[/]")
|
|
1443
|
+
by_repo = defaultdict(list)
|
|
1444
|
+
for c in all_commits:
|
|
1445
|
+
by_repo[c["repo_name"]].append(c)
|
|
1446
|
+
|
|
1447
|
+
for repo_name, repo_commits in sorted(by_repo.items(), key=lambda x: -len(x[1])):
|
|
1448
|
+
console.print(f" [bold]{repo_name}[/] ({len(repo_commits)} commits)")
|
|
1449
|
+
for c in repo_commits[:3]:
|
|
1450
|
+
msg = c["message"].split("\n")[0][:60]
|
|
1451
|
+
console.print(f" - {msg}")
|
|
1452
|
+
if len(repo_commits) > 3:
|
|
1453
|
+
console.print(f" [{BRAND_MUTED}]... and {len(repo_commits) - 3} more[/]")
|
|
1454
|
+
|
|
1455
|
+
console.print()
|
|
1456
|
+
print_info("Run `repr generate` to turn recent commits into stories.")
|
|
1457
|
+
|
|
1458
|
+
|
|
1459
|
+
@app.command("standup")
|
|
1460
|
+
def standup(
|
|
1461
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
1462
|
+
):
|
|
1463
|
+
"""
|
|
1464
|
+
Quick standup — what you did yesterday and today.
|
|
1465
|
+
"""
|
|
1466
|
+
from .timeline import extract_commits_from_git
|
|
1467
|
+
|
|
1468
|
+
today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
|
1469
|
+
yesterday = today - timedelta(days=1)
|
|
1470
|
+
|
|
1471
|
+
tracked = get_tracked_repos()
|
|
1472
|
+
all_commits = []
|
|
1473
|
+
for repo in tracked:
|
|
1474
|
+
path = Path(repo["path"])
|
|
1475
|
+
if path.exists():
|
|
1476
|
+
repo_commits = extract_commits_from_git(path, days=2)
|
|
1477
|
+
for c in repo_commits:
|
|
1478
|
+
c_dict = {
|
|
1479
|
+
"sha": c.sha,
|
|
1480
|
+
"message": c.message,
|
|
1481
|
+
"date": c.timestamp.isoformat(),
|
|
1482
|
+
"repo_name": path.name,
|
|
1483
|
+
}
|
|
1484
|
+
all_commits.append(c_dict)
|
|
1485
|
+
|
|
1486
|
+
today_commits = []
|
|
1487
|
+
yesterday_commits = []
|
|
1488
|
+
|
|
1489
|
+
for c in all_commits:
|
|
1490
|
+
try:
|
|
1491
|
+
commit_date = datetime.fromisoformat(c["date"].replace("Z", "+00:00")).replace(tzinfo=None)
|
|
1492
|
+
if commit_date >= today:
|
|
1493
|
+
today_commits.append(c)
|
|
1494
|
+
elif commit_date >= yesterday:
|
|
1495
|
+
yesterday_commits.append(c)
|
|
1496
|
+
except (ValueError, TypeError):
|
|
1497
|
+
yesterday_commits.append(c)
|
|
1498
|
+
|
|
1499
|
+
if json_output:
|
|
1500
|
+
print(json.dumps({
|
|
1501
|
+
"today": today_commits,
|
|
1502
|
+
"yesterday": yesterday_commits,
|
|
1503
|
+
}, indent=2, default=str))
|
|
1504
|
+
return
|
|
1505
|
+
|
|
1506
|
+
print_header()
|
|
1507
|
+
console.print("[bold]Standup Summary[/]")
|
|
1508
|
+
console.print()
|
|
1509
|
+
|
|
1510
|
+
if yesterday_commits:
|
|
1511
|
+
console.print("[bold]Yesterday[/]")
|
|
1512
|
+
for c in yesterday_commits:
|
|
1513
|
+
msg = c["message"].split("\n")[0][:70]
|
|
1514
|
+
console.print(f" • {msg} [{BRAND_MUTED}]({c['repo_name']})[/]")
|
|
1515
|
+
console.print()
|
|
1516
|
+
|
|
1517
|
+
if today_commits:
|
|
1518
|
+
console.print("[bold]Today[/]")
|
|
1519
|
+
for c in today_commits:
|
|
1520
|
+
msg = c["message"].split("\n")[0][:70]
|
|
1521
|
+
console.print(f" • {msg} [{BRAND_MUTED}]({c['repo_name']})[/]")
|
|
1522
|
+
console.print()
|
|
1523
|
+
|
|
1524
|
+
if not yesterday_commits and not today_commits:
|
|
1525
|
+
print_info("No activity found in the last 2 days.")
|
|
1526
|
+
else:
|
|
1527
|
+
print_info("Generated from local git history.")
|
|
1528
|
+
|
|
1529
|
+
|
|
1530
|
+
@app.command("since")
|
|
1531
|
+
def since(
|
|
1532
|
+
date_ref: str = typer.Argument(..., help="Date reference (e.g., 'monday', '2024-01-01', '3 days ago')"),
|
|
1533
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
1534
|
+
):
|
|
1535
|
+
"""
|
|
1536
|
+
Reflection summary since a specific date.
|
|
1537
|
+
"""
|
|
1538
|
+
from .timeline import extract_commits_from_git
|
|
1539
|
+
|
|
1540
|
+
parsed_date_str = _parse_date_reference(date_ref)
|
|
1541
|
+
if not parsed_date_str:
|
|
1542
|
+
print_error(f"Could not parse date: {date_ref}")
|
|
1543
|
+
print_info("Try: 'monday', 'yesterday', '2024-01-01', '3 days ago'")
|
|
1544
|
+
raise typer.Exit(1)
|
|
1545
|
+
|
|
1546
|
+
since_date = datetime.fromisoformat(parsed_date_str)
|
|
1547
|
+
now = datetime.now()
|
|
1548
|
+
days_back = (now - since_date).days + 1
|
|
1549
|
+
|
|
1550
|
+
tracked = get_tracked_repos()
|
|
1551
|
+
all_commits = []
|
|
1552
|
+
for repo in tracked:
|
|
1553
|
+
path = Path(repo["path"])
|
|
1554
|
+
if path.exists():
|
|
1555
|
+
repo_commits = extract_commits_from_git(path, days=days_back)
|
|
1556
|
+
for c in repo_commits:
|
|
1557
|
+
if c.timestamp.replace(tzinfo=None) >= since_date:
|
|
1558
|
+
c_dict = {
|
|
1559
|
+
"sha": c.sha,
|
|
1560
|
+
"message": c.message,
|
|
1561
|
+
"date": c.timestamp.isoformat(),
|
|
1562
|
+
"repo_name": path.name,
|
|
1563
|
+
}
|
|
1564
|
+
all_commits.append(c_dict)
|
|
1565
|
+
|
|
1566
|
+
if json_output:
|
|
1567
|
+
print(json.dumps({
|
|
1568
|
+
"since": parsed_date_str,
|
|
1569
|
+
"commits": all_commits,
|
|
1570
|
+
}, indent=2, default=str))
|
|
1571
|
+
return
|
|
1572
|
+
|
|
1573
|
+
print_header()
|
|
1574
|
+
console.print(f"[bold]Work since {date_ref}[/] ({since_date.strftime('%Y-%m-%d')})")
|
|
1575
|
+
console.print()
|
|
1576
|
+
|
|
1577
|
+
if not all_commits:
|
|
1578
|
+
print_info(f"No commits found since {date_ref}.")
|
|
1579
|
+
return
|
|
1580
|
+
|
|
1581
|
+
by_repo = defaultdict(list)
|
|
1582
|
+
for c in all_commits:
|
|
1583
|
+
by_repo[c["repo_name"]].append(c)
|
|
1584
|
+
|
|
1585
|
+
for repo_name, repo_commits in sorted(by_repo.items(), key=lambda x: -len(x[1])):
|
|
1586
|
+
console.print(f" [bold]{repo_name}[/] ({len(repo_commits)} commits)")
|
|
1587
|
+
for c in repo_commits[:5]:
|
|
1588
|
+
msg = c["message"].split("\n")[0][:60]
|
|
1589
|
+
console.print(f" - {msg}")
|
|
1590
|
+
if len(repo_commits) > 5:
|
|
1591
|
+
console.print(f" [{BRAND_MUTED}]... and {len(repo_commits) - 5} more[/]")
|
|
1592
|
+
|
|
1593
|
+
console.print()
|
|
1594
|
+
print_info(f"Summary based on {len(all_commits)} commits.")
|
|
1595
|
+
|
|
1596
|
+
|
|
1597
|
+
@app.command()
|
|
1598
|
+
def stories(
|
|
1599
|
+
repo: Optional[str] = typer.Option(None, "--repo", help="Filter by repository"),
|
|
1600
|
+
category: Optional[str] = typer.Option(None, "--category", "-c", help="Filter by category (feature, bugfix, refactor, perf, infra, docs, test, chore)"),
|
|
1601
|
+
scope: Optional[str] = typer.Option(None, "--scope", "-s", help="Filter by scope (user-facing, internal, platform, ops)"),
|
|
1602
|
+
stack: Optional[str] = typer.Option(None, "--stack", help="Filter by stack (frontend, backend, database, infra, mobile, fullstack)"),
|
|
1603
|
+
needs_review: bool = typer.Option(False, "--needs-review", help="Show only stories needing review"),
|
|
1604
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
1605
|
+
):
|
|
1606
|
+
"""
|
|
1607
|
+
List all stories.
|
|
1608
|
+
|
|
1609
|
+
Example:
|
|
1610
|
+
repr stories
|
|
1611
|
+
repr stories --repo myproject
|
|
1612
|
+
repr stories --category feature
|
|
1613
|
+
repr stories --scope user-facing
|
|
1614
|
+
repr stories --stack backend
|
|
1615
|
+
repr stories --needs-review
|
|
1616
|
+
"""
|
|
1617
|
+
story_list = list_stories(repo_name=repo, needs_review=needs_review)
|
|
1618
|
+
|
|
1619
|
+
# Apply category filters (local filtering since storage doesn't support these yet)
|
|
1620
|
+
if category:
|
|
1621
|
+
story_list = [s for s in story_list if s.get("category") == category]
|
|
1622
|
+
if scope:
|
|
1623
|
+
story_list = [s for s in story_list if s.get("scope") == scope]
|
|
1624
|
+
if stack:
|
|
1625
|
+
story_list = [s for s in story_list if s.get("stack") == stack]
|
|
1626
|
+
|
|
1627
|
+
if json_output:
|
|
1628
|
+
print(json.dumps(story_list, indent=2, default=str))
|
|
1629
|
+
return
|
|
1630
|
+
|
|
1631
|
+
if not story_list:
|
|
1632
|
+
print_info("No stories found.")
|
|
1633
|
+
print_info("Run `repr generate` to create stories from your commits.")
|
|
1634
|
+
raise typer.Exit()
|
|
1635
|
+
|
|
1636
|
+
console.print(f"[bold]Stories[/] ({len(story_list)} total)")
|
|
1637
|
+
console.print()
|
|
1638
|
+
|
|
1639
|
+
# Group stories by repository
|
|
861
1640
|
by_repo = defaultdict(list)
|
|
862
1641
|
for story in story_list[:20]:
|
|
863
1642
|
r_name = story.get("repo_name", "unknown")
|
|
@@ -1100,93 +1879,265 @@ def stories_review():
|
|
|
1100
1879
|
|
|
1101
1880
|
@app.command()
|
|
1102
1881
|
def push(
|
|
1103
|
-
|
|
1104
|
-
|
|
1882
|
+
visibility: str = typer.Option("friends", "--visibility", help="Visibility setting: public, friends, private"),
|
|
1883
|
+
force: bool = typer.Option(False, "--force", help="Re-push all stories, even if already pushed"),
|
|
1105
1884
|
dry_run: bool = typer.Option(False, "--dry-run", help="Preview what would be pushed"),
|
|
1106
1885
|
):
|
|
1107
1886
|
"""
|
|
1108
|
-
|
|
1109
|
-
|
|
1887
|
+
Sync local stories to repr.dev backend.
|
|
1888
|
+
|
|
1110
1889
|
Examples:
|
|
1111
1890
|
repr push
|
|
1112
|
-
repr push --
|
|
1891
|
+
repr push --visibility public
|
|
1892
|
+
repr push --force
|
|
1113
1893
|
"""
|
|
1114
1894
|
from .privacy import check_cloud_permission, log_cloud_operation
|
|
1115
|
-
|
|
1895
|
+
from .api import push_stories_batch, APIError, AuthError
|
|
1896
|
+
from .db import get_db
|
|
1897
|
+
|
|
1898
|
+
# Check authentication
|
|
1899
|
+
if not get_access_token():
|
|
1900
|
+
print_error("Not authenticated")
|
|
1901
|
+
print_info("Run 'repr login' to authenticate")
|
|
1902
|
+
raise typer.Exit(1)
|
|
1903
|
+
|
|
1116
1904
|
allowed, reason = check_cloud_permission("push")
|
|
1117
1905
|
if not allowed:
|
|
1118
1906
|
print_error("Publishing blocked")
|
|
1119
1907
|
print_info(reason)
|
|
1120
1908
|
raise typer.Exit(1)
|
|
1121
|
-
|
|
1122
|
-
# Get stories
|
|
1909
|
+
|
|
1910
|
+
# Get all stories from database
|
|
1911
|
+
db = get_db()
|
|
1912
|
+
all_stories = db.list_stories(limit=10000)
|
|
1913
|
+
|
|
1914
|
+
if not all_stories:
|
|
1915
|
+
print_info("No stories to push")
|
|
1916
|
+
raise typer.Exit()
|
|
1917
|
+
|
|
1918
|
+
console.print(f"Found {len(all_stories)} story(ies) in local database")
|
|
1919
|
+
console.print()
|
|
1920
|
+
|
|
1921
|
+
if dry_run:
|
|
1922
|
+
for story in all_stories:
|
|
1923
|
+
console.print(f" • {story.title[:60]}")
|
|
1924
|
+
console.print()
|
|
1925
|
+
console.print("Run without --dry-run to push")
|
|
1926
|
+
raise typer.Exit()
|
|
1927
|
+
|
|
1928
|
+
# Build batch payload
|
|
1929
|
+
console.print(f"Preparing to push with visibility: {visibility}...")
|
|
1930
|
+
|
|
1931
|
+
stories_payload = []
|
|
1932
|
+
for story in all_stories:
|
|
1933
|
+
# Convert Story model to dict and ensure author_name is included
|
|
1934
|
+
payload = story.model_dump(mode="json")
|
|
1935
|
+
payload["visibility"] = visibility
|
|
1936
|
+
payload["client_id"] = story.id # Use story ID for sync tracking
|
|
1937
|
+
stories_payload.append(payload)
|
|
1938
|
+
|
|
1939
|
+
# Push all stories in batch with progress
|
|
1940
|
+
console.print(f"Pushing {len(stories_payload)} stories...")
|
|
1941
|
+
console.print()
|
|
1942
|
+
|
|
1943
|
+
with BatchProgress() as progress:
|
|
1944
|
+
try:
|
|
1945
|
+
result = asyncio.run(push_stories_batch(stories_payload))
|
|
1946
|
+
pushed = result.get("pushed", 0)
|
|
1947
|
+
failed = result.get("failed", 0)
|
|
1948
|
+
results = result.get("results", [])
|
|
1949
|
+
|
|
1950
|
+
# Display results
|
|
1951
|
+
for i, story_result in enumerate(results):
|
|
1952
|
+
story_title = all_stories[i].title[:50] if i < len(all_stories) else "Unknown"
|
|
1953
|
+
|
|
1954
|
+
if story_result.get("success"):
|
|
1955
|
+
console.print(f" [{BRAND_SUCCESS}]✓[/] {story_title}")
|
|
1956
|
+
else:
|
|
1957
|
+
error_msg = story_result.get("error", "Unknown error")
|
|
1958
|
+
console.print(f" [{BRAND_ERROR}]✗[/] {story_title}: {error_msg}")
|
|
1959
|
+
|
|
1960
|
+
except (APIError, AuthError) as e:
|
|
1961
|
+
print_error(f"Batch push failed: {e}")
|
|
1962
|
+
raise typer.Exit(1)
|
|
1963
|
+
|
|
1964
|
+
# Log operation
|
|
1965
|
+
if pushed > 0:
|
|
1966
|
+
log_cloud_operation(
|
|
1967
|
+
operation="push",
|
|
1968
|
+
destination="repr.dev",
|
|
1969
|
+
payload_summary={
|
|
1970
|
+
"stories_pushed": pushed,
|
|
1971
|
+
"visibility": visibility,
|
|
1972
|
+
"force": force,
|
|
1973
|
+
},
|
|
1974
|
+
bytes_sent=0,
|
|
1975
|
+
)
|
|
1976
|
+
|
|
1977
|
+
console.print()
|
|
1978
|
+
if failed > 0:
|
|
1979
|
+
print_warning(f"Pushed {pushed}/{len(stories_payload)} stories ({failed} failed)")
|
|
1980
|
+
else:
|
|
1981
|
+
print_success(f"Pushed {pushed}/{len(stories_payload)} stories")
|
|
1982
|
+
|
|
1983
|
+
|
|
1984
|
+
@app.command()
|
|
1985
|
+
def publish(
|
|
1986
|
+
story_id: Optional[str] = typer.Argument(None, help="Story ID to publish (omit for batch publish)"),
|
|
1987
|
+
all_stories: bool = typer.Option(False, "--all", "-a", help="Republish all stories, including already-pushed"),
|
|
1988
|
+
repo: Optional[str] = typer.Option(None, "--repo", "-r", help="Publish stories from specific repository"),
|
|
1989
|
+
visibility: Optional[str] = typer.Option(None, "--visibility", "-v", help="Override visibility: public, private, connections"),
|
|
1990
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Preview what would be published"),
|
|
1991
|
+
):
|
|
1992
|
+
"""
|
|
1993
|
+
Publish stories to repr.dev.
|
|
1994
|
+
|
|
1995
|
+
Publish = Upload to cloud with current visibility (or override).
|
|
1996
|
+
|
|
1997
|
+
Supports three scopes:
|
|
1998
|
+
- Global: Publish all unpushed stories (or all with --all)
|
|
1999
|
+
- Repo: Publish stories from a specific repository (--repo)
|
|
2000
|
+
- Story: Publish a single story by ID
|
|
2001
|
+
|
|
2002
|
+
Examples:
|
|
2003
|
+
repr publish # Unpushed stories only
|
|
2004
|
+
repr publish --all # All stories (re-publish)
|
|
2005
|
+
repr publish --repo myproject # Stories from specific repo
|
|
2006
|
+
repr publish 01HXYZ123 # Single story by ID
|
|
2007
|
+
repr publish --visibility public # Publish as public
|
|
2008
|
+
repr publish --repo myproject --dry-run # Preview
|
|
2009
|
+
"""
|
|
2010
|
+
from .privacy import check_cloud_permission, log_cloud_operation
|
|
2011
|
+
from .api import push_stories_batch, APIError, AuthError
|
|
2012
|
+
from .db import get_db
|
|
2013
|
+
|
|
2014
|
+
# Check authentication
|
|
2015
|
+
if not get_access_token():
|
|
2016
|
+
print_error("Not authenticated")
|
|
2017
|
+
print_info("Run 'repr login' to authenticate")
|
|
2018
|
+
raise typer.Exit(1)
|
|
2019
|
+
|
|
2020
|
+
allowed, reason = check_cloud_permission("push")
|
|
2021
|
+
if not allowed:
|
|
2022
|
+
print_error("Publishing blocked")
|
|
2023
|
+
print_info(reason)
|
|
2024
|
+
raise typer.Exit(1)
|
|
2025
|
+
|
|
2026
|
+
# Validate visibility if provided
|
|
2027
|
+
valid_visibilities = {"public", "private", "connections"}
|
|
2028
|
+
if visibility and visibility not in valid_visibilities:
|
|
2029
|
+
print_error(f"Invalid visibility: {visibility}")
|
|
2030
|
+
print_info(f"Valid options: {', '.join(valid_visibilities)}")
|
|
2031
|
+
raise typer.Exit(1)
|
|
2032
|
+
|
|
2033
|
+
db = get_db()
|
|
2034
|
+
|
|
2035
|
+
# Determine which stories to publish based on scope
|
|
1123
2036
|
if story_id:
|
|
1124
|
-
|
|
1125
|
-
|
|
2037
|
+
# Single story mode
|
|
2038
|
+
story = db.get_story(story_id)
|
|
2039
|
+
if not story:
|
|
1126
2040
|
print_error(f"Story not found: {story_id}")
|
|
1127
2041
|
raise typer.Exit(1)
|
|
1128
|
-
|
|
1129
|
-
|
|
2042
|
+
all_stories_list = [story]
|
|
2043
|
+
scope_desc = f"story {story_id[:8]}..."
|
|
2044
|
+
elif repo:
|
|
2045
|
+
# Repo mode - get stories from specific repo
|
|
2046
|
+
projects = db.list_projects()
|
|
2047
|
+
project_ids = [p["id"] for p in projects if p["name"] == repo]
|
|
2048
|
+
if not project_ids:
|
|
2049
|
+
print_error(f"Repository not found: {repo}")
|
|
2050
|
+
print_info("Use 'repr repos list' to see tracked repositories")
|
|
2051
|
+
raise typer.Exit(1)
|
|
2052
|
+
all_stories_list = [s for s in db.list_stories(limit=10000) if s.project_id in project_ids]
|
|
2053
|
+
scope_desc = f"repo '{repo}'"
|
|
1130
2054
|
else:
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
2055
|
+
# Global mode
|
|
2056
|
+
all_stories_list = db.list_stories(limit=10000)
|
|
2057
|
+
scope_desc = "all repositories"
|
|
2058
|
+
|
|
2059
|
+
if not all_stories_list:
|
|
2060
|
+
print_info(f"No stories found for {scope_desc}")
|
|
1135
2061
|
raise typer.Exit()
|
|
1136
|
-
|
|
1137
|
-
|
|
2062
|
+
|
|
2063
|
+
# Filter to unpushed unless --all is specified
|
|
2064
|
+
# Note: For now we don't track pushed_at in the schema, so --all just means republish everything
|
|
2065
|
+
# In future, we could filter by pushed_at column
|
|
2066
|
+
stories_to_publish = all_stories_list
|
|
2067
|
+
|
|
2068
|
+
if not stories_to_publish:
|
|
2069
|
+
print_info("No stories to publish")
|
|
2070
|
+
raise typer.Exit()
|
|
2071
|
+
|
|
2072
|
+
console.print(f"Found [bold]{len(stories_to_publish)}[/] stories from {scope_desc}")
|
|
1138
2073
|
console.print()
|
|
1139
|
-
|
|
2074
|
+
|
|
1140
2075
|
if dry_run:
|
|
1141
|
-
|
|
1142
|
-
|
|
2076
|
+
console.print("[dim]Preview (dry-run):[/]")
|
|
2077
|
+
for story in stories_to_publish[:20]: # Limit preview to 20
|
|
2078
|
+
vis = visibility or story.visibility or "private"
|
|
2079
|
+
console.print(f" • [{vis}] {story.title[:55]}...")
|
|
2080
|
+
if len(stories_to_publish) > 20:
|
|
2081
|
+
console.print(f" ... and {len(stories_to_publish) - 20} more")
|
|
1143
2082
|
console.print()
|
|
1144
2083
|
console.print("Run without --dry-run to publish")
|
|
1145
2084
|
raise typer.Exit()
|
|
1146
|
-
|
|
2085
|
+
|
|
1147
2086
|
# Build batch payload
|
|
1148
|
-
|
|
1149
|
-
|
|
2087
|
+
vis_label = visibility or "default"
|
|
2088
|
+
console.print(f"Publishing with visibility: [bold]{vis_label}[/]...")
|
|
2089
|
+
|
|
1150
2090
|
stories_payload = []
|
|
1151
|
-
for
|
|
1152
|
-
|
|
1153
|
-
# Use
|
|
1154
|
-
payload =
|
|
2091
|
+
for story in stories_to_publish:
|
|
2092
|
+
payload = story.model_dump(mode="json")
|
|
2093
|
+
# Use override visibility or story's current visibility
|
|
2094
|
+
payload["visibility"] = visibility or story.visibility or "private"
|
|
2095
|
+
payload["client_id"] = story.id
|
|
1155
2096
|
stories_payload.append(payload)
|
|
1156
|
-
|
|
1157
|
-
# Push all stories in
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
2097
|
+
|
|
2098
|
+
# Push all stories in batch with progress
|
|
2099
|
+
console.print(f"Publishing {len(stories_payload)} stories...")
|
|
2100
|
+
console.print()
|
|
2101
|
+
|
|
2102
|
+
with BatchProgress() as progress:
|
|
2103
|
+
try:
|
|
2104
|
+
result = asyncio.run(push_stories_batch(stories_payload))
|
|
2105
|
+
pushed = result.get("pushed", 0)
|
|
2106
|
+
failed = result.get("failed", 0)
|
|
2107
|
+
results = result.get("results", [])
|
|
2108
|
+
|
|
2109
|
+
# Display results
|
|
2110
|
+
for i, story_result in enumerate(results):
|
|
2111
|
+
story_title = stories_to_publish[i].title[:50] if i < len(stories_to_publish) else "Unknown"
|
|
2112
|
+
if story_result.get("success"):
|
|
2113
|
+
console.print(f" [{BRAND_SUCCESS}]✓[/] {story_title}")
|
|
2114
|
+
else:
|
|
2115
|
+
error_msg = story_result.get("error", "Unknown error")
|
|
2116
|
+
console.print(f" [{BRAND_ERROR}]✗[/] {story_title}: {error_msg}")
|
|
2117
|
+
|
|
2118
|
+
except (APIError, AuthError) as e:
|
|
2119
|
+
print_error(f"Publish failed: {e}")
|
|
2120
|
+
raise typer.Exit(1)
|
|
2121
|
+
|
|
1179
2122
|
# Log operation
|
|
1180
2123
|
if pushed > 0:
|
|
1181
2124
|
log_cloud_operation(
|
|
1182
|
-
operation="
|
|
2125
|
+
operation="publish",
|
|
1183
2126
|
destination="repr.dev",
|
|
1184
|
-
payload_summary={
|
|
2127
|
+
payload_summary={
|
|
2128
|
+
"stories_published": pushed,
|
|
2129
|
+
"visibility": visibility or "default",
|
|
2130
|
+
"scope": "story" if story_id else ("repo" if repo else "global"),
|
|
2131
|
+
"repo": repo,
|
|
2132
|
+
},
|
|
1185
2133
|
bytes_sent=0,
|
|
1186
2134
|
)
|
|
1187
|
-
|
|
2135
|
+
|
|
1188
2136
|
console.print()
|
|
1189
|
-
|
|
2137
|
+
if failed > 0:
|
|
2138
|
+
print_warning(f"Published {pushed}/{len(stories_payload)} stories ({failed} failed)")
|
|
2139
|
+
else:
|
|
2140
|
+
print_success(f"Published {pushed} stories to repr.dev")
|
|
1190
2141
|
|
|
1191
2142
|
|
|
1192
2143
|
@app.command()
|
|
@@ -1734,58 +2685,190 @@ def hooks_queue(
|
|
|
1734
2685
|
|
|
1735
2686
|
|
|
1736
2687
|
# =============================================================================
|
|
1737
|
-
#
|
|
2688
|
+
# CRON SCHEDULING
|
|
1738
2689
|
# =============================================================================
|
|
1739
2690
|
|
|
1740
|
-
@
|
|
1741
|
-
def
|
|
1742
|
-
|
|
2691
|
+
@cron_app.command("install")
|
|
2692
|
+
def cron_install(
|
|
2693
|
+
interval: int = typer.Option(4, "--interval", "-i", help="Hours between runs (default: 4)"),
|
|
2694
|
+
min_commits: int = typer.Option(3, "--min-commits", "-m", help="Minimum commits to trigger generation"),
|
|
1743
2695
|
):
|
|
1744
2696
|
"""
|
|
1745
|
-
|
|
1746
|
-
|
|
2697
|
+
Install cron job for automatic story generation.
|
|
2698
|
+
|
|
2699
|
+
Runs every 4 hours by default, only generating if there are
|
|
2700
|
+
enough commits in the queue.
|
|
2701
|
+
|
|
1747
2702
|
Example:
|
|
1748
|
-
repr
|
|
2703
|
+
repr cron install
|
|
2704
|
+
repr cron install --interval 6 --min-commits 5
|
|
1749
2705
|
"""
|
|
1750
|
-
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
|
|
1757
|
-
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
raise typer.Exit(1)
|
|
1763
|
-
|
|
1764
|
-
# Test the key
|
|
1765
|
-
console.print("Testing connection...")
|
|
1766
|
-
from .llm import test_byok_provider
|
|
1767
|
-
result = test_byok_provider(provider, api_key)
|
|
1768
|
-
|
|
1769
|
-
if result.success:
|
|
1770
|
-
add_byok_provider(provider, api_key)
|
|
1771
|
-
print_success(f"Added {provider_info['name']}")
|
|
1772
|
-
console.print(f" Response time: {result.response_time_ms:.0f}ms")
|
|
2706
|
+
from .cron import install_cron
|
|
2707
|
+
|
|
2708
|
+
result = install_cron(interval, min_commits)
|
|
2709
|
+
|
|
2710
|
+
if result["success"]:
|
|
2711
|
+
if result["already_installed"]:
|
|
2712
|
+
print_success(result["message"])
|
|
2713
|
+
else:
|
|
2714
|
+
print_success(result["message"])
|
|
2715
|
+
console.print()
|
|
2716
|
+
console.print(f"[{BRAND_MUTED}]Stories will generate every {interval}h when queue has ≥{min_commits} commits[/]")
|
|
2717
|
+
console.print(f"[{BRAND_MUTED}]Logs: ~/.repr/logs/cron.log[/]")
|
|
1773
2718
|
else:
|
|
1774
|
-
print_error(
|
|
1775
|
-
|
|
1776
|
-
add_byok_provider(provider, api_key)
|
|
1777
|
-
print_success(f"Added {provider_info['name']}")
|
|
2719
|
+
print_error(result["message"])
|
|
2720
|
+
raise typer.Exit(1)
|
|
1778
2721
|
|
|
1779
2722
|
|
|
1780
|
-
@
|
|
1781
|
-
def
|
|
1782
|
-
provider: str = typer.Argument(..., help="Provider to remove"),
|
|
1783
|
-
):
|
|
2723
|
+
@cron_app.command("remove")
|
|
2724
|
+
def cron_remove():
|
|
1784
2725
|
"""
|
|
1785
|
-
Remove
|
|
1786
|
-
|
|
2726
|
+
Remove cron job for story generation.
|
|
2727
|
+
|
|
1787
2728
|
Example:
|
|
1788
|
-
repr
|
|
2729
|
+
repr cron remove
|
|
2730
|
+
"""
|
|
2731
|
+
from .cron import remove_cron
|
|
2732
|
+
|
|
2733
|
+
result = remove_cron()
|
|
2734
|
+
|
|
2735
|
+
if result["success"]:
|
|
2736
|
+
print_success(result["message"])
|
|
2737
|
+
else:
|
|
2738
|
+
print_error(result["message"])
|
|
2739
|
+
raise typer.Exit(1)
|
|
2740
|
+
|
|
2741
|
+
|
|
2742
|
+
@cron_app.command("pause")
|
|
2743
|
+
def cron_pause():
|
|
2744
|
+
"""
|
|
2745
|
+
Pause cron job without removing it.
|
|
2746
|
+
|
|
2747
|
+
Example:
|
|
2748
|
+
repr cron pause
|
|
2749
|
+
"""
|
|
2750
|
+
from .cron import pause_cron
|
|
2751
|
+
|
|
2752
|
+
result = pause_cron()
|
|
2753
|
+
|
|
2754
|
+
if result["success"]:
|
|
2755
|
+
print_success(result["message"])
|
|
2756
|
+
console.print(f"[{BRAND_MUTED}]Use `repr cron resume` to re-enable[/]")
|
|
2757
|
+
else:
|
|
2758
|
+
print_error(result["message"])
|
|
2759
|
+
raise typer.Exit(1)
|
|
2760
|
+
|
|
2761
|
+
|
|
2762
|
+
@cron_app.command("resume")
|
|
2763
|
+
def cron_resume():
|
|
2764
|
+
"""
|
|
2765
|
+
Resume paused cron job.
|
|
2766
|
+
|
|
2767
|
+
Example:
|
|
2768
|
+
repr cron resume
|
|
2769
|
+
"""
|
|
2770
|
+
from .cron import resume_cron
|
|
2771
|
+
|
|
2772
|
+
result = resume_cron()
|
|
2773
|
+
|
|
2774
|
+
if result["success"]:
|
|
2775
|
+
print_success(result["message"])
|
|
2776
|
+
else:
|
|
2777
|
+
print_error(result["message"])
|
|
2778
|
+
raise typer.Exit(1)
|
|
2779
|
+
|
|
2780
|
+
|
|
2781
|
+
@cron_app.command("status")
|
|
2782
|
+
def cron_status(
|
|
2783
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
2784
|
+
):
|
|
2785
|
+
"""
|
|
2786
|
+
Show cron job status.
|
|
2787
|
+
|
|
2788
|
+
Example:
|
|
2789
|
+
repr cron status
|
|
2790
|
+
"""
|
|
2791
|
+
from .cron import get_cron_status
|
|
2792
|
+
|
|
2793
|
+
status = get_cron_status()
|
|
2794
|
+
|
|
2795
|
+
if json_output:
|
|
2796
|
+
print(json.dumps(status, indent=2))
|
|
2797
|
+
return
|
|
2798
|
+
|
|
2799
|
+
console.print("[bold]Cron Status[/]")
|
|
2800
|
+
console.print()
|
|
2801
|
+
|
|
2802
|
+
if not status["installed"]:
|
|
2803
|
+
console.print(f"[{BRAND_MUTED}]○[/] Not installed")
|
|
2804
|
+
console.print()
|
|
2805
|
+
console.print(f"[{BRAND_MUTED}]Run `repr cron install` to enable scheduled generation[/]")
|
|
2806
|
+
return
|
|
2807
|
+
|
|
2808
|
+
if status["paused"]:
|
|
2809
|
+
console.print(f"[{BRAND_WARNING}]⏸[/] Paused")
|
|
2810
|
+
console.print(f" [{BRAND_MUTED}]Interval: every {status['interval_hours']}h[/]")
|
|
2811
|
+
console.print()
|
|
2812
|
+
console.print(f"[{BRAND_MUTED}]Run `repr cron resume` to re-enable[/]")
|
|
2813
|
+
else:
|
|
2814
|
+
console.print(f"[{BRAND_SUCCESS}]✓[/] Active")
|
|
2815
|
+
console.print(f" [{BRAND_MUTED}]Interval: every {status['interval_hours']}h[/]")
|
|
2816
|
+
console.print(f" [{BRAND_MUTED}]Logs: ~/.repr/logs/cron.log[/]")
|
|
2817
|
+
|
|
2818
|
+
|
|
2819
|
+
# =============================================================================
|
|
2820
|
+
# LLM CONFIGURATION
|
|
2821
|
+
# =============================================================================
|
|
2822
|
+
|
|
2823
|
+
@llm_app.command("add")
|
|
2824
|
+
def llm_add(
|
|
2825
|
+
provider: str = typer.Argument(..., help="Provider: openai, anthropic, groq, together"),
|
|
2826
|
+
):
|
|
2827
|
+
"""
|
|
2828
|
+
Configure a BYOK provider.
|
|
2829
|
+
|
|
2830
|
+
Example:
|
|
2831
|
+
repr llm add openai
|
|
2832
|
+
"""
|
|
2833
|
+
if provider not in BYOK_PROVIDERS:
|
|
2834
|
+
print_error(f"Unknown provider: {provider}")
|
|
2835
|
+
print_info(f"Available: {', '.join(BYOK_PROVIDERS.keys())}")
|
|
2836
|
+
raise typer.Exit(1)
|
|
2837
|
+
|
|
2838
|
+
provider_info = BYOK_PROVIDERS[provider]
|
|
2839
|
+
console.print(f"Configure {provider_info['name']}")
|
|
2840
|
+
console.print()
|
|
2841
|
+
|
|
2842
|
+
api_key = Prompt.ask("API Key", password=True)
|
|
2843
|
+
if not api_key:
|
|
2844
|
+
print_error("API key required")
|
|
2845
|
+
raise typer.Exit(1)
|
|
2846
|
+
|
|
2847
|
+
# Test the key
|
|
2848
|
+
console.print("Testing connection...")
|
|
2849
|
+
from .llm import test_byok_provider
|
|
2850
|
+
result = test_byok_provider(provider, api_key)
|
|
2851
|
+
|
|
2852
|
+
if result.success:
|
|
2853
|
+
add_byok_provider(provider, api_key)
|
|
2854
|
+
print_success(f"Added {provider_info['name']}")
|
|
2855
|
+
console.print(f" Response time: {result.response_time_ms:.0f}ms")
|
|
2856
|
+
else:
|
|
2857
|
+
print_error(f"Connection failed: {result.error}")
|
|
2858
|
+
if confirm("Save anyway?"):
|
|
2859
|
+
add_byok_provider(provider, api_key)
|
|
2860
|
+
print_success(f"Added {provider_info['name']}")
|
|
2861
|
+
|
|
2862
|
+
|
|
2863
|
+
@llm_app.command("remove")
|
|
2864
|
+
def llm_remove(
|
|
2865
|
+
provider: str = typer.Argument(..., help="Provider to remove"),
|
|
2866
|
+
):
|
|
2867
|
+
"""
|
|
2868
|
+
Remove a BYOK provider key.
|
|
2869
|
+
|
|
2870
|
+
Example:
|
|
2871
|
+
repr llm remove openai
|
|
1789
2872
|
"""
|
|
1790
2873
|
if remove_byok_provider(provider):
|
|
1791
2874
|
print_success(f"Removed {provider}")
|
|
@@ -1948,6 +3031,77 @@ def llm_test():
|
|
|
1948
3031
|
console.print(f" [{BRAND_SUCCESS}]✓[/] {provider}")
|
|
1949
3032
|
|
|
1950
3033
|
|
|
3034
|
+
# =============================================================================
|
|
3035
|
+
# CONFIGURE (unified setup wizard)
|
|
3036
|
+
# =============================================================================
|
|
3037
|
+
|
|
3038
|
+
@configure_app.callback(invoke_without_command=True)
|
|
3039
|
+
def configure_main(ctx: typer.Context):
|
|
3040
|
+
"""
|
|
3041
|
+
Configure repr settings (LLM, repos, schedule).
|
|
3042
|
+
|
|
3043
|
+
Run without arguments to see an interactive menu.
|
|
3044
|
+
Use subcommands for direct configuration:
|
|
3045
|
+
|
|
3046
|
+
repr configure llm Configure LLM provider
|
|
3047
|
+
repr configure repos Configure tracked repositories
|
|
3048
|
+
repr configure schedule Configure automatic generation
|
|
3049
|
+
|
|
3050
|
+
Example:
|
|
3051
|
+
repr configure
|
|
3052
|
+
"""
|
|
3053
|
+
if ctx.invoked_subcommand is None:
|
|
3054
|
+
from .configure import run_configure_menu
|
|
3055
|
+
run_configure_menu()
|
|
3056
|
+
|
|
3057
|
+
|
|
3058
|
+
@configure_app.command("llm")
|
|
3059
|
+
def configure_llm():
|
|
3060
|
+
"""
|
|
3061
|
+
Configure LLM provider interactively.
|
|
3062
|
+
|
|
3063
|
+
Supports:
|
|
3064
|
+
- Local: Ollama, LM Studio
|
|
3065
|
+
- API: OpenAI, Anthropic, Gemini, Groq, Together, OpenRouter
|
|
3066
|
+
|
|
3067
|
+
Example:
|
|
3068
|
+
repr configure llm
|
|
3069
|
+
"""
|
|
3070
|
+
from .configure import wizard_llm
|
|
3071
|
+
wizard_llm()
|
|
3072
|
+
|
|
3073
|
+
|
|
3074
|
+
@configure_app.command("repos")
|
|
3075
|
+
def configure_repos():
|
|
3076
|
+
"""
|
|
3077
|
+
Configure tracked repositories.
|
|
3078
|
+
|
|
3079
|
+
Scans for git repositories and lets you select which to track.
|
|
3080
|
+
|
|
3081
|
+
Example:
|
|
3082
|
+
repr configure repos
|
|
3083
|
+
"""
|
|
3084
|
+
from .configure import wizard_repos
|
|
3085
|
+
wizard_repos()
|
|
3086
|
+
|
|
3087
|
+
|
|
3088
|
+
@configure_app.command("schedule")
|
|
3089
|
+
def configure_schedule():
|
|
3090
|
+
"""
|
|
3091
|
+
Configure automatic story generation schedule.
|
|
3092
|
+
|
|
3093
|
+
Options:
|
|
3094
|
+
- Scheduled (cron) - Every N hours
|
|
3095
|
+
- On commit (hooks) - After N commits
|
|
3096
|
+
- Manual - Run `repr generate` yourself
|
|
3097
|
+
|
|
3098
|
+
Example:
|
|
3099
|
+
repr configure schedule
|
|
3100
|
+
"""
|
|
3101
|
+
from .configure import wizard_schedule
|
|
3102
|
+
wizard_schedule()
|
|
3103
|
+
|
|
3104
|
+
|
|
1951
3105
|
# =============================================================================
|
|
1952
3106
|
# PRIVACY
|
|
1953
3107
|
# =============================================================================
|
|
@@ -2340,12 +3494,12 @@ def data_restore(
|
|
|
2340
3494
|
def data_clear_cache():
|
|
2341
3495
|
"""
|
|
2342
3496
|
Clear local cache.
|
|
2343
|
-
|
|
3497
|
+
|
|
2344
3498
|
Example:
|
|
2345
3499
|
repr data clear-cache
|
|
2346
3500
|
"""
|
|
2347
3501
|
from .config import clear_cache, get_cache_size
|
|
2348
|
-
|
|
3502
|
+
|
|
2349
3503
|
size = get_cache_size()
|
|
2350
3504
|
clear_cache()
|
|
2351
3505
|
print_success(f"Cache cleared ({format_bytes(size)} freed)")
|
|
@@ -2353,6 +3507,199 @@ def data_clear_cache():
|
|
|
2353
3507
|
console.print(" Config preserved")
|
|
2354
3508
|
|
|
2355
3509
|
|
|
3510
|
+
@data_app.command("migrate-db")
|
|
3511
|
+
def data_migrate_db(
|
|
3512
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Show what would be migrated"),
|
|
3513
|
+
project: Optional[Path] = typer.Option(None, "--project", "-p", help="Migrate specific project"),
|
|
3514
|
+
):
|
|
3515
|
+
"""
|
|
3516
|
+
Migrate store.json files to central SQLite database.
|
|
3517
|
+
|
|
3518
|
+
This command imports existing .repr/store.json files into the central
|
|
3519
|
+
SQLite database at ~/.repr/stories.db for faster queries.
|
|
3520
|
+
|
|
3521
|
+
Example:
|
|
3522
|
+
repr data migrate-db # Migrate all tracked repos
|
|
3523
|
+
repr data migrate-db --dry-run # Preview migration
|
|
3524
|
+
repr data migrate-db -p /path/to/repo
|
|
3525
|
+
"""
|
|
3526
|
+
from .storage import migrate_stores_to_db, get_db_stats
|
|
3527
|
+
|
|
3528
|
+
project_paths = [project] if project else None
|
|
3529
|
+
|
|
3530
|
+
with create_spinner("Migrating stories to SQLite...") as progress:
|
|
3531
|
+
task = progress.add_task("migrating", total=None)
|
|
3532
|
+
|
|
3533
|
+
if dry_run:
|
|
3534
|
+
console.print("[bold]Dry run mode - no changes will be made[/]\n")
|
|
3535
|
+
|
|
3536
|
+
stats = migrate_stores_to_db(project_paths=project_paths, dry_run=dry_run)
|
|
3537
|
+
|
|
3538
|
+
progress.update(task, completed=True)
|
|
3539
|
+
|
|
3540
|
+
console.print()
|
|
3541
|
+
console.print(f"Projects scanned: {stats['projects_scanned']}")
|
|
3542
|
+
console.print(f"Projects migrated: {stats['projects_migrated']}")
|
|
3543
|
+
console.print(f"Stories imported: {stats['stories_imported']}")
|
|
3544
|
+
|
|
3545
|
+
if stats['errors']:
|
|
3546
|
+
console.print()
|
|
3547
|
+
print_warning(f"{len(stats['errors'])} errors:")
|
|
3548
|
+
for error in stats['errors'][:5]:
|
|
3549
|
+
console.print(f" • {error}")
|
|
3550
|
+
if len(stats['errors']) > 5:
|
|
3551
|
+
console.print(f" ... and {len(stats['errors']) - 5} more")
|
|
3552
|
+
|
|
3553
|
+
if not dry_run and stats['stories_imported'] > 0:
|
|
3554
|
+
console.print()
|
|
3555
|
+
db_stats = get_db_stats()
|
|
3556
|
+
console.print(f"Database: {db_stats['db_path']}")
|
|
3557
|
+
console.print(f"Total stories: {db_stats['story_count']}")
|
|
3558
|
+
console.print(f"Database size: {format_bytes(db_stats['db_size_bytes'])}")
|
|
3559
|
+
|
|
3560
|
+
|
|
3561
|
+
@data_app.command("db-stats")
|
|
3562
|
+
def data_db_stats():
|
|
3563
|
+
"""
|
|
3564
|
+
Show SQLite database statistics.
|
|
3565
|
+
|
|
3566
|
+
Example:
|
|
3567
|
+
repr data db-stats
|
|
3568
|
+
"""
|
|
3569
|
+
from .storage import get_db_stats
|
|
3570
|
+
from .db import get_db_path
|
|
3571
|
+
|
|
3572
|
+
if not get_db_path().exists():
|
|
3573
|
+
print_info("No SQLite database yet.")
|
|
3574
|
+
print_info("Run `repr data migrate-db` to create one.")
|
|
3575
|
+
return
|
|
3576
|
+
|
|
3577
|
+
stats = get_db_stats()
|
|
3578
|
+
|
|
3579
|
+
console.print("[bold]SQLite Database Stats[/]")
|
|
3580
|
+
console.print()
|
|
3581
|
+
console.print(f"Path: {stats['db_path']}")
|
|
3582
|
+
console.print(f"Size: {format_bytes(stats['db_size_bytes'])}")
|
|
3583
|
+
console.print()
|
|
3584
|
+
console.print(f"Stories: {stats['story_count']}")
|
|
3585
|
+
console.print(f"Projects: {stats['project_count']}")
|
|
3586
|
+
console.print(f"Unique files: {stats['unique_files']}")
|
|
3587
|
+
console.print(f"Unique commits: {stats['unique_commits']}")
|
|
3588
|
+
|
|
3589
|
+
if stats['categories']:
|
|
3590
|
+
console.print()
|
|
3591
|
+
console.print("[bold]By Category:[/]")
|
|
3592
|
+
for cat, count in sorted(stats['categories'].items(), key=lambda x: -x[1]):
|
|
3593
|
+
console.print(f" {cat}: {count}")
|
|
3594
|
+
|
|
3595
|
+
|
|
3596
|
+
@data_app.command("clear")
|
|
3597
|
+
def data_clear(
|
|
3598
|
+
force: bool = typer.Option(False, "--force", "-f", help="Skip confirmation"),
|
|
3599
|
+
):
|
|
3600
|
+
"""
|
|
3601
|
+
Clear all stories from database and storage.
|
|
3602
|
+
|
|
3603
|
+
This permanently deletes:
|
|
3604
|
+
- All stories from the SQLite database
|
|
3605
|
+
- All story files from ~/.repr/stories/
|
|
3606
|
+
- Local cache
|
|
3607
|
+
|
|
3608
|
+
Projects registry and config are preserved.
|
|
3609
|
+
|
|
3610
|
+
Example:
|
|
3611
|
+
repr data clear # With confirmation
|
|
3612
|
+
repr data clear --force # Skip confirmation
|
|
3613
|
+
"""
|
|
3614
|
+
from .db import get_db_path
|
|
3615
|
+
from .storage import STORIES_DIR
|
|
3616
|
+
from .config import get_cache_size, clear_cache
|
|
3617
|
+
import shutil
|
|
3618
|
+
|
|
3619
|
+
db_path = get_db_path()
|
|
3620
|
+
|
|
3621
|
+
# Check what exists
|
|
3622
|
+
db_exists = db_path.exists()
|
|
3623
|
+
stories_dir_exists = STORIES_DIR.exists()
|
|
3624
|
+
cache_size = get_cache_size()
|
|
3625
|
+
|
|
3626
|
+
if not db_exists and not stories_dir_exists and cache_size == 0:
|
|
3627
|
+
print_info("Nothing to clear - no database, stories, or cache found.")
|
|
3628
|
+
return
|
|
3629
|
+
|
|
3630
|
+
# Count what we're about to delete
|
|
3631
|
+
story_count = 0
|
|
3632
|
+
db_size = 0
|
|
3633
|
+
stories_file_count = 0
|
|
3634
|
+
|
|
3635
|
+
if db_exists:
|
|
3636
|
+
from .storage import get_db_stats
|
|
3637
|
+
try:
|
|
3638
|
+
stats = get_db_stats()
|
|
3639
|
+
story_count = stats.get('story_count', 0)
|
|
3640
|
+
db_size = stats.get('db_size_bytes', 0)
|
|
3641
|
+
except Exception:
|
|
3642
|
+
db_size = db_path.stat().st_size if db_path.exists() else 0
|
|
3643
|
+
|
|
3644
|
+
if stories_dir_exists:
|
|
3645
|
+
stories_file_count = len(list(STORIES_DIR.glob("*")))
|
|
3646
|
+
|
|
3647
|
+
# Show what will be deleted
|
|
3648
|
+
console.print("[bold red]This will permanently delete:[/]")
|
|
3649
|
+
console.print()
|
|
3650
|
+
if db_exists:
|
|
3651
|
+
console.print(f" • Database: {db_path}")
|
|
3652
|
+
console.print(f" {story_count} stories, {format_bytes(db_size)}")
|
|
3653
|
+
if stories_dir_exists and stories_file_count > 0:
|
|
3654
|
+
console.print(f" • Story files: {STORIES_DIR}")
|
|
3655
|
+
console.print(f" {stories_file_count} files")
|
|
3656
|
+
if cache_size > 0:
|
|
3657
|
+
console.print(f" • Cache: {format_bytes(cache_size)}")
|
|
3658
|
+
console.print()
|
|
3659
|
+
console.print("[dim]Projects registry and config will be preserved.[/]")
|
|
3660
|
+
console.print()
|
|
3661
|
+
|
|
3662
|
+
if not force:
|
|
3663
|
+
if not confirm("Are you sure you want to delete all stories and cache?"):
|
|
3664
|
+
print_info("Cancelled")
|
|
3665
|
+
raise typer.Exit()
|
|
3666
|
+
|
|
3667
|
+
# Delete database
|
|
3668
|
+
if db_exists:
|
|
3669
|
+
try:
|
|
3670
|
+
# Also delete WAL and SHM files if they exist
|
|
3671
|
+
db_path.unlink()
|
|
3672
|
+
wal_path = db_path.with_suffix(".db-wal")
|
|
3673
|
+
shm_path = db_path.with_suffix(".db-shm")
|
|
3674
|
+
if wal_path.exists():
|
|
3675
|
+
wal_path.unlink()
|
|
3676
|
+
if shm_path.exists():
|
|
3677
|
+
shm_path.unlink()
|
|
3678
|
+
except Exception as e:
|
|
3679
|
+
print_error(f"Failed to delete database: {e}")
|
|
3680
|
+
raise typer.Exit(1)
|
|
3681
|
+
|
|
3682
|
+
# Delete stories directory contents (but keep the directory)
|
|
3683
|
+
if stories_dir_exists:
|
|
3684
|
+
try:
|
|
3685
|
+
shutil.rmtree(STORIES_DIR)
|
|
3686
|
+
STORIES_DIR.mkdir(exist_ok=True)
|
|
3687
|
+
except Exception as e:
|
|
3688
|
+
print_error(f"Failed to clear stories directory: {e}")
|
|
3689
|
+
raise typer.Exit(1)
|
|
3690
|
+
|
|
3691
|
+
# Clear cache
|
|
3692
|
+
clear_cache()
|
|
3693
|
+
|
|
3694
|
+
print_success("All stories and cache cleared")
|
|
3695
|
+
if db_exists:
|
|
3696
|
+
console.print(f" Deleted: {story_count} stories from database")
|
|
3697
|
+
if stories_file_count > 0:
|
|
3698
|
+
console.print(f" Deleted: {stories_file_count} story files")
|
|
3699
|
+
if cache_size > 0:
|
|
3700
|
+
console.print(f" Cleared: {format_bytes(cache_size)} from cache")
|
|
3701
|
+
|
|
3702
|
+
|
|
2356
3703
|
# =============================================================================
|
|
2357
3704
|
# PROFILE
|
|
2358
3705
|
# =============================================================================
|
|
@@ -2536,61 +3883,451 @@ def profile_link():
|
|
|
2536
3883
|
|
|
2537
3884
|
|
|
2538
3885
|
# =============================================================================
|
|
2539
|
-
#
|
|
3886
|
+
# UNPUBLISH COMMAND
|
|
2540
3887
|
# =============================================================================
|
|
2541
3888
|
|
|
2542
|
-
@app.command()
|
|
2543
|
-
def
|
|
2544
|
-
|
|
3889
|
+
@app.command("unpublish")
|
|
3890
|
+
def unpublish_story(
|
|
3891
|
+
story_id: str = typer.Argument(..., help="Story ID to unpublish"),
|
|
2545
3892
|
):
|
|
2546
3893
|
"""
|
|
2547
|
-
|
|
2548
|
-
|
|
2549
|
-
Example:
|
|
2550
|
-
repr status
|
|
2551
|
-
"""
|
|
2552
|
-
authenticated = is_authenticated()
|
|
2553
|
-
user = get_current_user() if authenticated else None
|
|
2554
|
-
tracked = get_tracked_repos()
|
|
2555
|
-
story_count = get_story_count()
|
|
2556
|
-
unpushed = len(get_unpushed_stories())
|
|
2557
|
-
|
|
2558
|
-
if json_output:
|
|
2559
|
-
print(json.dumps({
|
|
2560
|
-
"version": __version__,
|
|
2561
|
-
"authenticated": authenticated,
|
|
2562
|
-
"email": user.get("email") if user else None,
|
|
2563
|
-
"repos_tracked": len(tracked),
|
|
2564
|
-
"stories_total": story_count,
|
|
2565
|
-
"stories_unpushed": unpushed,
|
|
2566
|
-
}, indent=2))
|
|
2567
|
-
return
|
|
2568
|
-
|
|
2569
|
-
print_header()
|
|
2570
|
-
|
|
2571
|
-
# Auth status
|
|
2572
|
-
if authenticated:
|
|
2573
|
-
email = user.get("email", "unknown")
|
|
2574
|
-
console.print(f"Auth: [{BRAND_SUCCESS}]✓ Signed in as {email}[/]")
|
|
2575
|
-
else:
|
|
2576
|
-
console.print(f"Auth: [{BRAND_MUTED}]○ Not signed in[/]")
|
|
2577
|
-
|
|
2578
|
-
console.print()
|
|
2579
|
-
|
|
2580
|
-
# Stats
|
|
2581
|
-
console.print(f"Tracked repos: {len(tracked)}")
|
|
2582
|
-
console.print(f"Stories: {story_count} ({unpushed} unpushed)")
|
|
2583
|
-
|
|
2584
|
-
console.print()
|
|
2585
|
-
|
|
2586
|
-
# Next steps
|
|
2587
|
-
if not authenticated:
|
|
2588
|
-
print_info("Run `repr login` to enable cloud sync")
|
|
2589
|
-
elif unpushed > 0:
|
|
2590
|
-
print_info(f"Run `repr push` to publish {unpushed} stories")
|
|
3894
|
+
Set story to private.
|
|
2591
3895
|
|
|
3896
|
+
Examples:
|
|
3897
|
+
repr unpublish abc123
|
|
3898
|
+
"""
|
|
3899
|
+
from .api import set_story_visibility, AuthError
|
|
2592
3900
|
|
|
2593
|
-
|
|
3901
|
+
if not is_authenticated():
|
|
3902
|
+
print_error("Not authenticated")
|
|
3903
|
+
print_info("Run `repr login` first")
|
|
3904
|
+
raise typer.Exit(1)
|
|
3905
|
+
|
|
3906
|
+
try:
|
|
3907
|
+
with create_spinner("Setting story to private..."):
|
|
3908
|
+
result = asyncio.run(set_story_visibility(story_id, "private"))
|
|
3909
|
+
print_success("Story set to private")
|
|
3910
|
+
except AuthError as e:
|
|
3911
|
+
print_error(str(e))
|
|
3912
|
+
raise typer.Exit(1)
|
|
3913
|
+
except APIError as e:
|
|
3914
|
+
print_error(f"API error: {e}")
|
|
3915
|
+
raise typer.Exit(1)
|
|
3916
|
+
|
|
3917
|
+
|
|
3918
|
+
# =============================================================================
|
|
3919
|
+
# FRIENDS
|
|
3920
|
+
# =============================================================================
|
|
3921
|
+
|
|
3922
|
+
@friends_app.command("add")
|
|
3923
|
+
def friends_add(
|
|
3924
|
+
username: str = typer.Argument(..., help="Username to send friend request to"),
|
|
3925
|
+
):
|
|
3926
|
+
"""Send friend request.
|
|
3927
|
+
|
|
3928
|
+
Example:
|
|
3929
|
+
repr friends add johndoe
|
|
3930
|
+
"""
|
|
3931
|
+
if not is_authenticated():
|
|
3932
|
+
print_error("Friend requests require sign-in")
|
|
3933
|
+
print_info("Run `repr login` first")
|
|
3934
|
+
raise typer.Exit(1)
|
|
3935
|
+
|
|
3936
|
+
from .api import send_friend_request, AuthError
|
|
3937
|
+
|
|
3938
|
+
try:
|
|
3939
|
+
result = asyncio.run(send_friend_request(username))
|
|
3940
|
+
print_success(f"Friend request sent to {username}")
|
|
3941
|
+
except AuthError as e:
|
|
3942
|
+
print_error(str(e))
|
|
3943
|
+
print_info("Run `repr login` to re-authenticate")
|
|
3944
|
+
raise typer.Exit(1)
|
|
3945
|
+
except APIError as e:
|
|
3946
|
+
print_error(str(e))
|
|
3947
|
+
raise typer.Exit(1)
|
|
3948
|
+
|
|
3949
|
+
|
|
3950
|
+
@friends_app.command("list")
|
|
3951
|
+
def friends_list():
|
|
3952
|
+
"""List friends.
|
|
3953
|
+
|
|
3954
|
+
Example:
|
|
3955
|
+
repr friends list
|
|
3956
|
+
"""
|
|
3957
|
+
if not is_authenticated():
|
|
3958
|
+
print_error("Friends list requires sign-in")
|
|
3959
|
+
print_info("Run `repr login` first")
|
|
3960
|
+
raise typer.Exit(1)
|
|
3961
|
+
|
|
3962
|
+
from .api import get_friends, AuthError
|
|
3963
|
+
|
|
3964
|
+
try:
|
|
3965
|
+
friends = asyncio.run(get_friends())
|
|
3966
|
+
|
|
3967
|
+
if not friends:
|
|
3968
|
+
print_info("No friends yet")
|
|
3969
|
+
return
|
|
3970
|
+
|
|
3971
|
+
table = create_table("Friends", ["Username", "Added"])
|
|
3972
|
+
for friend in friends:
|
|
3973
|
+
username = friend.get("username", "N/A")
|
|
3974
|
+
added_at = friend.get("created_at", "")
|
|
3975
|
+
# Format the date
|
|
3976
|
+
if added_at:
|
|
3977
|
+
from datetime import datetime
|
|
3978
|
+
try:
|
|
3979
|
+
dt = datetime.fromisoformat(added_at.replace("Z", "+00:00"))
|
|
3980
|
+
added_at = dt.strftime("%Y-%m-%d")
|
|
3981
|
+
except ValueError:
|
|
3982
|
+
pass
|
|
3983
|
+
table.add_row(username, added_at or "N/A")
|
|
3984
|
+
console.print(table)
|
|
3985
|
+
except AuthError as e:
|
|
3986
|
+
print_error(str(e))
|
|
3987
|
+
print_info("Run `repr login` to re-authenticate")
|
|
3988
|
+
raise typer.Exit(1)
|
|
3989
|
+
except APIError as e:
|
|
3990
|
+
print_error(str(e))
|
|
3991
|
+
raise typer.Exit(1)
|
|
3992
|
+
|
|
3993
|
+
|
|
3994
|
+
@friends_app.command("requests")
|
|
3995
|
+
def friends_requests():
|
|
3996
|
+
"""View pending friend requests.
|
|
3997
|
+
|
|
3998
|
+
Example:
|
|
3999
|
+
repr friends requests
|
|
4000
|
+
"""
|
|
4001
|
+
if not is_authenticated():
|
|
4002
|
+
print_error("Friend requests require sign-in")
|
|
4003
|
+
print_info("Run `repr login` first")
|
|
4004
|
+
raise typer.Exit(1)
|
|
4005
|
+
|
|
4006
|
+
from .api import get_friend_requests, AuthError
|
|
4007
|
+
|
|
4008
|
+
try:
|
|
4009
|
+
requests = asyncio.run(get_friend_requests())
|
|
4010
|
+
|
|
4011
|
+
if not requests:
|
|
4012
|
+
print_info("No pending friend requests")
|
|
4013
|
+
return
|
|
4014
|
+
|
|
4015
|
+
table = create_table("Pending Friend Requests", ["Request ID", "From", "Sent"])
|
|
4016
|
+
for req in requests:
|
|
4017
|
+
request_id = req.get("id", "N/A")
|
|
4018
|
+
from_user = req.get("from_username", "N/A")
|
|
4019
|
+
sent_at = req.get("created_at", "")
|
|
4020
|
+
# Format the date
|
|
4021
|
+
if sent_at:
|
|
4022
|
+
from datetime import datetime
|
|
4023
|
+
try:
|
|
4024
|
+
dt = datetime.fromisoformat(sent_at.replace("Z", "+00:00"))
|
|
4025
|
+
sent_at = dt.strftime("%Y-%m-%d")
|
|
4026
|
+
except ValueError:
|
|
4027
|
+
pass
|
|
4028
|
+
table.add_row(str(request_id), from_user, sent_at or "N/A")
|
|
4029
|
+
console.print(table)
|
|
4030
|
+
|
|
4031
|
+
print_info("Use `repr friends approve <id>` or `repr friends reject <id>`")
|
|
4032
|
+
except AuthError as e:
|
|
4033
|
+
print_error(str(e))
|
|
4034
|
+
print_info("Run `repr login` to re-authenticate")
|
|
4035
|
+
raise typer.Exit(1)
|
|
4036
|
+
except APIError as e:
|
|
4037
|
+
print_error(str(e))
|
|
4038
|
+
raise typer.Exit(1)
|
|
4039
|
+
|
|
4040
|
+
|
|
4041
|
+
@friends_app.command("approve")
|
|
4042
|
+
def friends_approve(
|
|
4043
|
+
request_id: str = typer.Argument(..., help="ID of the friend request to approve"),
|
|
4044
|
+
):
|
|
4045
|
+
"""Approve friend request.
|
|
4046
|
+
|
|
4047
|
+
Example:
|
|
4048
|
+
repr friends approve abc123
|
|
4049
|
+
"""
|
|
4050
|
+
if not is_authenticated():
|
|
4051
|
+
print_error("Approving friend requests requires sign-in")
|
|
4052
|
+
print_info("Run `repr login` first")
|
|
4053
|
+
raise typer.Exit(1)
|
|
4054
|
+
|
|
4055
|
+
from .api import approve_friend_request, AuthError
|
|
4056
|
+
|
|
4057
|
+
try:
|
|
4058
|
+
result = asyncio.run(approve_friend_request(request_id))
|
|
4059
|
+
print_success("Friend request approved")
|
|
4060
|
+
except AuthError as e:
|
|
4061
|
+
print_error(str(e))
|
|
4062
|
+
print_info("Run `repr login` to re-authenticate")
|
|
4063
|
+
raise typer.Exit(1)
|
|
4064
|
+
except APIError as e:
|
|
4065
|
+
print_error(str(e))
|
|
4066
|
+
raise typer.Exit(1)
|
|
4067
|
+
|
|
4068
|
+
|
|
4069
|
+
@friends_app.command("reject")
|
|
4070
|
+
def friends_reject(
|
|
4071
|
+
request_id: str = typer.Argument(..., help="ID of the friend request to reject"),
|
|
4072
|
+
):
|
|
4073
|
+
"""Reject friend request.
|
|
4074
|
+
|
|
4075
|
+
Example:
|
|
4076
|
+
repr friends reject abc123
|
|
4077
|
+
"""
|
|
4078
|
+
if not is_authenticated():
|
|
4079
|
+
print_error("Rejecting friend requests requires sign-in")
|
|
4080
|
+
print_info("Run `repr login` first")
|
|
4081
|
+
raise typer.Exit(1)
|
|
4082
|
+
|
|
4083
|
+
from .api import reject_friend_request, AuthError
|
|
4084
|
+
|
|
4085
|
+
try:
|
|
4086
|
+
result = asyncio.run(reject_friend_request(request_id))
|
|
4087
|
+
print_info("Friend request rejected")
|
|
4088
|
+
except AuthError as e:
|
|
4089
|
+
print_error(str(e))
|
|
4090
|
+
print_info("Run `repr login` to re-authenticate")
|
|
4091
|
+
raise typer.Exit(1)
|
|
4092
|
+
except APIError as e:
|
|
4093
|
+
print_error(str(e))
|
|
4094
|
+
raise typer.Exit(1)
|
|
4095
|
+
|
|
4096
|
+
|
|
4097
|
+
# =============================================================================
|
|
4098
|
+
# STATUS & INFO
|
|
4099
|
+
# =============================================================================
|
|
4100
|
+
|
|
4101
|
+
@app.command()
|
|
4102
|
+
def status(
|
|
4103
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
4104
|
+
):
|
|
4105
|
+
"""
|
|
4106
|
+
Show repr status and health.
|
|
4107
|
+
|
|
4108
|
+
Example:
|
|
4109
|
+
repr status
|
|
4110
|
+
"""
|
|
4111
|
+
authenticated = is_authenticated()
|
|
4112
|
+
user = get_current_user() if authenticated else None
|
|
4113
|
+
tracked = get_tracked_repos()
|
|
4114
|
+
story_count = get_story_count()
|
|
4115
|
+
unpushed = len(get_unpushed_stories())
|
|
4116
|
+
|
|
4117
|
+
if json_output:
|
|
4118
|
+
print(json.dumps({
|
|
4119
|
+
"version": __version__,
|
|
4120
|
+
"authenticated": authenticated,
|
|
4121
|
+
"email": user.get("email") if user else None,
|
|
4122
|
+
"repos_tracked": len(tracked),
|
|
4123
|
+
"stories_total": story_count,
|
|
4124
|
+
"stories_unpushed": unpushed,
|
|
4125
|
+
}, indent=2))
|
|
4126
|
+
return
|
|
4127
|
+
|
|
4128
|
+
print_header()
|
|
4129
|
+
|
|
4130
|
+
# Auth status
|
|
4131
|
+
if authenticated:
|
|
4132
|
+
email = user.get("email", "unknown")
|
|
4133
|
+
console.print(f"Auth: [{BRAND_SUCCESS}]✓ Signed in as {email}[/]")
|
|
4134
|
+
else:
|
|
4135
|
+
console.print(f"Auth: [{BRAND_MUTED}]○ Not signed in[/]")
|
|
4136
|
+
|
|
4137
|
+
console.print()
|
|
4138
|
+
|
|
4139
|
+
# Stats
|
|
4140
|
+
console.print(f"Tracked repos: {len(tracked)}")
|
|
4141
|
+
console.print(f"Stories: {story_count} ({unpushed} unpushed)")
|
|
4142
|
+
|
|
4143
|
+
console.print()
|
|
4144
|
+
|
|
4145
|
+
# Next steps
|
|
4146
|
+
if not authenticated:
|
|
4147
|
+
print_info("Run `repr login` to enable cloud sync")
|
|
4148
|
+
elif unpushed > 0:
|
|
4149
|
+
print_info(f"Run `repr push` to publish {unpushed} stories")
|
|
4150
|
+
|
|
4151
|
+
|
|
4152
|
+
@app.command()
|
|
4153
|
+
def changes(
|
|
4154
|
+
path: Optional[Path] = typer.Argument(
|
|
4155
|
+
None,
|
|
4156
|
+
help="Path to repository (default: current directory)",
|
|
4157
|
+
exists=True,
|
|
4158
|
+
resolve_path=True,
|
|
4159
|
+
),
|
|
4160
|
+
explain: bool = typer.Option(False, "--explain", "-e", help="Use LLM to explain changes"),
|
|
4161
|
+
compact: bool = typer.Option(False, "--compact", "-c", help="Compact output (no diff previews)"),
|
|
4162
|
+
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
4163
|
+
):
|
|
4164
|
+
"""
|
|
4165
|
+
Show file changes across git states with diff details.
|
|
4166
|
+
|
|
4167
|
+
Displays changes in three states:
|
|
4168
|
+
- Unstaged: Tracked files modified but not staged (with diff preview)
|
|
4169
|
+
- Staged: Changes ready to commit (with diff preview)
|
|
4170
|
+
- Unpushed: Commits not yet pushed to remote
|
|
4171
|
+
|
|
4172
|
+
Example:
|
|
4173
|
+
repr changes # Show changes with diffs
|
|
4174
|
+
repr changes --compact # Just file names
|
|
4175
|
+
repr changes --explain # LLM summary
|
|
4176
|
+
repr changes --json
|
|
4177
|
+
"""
|
|
4178
|
+
from .change_synthesis import (
|
|
4179
|
+
get_change_report,
|
|
4180
|
+
ChangeState,
|
|
4181
|
+
explain_group,
|
|
4182
|
+
)
|
|
4183
|
+
|
|
4184
|
+
target_path = path or Path.cwd()
|
|
4185
|
+
report = get_change_report(target_path)
|
|
4186
|
+
|
|
4187
|
+
if not report:
|
|
4188
|
+
print_error(f"Not a git repository: {target_path}")
|
|
4189
|
+
raise typer.Exit(1)
|
|
4190
|
+
|
|
4191
|
+
if json_output:
|
|
4192
|
+
data = {
|
|
4193
|
+
"repo_path": str(report.repo_path),
|
|
4194
|
+
"timestamp": report.timestamp.isoformat(),
|
|
4195
|
+
"unstaged": [
|
|
4196
|
+
{
|
|
4197
|
+
"path": f.path,
|
|
4198
|
+
"change_type": f.change_type,
|
|
4199
|
+
"insertions": f.insertions,
|
|
4200
|
+
"deletions": f.deletions,
|
|
4201
|
+
}
|
|
4202
|
+
for f in report.unstaged
|
|
4203
|
+
],
|
|
4204
|
+
"staged": [
|
|
4205
|
+
{
|
|
4206
|
+
"path": f.path,
|
|
4207
|
+
"change_type": f.change_type,
|
|
4208
|
+
"insertions": f.insertions,
|
|
4209
|
+
"deletions": f.deletions,
|
|
4210
|
+
}
|
|
4211
|
+
for f in report.staged
|
|
4212
|
+
],
|
|
4213
|
+
"unpushed": [
|
|
4214
|
+
{
|
|
4215
|
+
"sha": c.sha,
|
|
4216
|
+
"message": c.message,
|
|
4217
|
+
"author": c.author,
|
|
4218
|
+
"timestamp": c.timestamp.isoformat(),
|
|
4219
|
+
"files": [{"path": f.path, "change_type": f.change_type} for f in c.files],
|
|
4220
|
+
}
|
|
4221
|
+
for c in report.unpushed
|
|
4222
|
+
],
|
|
4223
|
+
}
|
|
4224
|
+
if report.summary:
|
|
4225
|
+
data["summary"] = {
|
|
4226
|
+
"hook": report.summary.hook,
|
|
4227
|
+
"what": report.summary.what,
|
|
4228
|
+
"value": report.summary.value,
|
|
4229
|
+
"problem": report.summary.problem,
|
|
4230
|
+
"insight": report.summary.insight,
|
|
4231
|
+
"show": report.summary.show,
|
|
4232
|
+
}
|
|
4233
|
+
print(json.dumps(data, indent=2))
|
|
4234
|
+
return
|
|
4235
|
+
|
|
4236
|
+
if not report.has_changes:
|
|
4237
|
+
print_info("No changes detected.")
|
|
4238
|
+
console.print(f"[{BRAND_MUTED}]Working tree clean, nothing staged, up to date with remote.[/]")
|
|
4239
|
+
raise typer.Exit()
|
|
4240
|
+
|
|
4241
|
+
# Get LLM client if explain mode
|
|
4242
|
+
client = None
|
|
4243
|
+
if explain:
|
|
4244
|
+
from .openai_analysis import get_openai_client
|
|
4245
|
+
client = get_openai_client()
|
|
4246
|
+
if not client:
|
|
4247
|
+
print_error("LLM not configured. Run `repr llm setup` first.")
|
|
4248
|
+
raise typer.Exit(1)
|
|
4249
|
+
|
|
4250
|
+
# Header
|
|
4251
|
+
console.print(f"[bold]Changes in {report.repo_path.name}[/]")
|
|
4252
|
+
console.print()
|
|
4253
|
+
|
|
4254
|
+
# Unstaged changes
|
|
4255
|
+
if report.unstaged:
|
|
4256
|
+
console.print(f"[bold][{BRAND_WARNING}]Unstaged[/][/] ({len(report.unstaged)} files)")
|
|
4257
|
+
# Explain this group right after header
|
|
4258
|
+
if client:
|
|
4259
|
+
with create_spinner("Explaining unstaged..."):
|
|
4260
|
+
explanation = asyncio.run(explain_group("unstaged", file_changes=report.unstaged, client=client))
|
|
4261
|
+
console.print()
|
|
4262
|
+
console.print(f"[{BRAND_MUTED}]{explanation}[/]")
|
|
4263
|
+
console.print()
|
|
4264
|
+
for f in report.unstaged:
|
|
4265
|
+
type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
|
|
4266
|
+
stats = ""
|
|
4267
|
+
if f.insertions or f.deletions:
|
|
4268
|
+
stats = f" [{BRAND_SUCCESS}]+{f.insertions}[/][{BRAND_ERROR}]-{f.deletions}[/]"
|
|
4269
|
+
full_path = report.repo_path / f.path
|
|
4270
|
+
console.print(f" {type_icon} {full_path}{stats}")
|
|
4271
|
+
# Show diff preview unless compact mode
|
|
4272
|
+
if not compact and f.diff_preview:
|
|
4273
|
+
for line in f.diff_preview.split("\n")[:10]:
|
|
4274
|
+
escaped_line = rich_escape(line)
|
|
4275
|
+
if line.startswith("+"):
|
|
4276
|
+
console.print(f" [{BRAND_SUCCESS}]{escaped_line}[/]")
|
|
4277
|
+
elif line.startswith("-"):
|
|
4278
|
+
console.print(f" [{BRAND_ERROR}]{escaped_line}[/]")
|
|
4279
|
+
console.print()
|
|
4280
|
+
|
|
4281
|
+
# Staged changes
|
|
4282
|
+
if report.staged:
|
|
4283
|
+
console.print(f"[bold][{BRAND_SUCCESS}]Staged[/][/] ({len(report.staged)} files)")
|
|
4284
|
+
# Explain this group right after header
|
|
4285
|
+
if client:
|
|
4286
|
+
with create_spinner("Explaining staged..."):
|
|
4287
|
+
explanation = asyncio.run(explain_group("staged", file_changes=report.staged, client=client))
|
|
4288
|
+
console.print()
|
|
4289
|
+
console.print(f"[{BRAND_MUTED}]{explanation}[/]")
|
|
4290
|
+
console.print()
|
|
4291
|
+
for f in report.staged:
|
|
4292
|
+
type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
|
|
4293
|
+
stats = ""
|
|
4294
|
+
if f.insertions or f.deletions:
|
|
4295
|
+
stats = f" [{BRAND_SUCCESS}]+{f.insertions}[/][{BRAND_ERROR}]-{f.deletions}[/]"
|
|
4296
|
+
full_path = report.repo_path / f.path
|
|
4297
|
+
console.print(f" {type_icon} {full_path}{stats}")
|
|
4298
|
+
# Show diff preview unless compact mode
|
|
4299
|
+
if not compact and f.diff_preview:
|
|
4300
|
+
for line in f.diff_preview.split("\n")[:10]:
|
|
4301
|
+
escaped_line = rich_escape(line)
|
|
4302
|
+
if line.startswith("+"):
|
|
4303
|
+
console.print(f" [{BRAND_SUCCESS}]{escaped_line}[/]")
|
|
4304
|
+
elif line.startswith("-"):
|
|
4305
|
+
console.print(f" [{BRAND_ERROR}]{escaped_line}[/]")
|
|
4306
|
+
console.print()
|
|
4307
|
+
|
|
4308
|
+
# Unpushed commits
|
|
4309
|
+
if report.unpushed:
|
|
4310
|
+
console.print(f"[bold][{BRAND_PRIMARY}]Unpushed[/][/] ({len(report.unpushed)} commits)")
|
|
4311
|
+
# Explain this group right after header
|
|
4312
|
+
if client:
|
|
4313
|
+
with create_spinner("Explaining unpushed..."):
|
|
4314
|
+
explanation = asyncio.run(explain_group("unpushed", commit_changes=report.unpushed, client=client))
|
|
4315
|
+
console.print()
|
|
4316
|
+
console.print(f"[{BRAND_MUTED}]{explanation}[/]")
|
|
4317
|
+
console.print()
|
|
4318
|
+
for commit in report.unpushed:
|
|
4319
|
+
console.print(f" [{BRAND_MUTED}]{commit.sha}[/] {commit.message}")
|
|
4320
|
+
# Show files changed in this commit
|
|
4321
|
+
for f in commit.files[:5]:
|
|
4322
|
+
type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
|
|
4323
|
+
full_path = report.repo_path / f.path
|
|
4324
|
+
console.print(f" {type_icon} {full_path}")
|
|
4325
|
+
if len(commit.files) > 5:
|
|
4326
|
+
console.print(f" [{BRAND_MUTED}]... +{len(commit.files) - 5} more[/]")
|
|
4327
|
+
console.print()
|
|
4328
|
+
|
|
4329
|
+
|
|
4330
|
+
@app.command()
|
|
2594
4331
|
def mode(
|
|
2595
4332
|
json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
|
|
2596
4333
|
):
|
|
@@ -2753,6 +4490,2211 @@ def doctor():
|
|
|
2753
4490
|
print_error("Issues found - see recommendations above")
|
|
2754
4491
|
|
|
2755
4492
|
|
|
4493
|
+
# =============================================================================
|
|
4494
|
+
# MCP SERVER
|
|
4495
|
+
# =============================================================================
|
|
4496
|
+
|
|
4497
|
+
@mcp_app.command("serve")
|
|
4498
|
+
def mcp_serve(
|
|
4499
|
+
sse: bool = typer.Option(False, "--sse", help="Use SSE transport instead of stdio"),
|
|
4500
|
+
port: int = typer.Option(3001, "--port", "-p", help="Port for SSE mode"),
|
|
4501
|
+
host: str = typer.Option("127.0.0.1", "--host", "-h", help="Host for SSE mode"),
|
|
4502
|
+
):
|
|
4503
|
+
"""
|
|
4504
|
+
Start the MCP server for AI agent integration.
|
|
4505
|
+
|
|
4506
|
+
The MCP (Model Context Protocol) server exposes repr functionality
|
|
4507
|
+
to AI agents like Claude Code, Cursor, Windsurf, and Cline.
|
|
4508
|
+
|
|
4509
|
+
Examples:
|
|
4510
|
+
repr mcp serve # stdio mode (default)
|
|
4511
|
+
repr mcp serve --sse # SSE mode for remote clients
|
|
4512
|
+
repr mcp serve --port 3001 # Custom port for SSE
|
|
4513
|
+
|
|
4514
|
+
Configuration for Claude Code:
|
|
4515
|
+
claude mcp add repr -- repr mcp serve
|
|
4516
|
+
|
|
4517
|
+
Configuration for Cursor/Windsurf (mcp.json):
|
|
4518
|
+
{
|
|
4519
|
+
"mcpServers": {
|
|
4520
|
+
"repr": {
|
|
4521
|
+
"command": "repr",
|
|
4522
|
+
"args": ["mcp", "serve"]
|
|
4523
|
+
}
|
|
4524
|
+
}
|
|
4525
|
+
}
|
|
4526
|
+
"""
|
|
4527
|
+
from .mcp_server import run_server
|
|
4528
|
+
|
|
4529
|
+
if not sse:
|
|
4530
|
+
# stdio mode - silent start, let MCP handle communication
|
|
4531
|
+
run_server(sse=False)
|
|
4532
|
+
else:
|
|
4533
|
+
console.print(f"Starting MCP server (SSE mode) on {host}:{port}...")
|
|
4534
|
+
run_server(sse=True, host=host, port=port)
|
|
4535
|
+
|
|
4536
|
+
|
|
4537
|
+
@mcp_app.command("info")
|
|
4538
|
+
def mcp_info():
|
|
4539
|
+
"""
|
|
4540
|
+
Show MCP server configuration info.
|
|
4541
|
+
|
|
4542
|
+
Example:
|
|
4543
|
+
repr mcp info
|
|
4544
|
+
"""
|
|
4545
|
+
console.print("[bold]MCP Server Info[/]")
|
|
4546
|
+
console.print()
|
|
4547
|
+
console.print("The MCP server exposes repr to AI agents via the Model Context Protocol.")
|
|
4548
|
+
console.print()
|
|
4549
|
+
console.print("[bold]Available Tools:[/]")
|
|
4550
|
+
console.print(" • repr_generate — Generate stories from commits")
|
|
4551
|
+
console.print(" • repr_stories_list — List existing stories")
|
|
4552
|
+
console.print(" • repr_week — Weekly work summary")
|
|
4553
|
+
console.print(" • repr_standup — Yesterday/today summary")
|
|
4554
|
+
console.print(" • repr_profile — Get developer profile")
|
|
4555
|
+
console.print()
|
|
4556
|
+
console.print("[bold]Available Resources:[/]")
|
|
4557
|
+
console.print(" • repr://profile — Current profile")
|
|
4558
|
+
console.print(" • repr://stories/recent — Recent stories")
|
|
4559
|
+
console.print()
|
|
4560
|
+
console.print("[bold]Usage:[/]")
|
|
4561
|
+
console.print(" repr mcp serve # Start server (stdio)")
|
|
4562
|
+
console.print(" repr mcp serve --sse # SSE mode")
|
|
4563
|
+
console.print()
|
|
4564
|
+
console.print("[bold]Claude Code setup:[/]")
|
|
4565
|
+
console.print(" claude mcp add repr -- repr mcp serve")
|
|
4566
|
+
|
|
4567
|
+
|
|
4568
|
+
@mcp_app.command("install-skills")
|
|
4569
|
+
def mcp_install_skills(
|
|
4570
|
+
force: bool = typer.Option(False, "--force", "-f", help="Overwrite existing skill files"),
|
|
4571
|
+
):
|
|
4572
|
+
"""
|
|
4573
|
+
Install repr skills to Claude Code.
|
|
4574
|
+
|
|
4575
|
+
Installs a skill that teaches Claude Code how and when to use repr commands
|
|
4576
|
+
(init, generate, timeline, changes).
|
|
4577
|
+
|
|
4578
|
+
Example:
|
|
4579
|
+
repr mcp install-skills
|
|
4580
|
+
repr mcp install-skills --force # Overwrite existing
|
|
4581
|
+
"""
|
|
4582
|
+
skill_dir = Path.home() / ".claude" / "skills"
|
|
4583
|
+
skill_file = skill_dir / "repr.md"
|
|
4584
|
+
|
|
4585
|
+
# Check if already installed
|
|
4586
|
+
if skill_file.exists() and not force:
|
|
4587
|
+
print_warning("repr skill already installed")
|
|
4588
|
+
console.print(f" Location: {skill_file}")
|
|
4589
|
+
console.print()
|
|
4590
|
+
console.print("Use --force to overwrite")
|
|
4591
|
+
return
|
|
4592
|
+
|
|
4593
|
+
# Create directory
|
|
4594
|
+
skill_dir.mkdir(parents=True, exist_ok=True)
|
|
4595
|
+
|
|
4596
|
+
# Write skill file
|
|
4597
|
+
skill_content = '''---
|
|
4598
|
+
name: repr
|
|
4599
|
+
description: Use this skill when the user asks to "show my changes", "what did I work on", "generate a story", "initialize repr", "show timeline", "set up repr", or needs context about their recent development work.
|
|
4600
|
+
version: 1.0.0
|
|
4601
|
+
---
|
|
4602
|
+
|
|
4603
|
+
# repr - Developer Context Layer
|
|
4604
|
+
|
|
4605
|
+
repr helps developers understand what they've actually worked on by analyzing git history and AI sessions.
|
|
4606
|
+
|
|
4607
|
+
## When to Use repr
|
|
4608
|
+
|
|
4609
|
+
- User asks about their recent work or changes
|
|
4610
|
+
- User wants to generate a story or summary from commits
|
|
4611
|
+
- User needs to set up repr for a project
|
|
4612
|
+
- User wants to see their development timeline
|
|
4613
|
+
- User asks "what did I work on" or similar
|
|
4614
|
+
|
|
4615
|
+
## Commands
|
|
4616
|
+
|
|
4617
|
+
### Initialize repr
|
|
4618
|
+
|
|
4619
|
+
```bash
|
|
4620
|
+
# First-time setup - scan for repositories
|
|
4621
|
+
repr init
|
|
4622
|
+
|
|
4623
|
+
# Scan specific directory
|
|
4624
|
+
repr init ~/projects
|
|
4625
|
+
```
|
|
4626
|
+
|
|
4627
|
+
Use when: User is setting up repr for the first time or adding new repositories.
|
|
4628
|
+
|
|
4629
|
+
### Show Changes
|
|
4630
|
+
|
|
4631
|
+
```bash
|
|
4632
|
+
# Show current changes (unstaged, staged, unpushed)
|
|
4633
|
+
repr changes
|
|
4634
|
+
|
|
4635
|
+
# Compact view (just file names)
|
|
4636
|
+
repr changes --compact
|
|
4637
|
+
|
|
4638
|
+
# With LLM explanation
|
|
4639
|
+
repr changes --explain
|
|
4640
|
+
|
|
4641
|
+
# JSON output
|
|
4642
|
+
repr changes --json
|
|
4643
|
+
```
|
|
4644
|
+
|
|
4645
|
+
Use when: User asks "what are my changes", "show my work", "what's uncommitted", or needs to understand current git state.
|
|
4646
|
+
|
|
4647
|
+
### Generate Stories
|
|
4648
|
+
|
|
4649
|
+
```bash
|
|
4650
|
+
# Generate stories from recent commits
|
|
4651
|
+
repr generate
|
|
4652
|
+
|
|
4653
|
+
# Generate for specific date range
|
|
4654
|
+
repr generate --since monday
|
|
4655
|
+
repr generate --since "2 weeks ago"
|
|
4656
|
+
|
|
4657
|
+
# Use local LLM (Ollama)
|
|
4658
|
+
repr generate --local
|
|
4659
|
+
|
|
4660
|
+
# Dry run (preview without saving)
|
|
4661
|
+
repr generate --dry-run
|
|
4662
|
+
```
|
|
4663
|
+
|
|
4664
|
+
Use when: User wants to create narratives from their commits, document their work, or generate content for standup/weekly reports.
|
|
4665
|
+
|
|
4666
|
+
### Timeline
|
|
4667
|
+
|
|
4668
|
+
```bash
|
|
4669
|
+
# Initialize timeline for current project
|
|
4670
|
+
repr timeline init
|
|
4671
|
+
|
|
4672
|
+
# Initialize with AI session ingestion
|
|
4673
|
+
repr timeline init --with-sessions
|
|
4674
|
+
|
|
4675
|
+
# Show timeline entries
|
|
4676
|
+
repr timeline show
|
|
4677
|
+
repr timeline show --days 14
|
|
4678
|
+
|
|
4679
|
+
# Filter by type
|
|
4680
|
+
repr timeline show --type commit
|
|
4681
|
+
repr timeline show --type session
|
|
4682
|
+
|
|
4683
|
+
# Show timeline status
|
|
4684
|
+
repr timeline status
|
|
4685
|
+
|
|
4686
|
+
# Refresh/update timeline
|
|
4687
|
+
repr timeline refresh
|
|
4688
|
+
|
|
4689
|
+
# Launch web dashboard
|
|
4690
|
+
repr dashboard
|
|
4691
|
+
```
|
|
4692
|
+
|
|
4693
|
+
Use when: User wants a unified view of commits and AI sessions, or needs to understand the full context of their development work.
|
|
4694
|
+
|
|
4695
|
+
## Output Interpretation
|
|
4696
|
+
|
|
4697
|
+
### Changes Output
|
|
4698
|
+
- **Unstaged**: Modified files not yet staged (with diff preview)
|
|
4699
|
+
- **Staged**: Changes ready to commit
|
|
4700
|
+
- **Unpushed**: Commits not yet pushed to remote
|
|
4701
|
+
|
|
4702
|
+
### Timeline Entry Types
|
|
4703
|
+
- **commit**: Regular git commits
|
|
4704
|
+
- **session**: AI coding sessions (Claude Code, etc.)
|
|
4705
|
+
- **merged**: Commits with associated AI session context
|
|
4706
|
+
|
|
4707
|
+
### Smart Git Workflow
|
|
4708
|
+
|
|
4709
|
+
```bash
|
|
4710
|
+
# Stage files
|
|
4711
|
+
repr add .py # Stage *.py files
|
|
4712
|
+
repr add . # Stage all
|
|
4713
|
+
repr add src/ # Stage directory
|
|
4714
|
+
|
|
4715
|
+
# Generate message and commit
|
|
4716
|
+
repr commit # AI generates message
|
|
4717
|
+
repr commit -m "fix: x" # Custom message
|
|
4718
|
+
repr commit -r # Regenerate message
|
|
4719
|
+
|
|
4720
|
+
# Push to remote
|
|
4721
|
+
repr push
|
|
4722
|
+
```
|
|
4723
|
+
|
|
4724
|
+
Use when: User wants to stage, commit with AI-generated message, or push.
|
|
4725
|
+
|
|
4726
|
+
## Tips
|
|
4727
|
+
|
|
4728
|
+
1. Run `repr changes` before committing to see what you're about to commit
|
|
4729
|
+
2. Use `repr generate --dry-run` to preview stories before saving
|
|
4730
|
+
3. Initialize timeline with `--with-sessions` to capture AI context
|
|
4731
|
+
4. Use `repr timeline show --type session` to see AI-assisted work separately
|
|
4732
|
+
5. Use `repr commit add` for quick commits with AI-generated messages
|
|
4733
|
+
'''
|
|
4734
|
+
|
|
4735
|
+
skill_file.write_text(skill_content)
|
|
4736
|
+
|
|
4737
|
+
print_success("repr skill installed to Claude Code")
|
|
4738
|
+
console.print()
|
|
4739
|
+
console.print(f" Skill: {skill_file}")
|
|
4740
|
+
console.print()
|
|
4741
|
+
console.print("Claude Code will now recognize repr commands.")
|
|
4742
|
+
console.print("Try asking: 'show my changes' or 'what did I work on'")
|
|
4743
|
+
|
|
4744
|
+
|
|
4745
|
+
# =============================================================================
|
|
4746
|
+
# TIMELINE
|
|
4747
|
+
# =============================================================================
|
|
4748
|
+
|
|
4749
|
+
@timeline_app.command("init")
|
|
4750
|
+
def timeline_init(
|
|
4751
|
+
path: Optional[Path] = typer.Argument(
|
|
4752
|
+
None,
|
|
4753
|
+
help="Project path (default: current directory)",
|
|
4754
|
+
exists=True,
|
|
4755
|
+
dir_okay=True,
|
|
4756
|
+
resolve_path=True,
|
|
4757
|
+
),
|
|
4758
|
+
with_sessions: bool = typer.Option(
|
|
4759
|
+
False, "--with-sessions", "-s",
|
|
4760
|
+
help="Include AI session context (Claude Code, Clawdbot)",
|
|
4761
|
+
),
|
|
4762
|
+
days: int = typer.Option(
|
|
4763
|
+
90, "--days", "-d",
|
|
4764
|
+
help="Number of days to look back",
|
|
4765
|
+
),
|
|
4766
|
+
max_commits: int = typer.Option(
|
|
4767
|
+
500, "--max-commits",
|
|
4768
|
+
help="Maximum commits to include",
|
|
4769
|
+
),
|
|
4770
|
+
model: str = typer.Option(
|
|
4771
|
+
"openai/gpt-4.1-mini", "--model", "-m",
|
|
4772
|
+
help="Model for session extraction (with --with-sessions)",
|
|
4773
|
+
),
|
|
4774
|
+
force: bool = typer.Option(
|
|
4775
|
+
False, "--force", "-f",
|
|
4776
|
+
help="Overwrite existing timeline",
|
|
4777
|
+
),
|
|
4778
|
+
json_output: bool = typer.Option(
|
|
4779
|
+
False, "--json",
|
|
4780
|
+
help="Output as JSON",
|
|
4781
|
+
),
|
|
4782
|
+
):
|
|
4783
|
+
"""
|
|
4784
|
+
Initialize a project timeline from git commits and AI sessions.
|
|
4785
|
+
|
|
4786
|
+
Creates .repr/timeline.json with unified context from:
|
|
4787
|
+
- Git commits (always included)
|
|
4788
|
+
- AI session logs (with --with-sessions flag)
|
|
4789
|
+
|
|
4790
|
+
Examples:
|
|
4791
|
+
repr timeline init # Commits only
|
|
4792
|
+
repr timeline init --with-sessions # Include AI sessions
|
|
4793
|
+
repr timeline init --days 30 # Last 30 days
|
|
4794
|
+
repr timeline init ~/myproject # Specific project
|
|
4795
|
+
"""
|
|
4796
|
+
from .timeline import (
|
|
4797
|
+
detect_project_root,
|
|
4798
|
+
is_initialized,
|
|
4799
|
+
init_timeline_commits_only,
|
|
4800
|
+
init_timeline_with_sessions_sync,
|
|
4801
|
+
get_timeline_stats,
|
|
4802
|
+
)
|
|
4803
|
+
from .loaders import detect_session_source
|
|
4804
|
+
|
|
4805
|
+
# Determine project path
|
|
4806
|
+
project_path = path or Path.cwd()
|
|
4807
|
+
|
|
4808
|
+
# Check if in git repo
|
|
4809
|
+
repo_root = detect_project_root(project_path)
|
|
4810
|
+
if not repo_root:
|
|
4811
|
+
print_error(f"Not a git repository: {project_path}")
|
|
4812
|
+
print_info("Run this command inside a git repository")
|
|
4813
|
+
raise typer.Exit(1)
|
|
4814
|
+
|
|
4815
|
+
project_path = repo_root
|
|
4816
|
+
|
|
4817
|
+
# Check if already initialized
|
|
4818
|
+
if is_initialized(project_path) and not force:
|
|
4819
|
+
print_warning(f"Timeline already exists for {project_path}")
|
|
4820
|
+
print_info("Use --force to reinitialize")
|
|
4821
|
+
raise typer.Exit(1)
|
|
4822
|
+
|
|
4823
|
+
if not json_output:
|
|
4824
|
+
print_header()
|
|
4825
|
+
console.print(f"Initializing timeline for [bold]{project_path.name}[/]")
|
|
4826
|
+
console.print()
|
|
4827
|
+
|
|
4828
|
+
# Check for session sources if --with-sessions
|
|
4829
|
+
session_sources = []
|
|
4830
|
+
if with_sessions:
|
|
4831
|
+
session_sources = detect_session_source(project_path)
|
|
4832
|
+
if not session_sources:
|
|
4833
|
+
if not json_output:
|
|
4834
|
+
print_warning("No AI session sources found for this project")
|
|
4835
|
+
print_info("Supported: Claude Code (~/.claude/projects/), Clawdbot (~/.clawdbot/)")
|
|
4836
|
+
if not confirm("Continue with commits only?", default=True):
|
|
4837
|
+
raise typer.Exit(0)
|
|
4838
|
+
with_sessions = False
|
|
4839
|
+
else:
|
|
4840
|
+
if not json_output:
|
|
4841
|
+
console.print(f"Session sources: {', '.join(session_sources)}")
|
|
4842
|
+
|
|
4843
|
+
# Progress tracking
|
|
4844
|
+
progress_state = {"stage": "", "current": 0, "total": 0}
|
|
4845
|
+
|
|
4846
|
+
def progress_callback(stage: str, current: int, total: int) -> None:
|
|
4847
|
+
progress_state["stage"] = stage
|
|
4848
|
+
progress_state["current"] = current
|
|
4849
|
+
progress_state["total"] = total
|
|
4850
|
+
|
|
4851
|
+
try:
|
|
4852
|
+
if with_sessions:
|
|
4853
|
+
# Get API key for extraction
|
|
4854
|
+
# Priority: BYOK OpenAI > env OPENAI_API_KEY > LiteLLM (cloud)
|
|
4855
|
+
api_key = None
|
|
4856
|
+
byok_config = get_byok_config("openai")
|
|
4857
|
+
if byok_config:
|
|
4858
|
+
api_key = byok_config.get("api_key")
|
|
4859
|
+
|
|
4860
|
+
if not api_key:
|
|
4861
|
+
# Try environment variable (direct OpenAI access)
|
|
4862
|
+
api_key = os.environ.get("OPENAI_API_KEY")
|
|
4863
|
+
|
|
4864
|
+
if not api_key:
|
|
4865
|
+
# Try LiteLLM config (cloud mode - needs LiteLLM proxy URL)
|
|
4866
|
+
_, litellm_key = get_litellm_config()
|
|
4867
|
+
api_key = litellm_key
|
|
4868
|
+
|
|
4869
|
+
if not api_key:
|
|
4870
|
+
if not json_output:
|
|
4871
|
+
print_warning("No API key configured for session extraction")
|
|
4872
|
+
print_info("Configure with: repr llm add openai")
|
|
4873
|
+
print_info("Or set OPENAI_API_KEY environment variable")
|
|
4874
|
+
if not confirm("Continue with commits only?", default=True):
|
|
4875
|
+
raise typer.Exit(0)
|
|
4876
|
+
with_sessions = False
|
|
4877
|
+
|
|
4878
|
+
if with_sessions:
|
|
4879
|
+
if not json_output:
|
|
4880
|
+
with create_spinner() as progress:
|
|
4881
|
+
task = progress.add_task("Initializing...", total=None)
|
|
4882
|
+
timeline = init_timeline_with_sessions_sync(
|
|
4883
|
+
project_path,
|
|
4884
|
+
days=days,
|
|
4885
|
+
max_commits=max_commits,
|
|
4886
|
+
session_sources=session_sources,
|
|
4887
|
+
api_key=api_key,
|
|
4888
|
+
model=model,
|
|
4889
|
+
progress_callback=progress_callback,
|
|
4890
|
+
)
|
|
4891
|
+
else:
|
|
4892
|
+
timeline = init_timeline_with_sessions_sync(
|
|
4893
|
+
project_path,
|
|
4894
|
+
days=days,
|
|
4895
|
+
max_commits=max_commits,
|
|
4896
|
+
session_sources=session_sources,
|
|
4897
|
+
api_key=api_key,
|
|
4898
|
+
model=model,
|
|
4899
|
+
)
|
|
4900
|
+
else:
|
|
4901
|
+
if not json_output:
|
|
4902
|
+
with create_spinner() as progress:
|
|
4903
|
+
task = progress.add_task("Scanning commits...", total=None)
|
|
4904
|
+
timeline = init_timeline_commits_only(
|
|
4905
|
+
project_path,
|
|
4906
|
+
days=days,
|
|
4907
|
+
max_commits=max_commits,
|
|
4908
|
+
progress_callback=progress_callback,
|
|
4909
|
+
)
|
|
4910
|
+
else:
|
|
4911
|
+
timeline = init_timeline_commits_only(
|
|
4912
|
+
project_path,
|
|
4913
|
+
days=days,
|
|
4914
|
+
max_commits=max_commits,
|
|
4915
|
+
)
|
|
4916
|
+
|
|
4917
|
+
# Get stats
|
|
4918
|
+
stats = get_timeline_stats(timeline)
|
|
4919
|
+
|
|
4920
|
+
if json_output:
|
|
4921
|
+
print(json.dumps({
|
|
4922
|
+
"success": True,
|
|
4923
|
+
"project": str(project_path),
|
|
4924
|
+
"timeline_path": str(project_path / ".repr" / "timeline.json"),
|
|
4925
|
+
"stats": stats,
|
|
4926
|
+
}, indent=2))
|
|
4927
|
+
else:
|
|
4928
|
+
console.print()
|
|
4929
|
+
print_success(f"Timeline initialized!")
|
|
4930
|
+
console.print()
|
|
4931
|
+
console.print(f" Location: .repr/timeline.json")
|
|
4932
|
+
console.print(f" Entries: {stats['total_entries']}")
|
|
4933
|
+
console.print(f" Commits: {stats['commit_count']}")
|
|
4934
|
+
if stats['merged_count'] > 0:
|
|
4935
|
+
console.print(f" Merged (commit + session): {stats['merged_count']}")
|
|
4936
|
+
if stats['session_count'] > 0:
|
|
4937
|
+
console.print(f" Sessions only: {stats['session_count']}")
|
|
4938
|
+
if stats['date_range']['first']:
|
|
4939
|
+
console.print(f" Date range: {stats['date_range']['first'][:10]} to {stats['date_range']['last'][:10]}")
|
|
4940
|
+
|
|
4941
|
+
console.print()
|
|
4942
|
+
print_next_steps([
|
|
4943
|
+
"repr timeline status View timeline status",
|
|
4944
|
+
"repr timeline show Browse timeline entries",
|
|
4945
|
+
])
|
|
4946
|
+
|
|
4947
|
+
except Exception as e:
|
|
4948
|
+
if json_output:
|
|
4949
|
+
print(json.dumps({"success": False, "error": str(e)}, indent=2))
|
|
4950
|
+
else:
|
|
4951
|
+
print_error(f"Failed to initialize timeline: {e}")
|
|
4952
|
+
raise typer.Exit(1)
|
|
4953
|
+
|
|
4954
|
+
|
|
4955
|
+
@timeline_app.command("status")
|
|
4956
|
+
def timeline_status(
|
|
4957
|
+
path: Optional[Path] = typer.Argument(
|
|
4958
|
+
None,
|
|
4959
|
+
help="Project path (default: current directory)",
|
|
4960
|
+
exists=True,
|
|
4961
|
+
dir_okay=True,
|
|
4962
|
+
resolve_path=True,
|
|
4963
|
+
),
|
|
4964
|
+
json_output: bool = typer.Option(
|
|
4965
|
+
False, "--json",
|
|
4966
|
+
help="Output as JSON",
|
|
4967
|
+
),
|
|
4968
|
+
):
|
|
4969
|
+
"""
|
|
4970
|
+
Show timeline status for a project.
|
|
4971
|
+
|
|
4972
|
+
Example:
|
|
4973
|
+
repr timeline status
|
|
4974
|
+
"""
|
|
4975
|
+
from .timeline import (
|
|
4976
|
+
detect_project_root,
|
|
4977
|
+
is_initialized,
|
|
4978
|
+
load_timeline,
|
|
4979
|
+
get_timeline_stats,
|
|
4980
|
+
)
|
|
4981
|
+
|
|
4982
|
+
# Determine project path
|
|
4983
|
+
project_path = path or Path.cwd()
|
|
4984
|
+
repo_root = detect_project_root(project_path)
|
|
4985
|
+
|
|
4986
|
+
if not repo_root:
|
|
4987
|
+
print_error(f"Not a git repository: {project_path}")
|
|
4988
|
+
raise typer.Exit(1)
|
|
4989
|
+
|
|
4990
|
+
project_path = repo_root
|
|
4991
|
+
|
|
4992
|
+
if not is_initialized(project_path):
|
|
4993
|
+
if json_output:
|
|
4994
|
+
print(json.dumps({"initialized": False, "project": str(project_path)}, indent=2))
|
|
4995
|
+
else:
|
|
4996
|
+
print_warning(f"Timeline not initialized for {project_path.name}")
|
|
4997
|
+
print_info("Run: repr timeline init")
|
|
4998
|
+
raise typer.Exit(1)
|
|
4999
|
+
|
|
5000
|
+
timeline = load_timeline(project_path)
|
|
5001
|
+
if not timeline:
|
|
5002
|
+
print_error("Failed to load timeline")
|
|
5003
|
+
raise typer.Exit(1)
|
|
5004
|
+
|
|
5005
|
+
stats = get_timeline_stats(timeline)
|
|
5006
|
+
|
|
5007
|
+
if json_output:
|
|
5008
|
+
print(json.dumps({
|
|
5009
|
+
"initialized": True,
|
|
5010
|
+
"project": str(project_path),
|
|
5011
|
+
"stats": stats,
|
|
5012
|
+
"last_updated": timeline.last_updated.isoformat() if timeline.last_updated else None,
|
|
5013
|
+
}, indent=2))
|
|
5014
|
+
else:
|
|
5015
|
+
print_header()
|
|
5016
|
+
console.print(f"Timeline: [bold]{project_path.name}[/]")
|
|
5017
|
+
console.print()
|
|
5018
|
+
console.print(f" Entries: {stats['total_entries']}")
|
|
5019
|
+
console.print(f" Commits: {stats['commit_count']}")
|
|
5020
|
+
console.print(f" Merged: {stats['merged_count']}")
|
|
5021
|
+
console.print(f" Sessions: {stats['session_count']}")
|
|
5022
|
+
if stats['date_range']['first']:
|
|
5023
|
+
console.print(f" Date range: {stats['date_range']['first'][:10]} to {stats['date_range']['last'][:10]}")
|
|
5024
|
+
if stats['session_sources']:
|
|
5025
|
+
console.print(f" Session sources: {', '.join(stats['session_sources'])}")
|
|
5026
|
+
if timeline.last_updated:
|
|
5027
|
+
last_updated_str = timeline.last_updated.isoformat() if hasattr(timeline.last_updated, 'isoformat') else str(timeline.last_updated)
|
|
5028
|
+
console.print(f" Last updated: {format_relative_time(last_updated_str)}")
|
|
5029
|
+
|
|
5030
|
+
|
|
5031
|
+
@timeline_app.command("show")
|
|
5032
|
+
def timeline_show(
|
|
5033
|
+
path: Optional[str] = typer.Argument(
|
|
5034
|
+
None,
|
|
5035
|
+
help="Project path (use '.' for current repo, omit for all repos)",
|
|
5036
|
+
),
|
|
5037
|
+
days: int = typer.Option(
|
|
5038
|
+
7, "--days", "-d",
|
|
5039
|
+
help="Show entries from last N days",
|
|
5040
|
+
),
|
|
5041
|
+
entry_type: Optional[str] = typer.Option(
|
|
5042
|
+
None, "--type", "-t",
|
|
5043
|
+
help="Filter by type: commit, session, merged",
|
|
5044
|
+
),
|
|
5045
|
+
limit: int = typer.Option(
|
|
5046
|
+
20, "--limit", "-n",
|
|
5047
|
+
help="Maximum entries to show",
|
|
5048
|
+
),
|
|
5049
|
+
json_output: bool = typer.Option(
|
|
5050
|
+
False, "--json",
|
|
5051
|
+
help="Output as JSON",
|
|
5052
|
+
),
|
|
5053
|
+
public: bool = typer.Option(
|
|
5054
|
+
True, "--public/--no-public",
|
|
5055
|
+
help="Build-in-public feed format (default)",
|
|
5056
|
+
),
|
|
5057
|
+
internal: bool = typer.Option(
|
|
5058
|
+
False, "--internal",
|
|
5059
|
+
help="Show technical details in feed",
|
|
5060
|
+
),
|
|
5061
|
+
raw: bool = typer.Option(
|
|
5062
|
+
False, "--raw",
|
|
5063
|
+
help="Show raw timeline entries (commits/sessions)",
|
|
5064
|
+
),
|
|
5065
|
+
all_repos: bool = typer.Option(
|
|
5066
|
+
False, "--all", "-a",
|
|
5067
|
+
help="Show stories from all tracked repos (default when no path given)",
|
|
5068
|
+
),
|
|
5069
|
+
group: bool = typer.Option(
|
|
5070
|
+
False, "--group", "-g",
|
|
5071
|
+
help="Group stories by project (presentation view)",
|
|
5072
|
+
),
|
|
5073
|
+
):
|
|
5074
|
+
"""
|
|
5075
|
+
Show timeline entries.
|
|
5076
|
+
|
|
5077
|
+
Examples:
|
|
5078
|
+
repr timeline show # All tracked repos from database
|
|
5079
|
+
repr timeline show . # Current repo only
|
|
5080
|
+
repr timeline show /path/to/repo # Specific repo
|
|
5081
|
+
repr timeline show --group # All repos, grouped by project
|
|
5082
|
+
repr timeline show . --days 30 # Current repo, last 30 days
|
|
5083
|
+
repr timeline show --internal # Feed format with tech details
|
|
5084
|
+
"""
|
|
5085
|
+
from datetime import timezone
|
|
5086
|
+
from .timeline import (
|
|
5087
|
+
detect_project_root,
|
|
5088
|
+
is_initialized,
|
|
5089
|
+
load_timeline,
|
|
5090
|
+
query_timeline,
|
|
5091
|
+
)
|
|
5092
|
+
from .models import TimelineEntryType
|
|
5093
|
+
from .db import get_db
|
|
5094
|
+
|
|
5095
|
+
db = get_db()
|
|
5096
|
+
since = datetime.now(timezone.utc) - timedelta(days=days)
|
|
5097
|
+
|
|
5098
|
+
# Determine mode: all repos (no path) vs specific repo (path given)
|
|
5099
|
+
show_all_repos = path is None and not raw
|
|
5100
|
+
|
|
5101
|
+
# If --all flag is given, always show all repos
|
|
5102
|
+
if all_repos:
|
|
5103
|
+
show_all_repos = True
|
|
5104
|
+
|
|
5105
|
+
project_path = None
|
|
5106
|
+
timeline = None
|
|
5107
|
+
entries = []
|
|
5108
|
+
|
|
5109
|
+
if not show_all_repos:
|
|
5110
|
+
# Specific repo mode - resolve path
|
|
5111
|
+
resolved_path = Path(path) if path else Path.cwd()
|
|
5112
|
+
if not resolved_path.is_absolute():
|
|
5113
|
+
resolved_path = Path.cwd() / resolved_path
|
|
5114
|
+
resolved_path = resolved_path.resolve()
|
|
5115
|
+
|
|
5116
|
+
repo_root = detect_project_root(resolved_path)
|
|
5117
|
+
|
|
5118
|
+
if not repo_root:
|
|
5119
|
+
print_error(f"Not a git repository: {resolved_path}")
|
|
5120
|
+
raise typer.Exit(1)
|
|
5121
|
+
|
|
5122
|
+
project_path = repo_root
|
|
5123
|
+
|
|
5124
|
+
if not is_initialized(project_path):
|
|
5125
|
+
print_warning(f"Timeline not initialized for {project_path.name}")
|
|
5126
|
+
print_info("Run: repr timeline init")
|
|
5127
|
+
raise typer.Exit(1)
|
|
5128
|
+
|
|
5129
|
+
timeline = load_timeline(project_path)
|
|
5130
|
+
if not timeline:
|
|
5131
|
+
print_error("Failed to load timeline")
|
|
5132
|
+
raise typer.Exit(1)
|
|
5133
|
+
|
|
5134
|
+
# Parse entry type filter
|
|
5135
|
+
entry_types = None
|
|
5136
|
+
if entry_type:
|
|
5137
|
+
try:
|
|
5138
|
+
entry_types = [TimelineEntryType(entry_type)]
|
|
5139
|
+
except ValueError:
|
|
5140
|
+
print_error(f"Invalid type: {entry_type}")
|
|
5141
|
+
print_info("Valid types: commit, session, merged")
|
|
5142
|
+
raise typer.Exit(1)
|
|
5143
|
+
|
|
5144
|
+
# Query entries
|
|
5145
|
+
entries = query_timeline(timeline, since=since, entry_types=entry_types)
|
|
5146
|
+
entries = entries[-limit:] # Take most recent
|
|
5147
|
+
|
|
5148
|
+
if json_output:
|
|
5149
|
+
if show_all_repos:
|
|
5150
|
+
# JSON output for all repos
|
|
5151
|
+
stories = db.list_stories(since=since, limit=limit)
|
|
5152
|
+
print(json.dumps({
|
|
5153
|
+
"mode": "all_repos",
|
|
5154
|
+
"stories": [{"id": s.id, "title": s.title, "project_id": s.project_id} for s in stories],
|
|
5155
|
+
}, indent=2))
|
|
5156
|
+
else:
|
|
5157
|
+
from .timeline import _serialize_entry
|
|
5158
|
+
print(json.dumps({
|
|
5159
|
+
"project": str(project_path),
|
|
5160
|
+
"entries": [_serialize_entry(e) for e in entries],
|
|
5161
|
+
}, indent=2))
|
|
5162
|
+
return
|
|
5163
|
+
|
|
5164
|
+
if not show_all_repos and not entries:
|
|
5165
|
+
print_info(f"No entries in the last {days} days")
|
|
5166
|
+
return
|
|
5167
|
+
|
|
5168
|
+
# Feed format (default, unless --raw)
|
|
5169
|
+
if not raw:
|
|
5170
|
+
# Build project lookup
|
|
5171
|
+
projects = {p["id"]: p["name"] for p in db.list_projects()}
|
|
5172
|
+
|
|
5173
|
+
if show_all_repos:
|
|
5174
|
+
# Show stories from all repos
|
|
5175
|
+
stories = db.list_stories(since=since, limit=limit)
|
|
5176
|
+
header_name = "all repos"
|
|
5177
|
+
else:
|
|
5178
|
+
# Show stories from current repo only
|
|
5179
|
+
project = db.get_project_by_path(project_path)
|
|
5180
|
+
|
|
5181
|
+
if not project:
|
|
5182
|
+
print_info(f"No stories found. Run 'repr generate' first.")
|
|
5183
|
+
return
|
|
5184
|
+
|
|
5185
|
+
stories = db.list_stories(project_id=project["id"], since=since, limit=limit)
|
|
5186
|
+
header_name = project_path.name
|
|
5187
|
+
|
|
5188
|
+
if not stories:
|
|
5189
|
+
print_info(f"No stories in the last {days} days. Run 'repr generate' first.")
|
|
5190
|
+
return
|
|
5191
|
+
|
|
5192
|
+
def format_rel_time(story_time):
|
|
5193
|
+
"""Format timestamp as relative time."""
|
|
5194
|
+
local_time = story_time.astimezone()
|
|
5195
|
+
now = datetime.now(local_time.tzinfo)
|
|
5196
|
+
delta = now - local_time
|
|
5197
|
+
|
|
5198
|
+
if delta.days == 0:
|
|
5199
|
+
if delta.seconds < 3600:
|
|
5200
|
+
return f"{delta.seconds // 60}m ago"
|
|
5201
|
+
else:
|
|
5202
|
+
return f"{delta.seconds // 3600}h ago"
|
|
5203
|
+
elif delta.days == 1:
|
|
5204
|
+
return "Yesterday"
|
|
5205
|
+
elif delta.days < 7:
|
|
5206
|
+
return f"{delta.days} days ago"
|
|
5207
|
+
else:
|
|
5208
|
+
return local_time.strftime("%b %d")
|
|
5209
|
+
|
|
5210
|
+
def render_story(story, show_repo=False):
|
|
5211
|
+
"""Render a single story entry using Tripartite Codex structure."""
|
|
5212
|
+
story_time = story.started_at or story.created_at
|
|
5213
|
+
rel_time = format_rel_time(story_time)
|
|
5214
|
+
repo_name = projects.get(story.project_id, "unknown")
|
|
5215
|
+
|
|
5216
|
+
# Header with time and optional repo
|
|
5217
|
+
if show_repo:
|
|
5218
|
+
console.print(f"[{BRAND_PRIMARY}]{repo_name}[/] · [{BRAND_MUTED}]{rel_time}[/]")
|
|
5219
|
+
else:
|
|
5220
|
+
console.print(f"[{BRAND_MUTED}]{rel_time}[/]")
|
|
5221
|
+
|
|
5222
|
+
# Use structured fields if available, fall back to legacy
|
|
5223
|
+
if story.hook:
|
|
5224
|
+
# New Tripartite Codex format
|
|
5225
|
+
console.print(f"[bold]{story.hook}[/]")
|
|
5226
|
+
console.print()
|
|
5227
|
+
what_text = story.what.rstrip(".").rstrip()
|
|
5228
|
+
value_text = story.value.lstrip(".").lstrip()
|
|
5229
|
+
console.print(f"{what_text}. {value_text}")
|
|
5230
|
+
|
|
5231
|
+
# Show block if present
|
|
5232
|
+
if story.show:
|
|
5233
|
+
console.print()
|
|
5234
|
+
console.print(f"```\n{story.show}\n```")
|
|
5235
|
+
|
|
5236
|
+
# Diagram if present
|
|
5237
|
+
if story.diagram:
|
|
5238
|
+
console.print()
|
|
5239
|
+
console.print(f"[{BRAND_MUTED}]Diagram:[/]")
|
|
5240
|
+
console.print(f"```\n{story.diagram}\n```")
|
|
5241
|
+
|
|
5242
|
+
# Internal mode: show problem and how
|
|
5243
|
+
if internal:
|
|
5244
|
+
if story.problem:
|
|
5245
|
+
console.print()
|
|
5246
|
+
console.print(f"[{BRAND_MUTED}]Problem:[/] {story.problem}")
|
|
5247
|
+
|
|
5248
|
+
if story.implementation_details:
|
|
5249
|
+
console.print()
|
|
5250
|
+
console.print(f"[{BRAND_MUTED}]How:[/]")
|
|
5251
|
+
for detail in story.implementation_details[:5]:
|
|
5252
|
+
console.print(f" [{BRAND_MUTED}]›[/] {detail}")
|
|
5253
|
+
|
|
5254
|
+
# Insight
|
|
5255
|
+
if story.insight:
|
|
5256
|
+
console.print()
|
|
5257
|
+
console.print(f"[{BRAND_MUTED}]Insight:[/] {story.insight}")
|
|
5258
|
+
|
|
5259
|
+
else:
|
|
5260
|
+
# Legacy format fallback
|
|
5261
|
+
post_text = story.public_post or story.title
|
|
5262
|
+
if internal:
|
|
5263
|
+
post_text = story.internal_post or post_text
|
|
5264
|
+
console.print(post_text)
|
|
5265
|
+
|
|
5266
|
+
show_block = story.show or story.public_show
|
|
5267
|
+
if internal:
|
|
5268
|
+
show_block = show_block or story.internal_show
|
|
5269
|
+
if show_block:
|
|
5270
|
+
console.print()
|
|
5271
|
+
console.print(f"```\n{show_block}\n```")
|
|
5272
|
+
|
|
5273
|
+
if internal and story.internal_details:
|
|
5274
|
+
console.print()
|
|
5275
|
+
console.print(f"[{BRAND_MUTED}]Implementation:[/]")
|
|
5276
|
+
for detail in story.internal_details[:5]:
|
|
5277
|
+
console.print(f" [{BRAND_MUTED}]›[/] {detail}")
|
|
5278
|
+
|
|
5279
|
+
# Recall data (internal mode only) - file changes and snippets
|
|
5280
|
+
if internal:
|
|
5281
|
+
# File changes summary
|
|
5282
|
+
if story.file_changes:
|
|
5283
|
+
total_changes = f"+{story.total_insertions}/-{story.total_deletions}" if (story.total_insertions or story.total_deletions) else ""
|
|
5284
|
+
console.print()
|
|
5285
|
+
console.print(f"[{BRAND_MUTED}]Files changed ({len(story.file_changes)})[/] [{BRAND_MUTED}]{total_changes}[/]")
|
|
5286
|
+
for fc in story.file_changes[:8]: # Show up to 8 files
|
|
5287
|
+
change_indicator = {"added": "+", "deleted": "-", "modified": "~"}.get(fc.change_type, "~")
|
|
5288
|
+
stats = f"[green]+{fc.insertions}[/][{BRAND_MUTED}]/[/][red]-{fc.deletions}[/]" if (fc.insertions or fc.deletions) else ""
|
|
5289
|
+
console.print(f" [{BRAND_MUTED}]{change_indicator}[/] {fc.file_path} {stats}")
|
|
5290
|
+
if len(story.file_changes) > 8:
|
|
5291
|
+
console.print(f" [{BRAND_MUTED}]... and {len(story.file_changes) - 8} more files[/]")
|
|
5292
|
+
|
|
5293
|
+
# Key code snippets
|
|
5294
|
+
if story.key_snippets:
|
|
5295
|
+
console.print()
|
|
5296
|
+
console.print(f"[{BRAND_MUTED}]Snippets:[/]")
|
|
5297
|
+
for snippet in story.key_snippets[:2]: # Show up to 2 snippets
|
|
5298
|
+
lang = snippet.language or ""
|
|
5299
|
+
console.print(f" [{BRAND_MUTED}]{snippet.file_path}[/]")
|
|
5300
|
+
console.print(f"```{lang}\n{snippet.content}\n```")
|
|
5301
|
+
|
|
5302
|
+
console.print()
|
|
5303
|
+
console.print(f"[{BRAND_MUTED}]{'─' * 60}[/]")
|
|
5304
|
+
console.print()
|
|
5305
|
+
|
|
5306
|
+
print_header()
|
|
5307
|
+
|
|
5308
|
+
if group and all_repos:
|
|
5309
|
+
# Grouped view: organize by project
|
|
5310
|
+
console.print(f"[bold]@all repos[/] · build log [dim](grouped)[/]")
|
|
5311
|
+
console.print()
|
|
5312
|
+
|
|
5313
|
+
# Group stories by project
|
|
5314
|
+
from collections import defaultdict
|
|
5315
|
+
stories_by_project = defaultdict(list)
|
|
5316
|
+
for story in stories:
|
|
5317
|
+
stories_by_project[story.project_id].append(story)
|
|
5318
|
+
|
|
5319
|
+
# Render each project group
|
|
5320
|
+
for project_id, project_stories in stories_by_project.items():
|
|
5321
|
+
project_name = projects.get(project_id, "unknown")
|
|
5322
|
+
console.print(f"[bold]── {project_name} ──[/]")
|
|
5323
|
+
console.print()
|
|
5324
|
+
|
|
5325
|
+
for story in project_stories:
|
|
5326
|
+
render_story(story, show_repo=False)
|
|
5327
|
+
|
|
5328
|
+
else:
|
|
5329
|
+
# Timeline view (default)
|
|
5330
|
+
console.print(f"[bold]@{header_name}[/] · build log")
|
|
5331
|
+
console.print()
|
|
5332
|
+
|
|
5333
|
+
for story in stories:
|
|
5334
|
+
render_story(story, show_repo=all_repos)
|
|
5335
|
+
|
|
5336
|
+
return
|
|
5337
|
+
|
|
5338
|
+
# Default format (unchanged)
|
|
5339
|
+
print_header()
|
|
5340
|
+
console.print(f"Timeline: [bold]{project_path.name}[/] (last {days} days)")
|
|
5341
|
+
console.print()
|
|
5342
|
+
|
|
5343
|
+
for entry in reversed(entries): # Show newest first
|
|
5344
|
+
# Format timestamp (convert to local timezone)
|
|
5345
|
+
local_ts = entry.timestamp.astimezone()
|
|
5346
|
+
ts = local_ts.strftime("%Y-%m-%d %H:%M")
|
|
5347
|
+
|
|
5348
|
+
# Entry type indicator
|
|
5349
|
+
if entry.type == TimelineEntryType.COMMIT:
|
|
5350
|
+
type_icon = "📝"
|
|
5351
|
+
elif entry.type == TimelineEntryType.SESSION:
|
|
5352
|
+
type_icon = "💬"
|
|
5353
|
+
else: # MERGED
|
|
5354
|
+
type_icon = "🔗"
|
|
5355
|
+
|
|
5356
|
+
# Build description
|
|
5357
|
+
if entry.commit:
|
|
5358
|
+
# Show first line of commit message
|
|
5359
|
+
msg = entry.commit.message.split("\n")[0][:60]
|
|
5360
|
+
if len(entry.commit.message.split("\n")[0]) > 60:
|
|
5361
|
+
msg += "..."
|
|
5362
|
+
desc = f"{msg}"
|
|
5363
|
+
sha = entry.commit.sha[:8]
|
|
5364
|
+
console.print(f"{type_icon} [{BRAND_MUTED}]{ts}[/] [{BRAND_PRIMARY}]{sha}[/] {desc}")
|
|
5365
|
+
elif entry.session_context:
|
|
5366
|
+
# Show problem from session
|
|
5367
|
+
problem = entry.session_context.problem[:60]
|
|
5368
|
+
if len(entry.session_context.problem) > 60:
|
|
5369
|
+
problem += "..."
|
|
5370
|
+
console.print(f"{type_icon} [{BRAND_MUTED}]{ts}[/] {problem}")
|
|
5371
|
+
|
|
5372
|
+
# Show session context if merged
|
|
5373
|
+
if entry.type == TimelineEntryType.MERGED and entry.session_context:
|
|
5374
|
+
console.print(f" [{BRAND_MUTED}]→ {entry.session_context.problem[:70]}[/]")
|
|
5375
|
+
|
|
5376
|
+
console.print()
|
|
5377
|
+
|
|
5378
|
+
|
|
5379
|
+
@timeline_app.command("refresh")
|
|
5380
|
+
def timeline_refresh(
|
|
5381
|
+
path: Optional[str] = typer.Argument(
|
|
5382
|
+
None,
|
|
5383
|
+
help="Project path (use '.' for current repo, omit for all repos)",
|
|
5384
|
+
),
|
|
5385
|
+
limit: int = typer.Option(
|
|
5386
|
+
50, "--limit", "-n",
|
|
5387
|
+
help="Maximum stories to refresh",
|
|
5388
|
+
),
|
|
5389
|
+
json_output: bool = typer.Option(
|
|
5390
|
+
False, "--json",
|
|
5391
|
+
help="Output as JSON",
|
|
5392
|
+
),
|
|
5393
|
+
):
|
|
5394
|
+
"""
|
|
5395
|
+
Regenerate posts for existing stories.
|
|
5396
|
+
|
|
5397
|
+
This updates the public_post and internal_post fields without
|
|
5398
|
+
re-analyzing commits. Useful after prompt improvements.
|
|
5399
|
+
|
|
5400
|
+
Examples:
|
|
5401
|
+
repr timeline refresh # Refresh all repos
|
|
5402
|
+
repr timeline refresh . # Refresh current repo only
|
|
5403
|
+
repr timeline refresh --limit 10 # Refresh last 10 stories
|
|
5404
|
+
"""
|
|
5405
|
+
from .db import get_db
|
|
5406
|
+
from .story_synthesis import (
|
|
5407
|
+
transform_story_for_feed_sync,
|
|
5408
|
+
_build_fallback_post,
|
|
5409
|
+
extract_file_changes_from_commits,
|
|
5410
|
+
extract_key_snippets_from_commits,
|
|
5411
|
+
)
|
|
5412
|
+
|
|
5413
|
+
db = get_db()
|
|
5414
|
+
|
|
5415
|
+
# Determine scope
|
|
5416
|
+
if path is None:
|
|
5417
|
+
# All repos
|
|
5418
|
+
stories = db.list_stories(limit=limit)
|
|
5419
|
+
scope_name = "all repos"
|
|
5420
|
+
else:
|
|
5421
|
+
# Specific repo
|
|
5422
|
+
from .timeline import detect_project_root
|
|
5423
|
+
resolved_path = Path(path) if path else Path.cwd()
|
|
5424
|
+
if not resolved_path.is_absolute():
|
|
5425
|
+
resolved_path = Path.cwd() / resolved_path
|
|
5426
|
+
resolved_path = resolved_path.resolve()
|
|
5427
|
+
|
|
5428
|
+
repo_root = detect_project_root(resolved_path)
|
|
5429
|
+
if not repo_root:
|
|
5430
|
+
print_error(f"Not a git repository: {resolved_path}")
|
|
5431
|
+
raise typer.Exit(1)
|
|
5432
|
+
|
|
5433
|
+
project = db.get_project_by_path(repo_root)
|
|
5434
|
+
if not project:
|
|
5435
|
+
print_error(f"No stories found for {repo_root.name}")
|
|
5436
|
+
raise typer.Exit(1)
|
|
5437
|
+
|
|
5438
|
+
stories = db.list_stories(project_id=project["id"], limit=limit)
|
|
5439
|
+
scope_name = repo_root.name
|
|
5440
|
+
|
|
5441
|
+
if not stories:
|
|
5442
|
+
print_info("No stories to refresh")
|
|
5443
|
+
return
|
|
5444
|
+
|
|
5445
|
+
print_header()
|
|
5446
|
+
console.print(f"Refreshing {len(stories)} stories from {scope_name}...")
|
|
5447
|
+
console.print()
|
|
5448
|
+
|
|
5449
|
+
refreshed = 0
|
|
5450
|
+
failed = 0
|
|
5451
|
+
|
|
5452
|
+
# Get project paths for file extraction
|
|
5453
|
+
project_paths = {}
|
|
5454
|
+
for p in db.list_projects():
|
|
5455
|
+
project_paths[p["id"]] = p["path"]
|
|
5456
|
+
|
|
5457
|
+
for story in stories:
|
|
5458
|
+
try:
|
|
5459
|
+
# Extract file changes and snippets from git
|
|
5460
|
+
project_path = project_paths.get(story.project_id)
|
|
5461
|
+
if story.commit_shas and project_path:
|
|
5462
|
+
file_changes, total_ins, total_del = extract_file_changes_from_commits(
|
|
5463
|
+
story.commit_shas, project_path
|
|
5464
|
+
)
|
|
5465
|
+
story.file_changes = file_changes
|
|
5466
|
+
story.total_insertions = total_ins
|
|
5467
|
+
story.total_deletions = total_del
|
|
5468
|
+
story.key_snippets = extract_key_snippets_from_commits(
|
|
5469
|
+
story.commit_shas, project_path, max_snippets=3
|
|
5470
|
+
)
|
|
5471
|
+
|
|
5472
|
+
# Regenerate Tripartite Codex content
|
|
5473
|
+
result = transform_story_for_feed_sync(story, mode="internal")
|
|
5474
|
+
|
|
5475
|
+
# Store structured fields
|
|
5476
|
+
story.hook = result.hook
|
|
5477
|
+
story.what = result.what
|
|
5478
|
+
story.value = result.value
|
|
5479
|
+
story.insight = result.insight
|
|
5480
|
+
story.show = result.show
|
|
5481
|
+
story.post_body = result.post_body
|
|
5482
|
+
|
|
5483
|
+
# Internal-specific fields
|
|
5484
|
+
if hasattr(result, 'problem') and result.problem:
|
|
5485
|
+
story.problem = result.problem
|
|
5486
|
+
if hasattr(result, 'how') and result.how:
|
|
5487
|
+
story.implementation_details = result.how
|
|
5488
|
+
|
|
5489
|
+
# Legacy fields for backward compatibility
|
|
5490
|
+
what_clean = result.what.rstrip(".").rstrip()
|
|
5491
|
+
value_clean = result.value.lstrip(".").lstrip()
|
|
5492
|
+
story.public_post = f"{result.hook}\n\n{what_clean}. {value_clean}\n\nInsight: {result.insight}"
|
|
5493
|
+
story.internal_post = story.public_post
|
|
5494
|
+
story.public_show = result.show
|
|
5495
|
+
story.internal_show = result.show
|
|
5496
|
+
|
|
5497
|
+
# Update in database
|
|
5498
|
+
db.save_story(story, story.project_id)
|
|
5499
|
+
refreshed += 1
|
|
5500
|
+
|
|
5501
|
+
if not json_output:
|
|
5502
|
+
console.print(f" [green]✓[/] {story.title[:60]}")
|
|
5503
|
+
|
|
5504
|
+
except Exception as e:
|
|
5505
|
+
# Use fallback
|
|
5506
|
+
fallback_post = _build_fallback_post(story)
|
|
5507
|
+
story.public_post = fallback_post
|
|
5508
|
+
story.internal_post = fallback_post
|
|
5509
|
+
db.save_story(story, story.project_id)
|
|
5510
|
+
failed += 1
|
|
5511
|
+
|
|
5512
|
+
if not json_output:
|
|
5513
|
+
console.print(f" [yellow]![/] {story.title[:60]} (fallback)")
|
|
5514
|
+
|
|
5515
|
+
console.print()
|
|
5516
|
+
|
|
5517
|
+
if json_output:
|
|
5518
|
+
print(json.dumps({"refreshed": refreshed, "failed": failed}))
|
|
5519
|
+
else:
|
|
5520
|
+
print_success(f"Refreshed {refreshed} stories" + (f" ({failed} used fallback)" if failed else ""))
|
|
5521
|
+
|
|
5522
|
+
|
|
5523
|
+
@timeline_app.command("ingest-session")
|
|
5524
|
+
def timeline_ingest_session(
|
|
5525
|
+
file: Path = typer.Option(
|
|
5526
|
+
..., "--file", "-f",
|
|
5527
|
+
help="Path to session file (JSONL)",
|
|
5528
|
+
exists=True,
|
|
5529
|
+
file_okay=True,
|
|
5530
|
+
resolve_path=True,
|
|
5531
|
+
),
|
|
5532
|
+
project: Optional[Path] = typer.Option(
|
|
5533
|
+
None, "--project", "-p",
|
|
5534
|
+
help="Project path (default: detected from session cwd)",
|
|
5535
|
+
exists=True,
|
|
5536
|
+
dir_okay=True,
|
|
5537
|
+
resolve_path=True,
|
|
5538
|
+
),
|
|
5539
|
+
source: Optional[str] = typer.Option(
|
|
5540
|
+
None, "--source", "-s",
|
|
5541
|
+
help="Session source: claude_code, clawdbot (default: auto-detect)",
|
|
5542
|
+
),
|
|
5543
|
+
model: str = typer.Option(
|
|
5544
|
+
"openai/gpt-4.1-mini", "--model", "-m",
|
|
5545
|
+
help="Model for context extraction",
|
|
5546
|
+
),
|
|
5547
|
+
json_output: bool = typer.Option(
|
|
5548
|
+
False, "--json",
|
|
5549
|
+
help="Output as JSON",
|
|
5550
|
+
),
|
|
5551
|
+
):
|
|
5552
|
+
"""
|
|
5553
|
+
Ingest a completed AI session into the timeline.
|
|
5554
|
+
|
|
5555
|
+
Called by SessionEnd hooks to capture context from AI coding sessions.
|
|
5556
|
+
|
|
5557
|
+
Examples:
|
|
5558
|
+
repr timeline ingest-session --file ~/.claude/projects/.../session.jsonl
|
|
5559
|
+
repr timeline ingest-session --file /path/to/session.jsonl --project ~/myproject
|
|
5560
|
+
"""
|
|
5561
|
+
from datetime import timezone
|
|
5562
|
+
from .timeline import (
|
|
5563
|
+
detect_project_root,
|
|
5564
|
+
is_initialized,
|
|
5565
|
+
load_timeline,
|
|
5566
|
+
save_timeline,
|
|
5567
|
+
extract_commits_from_git,
|
|
5568
|
+
)
|
|
5569
|
+
from .models import (
|
|
5570
|
+
TimelineEntry,
|
|
5571
|
+
TimelineEntryType,
|
|
5572
|
+
match_commits_to_sessions,
|
|
5573
|
+
)
|
|
5574
|
+
from .loaders import ClaudeCodeLoader, ClawdbotLoader
|
|
5575
|
+
from .session_extractor import SessionExtractor
|
|
5576
|
+
|
|
5577
|
+
# Determine source
|
|
5578
|
+
if source is None:
|
|
5579
|
+
if ".claude" in str(file):
|
|
5580
|
+
source = "claude_code"
|
|
5581
|
+
elif ".clawdbot" in str(file):
|
|
5582
|
+
source = "clawdbot"
|
|
5583
|
+
else:
|
|
5584
|
+
# Try both loaders
|
|
5585
|
+
source = "claude_code"
|
|
5586
|
+
|
|
5587
|
+
# Load session
|
|
5588
|
+
if source == "claude_code":
|
|
5589
|
+
loader = ClaudeCodeLoader()
|
|
5590
|
+
elif source == "clawdbot":
|
|
5591
|
+
loader = ClawdbotLoader()
|
|
5592
|
+
else:
|
|
5593
|
+
print_error(f"Unknown source: {source}")
|
|
5594
|
+
raise typer.Exit(1)
|
|
5595
|
+
|
|
5596
|
+
session = loader.load_session(file)
|
|
5597
|
+
if not session:
|
|
5598
|
+
if json_output:
|
|
5599
|
+
print(json.dumps({"success": False, "error": "Failed to load session"}))
|
|
5600
|
+
else:
|
|
5601
|
+
print_error(f"Failed to load session from {file}")
|
|
5602
|
+
raise typer.Exit(1)
|
|
5603
|
+
|
|
5604
|
+
# Determine project path
|
|
5605
|
+
if project is None:
|
|
5606
|
+
if session.cwd:
|
|
5607
|
+
project = detect_project_root(Path(session.cwd))
|
|
5608
|
+
if project is None:
|
|
5609
|
+
if json_output:
|
|
5610
|
+
print(json.dumps({"success": False, "error": "Could not detect project path"}))
|
|
5611
|
+
else:
|
|
5612
|
+
print_error("Could not detect project path from session")
|
|
5613
|
+
print_info("Specify with --project /path/to/repo")
|
|
5614
|
+
raise typer.Exit(1)
|
|
5615
|
+
|
|
5616
|
+
project_path = project
|
|
5617
|
+
|
|
5618
|
+
# Check if timeline exists
|
|
5619
|
+
if not is_initialized(project_path):
|
|
5620
|
+
if json_output:
|
|
5621
|
+
print(json.dumps({"success": False, "error": f"Timeline not initialized for {project_path}"}))
|
|
5622
|
+
else:
|
|
5623
|
+
print_warning(f"Timeline not initialized for {project_path.name}")
|
|
5624
|
+
print_info("Run: repr timeline init")
|
|
5625
|
+
raise typer.Exit(1)
|
|
5626
|
+
|
|
5627
|
+
# Load existing timeline
|
|
5628
|
+
timeline = load_timeline(project_path)
|
|
5629
|
+
if not timeline:
|
|
5630
|
+
print_error("Failed to load timeline")
|
|
5631
|
+
raise typer.Exit(1)
|
|
5632
|
+
|
|
5633
|
+
# Check if session already ingested
|
|
5634
|
+
for entry in timeline.entries:
|
|
5635
|
+
if entry.session_context and entry.session_context.session_id == session.id:
|
|
5636
|
+
if json_output:
|
|
5637
|
+
print(json.dumps({"success": True, "skipped": True, "reason": "Session already ingested"}))
|
|
5638
|
+
else:
|
|
5639
|
+
print_info(f"Session {session.id[:8]} already ingested")
|
|
5640
|
+
return
|
|
5641
|
+
|
|
5642
|
+
# Get API key for extraction
|
|
5643
|
+
api_key = None
|
|
5644
|
+
byok_config = get_byok_config("openai")
|
|
5645
|
+
if byok_config:
|
|
5646
|
+
api_key = byok_config.get("api_key")
|
|
5647
|
+
|
|
5648
|
+
if not api_key:
|
|
5649
|
+
_, litellm_key = get_litellm_config()
|
|
5650
|
+
api_key = litellm_key
|
|
5651
|
+
|
|
5652
|
+
if not api_key:
|
|
5653
|
+
api_key = os.environ.get("OPENAI_API_KEY")
|
|
5654
|
+
|
|
5655
|
+
if not api_key:
|
|
5656
|
+
if json_output:
|
|
5657
|
+
print(json.dumps({"success": False, "error": "No API key for extraction"}))
|
|
5658
|
+
else:
|
|
5659
|
+
print_error("No API key configured for session extraction")
|
|
5660
|
+
print_info("Configure with: repr llm add openai")
|
|
5661
|
+
raise typer.Exit(1)
|
|
5662
|
+
|
|
5663
|
+
# Extract context from session
|
|
5664
|
+
async def _extract():
|
|
5665
|
+
extractor = SessionExtractor(api_key=api_key, model=model)
|
|
5666
|
+
return await extractor.extract_context(session)
|
|
5667
|
+
|
|
5668
|
+
if not json_output:
|
|
5669
|
+
with create_spinner() as progress:
|
|
5670
|
+
task = progress.add_task("Extracting context...", total=None)
|
|
5671
|
+
context = asyncio.run(_extract())
|
|
5672
|
+
else:
|
|
5673
|
+
context = asyncio.run(_extract())
|
|
5674
|
+
|
|
5675
|
+
# Get recent commits to potentially link
|
|
5676
|
+
recent_commits = extract_commits_from_git(
|
|
5677
|
+
project_path,
|
|
5678
|
+
days=1, # Just last day for linking
|
|
5679
|
+
max_commits=50,
|
|
5680
|
+
)
|
|
5681
|
+
|
|
5682
|
+
# Match session to commits
|
|
5683
|
+
if recent_commits:
|
|
5684
|
+
matches = match_commits_to_sessions(recent_commits, [session])
|
|
5685
|
+
linked_commits = [m.commit_sha for m in matches if m.session_id == session.id]
|
|
5686
|
+
context.linked_commits = linked_commits
|
|
5687
|
+
|
|
5688
|
+
# Create timeline entry
|
|
5689
|
+
entry_type = TimelineEntryType.SESSION
|
|
5690
|
+
if context.linked_commits:
|
|
5691
|
+
# Find and upgrade matching commit entries to MERGED
|
|
5692
|
+
for commit_sha in context.linked_commits:
|
|
5693
|
+
for entry in timeline.entries:
|
|
5694
|
+
if entry.commit and entry.commit.sha == commit_sha:
|
|
5695
|
+
entry.session_context = context
|
|
5696
|
+
entry.type = TimelineEntryType.MERGED
|
|
5697
|
+
entry_type = None # Don't create standalone entry
|
|
5698
|
+
break
|
|
5699
|
+
if entry_type is None:
|
|
5700
|
+
break
|
|
5701
|
+
|
|
5702
|
+
# Add standalone session entry if not merged
|
|
5703
|
+
if entry_type is not None:
|
|
5704
|
+
entry = TimelineEntry(
|
|
5705
|
+
timestamp=context.timestamp,
|
|
5706
|
+
type=entry_type,
|
|
5707
|
+
commit=None,
|
|
5708
|
+
session_context=context,
|
|
5709
|
+
story=None,
|
|
5710
|
+
)
|
|
5711
|
+
timeline.add_entry(entry)
|
|
5712
|
+
|
|
5713
|
+
# Save timeline
|
|
5714
|
+
save_timeline(timeline, project_path)
|
|
5715
|
+
|
|
5716
|
+
if json_output:
|
|
5717
|
+
print(json.dumps({
|
|
5718
|
+
"success": True,
|
|
5719
|
+
"session_id": session.id,
|
|
5720
|
+
"project": str(project_path),
|
|
5721
|
+
"problem": context.problem[:100],
|
|
5722
|
+
"linked_commits": context.linked_commits,
|
|
5723
|
+
"entry_type": entry_type.value if entry_type else "merged",
|
|
5724
|
+
}, indent=2))
|
|
5725
|
+
else:
|
|
5726
|
+
print_success(f"Session ingested!")
|
|
5727
|
+
console.print()
|
|
5728
|
+
console.print(f" Session: {session.id[:8]}")
|
|
5729
|
+
console.print(f" Problem: {context.problem[:60]}...")
|
|
5730
|
+
if context.linked_commits:
|
|
5731
|
+
console.print(f" Linked to commits: {', '.join(c[:8] for c in context.linked_commits)}")
|
|
5732
|
+
console.print(f" Entry type: {entry_type.value if entry_type else 'merged'}")
|
|
5733
|
+
|
|
5734
|
+
|
|
5735
|
+
@timeline_app.command("serve", hidden=True)
|
|
5736
|
+
def timeline_serve(
|
|
5737
|
+
port: int = typer.Option(
|
|
5738
|
+
8787, "--port", "-p",
|
|
5739
|
+
help="Port to serve on",
|
|
5740
|
+
),
|
|
5741
|
+
host: str = typer.Option(
|
|
5742
|
+
"127.0.0.1", "--host",
|
|
5743
|
+
help="Host to bind to",
|
|
5744
|
+
),
|
|
5745
|
+
open_browser: bool = typer.Option(
|
|
5746
|
+
True, "--open/--no-open",
|
|
5747
|
+
help="Auto-open browser (default: enabled)",
|
|
5748
|
+
),
|
|
5749
|
+
):
|
|
5750
|
+
"""
|
|
5751
|
+
[Deprecated] Use 'repr dashboard' instead.
|
|
5752
|
+
|
|
5753
|
+
Alias for backward compatibility - calls 'repr dashboard'.
|
|
5754
|
+
"""
|
|
5755
|
+
print_info("Note: 'repr timeline serve' is deprecated. Use 'repr dashboard' instead.")
|
|
5756
|
+
dashboard(port=port, host=host, open_browser=open_browser)
|
|
5757
|
+
|
|
5758
|
+
|
|
5759
|
+
# =============================================================================
|
|
5760
|
+
# GIT WORKFLOW COMMANDS (add, commit, push)
|
|
5761
|
+
# =============================================================================
|
|
5762
|
+
|
|
5763
|
+
# File-based cache for commit message (persists between commands)
|
|
5764
|
+
_COMMIT_MSG_CACHE_FILE = Path.home() / ".repr" / ".commit_message_cache"
|
|
5765
|
+
|
|
5766
|
+
|
|
5767
|
+
COMMIT_MESSAGE_SYSTEM = """You generate git commit messages and branch names. Given staged changes, output JSON with:
|
|
5768
|
+
|
|
5769
|
+
{
|
|
5770
|
+
"branch": "<type>/<short-description>",
|
|
5771
|
+
"message": "<type>: <description>"
|
|
5772
|
+
}
|
|
5773
|
+
|
|
5774
|
+
Types: feat, fix, refactor, docs, style, test, chore
|
|
5775
|
+
|
|
5776
|
+
Rules:
|
|
5777
|
+
- Branch: lowercase, hyphens, no spaces (e.g., feat/add-user-auth)
|
|
5778
|
+
- Message: under 72 chars, imperative mood, no period at end
|
|
5779
|
+
|
|
5780
|
+
Output only valid JSON."""
|
|
5781
|
+
|
|
5782
|
+
COMMIT_MESSAGE_USER = """Generate branch name and commit message for these staged changes:
|
|
5783
|
+
|
|
5784
|
+
{changes}"""
|
|
5785
|
+
|
|
5786
|
+
|
|
5787
|
+
def _get_cached_commit_info() -> Optional[dict]:
|
|
5788
|
+
"""Get cached commit info (branch + message) if it exists."""
|
|
5789
|
+
if _COMMIT_MSG_CACHE_FILE.exists():
|
|
5790
|
+
try:
|
|
5791
|
+
return json.loads(_COMMIT_MSG_CACHE_FILE.read_text())
|
|
5792
|
+
except Exception:
|
|
5793
|
+
return None
|
|
5794
|
+
return None
|
|
5795
|
+
|
|
5796
|
+
|
|
5797
|
+
def _set_cached_commit_info(branch: str, message: str):
|
|
5798
|
+
"""Cache the commit info."""
|
|
5799
|
+
_COMMIT_MSG_CACHE_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
5800
|
+
_COMMIT_MSG_CACHE_FILE.write_text(json.dumps({"branch": branch, "message": message}))
|
|
5801
|
+
|
|
5802
|
+
|
|
5803
|
+
def _clear_cached_commit_info():
|
|
5804
|
+
"""Clear the cached commit info."""
|
|
5805
|
+
if _COMMIT_MSG_CACHE_FILE.exists():
|
|
5806
|
+
_COMMIT_MSG_CACHE_FILE.unlink()
|
|
5807
|
+
|
|
5808
|
+
|
|
5809
|
+
@app.command("clear")
|
|
5810
|
+
def clear_cache():
|
|
5811
|
+
"""
|
|
5812
|
+
Clear cached branch name and commit message.
|
|
5813
|
+
|
|
5814
|
+
Examples:
|
|
5815
|
+
repr clear
|
|
5816
|
+
"""
|
|
5817
|
+
cached = _get_cached_commit_info()
|
|
5818
|
+
if cached:
|
|
5819
|
+
console.print(f"[{BRAND_MUTED}]Clearing cached:[/]")
|
|
5820
|
+
if cached.get("branch"):
|
|
5821
|
+
console.print(f" Branch: {cached['branch']}")
|
|
5822
|
+
if cached.get("message"):
|
|
5823
|
+
console.print(f" Message: {cached['message']}")
|
|
5824
|
+
_clear_cached_commit_info()
|
|
5825
|
+
print_success("Cache cleared")
|
|
5826
|
+
else:
|
|
5827
|
+
print_info("No cached branch/message")
|
|
5828
|
+
|
|
5829
|
+
|
|
5830
|
+
@app.command("add")
|
|
5831
|
+
def add_files(
|
|
5832
|
+
pattern: str = typer.Argument(..., help="File pattern to stage (glob pattern)"),
|
|
5833
|
+
force: bool = typer.Option(False, "--force", "-f", help="Force add ignored files"),
|
|
5834
|
+
):
|
|
5835
|
+
"""
|
|
5836
|
+
Stage files matching pattern.
|
|
5837
|
+
|
|
5838
|
+
Run `repr commit` to generate a message and commit.
|
|
5839
|
+
|
|
5840
|
+
Examples:
|
|
5841
|
+
repr add cli # Stage files containing "cli"
|
|
5842
|
+
repr add .py # Stage files containing ".py"
|
|
5843
|
+
repr add . # Stage all
|
|
5844
|
+
repr add cli -f # Force add ignored files
|
|
5845
|
+
"""
|
|
5846
|
+
import subprocess
|
|
5847
|
+
from .change_synthesis import get_repo, get_staged_changes
|
|
5848
|
+
|
|
5849
|
+
repo = get_repo(Path.cwd())
|
|
5850
|
+
if not repo:
|
|
5851
|
+
print_error("Not a git repository")
|
|
5852
|
+
raise typer.Exit(1)
|
|
5853
|
+
|
|
5854
|
+
# Special case: "." means all
|
|
5855
|
+
if pattern == ".":
|
|
5856
|
+
try:
|
|
5857
|
+
cmd = ["git", "add", "."]
|
|
5858
|
+
if force:
|
|
5859
|
+
cmd.insert(2, "-f")
|
|
5860
|
+
result = subprocess.run(cmd, cwd=repo.working_dir, capture_output=True, text=True)
|
|
5861
|
+
if result.returncode != 0:
|
|
5862
|
+
print_error(f"Failed to stage files: {result.stderr}")
|
|
5863
|
+
raise typer.Exit(1)
|
|
5864
|
+
except Exception as e:
|
|
5865
|
+
print_error(f"Failed to stage files: {e}")
|
|
5866
|
+
raise typer.Exit(1)
|
|
5867
|
+
else:
|
|
5868
|
+
# Grep-style match: find files containing pattern
|
|
5869
|
+
# Get modified/untracked files
|
|
5870
|
+
status_result = subprocess.run(
|
|
5871
|
+
["git", "status", "--porcelain"],
|
|
5872
|
+
cwd=repo.working_dir,
|
|
5873
|
+
capture_output=True,
|
|
5874
|
+
text=True,
|
|
5875
|
+
)
|
|
5876
|
+
if status_result.returncode != 0:
|
|
5877
|
+
print_error(f"Failed to get status: {status_result.stderr}")
|
|
5878
|
+
raise typer.Exit(1)
|
|
5879
|
+
|
|
5880
|
+
# Parse status output and filter by pattern (grep-style regex)
|
|
5881
|
+
import re
|
|
5882
|
+
matching_files = []
|
|
5883
|
+
try:
|
|
5884
|
+
regex = re.compile(pattern, re.IGNORECASE)
|
|
5885
|
+
except re.error:
|
|
5886
|
+
# Fall back to literal match if invalid regex
|
|
5887
|
+
regex = re.compile(re.escape(pattern), re.IGNORECASE)
|
|
5888
|
+
|
|
5889
|
+
for line in status_result.stdout.strip().split("\n"):
|
|
5890
|
+
if not line:
|
|
5891
|
+
continue
|
|
5892
|
+
# Format: XY filename or XY orig -> renamed (XY is 2 chars, then optional space)
|
|
5893
|
+
file_path = line[2:].lstrip().split(" -> ")[-1].strip()
|
|
5894
|
+
if regex.search(file_path):
|
|
5895
|
+
matching_files.append(file_path)
|
|
5896
|
+
|
|
5897
|
+
if not matching_files:
|
|
5898
|
+
print_warning(f"No changed files matching '{pattern}'")
|
|
5899
|
+
raise typer.Exit(0)
|
|
5900
|
+
|
|
5901
|
+
# Stage matching files
|
|
5902
|
+
try:
|
|
5903
|
+
cmd = ["git", "add"]
|
|
5904
|
+
if force:
|
|
5905
|
+
cmd.append("-f")
|
|
5906
|
+
cmd.extend(matching_files)
|
|
5907
|
+
result = subprocess.run(cmd, cwd=repo.working_dir, capture_output=True, text=True)
|
|
5908
|
+
if result.returncode != 0:
|
|
5909
|
+
print_error(f"Failed to stage files: {result.stderr}")
|
|
5910
|
+
raise typer.Exit(1)
|
|
5911
|
+
except Exception as e:
|
|
5912
|
+
print_error(f"Failed to stage files: {e}")
|
|
5913
|
+
raise typer.Exit(1)
|
|
5914
|
+
|
|
5915
|
+
staged = get_staged_changes(repo)
|
|
5916
|
+
if not staged:
|
|
5917
|
+
print_warning("No files matched pattern or nothing to stage")
|
|
5918
|
+
raise typer.Exit(0)
|
|
5919
|
+
|
|
5920
|
+
# Clear cached branch/message so next commit generates fresh
|
|
5921
|
+
_clear_cached_commit_info()
|
|
5922
|
+
|
|
5923
|
+
# Show staged files
|
|
5924
|
+
console.print(f"[bold]Staged {len(staged)} files[/]")
|
|
5925
|
+
for f in staged:
|
|
5926
|
+
type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
|
|
5927
|
+
stats = ""
|
|
5928
|
+
if f.insertions or f.deletions:
|
|
5929
|
+
stats = f" [{BRAND_SUCCESS}]+{f.insertions}[/][{BRAND_ERROR}]-{f.deletions}[/]"
|
|
5930
|
+
console.print(f" {type_icon} {f.path}{stats}")
|
|
5931
|
+
console.print()
|
|
5932
|
+
|
|
5933
|
+
# Suggest branch if on main/master, otherwise commit
|
|
5934
|
+
try:
|
|
5935
|
+
branch_name = repo.active_branch.name
|
|
5936
|
+
if branch_name in ("main", "master"):
|
|
5937
|
+
print_info("Run `repr branch` to create a new feature branch")
|
|
5938
|
+
else:
|
|
5939
|
+
print_info("Run `repr commit` to generate message and commit, or `repr branch` to create a new feature branch")
|
|
5940
|
+
except Exception:
|
|
5941
|
+
print_info("Run `repr commit` to generate message and commit")
|
|
5942
|
+
|
|
5943
|
+
|
|
5944
|
+
@app.command("unstage")
|
|
5945
|
+
def unstage_files(
|
|
5946
|
+
pattern: str = typer.Argument(..., help="File pattern to unstage (grep-style match)"),
|
|
5947
|
+
):
|
|
5948
|
+
"""
|
|
5949
|
+
Unstage files matching pattern.
|
|
5950
|
+
|
|
5951
|
+
Examples:
|
|
5952
|
+
repr unstage cli # Unstage files containing "cli"
|
|
5953
|
+
repr unstage .py # Unstage files containing ".py"
|
|
5954
|
+
repr unstage . # Unstage all staged files
|
|
5955
|
+
"""
|
|
5956
|
+
import subprocess
|
|
5957
|
+
from .change_synthesis import get_repo, get_staged_changes
|
|
5958
|
+
|
|
5959
|
+
repo = get_repo(Path.cwd())
|
|
5960
|
+
if not repo:
|
|
5961
|
+
print_error("Not a git repository")
|
|
5962
|
+
raise typer.Exit(1)
|
|
5963
|
+
|
|
5964
|
+
# Get currently staged files
|
|
5965
|
+
staged = get_staged_changes(repo)
|
|
5966
|
+
if not staged:
|
|
5967
|
+
print_warning("No staged files")
|
|
5968
|
+
raise typer.Exit(0)
|
|
5969
|
+
|
|
5970
|
+
# Special case: "." means all
|
|
5971
|
+
if pattern == ".":
|
|
5972
|
+
matching_files = [f.path for f in staged]
|
|
5973
|
+
else:
|
|
5974
|
+
# Grep-style regex match
|
|
5975
|
+
import re
|
|
5976
|
+
try:
|
|
5977
|
+
regex = re.compile(pattern, re.IGNORECASE)
|
|
5978
|
+
except re.error:
|
|
5979
|
+
regex = re.compile(re.escape(pattern), re.IGNORECASE)
|
|
5980
|
+
matching_files = [f.path for f in staged if regex.search(f.path)]
|
|
5981
|
+
|
|
5982
|
+
if not matching_files:
|
|
5983
|
+
print_warning(f"No staged files matching '{pattern}'")
|
|
5984
|
+
raise typer.Exit(0)
|
|
5985
|
+
|
|
5986
|
+
# Unstage matching files
|
|
5987
|
+
try:
|
|
5988
|
+
cmd = ["git", "restore", "--staged"] + matching_files
|
|
5989
|
+
result = subprocess.run(cmd, cwd=repo.working_dir, capture_output=True, text=True)
|
|
5990
|
+
if result.returncode != 0:
|
|
5991
|
+
print_error(f"Failed to unstage files: {result.stderr}")
|
|
5992
|
+
raise typer.Exit(1)
|
|
5993
|
+
except Exception as e:
|
|
5994
|
+
print_error(f"Failed to unstage files: {e}")
|
|
5995
|
+
raise typer.Exit(1)
|
|
5996
|
+
|
|
5997
|
+
# Show what was unstaged
|
|
5998
|
+
console.print(f"[bold]Unstaged {len(matching_files)} files[/]")
|
|
5999
|
+
for f in matching_files:
|
|
6000
|
+
console.print(f" - {f}")
|
|
6001
|
+
|
|
6002
|
+
|
|
6003
|
+
@app.command("branch")
|
|
6004
|
+
def create_branch(
|
|
6005
|
+
name: Optional[str] = typer.Argument(None, help="Branch name (optional, AI generates if omitted)"),
|
|
6006
|
+
regenerate: bool = typer.Option(False, "--regenerate", "-r", help="Regenerate branch name"),
|
|
6007
|
+
):
|
|
6008
|
+
"""
|
|
6009
|
+
Create and switch to a new branch.
|
|
6010
|
+
|
|
6011
|
+
If no name given, generates one from staged or unpushed changes.
|
|
6012
|
+
|
|
6013
|
+
Examples:
|
|
6014
|
+
repr branch # AI generates name
|
|
6015
|
+
repr branch feat/my-feature # Use explicit name
|
|
6016
|
+
repr branch -r # Regenerate name
|
|
6017
|
+
"""
|
|
6018
|
+
import subprocess
|
|
6019
|
+
from .change_synthesis import get_repo, get_staged_changes, get_unpushed_commits, format_file_changes, format_commit_changes
|
|
6020
|
+
|
|
6021
|
+
repo = get_repo(Path.cwd())
|
|
6022
|
+
if not repo:
|
|
6023
|
+
print_error("Not a git repository")
|
|
6024
|
+
raise typer.Exit(1)
|
|
6025
|
+
|
|
6026
|
+
branch_name = name
|
|
6027
|
+
|
|
6028
|
+
if not branch_name:
|
|
6029
|
+
# Check cache first
|
|
6030
|
+
cached = _get_cached_commit_info()
|
|
6031
|
+
if cached and cached.get("branch") and not regenerate:
|
|
6032
|
+
branch_name = cached["branch"]
|
|
6033
|
+
console.print(f"[{BRAND_MUTED}](cached)[/]")
|
|
6034
|
+
else:
|
|
6035
|
+
# Generate from staged or unpushed changes
|
|
6036
|
+
staged = get_staged_changes(repo)
|
|
6037
|
+
unpushed = get_unpushed_commits(repo)
|
|
6038
|
+
|
|
6039
|
+
if not staged and not unpushed:
|
|
6040
|
+
print_error("No staged or unpushed changes to generate branch name from")
|
|
6041
|
+
print_info("Run `repr add <pattern>` first, or provide a name")
|
|
6042
|
+
raise typer.Exit(1)
|
|
6043
|
+
|
|
6044
|
+
from .openai_analysis import get_openai_client
|
|
6045
|
+
|
|
6046
|
+
client = get_openai_client()
|
|
6047
|
+
if not client:
|
|
6048
|
+
print_error("LLM not configured. Run `repr llm setup` first, or provide a name")
|
|
6049
|
+
raise typer.Exit(1)
|
|
6050
|
+
|
|
6051
|
+
# Build context from staged and/or unpushed
|
|
6052
|
+
changes_str = ""
|
|
6053
|
+
if staged:
|
|
6054
|
+
changes_str += "Staged changes:\n" + format_file_changes(staged) + "\n\n"
|
|
6055
|
+
if unpushed:
|
|
6056
|
+
changes_str += "Unpushed commits:\n" + format_commit_changes(unpushed)
|
|
6057
|
+
|
|
6058
|
+
prompt = COMMIT_MESSAGE_USER.format(changes=changes_str)
|
|
6059
|
+
|
|
6060
|
+
with create_spinner("Generating branch name..."):
|
|
6061
|
+
response = asyncio.run(client.chat.completions.create(
|
|
6062
|
+
model="gpt-4o-mini",
|
|
6063
|
+
messages=[
|
|
6064
|
+
{"role": "system", "content": COMMIT_MESSAGE_SYSTEM},
|
|
6065
|
+
{"role": "user", "content": prompt},
|
|
6066
|
+
],
|
|
6067
|
+
response_format={"type": "json_object"},
|
|
6068
|
+
temperature=0.3,
|
|
6069
|
+
))
|
|
6070
|
+
data = json.loads(response.choices[0].message.content)
|
|
6071
|
+
branch_name = data.get("branch", "")
|
|
6072
|
+
commit_msg = data.get("message", "")
|
|
6073
|
+
|
|
6074
|
+
# Cache both
|
|
6075
|
+
_set_cached_commit_info(branch_name, commit_msg)
|
|
6076
|
+
|
|
6077
|
+
console.print(f"[bold]Branch:[/] {branch_name}")
|
|
6078
|
+
|
|
6079
|
+
try:
|
|
6080
|
+
result = subprocess.run(
|
|
6081
|
+
["git", "checkout", "-b", branch_name],
|
|
6082
|
+
cwd=repo.working_dir,
|
|
6083
|
+
capture_output=True,
|
|
6084
|
+
text=True,
|
|
6085
|
+
)
|
|
6086
|
+
if result.returncode != 0:
|
|
6087
|
+
if "already exists" in result.stderr:
|
|
6088
|
+
# Switch to existing branch
|
|
6089
|
+
result = subprocess.run(
|
|
6090
|
+
["git", "checkout", branch_name],
|
|
6091
|
+
cwd=repo.working_dir,
|
|
6092
|
+
capture_output=True,
|
|
6093
|
+
text=True,
|
|
6094
|
+
)
|
|
6095
|
+
if result.returncode == 0:
|
|
6096
|
+
print_success(f"Switched to {branch_name}")
|
|
6097
|
+
return
|
|
6098
|
+
print_error(f"Failed: {result.stderr}")
|
|
6099
|
+
raise typer.Exit(1)
|
|
6100
|
+
|
|
6101
|
+
print_success(f"Created and switched to {branch_name}")
|
|
6102
|
+
|
|
6103
|
+
# Check if there are staged changes and suggest next step
|
|
6104
|
+
from .change_synthesis import get_staged_changes
|
|
6105
|
+
staged = get_staged_changes(repo)
|
|
6106
|
+
if staged:
|
|
6107
|
+
print_info("Run `repr commit` to commit your staged changes")
|
|
6108
|
+
|
|
6109
|
+
except Exception as e:
|
|
6110
|
+
print_error(f"Failed: {e}")
|
|
6111
|
+
raise typer.Exit(1)
|
|
6112
|
+
|
|
6113
|
+
|
|
6114
|
+
@app.command("commit")
|
|
6115
|
+
def commit_staged(
|
|
6116
|
+
message: Optional[str] = typer.Option(None, "--message", "-m", help="Custom message (skip AI)"),
|
|
6117
|
+
regenerate: bool = typer.Option(False, "--regenerate", "-r", help="Regenerate message"),
|
|
6118
|
+
yes: bool = typer.Option(False, "--yes", "-y", help="Skip confirmation on main/master"),
|
|
6119
|
+
):
|
|
6120
|
+
"""
|
|
6121
|
+
Generate commit message and commit staged changes.
|
|
6122
|
+
|
|
6123
|
+
Examples:
|
|
6124
|
+
repr commit # Generate and commit
|
|
6125
|
+
repr commit -m "fix: typo" # Custom message
|
|
6126
|
+
repr commit -r # Regenerate message
|
|
6127
|
+
"""
|
|
6128
|
+
import subprocess
|
|
6129
|
+
from .change_synthesis import get_repo, get_staged_changes, format_file_changes
|
|
6130
|
+
from .config import get_config_value, set_config_value
|
|
6131
|
+
|
|
6132
|
+
repo = get_repo(Path.cwd())
|
|
6133
|
+
if not repo:
|
|
6134
|
+
print_error("Not a git repository")
|
|
6135
|
+
raise typer.Exit(1)
|
|
6136
|
+
|
|
6137
|
+
# Check if on main/master
|
|
6138
|
+
current_branch = subprocess.run(
|
|
6139
|
+
["git", "branch", "--show-current"],
|
|
6140
|
+
cwd=repo.working_dir,
|
|
6141
|
+
capture_output=True,
|
|
6142
|
+
text=True,
|
|
6143
|
+
).stdout.strip()
|
|
6144
|
+
|
|
6145
|
+
if current_branch in ("main", "master") and not yes:
|
|
6146
|
+
allow_main = get_config_value("allow_commit_to_main")
|
|
6147
|
+
if allow_main is None:
|
|
6148
|
+
# Ask user
|
|
6149
|
+
print_warning(f"You're about to commit directly to {current_branch}")
|
|
6150
|
+
console.print()
|
|
6151
|
+
response = typer.prompt(
|
|
6152
|
+
"Allow commits to main/master? [y]es / [n]o / [a]lways / [never]",
|
|
6153
|
+
default="n",
|
|
6154
|
+
).lower()
|
|
6155
|
+
|
|
6156
|
+
if response in ("a", "always"):
|
|
6157
|
+
set_config_value("allow_commit_to_main", True)
|
|
6158
|
+
console.print(f"[{BRAND_MUTED}]Preference saved. Use `repr config set allow_commit_to_main false` to reset.[/]")
|
|
6159
|
+
elif response in ("never",):
|
|
6160
|
+
set_config_value("allow_commit_to_main", False)
|
|
6161
|
+
console.print(f"[{BRAND_MUTED}]Preference saved. Use `repr config set allow_commit_to_main true` to reset.[/]")
|
|
6162
|
+
print_info("Create a branch first: repr branch")
|
|
6163
|
+
raise typer.Exit(0)
|
|
6164
|
+
elif response not in ("y", "yes"):
|
|
6165
|
+
print_info("Create a branch first: repr branch")
|
|
6166
|
+
raise typer.Exit(0)
|
|
6167
|
+
elif allow_main is False:
|
|
6168
|
+
print_warning(f"Commits to {current_branch} are disabled")
|
|
6169
|
+
print_info("Create a branch first: repr branch")
|
|
6170
|
+
print_info("Or use: repr config set allow_commit_to_main true")
|
|
6171
|
+
raise typer.Exit(0)
|
|
6172
|
+
|
|
6173
|
+
staged = get_staged_changes(repo)
|
|
6174
|
+
if not staged:
|
|
6175
|
+
print_warning("Nothing staged to commit")
|
|
6176
|
+
print_info("Run `repr add <pattern>` first")
|
|
6177
|
+
raise typer.Exit(0)
|
|
6178
|
+
|
|
6179
|
+
# Show staged files
|
|
6180
|
+
console.print(f"[bold]Staged {len(staged)} files[/]")
|
|
6181
|
+
for f in staged:
|
|
6182
|
+
type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
|
|
6183
|
+
stats = ""
|
|
6184
|
+
if f.insertions or f.deletions:
|
|
6185
|
+
stats = f" [{BRAND_SUCCESS}]+{f.insertions}[/][{BRAND_ERROR}]-{f.deletions}[/]"
|
|
6186
|
+
console.print(f" {type_icon} {f.path}{stats}")
|
|
6187
|
+
console.print()
|
|
6188
|
+
|
|
6189
|
+
# Get commit message
|
|
6190
|
+
commit_msg = None
|
|
6191
|
+
|
|
6192
|
+
if message:
|
|
6193
|
+
commit_msg = message
|
|
6194
|
+
else:
|
|
6195
|
+
# Check cache
|
|
6196
|
+
cached = _get_cached_commit_info()
|
|
6197
|
+
if cached and cached.get("message") and not regenerate:
|
|
6198
|
+
commit_msg = cached["message"]
|
|
6199
|
+
console.print(f"[{BRAND_MUTED}](cached)[/]")
|
|
6200
|
+
else:
|
|
6201
|
+
# Generate with LLM
|
|
6202
|
+
from .openai_analysis import get_openai_client
|
|
6203
|
+
|
|
6204
|
+
client = get_openai_client()
|
|
6205
|
+
if not client:
|
|
6206
|
+
print_error("LLM not configured. Run `repr llm setup` first, or use -m")
|
|
6207
|
+
raise typer.Exit(1)
|
|
6208
|
+
|
|
6209
|
+
changes_str = format_file_changes(staged)
|
|
6210
|
+
prompt = COMMIT_MESSAGE_USER.format(changes=changes_str)
|
|
6211
|
+
|
|
6212
|
+
with create_spinner("Generating commit message..."):
|
|
6213
|
+
response = asyncio.run(client.chat.completions.create(
|
|
6214
|
+
model="gpt-4o-mini",
|
|
6215
|
+
messages=[
|
|
6216
|
+
{"role": "system", "content": COMMIT_MESSAGE_SYSTEM},
|
|
6217
|
+
{"role": "user", "content": prompt},
|
|
6218
|
+
],
|
|
6219
|
+
response_format={"type": "json_object"},
|
|
6220
|
+
temperature=0.3,
|
|
6221
|
+
))
|
|
6222
|
+
data = json.loads(response.choices[0].message.content)
|
|
6223
|
+
branch_name = data.get("branch", "")
|
|
6224
|
+
commit_msg = data.get("message", "")
|
|
6225
|
+
|
|
6226
|
+
_set_cached_commit_info(branch_name, commit_msg)
|
|
6227
|
+
|
|
6228
|
+
console.print(f"[bold]Message:[/] {commit_msg}")
|
|
6229
|
+
console.print()
|
|
6230
|
+
|
|
6231
|
+
try:
|
|
6232
|
+
result = subprocess.run(
|
|
6233
|
+
["git", "commit", "-m", commit_msg],
|
|
6234
|
+
cwd=repo.working_dir,
|
|
6235
|
+
capture_output=True,
|
|
6236
|
+
text=True,
|
|
6237
|
+
)
|
|
6238
|
+
if result.returncode != 0:
|
|
6239
|
+
print_error(f"Commit failed: {result.stderr}")
|
|
6240
|
+
raise typer.Exit(1)
|
|
6241
|
+
|
|
6242
|
+
_clear_cached_commit_info()
|
|
6243
|
+
print_success("Committed")
|
|
6244
|
+
|
|
6245
|
+
sha_result = subprocess.run(
|
|
6246
|
+
["git", "rev-parse", "--short", "HEAD"],
|
|
6247
|
+
cwd=repo.working_dir,
|
|
6248
|
+
capture_output=True,
|
|
6249
|
+
text=True,
|
|
6250
|
+
)
|
|
6251
|
+
if sha_result.returncode == 0:
|
|
6252
|
+
console.print(f" [{BRAND_MUTED}]{sha_result.stdout.strip()}[/]")
|
|
6253
|
+
|
|
6254
|
+
except Exception as e:
|
|
6255
|
+
print_error(f"Commit failed: {e}")
|
|
6256
|
+
raise typer.Exit(1)
|
|
6257
|
+
|
|
6258
|
+
|
|
6259
|
+
@app.command("push")
|
|
6260
|
+
def push_commits():
|
|
6261
|
+
"""
|
|
6262
|
+
Push commits to remote.
|
|
6263
|
+
|
|
6264
|
+
Examples:
|
|
6265
|
+
repr push
|
|
6266
|
+
"""
|
|
6267
|
+
import subprocess
|
|
6268
|
+
from .change_synthesis import get_repo
|
|
6269
|
+
|
|
6270
|
+
repo = get_repo(Path.cwd())
|
|
6271
|
+
if not repo:
|
|
6272
|
+
print_error("Not a git repository")
|
|
6273
|
+
raise typer.Exit(1)
|
|
6274
|
+
|
|
6275
|
+
# Check for unpushed commits
|
|
6276
|
+
try:
|
|
6277
|
+
result = subprocess.run(
|
|
6278
|
+
["git", "log", "@{u}..", "--oneline"],
|
|
6279
|
+
cwd=repo.working_dir,
|
|
6280
|
+
capture_output=True,
|
|
6281
|
+
text=True,
|
|
6282
|
+
)
|
|
6283
|
+
if result.returncode != 0:
|
|
6284
|
+
# No upstream, just push
|
|
6285
|
+
pass
|
|
6286
|
+
elif not result.stdout.strip():
|
|
6287
|
+
print_info("Nothing to push")
|
|
6288
|
+
raise typer.Exit(0)
|
|
6289
|
+
else:
|
|
6290
|
+
commits = result.stdout.strip().split("\n")
|
|
6291
|
+
console.print(f"[bold]Pushing {len(commits)} commits[/]")
|
|
6292
|
+
for c in commits[:5]:
|
|
6293
|
+
console.print(f" [{BRAND_MUTED}]{c}[/]")
|
|
6294
|
+
if len(commits) > 5:
|
|
6295
|
+
console.print(f" [{BRAND_MUTED}]... +{len(commits) - 5} more[/]")
|
|
6296
|
+
console.print()
|
|
6297
|
+
except Exception:
|
|
6298
|
+
pass
|
|
6299
|
+
|
|
6300
|
+
try:
|
|
6301
|
+
with create_spinner("Pushing..."):
|
|
6302
|
+
result = subprocess.run(
|
|
6303
|
+
["git", "push"],
|
|
6304
|
+
cwd=repo.working_dir,
|
|
6305
|
+
capture_output=True,
|
|
6306
|
+
text=True,
|
|
6307
|
+
)
|
|
6308
|
+
if result.returncode != 0:
|
|
6309
|
+
if "no upstream branch" in result.stderr:
|
|
6310
|
+
# Try push with -u
|
|
6311
|
+
branch = subprocess.run(
|
|
6312
|
+
["git", "branch", "--show-current"],
|
|
6313
|
+
cwd=repo.working_dir,
|
|
6314
|
+
capture_output=True,
|
|
6315
|
+
text=True,
|
|
6316
|
+
).stdout.strip()
|
|
6317
|
+
with create_spinner(f"Setting upstream and pushing {branch}..."):
|
|
6318
|
+
result = subprocess.run(
|
|
6319
|
+
["git", "push", "-u", "origin", branch],
|
|
6320
|
+
cwd=repo.working_dir,
|
|
6321
|
+
capture_output=True,
|
|
6322
|
+
text=True,
|
|
6323
|
+
)
|
|
6324
|
+
if result.returncode != 0:
|
|
6325
|
+
print_error(f"Push failed: {result.stderr}")
|
|
6326
|
+
raise typer.Exit(1)
|
|
6327
|
+
else:
|
|
6328
|
+
print_error(f"Push failed: {result.stderr}")
|
|
6329
|
+
raise typer.Exit(1)
|
|
6330
|
+
|
|
6331
|
+
print_success("Pushed")
|
|
6332
|
+
|
|
6333
|
+
except Exception as e:
|
|
6334
|
+
print_error(f"Push failed: {e}")
|
|
6335
|
+
raise typer.Exit(1)
|
|
6336
|
+
|
|
6337
|
+
|
|
6338
|
+
# ==================== SKILL COMMANDS ====================
|
|
6339
|
+
|
|
6340
|
+
# Repr skill content for AI agents
|
|
6341
|
+
REPR_SKILL = '''---
|
|
6342
|
+
name: repr
|
|
6343
|
+
description: Use repr to extract developer context from git history for interviews, reviews, and AI agents
|
|
6344
|
+
---
|
|
6345
|
+
|
|
6346
|
+
# repr - Developer Context Layer
|
|
6347
|
+
|
|
6348
|
+
Use `repr` to capture and surface developer context from git history. Generate stories, prepare for interviews, create performance review material, and provide context to AI agents.
|
|
6349
|
+
|
|
6350
|
+
## Quick Start
|
|
6351
|
+
|
|
6352
|
+
```bash
|
|
6353
|
+
# Initialize (scan repos)
|
|
6354
|
+
repr init ~/code
|
|
6355
|
+
|
|
6356
|
+
# Generate stories from recent commits
|
|
6357
|
+
repr generate --local
|
|
6358
|
+
|
|
6359
|
+
# View generated stories
|
|
6360
|
+
repr stories
|
|
6361
|
+
repr story view <id>
|
|
6362
|
+
```
|
|
6363
|
+
|
|
6364
|
+
## Common Commands
|
|
6365
|
+
|
|
6366
|
+
| Command | Description |
|
|
6367
|
+
|---------|-------------|
|
|
6368
|
+
| `repr init <path>` | Scan and track repositories |
|
|
6369
|
+
| `repr generate --local` | Generate stories using local LLM |
|
|
6370
|
+
| `repr generate --days 30` | Generate from last 30 days |
|
|
6371
|
+
| `repr stories` | List all stories |
|
|
6372
|
+
| `repr story view <id>` | View a specific story |
|
|
6373
|
+
| `repr commits --days 7` | Show recent commits |
|
|
6374
|
+
| `repr dashboard` | Open web dashboard |
|
|
6375
|
+
| `repr mcp serve` | Start MCP server for AI agents |
|
|
6376
|
+
|
|
6377
|
+
## Story Generation
|
|
6378
|
+
|
|
6379
|
+
```bash
|
|
6380
|
+
# Generate with local LLM (Ollama)
|
|
6381
|
+
repr generate --local
|
|
6382
|
+
|
|
6383
|
+
# Generate from specific timeframe
|
|
6384
|
+
repr generate --days 30 --local
|
|
6385
|
+
repr generate --since "2 weeks ago" --local
|
|
6386
|
+
|
|
6387
|
+
# Generate interview stories (STAR format)
|
|
6388
|
+
repr generate --template interview --local
|
|
6389
|
+
```
|
|
6390
|
+
|
|
6391
|
+
## LLM Configuration
|
|
6392
|
+
|
|
6393
|
+
```bash
|
|
6394
|
+
# Configure local LLM
|
|
6395
|
+
repr llm configure
|
|
6396
|
+
|
|
6397
|
+
# Add API keys (stored in OS keychain)
|
|
6398
|
+
repr llm add openai
|
|
6399
|
+
repr llm add anthropic
|
|
6400
|
+
|
|
6401
|
+
# Test LLM connection
|
|
6402
|
+
repr llm test
|
|
6403
|
+
```
|
|
6404
|
+
|
|
6405
|
+
## Use Cases
|
|
6406
|
+
|
|
6407
|
+
- **Interview Prep**: Generate STAR-format stories from commits
|
|
6408
|
+
- **Performance Reviews**: Summarize months of work with impact
|
|
6409
|
+
- **Sprint Demos**: Quick changelogs for stakeholders
|
|
6410
|
+
- **AI Context**: MCP server provides work history to Claude/Cursor
|
|
6411
|
+
- **Weekly Reflection**: See what you accomplished
|
|
6412
|
+
|
|
6413
|
+
## Privacy
|
|
6414
|
+
|
|
6415
|
+
- Local-first: data stays in ~/.repr/
|
|
6416
|
+
- Air-gapped ready: works fully offline
|
|
6417
|
+
- BYOK: use your own API keys
|
|
6418
|
+
- Privacy audit: `repr privacy audit`
|
|
6419
|
+
'''
|
|
6420
|
+
|
|
6421
|
+
|
|
6422
|
+
def _get_skill_path(provider: str) -> Path:
|
|
6423
|
+
"""Get the skill installation path for a provider."""
|
|
6424
|
+
home = Path.home()
|
|
6425
|
+
if provider == "claude":
|
|
6426
|
+
return home / ".claude" / "skills" / "repr"
|
|
6427
|
+
elif provider == "gemini":
|
|
6428
|
+
return home / ".gemini" / "skills" / "repr"
|
|
6429
|
+
else:
|
|
6430
|
+
raise ValueError(f"Unknown provider: {provider}")
|
|
6431
|
+
|
|
6432
|
+
|
|
6433
|
+
def _is_skill_installed(provider: str) -> bool:
|
|
6434
|
+
"""Check if skill is installed for a provider."""
|
|
6435
|
+
skill_path = _get_skill_path(provider) / "SKILL.md"
|
|
6436
|
+
return skill_path.exists()
|
|
6437
|
+
|
|
6438
|
+
|
|
6439
|
+
def _install_skill_to(provider: str) -> Path:
|
|
6440
|
+
"""Install the repr skill to a provider's skills directory."""
|
|
6441
|
+
skill_dir = _get_skill_path(provider)
|
|
6442
|
+
skill_dir.mkdir(parents=True, exist_ok=True)
|
|
6443
|
+
skill_file = skill_dir / "SKILL.md"
|
|
6444
|
+
skill_file.write_text(REPR_SKILL)
|
|
6445
|
+
return skill_dir
|
|
6446
|
+
|
|
6447
|
+
|
|
6448
|
+
@skill_app.callback(invoke_without_command=True)
|
|
6449
|
+
def skill_default(ctx: typer.Context):
|
|
6450
|
+
"""View the repr skill for AI agents."""
|
|
6451
|
+
if ctx.invoked_subcommand is None:
|
|
6452
|
+
# Show skill content
|
|
6453
|
+
console.print()
|
|
6454
|
+
console.print(f"[bold {BRAND_PRIMARY}]/repr[/] - [{BRAND_MUTED}]LLM instructions for using repr[/]")
|
|
6455
|
+
console.print()
|
|
6456
|
+
|
|
6457
|
+
# Check installation status
|
|
6458
|
+
claude_installed = _is_skill_installed("claude")
|
|
6459
|
+
gemini_installed = _is_skill_installed("gemini")
|
|
6460
|
+
|
|
6461
|
+
if claude_installed or gemini_installed:
|
|
6462
|
+
console.print(f"[{BRAND_SUCCESS}]Installed:[/]")
|
|
6463
|
+
if claude_installed:
|
|
6464
|
+
console.print(f" [{BRAND_MUTED}]~/.claude/skills/repr/SKILL.md[/]")
|
|
6465
|
+
if gemini_installed:
|
|
6466
|
+
console.print(f" [{BRAND_MUTED}]~/.gemini/skills/repr/SKILL.md[/]")
|
|
6467
|
+
console.print()
|
|
6468
|
+
|
|
6469
|
+
console.print(f"[{BRAND_MUTED}]{'─' * 60}[/]")
|
|
6470
|
+
console.print(REPR_SKILL)
|
|
6471
|
+
console.print(f"[{BRAND_MUTED}]{'─' * 60}[/]")
|
|
6472
|
+
|
|
6473
|
+
if not claude_installed and not gemini_installed:
|
|
6474
|
+
console.print(f"\n[{BRAND_MUTED}]Install with:[/] repr skill install")
|
|
6475
|
+
console.print()
|
|
6476
|
+
|
|
6477
|
+
|
|
6478
|
+
@skill_app.command("install")
|
|
6479
|
+
def skill_install(
|
|
6480
|
+
target: Optional[str] = typer.Argument(
|
|
6481
|
+
None,
|
|
6482
|
+
help="Target provider: claude, gemini, or all (default: all)",
|
|
6483
|
+
),
|
|
6484
|
+
):
|
|
6485
|
+
"""Install the repr skill to AI agent providers."""
|
|
6486
|
+
console.print()
|
|
6487
|
+
console.print(f"[bold {BRAND_PRIMARY}]Install repr skill[/]")
|
|
6488
|
+
console.print()
|
|
6489
|
+
|
|
6490
|
+
if not target or target == "all":
|
|
6491
|
+
installed = 0
|
|
6492
|
+
|
|
6493
|
+
# Try Claude
|
|
6494
|
+
try:
|
|
6495
|
+
dest = _install_skill_to("claude")
|
|
6496
|
+
print_success(f"Installed to {dest}")
|
|
6497
|
+
installed += 1
|
|
6498
|
+
except Exception as e:
|
|
6499
|
+
print_warning(f"Could not install to Claude: {e}")
|
|
6500
|
+
|
|
6501
|
+
# Try Gemini
|
|
6502
|
+
try:
|
|
6503
|
+
dest = _install_skill_to("gemini")
|
|
6504
|
+
print_success(f"Installed to {dest}")
|
|
6505
|
+
installed += 1
|
|
6506
|
+
except Exception as e:
|
|
6507
|
+
print_warning(f"Could not install to Gemini: {e}")
|
|
6508
|
+
|
|
6509
|
+
if installed == 0:
|
|
6510
|
+
print_warning("No providers found.")
|
|
6511
|
+
else:
|
|
6512
|
+
console.print(f"\n[{BRAND_MUTED}]LLMs can now use /repr to learn how to run repr commands.[/]\n")
|
|
6513
|
+
|
|
6514
|
+
elif target in ("claude", "gemini"):
|
|
6515
|
+
try:
|
|
6516
|
+
dest = _install_skill_to(target)
|
|
6517
|
+
print_success(f"Installed to {dest}")
|
|
6518
|
+
console.print(f"\n[{BRAND_MUTED}]LLMs can now use /repr to learn how to run repr commands.[/]\n")
|
|
6519
|
+
except Exception as e:
|
|
6520
|
+
print_error(f"Installation failed: {e}")
|
|
6521
|
+
raise typer.Exit(1)
|
|
6522
|
+
else:
|
|
6523
|
+
print_error(f"Unknown target: {target}")
|
|
6524
|
+
console.print(f"[{BRAND_MUTED}]Usage: repr skill install [claude|gemini|all][/]")
|
|
6525
|
+
raise typer.Exit(1)
|
|
6526
|
+
|
|
6527
|
+
|
|
6528
|
+
# PR generation prompts
|
|
6529
|
+
PR_SYSTEM = """You generate GitHub PR titles and descriptions. Given the commits, output JSON:
|
|
6530
|
+
|
|
6531
|
+
{
|
|
6532
|
+
"title": "<type>: <short description>",
|
|
6533
|
+
"body": "## Summary\\n<bullet points>\\n\\n## Changes\\n<bullet points>"
|
|
6534
|
+
}
|
|
6535
|
+
|
|
6536
|
+
Rules:
|
|
6537
|
+
- Title: under 72 chars, conventional commit style
|
|
6538
|
+
- Body: markdown, concise bullet points
|
|
6539
|
+
- Focus on what and why, not how
|
|
6540
|
+
|
|
6541
|
+
Output only valid JSON."""
|
|
6542
|
+
|
|
6543
|
+
PR_USER = """Generate PR title and description for these commits:
|
|
6544
|
+
|
|
6545
|
+
{commits}"""
|
|
6546
|
+
|
|
6547
|
+
|
|
6548
|
+
@app.command("pr")
|
|
6549
|
+
def create_pr(
|
|
6550
|
+
title: Optional[str] = typer.Option(None, "--title", "-t", help="Custom PR title"),
|
|
6551
|
+
draft: bool = typer.Option(False, "--draft", "-d", help="Create as draft PR"),
|
|
6552
|
+
regenerate: bool = typer.Option(False, "--regenerate", "-r", help="Regenerate title/body"),
|
|
6553
|
+
):
|
|
6554
|
+
"""
|
|
6555
|
+
Create a pull request with AI-generated title and description.
|
|
6556
|
+
|
|
6557
|
+
Examples:
|
|
6558
|
+
repr pr # AI generates title/body
|
|
6559
|
+
repr pr -t "feat: add X" # Custom title
|
|
6560
|
+
repr pr --draft # Create draft PR
|
|
6561
|
+
"""
|
|
6562
|
+
import subprocess
|
|
6563
|
+
from .change_synthesis import get_repo, get_unpushed_commits, format_commit_changes
|
|
6564
|
+
|
|
6565
|
+
repo = get_repo(Path.cwd())
|
|
6566
|
+
if not repo:
|
|
6567
|
+
print_error("Not a git repository")
|
|
6568
|
+
raise typer.Exit(1)
|
|
6569
|
+
|
|
6570
|
+
# Check gh is installed
|
|
6571
|
+
gh_check = subprocess.run(["which", "gh"], capture_output=True)
|
|
6572
|
+
if gh_check.returncode != 0:
|
|
6573
|
+
print_error("GitHub CLI (gh) not installed")
|
|
6574
|
+
print_info("Install: brew install gh")
|
|
6575
|
+
raise typer.Exit(1)
|
|
6576
|
+
|
|
6577
|
+
# Get current branch
|
|
6578
|
+
current_branch = subprocess.run(
|
|
6579
|
+
["git", "branch", "--show-current"],
|
|
6580
|
+
cwd=repo.working_dir,
|
|
6581
|
+
capture_output=True,
|
|
6582
|
+
text=True,
|
|
6583
|
+
).stdout.strip()
|
|
6584
|
+
|
|
6585
|
+
if current_branch in ("main", "master"):
|
|
6586
|
+
print_error(f"Cannot create PR from {current_branch}")
|
|
6587
|
+
print_info("Create a branch first: repr branch")
|
|
6588
|
+
raise typer.Exit(1)
|
|
6589
|
+
|
|
6590
|
+
# Check for unpushed commits
|
|
6591
|
+
unpushed = get_unpushed_commits(repo)
|
|
6592
|
+
|
|
6593
|
+
# Get commits for this branch vs main/master
|
|
6594
|
+
base_branch = "main"
|
|
6595
|
+
check_main = subprocess.run(
|
|
6596
|
+
["git", "rev-parse", "--verify", "main"],
|
|
6597
|
+
cwd=repo.working_dir,
|
|
6598
|
+
capture_output=True,
|
|
6599
|
+
)
|
|
6600
|
+
if check_main.returncode != 0:
|
|
6601
|
+
base_branch = "master"
|
|
6602
|
+
|
|
6603
|
+
log_result = subprocess.run(
|
|
6604
|
+
["git", "log", f"{base_branch}..HEAD", "--oneline"],
|
|
6605
|
+
cwd=repo.working_dir,
|
|
6606
|
+
capture_output=True,
|
|
6607
|
+
text=True,
|
|
6608
|
+
)
|
|
6609
|
+
commits_text = log_result.stdout.strip()
|
|
6610
|
+
|
|
6611
|
+
if not commits_text:
|
|
6612
|
+
print_warning("No commits to create PR from")
|
|
6613
|
+
raise typer.Exit(0)
|
|
6614
|
+
|
|
6615
|
+
commits_list = commits_text.split("\n")
|
|
6616
|
+
console.print(f"[bold]PR for {len(commits_list)} commits[/]")
|
|
6617
|
+
for c in commits_list[:5]:
|
|
6618
|
+
console.print(f" [{BRAND_MUTED}]{c}[/]")
|
|
6619
|
+
if len(commits_list) > 5:
|
|
6620
|
+
console.print(f" [{BRAND_MUTED}]... +{len(commits_list) - 5} more[/]")
|
|
6621
|
+
console.print()
|
|
6622
|
+
|
|
6623
|
+
# Push if needed
|
|
6624
|
+
if unpushed:
|
|
6625
|
+
console.print(f"[{BRAND_MUTED}]Pushing {len(unpushed)} unpushed commits...[/]")
|
|
6626
|
+
push_result = subprocess.run(
|
|
6627
|
+
["git", "push", "-u", "origin", current_branch],
|
|
6628
|
+
cwd=repo.working_dir,
|
|
6629
|
+
capture_output=True,
|
|
6630
|
+
text=True,
|
|
6631
|
+
)
|
|
6632
|
+
if push_result.returncode != 0:
|
|
6633
|
+
print_error(f"Push failed: {push_result.stderr}")
|
|
6634
|
+
raise typer.Exit(1)
|
|
6635
|
+
|
|
6636
|
+
# Generate or use provided title/body
|
|
6637
|
+
pr_title = title
|
|
6638
|
+
pr_body = None
|
|
6639
|
+
|
|
6640
|
+
if not pr_title:
|
|
6641
|
+
from .openai_analysis import get_openai_client
|
|
6642
|
+
|
|
6643
|
+
client = get_openai_client()
|
|
6644
|
+
if not client:
|
|
6645
|
+
print_error("LLM not configured. Run `repr llm setup` first, or use -t")
|
|
6646
|
+
raise typer.Exit(1)
|
|
6647
|
+
|
|
6648
|
+
prompt = PR_USER.format(commits=commits_text)
|
|
6649
|
+
|
|
6650
|
+
with create_spinner("Generating PR..."):
|
|
6651
|
+
response = asyncio.run(client.chat.completions.create(
|
|
6652
|
+
model="gpt-4o-mini",
|
|
6653
|
+
messages=[
|
|
6654
|
+
{"role": "system", "content": PR_SYSTEM},
|
|
6655
|
+
{"role": "user", "content": prompt},
|
|
6656
|
+
],
|
|
6657
|
+
response_format={"type": "json_object"},
|
|
6658
|
+
temperature=0.3,
|
|
6659
|
+
))
|
|
6660
|
+
data = json.loads(response.choices[0].message.content)
|
|
6661
|
+
pr_title = data.get("title", current_branch)
|
|
6662
|
+
pr_body = data.get("body", "")
|
|
6663
|
+
|
|
6664
|
+
console.print(f"[bold]Title:[/] {pr_title}")
|
|
6665
|
+
if pr_body:
|
|
6666
|
+
console.print(f"[bold]Body:[/]")
|
|
6667
|
+
for line in pr_body.split("\n")[:5]:
|
|
6668
|
+
console.print(f" [{BRAND_MUTED}]{line}[/]")
|
|
6669
|
+
console.print()
|
|
6670
|
+
|
|
6671
|
+
# Create PR
|
|
6672
|
+
cmd = ["gh", "pr", "create", "--title", pr_title, "--base", base_branch]
|
|
6673
|
+
if pr_body:
|
|
6674
|
+
cmd.extend(["--body", pr_body])
|
|
6675
|
+
if draft:
|
|
6676
|
+
cmd.append("--draft")
|
|
6677
|
+
|
|
6678
|
+
try:
|
|
6679
|
+
result = subprocess.run(
|
|
6680
|
+
cmd,
|
|
6681
|
+
cwd=repo.working_dir,
|
|
6682
|
+
capture_output=True,
|
|
6683
|
+
text=True,
|
|
6684
|
+
)
|
|
6685
|
+
if result.returncode != 0:
|
|
6686
|
+
print_error(f"PR creation failed: {result.stderr}")
|
|
6687
|
+
raise typer.Exit(1)
|
|
6688
|
+
|
|
6689
|
+
pr_url = result.stdout.strip()
|
|
6690
|
+
print_success("PR created")
|
|
6691
|
+
console.print(f" {pr_url}")
|
|
6692
|
+
|
|
6693
|
+
except Exception as e:
|
|
6694
|
+
print_error(f"PR creation failed: {e}")
|
|
6695
|
+
raise typer.Exit(1)
|
|
6696
|
+
|
|
6697
|
+
|
|
2756
6698
|
# Entry point
|
|
2757
6699
|
if __name__ == "__main__":
|
|
2758
6700
|
app()
|