repr-cli 0.2.16__py3-none-any.whl → 0.2.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. repr/__init__.py +1 -1
  2. repr/api.py +363 -62
  3. repr/auth.py +47 -38
  4. repr/change_synthesis.py +478 -0
  5. repr/cli.py +4099 -280
  6. repr/config.py +119 -11
  7. repr/configure.py +889 -0
  8. repr/cron.py +419 -0
  9. repr/dashboard/__init__.py +9 -0
  10. repr/dashboard/build.py +126 -0
  11. repr/dashboard/dist/assets/index-BYFVbEev.css +1 -0
  12. repr/dashboard/dist/assets/index-BrrhyJFO.css +1 -0
  13. repr/dashboard/dist/assets/index-CcEg74ts.js +270 -0
  14. repr/dashboard/dist/assets/index-Cerc-iA_.js +377 -0
  15. repr/dashboard/dist/assets/index-CjVcBW2L.css +1 -0
  16. repr/dashboard/dist/assets/index-Dfl3mR5E.js +377 -0
  17. repr/dashboard/dist/favicon.svg +4 -0
  18. repr/dashboard/dist/index.html +14 -0
  19. repr/dashboard/manager.py +234 -0
  20. repr/dashboard/server.py +1298 -0
  21. repr/db.py +980 -0
  22. repr/hooks.py +3 -2
  23. repr/loaders/__init__.py +22 -0
  24. repr/loaders/base.py +156 -0
  25. repr/loaders/claude_code.py +287 -0
  26. repr/loaders/clawdbot.py +313 -0
  27. repr/loaders/gemini_antigravity.py +381 -0
  28. repr/mcp_server.py +1196 -0
  29. repr/models.py +503 -0
  30. repr/openai_analysis.py +25 -0
  31. repr/session_extractor.py +481 -0
  32. repr/storage.py +328 -0
  33. repr/story_synthesis.py +1296 -0
  34. repr/templates.py +68 -4
  35. repr/timeline.py +710 -0
  36. repr/tools.py +17 -8
  37. {repr_cli-0.2.16.dist-info → repr_cli-0.2.17.dist-info}/METADATA +48 -10
  38. repr_cli-0.2.17.dist-info/RECORD +52 -0
  39. {repr_cli-0.2.16.dist-info → repr_cli-0.2.17.dist-info}/WHEEL +1 -1
  40. {repr_cli-0.2.16.dist-info → repr_cli-0.2.17.dist-info}/entry_points.txt +1 -0
  41. repr_cli-0.2.16.dist-info/RECORD +0 -26
  42. {repr_cli-0.2.16.dist-info → repr_cli-0.2.17.dist-info}/licenses/LICENSE +0 -0
  43. {repr_cli-0.2.16.dist-info → repr_cli-0.2.17.dist-info}/top_level.txt +0 -0
repr/cli.py CHANGED
@@ -21,10 +21,11 @@ import os
21
21
  import sys
22
22
  from datetime import datetime, timedelta
23
23
  from pathlib import Path
24
- from typing import Optional, List, Dict, Callable
24
+ from typing import Optional, List, Dict, Callable, Any, Any
25
25
  from collections import defaultdict
26
26
 
27
27
  import typer
28
+ from rich.markup import escape as rich_escape
28
29
  from rich.prompt import Confirm, Prompt
29
30
  from rich.table import Table
30
31
 
@@ -86,9 +87,6 @@ from .config import (
86
87
  )
87
88
  from .storage import (
88
89
  save_story,
89
- load_story,
90
- delete_story,
91
- list_stories,
92
90
  get_story_count,
93
91
  get_unpushed_stories,
94
92
  mark_story_pushed,
@@ -98,9 +96,245 @@ from .storage import (
98
96
  restore_from_backup,
99
97
  get_storage_stats,
100
98
  )
99
+ from .db import get_db
101
100
  from .auth import AuthFlow, AuthError, logout as auth_logout, get_current_user, migrate_plaintext_auth
102
101
  from .api import APIError
103
102
 
103
+
104
+ # Database-backed story listing (replaces JSON storage)
105
+ def list_stories(
106
+ repo_name: str | None = None,
107
+ since: datetime | None = None,
108
+ needs_review: bool = False,
109
+ limit: int | None = None,
110
+ ) -> list[dict[str, Any]]:
111
+ """
112
+ List all stories from the database.
113
+
114
+ Args:
115
+ repo_name: Filter by repository name
116
+ since: Filter by creation date
117
+ needs_review: Only show stories needing review
118
+ limit: Maximum stories to return
119
+
120
+ Returns:
121
+ List of story metadata dicts (sorted by creation date, newest first)
122
+ """
123
+ db = get_db()
124
+
125
+ # Build query with project join
126
+ conditions = []
127
+ params = []
128
+
129
+ if since:
130
+ iso_since = since.isoformat()
131
+ conditions.append("s.created_at >= ?")
132
+ params.append(iso_since)
133
+
134
+ where_clause = " AND ".join(conditions) if conditions else "1=1"
135
+
136
+ query = f"""
137
+ SELECT
138
+ s.id,
139
+ s.project_id,
140
+ s.created_at,
141
+ s.updated_at,
142
+ s.title,
143
+ s.problem,
144
+ s.approach,
145
+ s.tradeoffs,
146
+ s.outcome,
147
+ s.category,
148
+ s.scope,
149
+ s.technologies,
150
+ s.started_at,
151
+ s.ended_at,
152
+ s.implementation_details,
153
+ s.decisions,
154
+ s.lessons,
155
+ s.hook,
156
+ s.what,
157
+ s.value,
158
+ s.insight,
159
+ s.show,
160
+ s.diagram,
161
+ s.post_body,
162
+ p.name as repo_name,
163
+ p.path as repo_path
164
+ FROM stories s
165
+ JOIN projects p ON s.project_id = p.id
166
+ WHERE {where_clause}
167
+ ORDER BY s.created_at DESC
168
+ """
169
+
170
+ if limit:
171
+ query += " LIMIT ?"
172
+ params.append(limit)
173
+
174
+ stories = []
175
+ with db.connect() as conn:
176
+ for row in conn.execute(query, params).fetchall():
177
+ story = {
178
+ "id": row["id"],
179
+ "repo_name": row["repo_name"],
180
+ "repo_path": row["repo_path"],
181
+ "summary": row["title"],
182
+ "created_at": row["created_at"],
183
+ "updated_at": row["updated_at"],
184
+ "problem": row["problem"],
185
+ "approach": row["approach"],
186
+ "tradeoffs": row["tradeoffs"],
187
+ "outcome": row["outcome"],
188
+ "category": row["category"],
189
+ "scope": row["scope"],
190
+ "started_at": row["started_at"],
191
+ "ended_at": row["ended_at"],
192
+ "hook": row["hook"],
193
+ "what": row["what"],
194
+ "value": row["value"],
195
+ "insight": row["insight"],
196
+ "show": row["show"],
197
+ "diagram": row["diagram"],
198
+ "post_body": row["post_body"],
199
+ }
200
+
201
+ # Deserialize JSON fields
202
+ if row["technologies"]:
203
+ try:
204
+ story["technologies"] = json.loads(row["technologies"])
205
+ except json.JSONDecodeError:
206
+ story["technologies"] = []
207
+ else:
208
+ story["technologies"] = []
209
+
210
+ if row["implementation_details"]:
211
+ try:
212
+ story["implementation_details"] = json.loads(row["implementation_details"])
213
+ except json.JSONDecodeError:
214
+ story["implementation_details"] = []
215
+ else:
216
+ story["implementation_details"] = []
217
+
218
+ if row["decisions"]:
219
+ try:
220
+ story["decisions"] = json.loads(row["decisions"])
221
+ except json.JSONDecodeError:
222
+ story["decisions"] = []
223
+ else:
224
+ story["decisions"] = []
225
+
226
+ if row["lessons"]:
227
+ try:
228
+ story["lessons"] = json.loads(row["lessons"])
229
+ except json.JSONDecodeError:
230
+ story["lessons"] = []
231
+ else:
232
+ story["lessons"] = []
233
+
234
+ # Apply filters that couldn't be in SQL
235
+ if repo_name and story["repo_name"] != repo_name:
236
+ continue
237
+
238
+ if needs_review and not story.get("needs_review", False):
239
+ continue
240
+
241
+ stories.append(story)
242
+
243
+ return stories
244
+
245
+
246
+ def load_story(story_id: str) -> tuple[str, dict[str, Any]] | None:
247
+ """
248
+ Load a story by ID from the database.
249
+
250
+ Args:
251
+ story_id: Story ULID
252
+
253
+ Returns:
254
+ Tuple of (content, metadata) or None if not found
255
+ """
256
+ db = get_db()
257
+ story = db.get_story(story_id)
258
+
259
+ if not story:
260
+ return None
261
+
262
+ # Convert Story model to markdown content
263
+ content = f"""# {story.title}
264
+
265
+ ## Problem
266
+ {story.problem or "No problem specified."}
267
+
268
+ ## Approach
269
+ {story.approach or "No approach specified."}
270
+
271
+ ## Tradeoffs
272
+ {story.tradeoffs or "No tradeoffs specified."}
273
+
274
+ ## Outcome
275
+ {story.outcome or "No outcome specified."}
276
+
277
+ ## Implementation Details
278
+ {chr(10).join(f"- {d}" for d in story.implementation_details) if story.implementation_details else "None"}
279
+
280
+ ## Decisions
281
+ {chr(10).join(f"- {d}" for d in story.decisions) if story.decisions else "None"}
282
+
283
+ ## Lessons
284
+ {chr(10).join(f"- {l}" for l in story.lessons) if story.lessons else "None"}
285
+
286
+ ## Technologies
287
+ {chr(10).join(f"- {t}" for t in story.technologies) if story.technologies else "None"}
288
+ """
289
+
290
+ # Build metadata dict (for backward compatibility)
291
+ metadata = {
292
+ "id": story.id,
293
+ "title": story.title,
294
+ "summary": story.title, # For compatibility
295
+ "problem": story.problem,
296
+ "approach": story.approach,
297
+ "tradeoffs": story.tradeoffs,
298
+ "outcome": story.outcome,
299
+ "category": story.category,
300
+ "scope": story.scope,
301
+ "created_at": story.created_at.isoformat() if story.created_at else None,
302
+ "updated_at": story.updated_at.isoformat() if story.updated_at else None,
303
+ "started_at": story.started_at.isoformat() if story.started_at else None,
304
+ "ended_at": story.ended_at.isoformat() if story.ended_at else None,
305
+ "technologies": story.technologies,
306
+ "implementation_details": story.implementation_details,
307
+ "decisions": story.decisions,
308
+ "lessons": story.lessons,
309
+ "files": story.files,
310
+ "commit_shas": story.commit_shas,
311
+ "session_ids": story.session_ids,
312
+ "hook": story.hook,
313
+ "what": story.what,
314
+ "value": story.value,
315
+ "insight": story.insight,
316
+ "show": story.show,
317
+ "diagram": story.diagram,
318
+ "post_body": story.post_body,
319
+ }
320
+
321
+ return content, metadata
322
+
323
+
324
+ def delete_story(story_id: str) -> bool:
325
+ """
326
+ Delete a story by ID from the database.
327
+
328
+ Args:
329
+ story_id: Story ULID
330
+
331
+ Returns:
332
+ True if deleted, False if not found
333
+ """
334
+ db = get_db()
335
+ return db.delete_story(story_id)
336
+
337
+
104
338
  # Create Typer app
105
339
  app = typer.Typer(
106
340
  name="repr",
@@ -111,18 +345,30 @@ app = typer.Typer(
111
345
 
112
346
  # Sub-apps for command groups
113
347
  hooks_app = typer.Typer(help="Manage git post-commit hooks")
348
+ cron_app = typer.Typer(help="Scheduled story generation (every 4h)")
114
349
  llm_app = typer.Typer(help="Configure LLM (local/cloud/BYOK)")
115
350
  privacy_app = typer.Typer(help="Privacy audit and controls")
116
351
  config_app = typer.Typer(help="View and modify configuration")
117
352
  data_app = typer.Typer(help="Backup, restore, and manage data")
118
353
  profile_app = typer.Typer(help="View and manage profile")
354
+ mcp_app = typer.Typer(help="MCP server for AI agent integration")
355
+ timeline_app = typer.Typer(help="Unified timeline of commits + AI sessions")
356
+ friends_app = typer.Typer(help="Manage friends")
357
+ skill_app = typer.Typer(help="Manage repr skill for AI agents")
358
+ configure_app = typer.Typer(help="Configure repr (LLM, repos, schedule)")
119
359
 
120
360
  app.add_typer(hooks_app, name="hooks")
361
+ app.add_typer(cron_app, name="cron")
121
362
  app.add_typer(llm_app, name="llm")
122
363
  app.add_typer(privacy_app, name="privacy")
123
364
  app.add_typer(config_app, name="config")
124
365
  app.add_typer(data_app, name="data")
125
366
  app.add_typer(profile_app, name="profile")
367
+ app.add_typer(mcp_app, name="mcp")
368
+ app.add_typer(timeline_app, name="timeline")
369
+ app.add_typer(friends_app, name="friends")
370
+ app.add_typer(skill_app, name="skill")
371
+ app.add_typer(configure_app, name="configure")
126
372
 
127
373
 
128
374
  def version_callback(value: bool):
@@ -153,18 +399,147 @@ def main(
153
399
  ),
154
400
  ):
155
401
  """repr - understand what you've actually worked on.
156
-
402
+
157
403
  Cloud features require sign-in. Local generation always works offline.
158
404
  """
159
405
  # Migrate plaintext auth tokens on startup
160
406
  migrate_plaintext_auth()
161
-
407
+
408
+ # First-run detection: trigger wizard on first use
409
+ # Skip for: configure, --help, mcp (automated), CI, non-interactive
410
+ skip_first_run_commands = {"configure", "mcp", None}
411
+ if ctx.invoked_subcommand not in skip_first_run_commands:
412
+ import os
413
+ is_ci = os.getenv("CI") or os.getenv("GITHUB_ACTIONS") or os.getenv("REPR_CI")
414
+ is_interactive = sys.stdin.isatty() if hasattr(sys.stdin, 'isatty') else False
415
+ if is_interactive and not is_ci:
416
+ from .configure import is_first_run, run_full_wizard
417
+ if is_first_run():
418
+ run_full_wizard()
419
+
162
420
  # Track command usage (if telemetry enabled)
163
421
  from .telemetry import track_command
164
422
  if ctx.invoked_subcommand:
165
423
  track_command(ctx.invoked_subcommand)
166
424
 
167
425
 
426
+ # =============================================================================
427
+ # DASHBOARD
428
+ # =============================================================================
429
+
430
+ @app.command("dashboard")
431
+ def dashboard(
432
+ port: int = typer.Option(
433
+ 8787, "--port", "-p",
434
+ help="Port to serve on",
435
+ ),
436
+ host: str = typer.Option(
437
+ "127.0.0.1", "--host",
438
+ help="Host to bind to",
439
+ ),
440
+ open_browser: bool = typer.Option(
441
+ True, "--open/--no-open",
442
+ help="Auto-open browser (default: enabled)",
443
+ ),
444
+ ):
445
+ """
446
+ Launch web dashboard for exploring your stories.
447
+
448
+ Starts a local web server to browse and search through your
449
+ stories with rich context visualization.
450
+
451
+ Works from any directory - reads from central SQLite database.
452
+
453
+ Examples:
454
+ repr dashboard # localhost:8787, auto-opens browser
455
+ repr dashboard --port 8080 # custom port
456
+ repr dashboard --no-open # don't auto-open browser
457
+ rp dashboard # using the 'rp' alias
458
+ """
459
+ import webbrowser
460
+ from .dashboard import run_server
461
+ from .db import get_db_path, get_db
462
+
463
+ # Check if SQLite database exists and has stories
464
+ if not get_db_path().exists():
465
+ print_error("No stories database found")
466
+ print_info("Run `repr generate` in a git repository first")
467
+ raise typer.Exit(1)
468
+
469
+ db = get_db()
470
+ stats = db.get_stats()
471
+ story_count = stats.get("story_count", 0)
472
+ project_count = stats.get("project_count", 0)
473
+
474
+ if story_count == 0:
475
+ print_error("No stories in database")
476
+ print_info("Run `repr generate` to create stories from commits")
477
+ raise typer.Exit(1)
478
+
479
+ console.print(f"Starting dashboard with [bold]{story_count} stories[/] from [bold]{project_count} repositories[/]")
480
+
481
+ # Ensure dashboard is built (build on the fly if needed)
482
+ _ensure_dashboard_built()
483
+
484
+ url = f"http://{host}:{port}"
485
+
486
+ print_header()
487
+ console.print(f" URL: [bold blue]{url}[/]")
488
+ console.print()
489
+ console.print("[dim]Press Ctrl+C to stop[/]")
490
+ console.print()
491
+
492
+ if open_browser:
493
+ webbrowser.open(url)
494
+
495
+ try:
496
+ run_server(port, host)
497
+ except KeyboardInterrupt:
498
+ console.print()
499
+ print_info("Server stopped")
500
+ except OSError as e:
501
+ if "Address already in use" in str(e):
502
+ print_error(f"Port {port} is already in use")
503
+ print_info(f"Try: repr dashboard --port {port + 1}")
504
+ raise
505
+
506
+
507
+ def _ensure_dashboard_built():
508
+ """Build dashboard if index.html doesn't exist or source files are newer."""
509
+ from pathlib import Path
510
+
511
+ dashboard_dir = Path(__file__).parent / "dashboard"
512
+ src_dir = dashboard_dir / "src"
513
+ index_html = dashboard_dir / "index.html"
514
+
515
+ # If no source directory, can't build
516
+ if not src_dir.exists():
517
+ return
518
+
519
+ # Check if we need to build
520
+ needs_build = False
521
+
522
+ if not index_html.exists():
523
+ needs_build = True
524
+ reason = "index.html not found"
525
+ else:
526
+ # Check if any source file is newer than index.html
527
+ index_mtime = index_html.stat().st_mtime
528
+
529
+ for src_file in src_dir.rglob("*"):
530
+ if src_file.is_file() and src_file.stat().st_mtime > index_mtime:
531
+ needs_build = True
532
+ reason = f"{src_file.relative_to(dashboard_dir)} is newer than index.html"
533
+ break
534
+
535
+ if needs_build:
536
+ from .dashboard.build import build
537
+ print_info(f"Building dashboard ({reason})...")
538
+ result = build()
539
+ if result != 0:
540
+ print_warning("Dashboard build failed, using cached version if available")
541
+
542
+
168
543
  # =============================================================================
169
544
  # INIT
170
545
  # =============================================================================
@@ -236,7 +611,65 @@ def init(
236
611
  console.print(f"Local LLM: detected {llm_info.name} at {llm_info.url}")
237
612
  else:
238
613
  console.print(f"[{BRAND_MUTED}]Local LLM: not detected (install Ollama for offline generation)[/]")
239
-
614
+
615
+ # Ask about automatic story generation
616
+ console.print()
617
+ console.print("[bold]Automatic Story Generation[/]")
618
+ console.print()
619
+ console.print("How should repr generate stories from your commits?")
620
+ console.print()
621
+ console.print(f" [bold]1.[/] Scheduled (recommended) - Every 4 hours via cron")
622
+ console.print(f" [{BRAND_MUTED}]Predictable, batches work, never interrupts[/]")
623
+ console.print()
624
+ console.print(f" [bold]2.[/] On commit - After every 5 commits via git hook")
625
+ console.print(f" [{BRAND_MUTED}]Real-time, but needs LLM running during commits[/]")
626
+ console.print()
627
+ console.print(f" [bold]3.[/] Manual only - Run `repr generate` yourself")
628
+ console.print(f" [{BRAND_MUTED}]Full control, no automation[/]")
629
+ console.print()
630
+
631
+ schedule_choice = Prompt.ask(
632
+ "Choose",
633
+ choices=["1", "2", "3"],
634
+ default="1",
635
+ )
636
+
637
+ from .hooks import install_hook
638
+ from .cron import install_cron
639
+
640
+ if schedule_choice == "1":
641
+ # Scheduled via cron
642
+ result = install_cron(interval_hours=4, min_commits=3)
643
+ if result["success"]:
644
+ print_success("Cron job installed (every 4h)")
645
+ # Install hooks for queue tracking (but disable auto-generate)
646
+ config = load_config()
647
+ config["generation"]["auto_generate_on_hook"] = False
648
+ save_config(config)
649
+ for repo in repos:
650
+ install_hook(Path(repo.path))
651
+ set_repo_hook_status(str(repo.path), True)
652
+ else:
653
+ print_warning(f"Could not install cron: {result['message']}")
654
+ print_info("You can set it up later with `repr cron install`")
655
+
656
+ elif schedule_choice == "2":
657
+ # On-commit via hooks
658
+ config = load_config()
659
+ config["generation"]["auto_generate_on_hook"] = True
660
+ save_config(config)
661
+ for repo in repos:
662
+ install_hook(Path(repo.path))
663
+ set_repo_hook_status(str(repo.path), True)
664
+ print_success(f"Hooks installed in {len(repos)} repos (generates after 5 commits)")
665
+
666
+ else:
667
+ # Manual only - disable auto-generation
668
+ config = load_config()
669
+ config["generation"]["auto_generate_on_hook"] = False
670
+ save_config(config)
671
+ print_info("Manual mode - run `repr generate` when you want stories")
672
+
240
673
  console.print()
241
674
  print_next_steps([
242
675
  "repr week See what you worked on this week",
@@ -249,6 +682,90 @@ def init(
249
682
  # GENERATE
250
683
  # =============================================================================
251
684
 
685
+ # Technology detection from file extensions
686
+ _TECH_EXTENSIONS = {
687
+ ".py": "Python",
688
+ ".ts": "TypeScript",
689
+ ".tsx": "TypeScript",
690
+ ".js": "JavaScript",
691
+ ".jsx": "JavaScript",
692
+ ".go": "Go",
693
+ ".rs": "Rust",
694
+ ".java": "Java",
695
+ ".kt": "Kotlin",
696
+ ".swift": "Swift",
697
+ ".c": "C",
698
+ ".cpp": "C++",
699
+ ".h": "C",
700
+ ".hpp": "C++",
701
+ ".rb": "Ruby",
702
+ ".php": "PHP",
703
+ ".cs": "C#",
704
+ ".scala": "Scala",
705
+ ".vue": "Vue",
706
+ ".svelte": "Svelte",
707
+ ".sql": "SQL",
708
+ ".sh": "Shell",
709
+ ".bash": "Shell",
710
+ ".yaml": "YAML",
711
+ ".yml": "YAML",
712
+ ".json": "JSON",
713
+ ".graphql": "GraphQL",
714
+ ".prisma": "Prisma",
715
+ }
716
+
717
+ # Special file name patterns that indicate technologies
718
+ _TECH_FILES = {
719
+ "Dockerfile": "Docker",
720
+ "docker-compose": "Docker",
721
+ "package.json": "Node.js",
722
+ "tsconfig.json": "TypeScript",
723
+ "pyproject.toml": "Python",
724
+ "requirements.txt": "Python",
725
+ "Cargo.toml": "Rust",
726
+ "go.mod": "Go",
727
+ "Gemfile": "Ruby",
728
+ "pom.xml": "Maven",
729
+ "build.gradle": "Gradle",
730
+ ".eslintrc": "ESLint",
731
+ "tailwind.config": "Tailwind CSS",
732
+ "next.config": "Next.js",
733
+ "vite.config": "Vite",
734
+ "webpack.config": "Webpack",
735
+ }
736
+
737
+
738
+ def _detect_technologies_from_files(files: list[str]) -> list[str]:
739
+ """
740
+ Detect technologies from file paths/extensions.
741
+
742
+ Args:
743
+ files: List of file paths
744
+
745
+ Returns:
746
+ Sorted list of detected technology names
747
+ """
748
+ tech = set()
749
+
750
+ for f in files:
751
+ # Handle files as either dict with 'path' or string
752
+ if isinstance(f, dict):
753
+ f = f.get("path", "")
754
+
755
+ # Check extensions
756
+ for ext, name in _TECH_EXTENSIONS.items():
757
+ if f.endswith(ext):
758
+ tech.add(name)
759
+ break
760
+
761
+ # Check special file names
762
+ for fname, name in _TECH_FILES.items():
763
+ if fname in f:
764
+ tech.add(name)
765
+
766
+ return sorted(tech)
767
+
768
+
252
769
  def _parse_date_reference(date_str: str) -> str | None:
253
770
  """
254
771
  Parse a date reference string into an ISO date string.
@@ -372,10 +889,18 @@ def generate(
372
889
  False, "--json",
373
890
  help="Output as JSON",
374
891
  ),
892
+ force: bool = typer.Option(
893
+ False, "--force", "-f",
894
+ help="Reprocess commits even if already in existing stories",
895
+ ),
896
+ with_sessions: bool = typer.Option(
897
+ True, "--with-sessions/--no-sessions", "-s",
898
+ help="Include AI session context (Claude Code, Clawdbot)",
899
+ ),
375
900
  ):
376
901
  """
377
902
  Generate stories from commits.
378
-
903
+
379
904
  Examples:
380
905
  repr generate --local
381
906
  repr generate --cloud
@@ -383,8 +908,16 @@ def generate(
383
908
  repr generate --days 30
384
909
  repr generate --template changelog
385
910
  repr generate --commits abc123,def456
911
+ repr generate --force # Reprocess all commits
386
912
  """
913
+ import asyncio
914
+ from .timeline import extract_commits_from_git, detect_project_root, get_session_contexts_for_commits
915
+ from .story_synthesis import synthesize_stories
916
+ from .db import get_db
387
917
  from .privacy import check_cloud_permission, log_cloud_operation
918
+
919
+ def synthesize_stories_sync(*args, **kwargs):
920
+ return asyncio.run(synthesize_stories(*args, **kwargs))
388
921
 
389
922
  # Determine mode
390
923
  if cloud:
@@ -401,10 +934,16 @@ def generate(
401
934
  raise typer.Exit(1)
402
935
 
403
936
  if not local and not cloud:
404
- # Default: local if not signed in, cloud if signed in
405
- if is_authenticated() and is_cloud_allowed():
937
+ # Check config for default mode
938
+ llm_config = get_llm_config()
939
+ default_mode = llm_config.get("default", "local")
940
+
941
+ if default_mode == "local":
942
+ local = True
943
+ elif default_mode == "cloud" and is_authenticated() and is_cloud_allowed():
406
944
  cloud = True
407
945
  else:
946
+ # Fallback: local if not signed in or cloud not allowed
408
947
  local = True
409
948
 
410
949
  if not json_output:
@@ -451,201 +990,193 @@ def generate(
451
990
  from .tools import get_commits_with_diffs, get_commits_by_shas
452
991
  from .discovery import analyze_repo
453
992
 
454
- total_stories = 0
455
- all_stories = [] # Collect all generated stories for JSON output
456
-
993
+ all_generated_stories = []
994
+
457
995
  for repo_path in repo_paths:
458
- repo_info = analyze_repo(repo_path)
459
996
  if not json_output:
460
- console.print(f"[bold]{repo_info.name}[/]")
461
-
997
+ console.print(f"[bold]{repo_path.name}[/]")
998
+
999
+ # Determine commit range
1000
+ repo_commits = []
462
1001
  if commits:
463
- # Specific commits
464
- if not json_output:
465
- console.print(f" Collecting specified commits...")
466
- commit_shas = [s.strip() for s in commits.split(",")]
467
- commit_list = get_commits_by_shas(repo_path, commit_shas)
1002
+ # Specific SHA list filtering
1003
+ repo_commits = extract_commits_from_git(repo_path, days=90)
1004
+ target_shas = [s.strip() for s in commits.split(",")]
1005
+ repo_commits = [c for c in repo_commits if any(c.sha.startswith(t) for t in target_shas)]
1006
+ elif since_date:
1007
+ # Parse date (rough approximation)
1008
+ filter_days = 30
1009
+ if "week" in since_date: filter_days = 14
1010
+ if "month" in since_date: filter_days = 30
1011
+ repo_commits = extract_commits_from_git(repo_path, days=filter_days)
468
1012
  else:
469
- # Determine timeframe
470
- timeframe_days = days if days is not None else 90 # Default 90 days
471
- since_str = None
472
-
473
- # Parse natural language date if provided
474
- if since_date:
475
- since_str = _parse_date_reference(since_date)
476
-
477
- # Recent commits within timeframe
478
- if not json_output:
479
- console.print(f" Scanning commits...")
480
- commit_list = get_commits_with_diffs(
481
- repo_path,
482
- count=500, # Higher limit when filtering by time
483
- days=timeframe_days,
484
- since=since_str,
485
- )
486
- if not json_output and commit_list:
487
- console.print(f" Found {len(commit_list)} commits")
488
-
489
- if not commit_list:
1013
+ filter_days = days if days else 90
1014
+ repo_commits = extract_commits_from_git(repo_path, days=filter_days)
1015
+
1016
+ if not repo_commits:
490
1017
  if not json_output:
491
- console.print(f" [{BRAND_MUTED}]No commits found[/]")
1018
+ console.print(f" No matching commits found")
492
1019
  continue
493
-
494
- # Filter out already-processed commits
495
- from .storage import get_processed_commit_shas
496
- processed_shas = get_processed_commit_shas(repo_name=repo_info.name)
497
-
498
- original_count = len(commit_list)
499
- commit_list = [c for c in commit_list if c["full_sha"] not in processed_shas]
500
-
501
- if not json_output and processed_shas:
502
- skipped_count = original_count - len(commit_list)
503
- if skipped_count > 0:
504
- console.print(f" [{BRAND_MUTED}]Skipping {skipped_count} already-processed commits[/]")
505
-
506
- if not commit_list:
1020
+
1021
+ # Filter out commits that are already part of existing stories (unless --force)
1022
+ from .db import get_db
1023
+ db = get_db()
1024
+ project_id = db.register_project(repo_path, repo_path.name)
1025
+ processed_shas = db.get_processed_commits(project_id)
1026
+ if processed_shas and not force:
1027
+ original_count = len(repo_commits)
1028
+ repo_commits = [c for c in repo_commits if c.sha not in processed_shas]
1029
+ skipped = original_count - len(repo_commits)
1030
+ if skipped > 0 and not json_output:
1031
+ console.print(f" [{BRAND_MUTED}]Skipping {skipped} already-processed commits[/]")
1032
+
1033
+ if not repo_commits:
507
1034
  if not json_output:
508
- console.print(f" [{BRAND_MUTED}]All commits already processed[/]")
1035
+ console.print(f" No new commits to process")
509
1036
  continue
510
-
511
- # Dry run: show what would be sent
512
- if dry_run:
513
- from .openai_analysis import estimate_tokens, get_batch_size
514
- from .config import load_config
515
-
516
- config = load_config()
517
- max_commits = config.get("generation", {}).get("max_commits_per_batch", 50)
518
- token_limit = config.get("generation", {}).get("token_limit", 100000)
519
1037
 
520
- console.print(f" [bold]Dry Run Preview[/]")
521
- console.print(f" Commits to analyze: {len(commit_list)}")
522
- console.print(f" Template: {template}")
523
- console.print()
1038
+ if not json_output:
1039
+ console.print(f" Analyzing {len(repo_commits)} commits...")
524
1040
 
525
- # Estimate tokens
526
- estimated_tokens = estimate_tokens(commit_list)
527
- console.print(f" Estimated tokens: ~{estimated_tokens:,}")
1041
+ # Load sessions if requested
1042
+ repo_sessions = None
1043
+ if with_sessions:
1044
+ if not json_output:
1045
+ console.print(f" Looking for AI sessions...")
1046
+
1047
+ # Use same days lookback as commits
1048
+ session_days = days if days else 90
1049
+
1050
+ def session_progress(stage: str, current: int, total: int) -> None:
1051
+ if not json_output:
1052
+ if stage == "extracting":
1053
+ console.print(f" Extracting session {current}/{total}...", end="\r")
1054
+ elif stage == "sessions_loaded" and current > 0:
1055
+ console.print(f" Found {current} sessions")
1056
+
1057
+ try:
1058
+ # Run async extraction in sync context
1059
+ repo_sessions = asyncio.run(get_session_contexts_for_commits(
1060
+ repo_path,
1061
+ repo_commits,
1062
+ days=session_days,
1063
+ progress_callback=session_progress
1064
+ ))
1065
+ if not json_output and repo_sessions:
1066
+ console.print(f"\n Enriched with {len(repo_sessions)} AI sessions")
1067
+ except Exception as e:
1068
+ if not json_output:
1069
+ print_warning(f" Failed to load sessions: {e}")
528
1070
 
529
- # Check if we need to split into batches
530
- if len(commit_list) > max_commits:
531
- num_batches = (len(commit_list) + max_commits - 1) // max_commits
532
- console.print()
533
- console.print(f" ⚠ {len(commit_list)} commits exceeds {max_commits}-commit limit")
534
- console.print()
535
- console.print(f" Will split into {num_batches} batches:")
536
- for batch_num in range(num_batches):
537
- start = batch_num * max_commits + 1
538
- end = min((batch_num + 1) * max_commits, len(commit_list))
539
- batch_commits = commit_list[batch_num * max_commits:end]
540
- batch_tokens = estimate_tokens(batch_commits)
541
- console.print(f" Batch {batch_num + 1}: commits {start}-{end} (est. {batch_tokens // 1000}k tokens)")
1071
+ # Progress callback
1072
+ def progress(current: int, total: int) -> None:
1073
+ if not json_output:
1074
+ console.print(f" Batch {current}/{total}")
542
1075
 
543
- console.print()
544
- console.print(" Sample commits:")
545
- for c in commit_list[:5]:
546
- console.print(f" • {c['sha'][:7]} {c['message'][:50]}")
547
- if len(commit_list) > 5:
548
- console.print(f" ... and {len(commit_list) - 5} more")
549
- continue
550
-
551
- # Check token limits and prompt user if needed
552
- from .openai_analysis import estimate_tokens
553
- from .config import load_config
1076
+ # Determine model based on mode (respect config default if no flag given)
1077
+ llm_config = get_llm_config()
1078
+ use_local = local or (not cloud and llm_config.get("default") == "local")
1079
+ if use_local:
1080
+ model = llm_config.get("local_model") or "llama3.2"
1081
+ else:
1082
+ model = None # Use default cloud model
554
1083
 
555
- config = load_config()
556
- max_commits = config.get("generation", {}).get("max_commits_per_batch", 50)
557
- token_limit = config.get("generation", {}).get("token_limit", 100000)
1084
+ try:
1085
+ # Run synthesis sync
1086
+ stories, index = synthesize_stories_sync(
1087
+ commits=repo_commits,
1088
+ sessions=repo_sessions,
1089
+ model=model,
1090
+ batch_size=batch_size,
1091
+ progress_callback=progress,
1092
+ )
558
1093
 
559
- # Check if we exceed limits (only warn for cloud generation)
560
- if cloud and len(commit_list) > max_commits:
561
- num_batches = (len(commit_list) + max_commits - 1) // max_commits
562
- console.print()
563
- console.print(f" ⚠ {len(commit_list)} commits exceeds {max_commits}-commit limit")
564
- console.print()
565
- console.print(f" Will split into {num_batches} batches:")
566
- for batch_num in range(num_batches):
567
- start = batch_num * max_commits + 1
568
- end = min((batch_num + 1) * max_commits, len(commit_list))
569
- batch_commits = commit_list[batch_num * max_commits:end]
570
- batch_tokens = estimate_tokens(batch_commits)
571
- console.print(f" Batch {batch_num + 1}: commits {start}-{end} (est. {batch_tokens // 1000}k tokens)")
572
- console.print()
1094
+ # Generate public/internal posts for each story
1095
+ if stories and not dry_run:
1096
+ from .story_synthesis import transform_story_for_feed_sync, _build_fallback_post
1097
+
1098
+ if not json_output:
1099
+ console.print(f" Generating build log posts...")
1100
+
1101
+ for story in stories:
1102
+ try:
1103
+ # Generate Tripartite Codex content (internal includes all fields)
1104
+ result = transform_story_for_feed_sync(story, mode="internal")
1105
+
1106
+ # Store structured fields
1107
+ story.hook = result.hook
1108
+ story.what = result.what
1109
+ story.value = result.value
1110
+ story.insight = result.insight
1111
+ story.show = result.show
1112
+ story.post_body = result.post_body
1113
+
1114
+ # Internal-specific fields
1115
+ if hasattr(result, 'problem') and result.problem:
1116
+ story.problem = result.problem
1117
+ if hasattr(result, 'how') and result.how:
1118
+ story.implementation_details = result.how
1119
+
1120
+ # Legacy fields for backward compatibility
1121
+ what_clean = result.what.rstrip(".").rstrip()
1122
+ value_clean = result.value.lstrip(".").lstrip()
1123
+ story.public_post = f"{result.hook}\n\n{what_clean}. {value_clean}\n\nInsight: {result.insight}"
1124
+ story.internal_post = story.public_post
1125
+ story.public_show = result.show
1126
+ story.internal_show = result.show
1127
+ except Exception as e:
1128
+ # Fallback: build from story data
1129
+ from .story_synthesis import _build_fallback_codex
1130
+ result = _build_fallback_codex(story, "internal")
1131
+ story.hook = result.hook
1132
+ story.what = result.what
1133
+ story.value = result.value
1134
+ story.insight = result.insight
1135
+ story.post_body = result.post_body
1136
+ what_clean = result.what.rstrip(".").rstrip()
1137
+ value_clean = result.value.lstrip(".").lstrip()
1138
+ story.public_post = f"{result.hook}\n\n{what_clean}. {value_clean}\n\nInsight: {result.insight}"
1139
+ story.internal_post = story.public_post
1140
+
1141
+ # Save to SQLite
1142
+ if not dry_run and stories:
1143
+ db = get_db()
1144
+ project_id = db.register_project(repo_path, repo_path.name)
1145
+
1146
+ for story in stories:
1147
+ db.save_story(story, project_id)
1148
+
1149
+ # Update freshness with latest commit
1150
+ if repo_commits:
1151
+ latest_commit = repo_commits[0] # Already sorted by date desc
1152
+ db.update_freshness(
1153
+ project_id,
1154
+ latest_commit.sha,
1155
+ latest_commit.timestamp,
1156
+ )
1157
+
1158
+ if not json_output:
1159
+ print_success(f"Saved {len(stories)} stories to SQLite")
1160
+ else:
1161
+ if not json_output:
1162
+ print_info("Dry run - not saved")
573
1163
 
574
- if not confirm("Continue with generation?"):
575
- console.print(f" [{BRAND_MUTED}]Skipped {repo_info.name}[/]")
576
- continue
1164
+ all_generated_stories.extend(stories)
577
1165
 
578
- # Calculate number of batches for progress
579
- num_batches = (len(commit_list) + batch_size - 1) // batch_size
580
-
581
- # Generate stories with progress tracking
582
- if not json_output and num_batches > 1:
583
- # Use progress bar for multiple batches
584
- with BatchProgress(num_batches, f"Analyzing {repo_info.name}") as progress:
585
- def on_progress(batch_num, total, status):
586
- if status == "complete":
587
- progress.update(1, f"batch {batch_num}/{total}")
588
-
589
- stories = _generate_stories(
590
- commits=commit_list,
591
- repo_info=repo_info,
592
- batch_size=batch_size,
593
- local=local,
594
- template=template,
595
- custom_prompt=prompt,
596
- progress_callback=on_progress,
597
- )
598
- else:
599
- # Single batch or JSON mode - no progress bar needed
600
- if not json_output and num_batches == 1:
601
- console.print(f" Analyzing {len(commit_list)} commits...")
602
-
603
- stories = _generate_stories(
604
- commits=commit_list,
605
- repo_info=repo_info,
606
- batch_size=batch_size,
607
- local=local,
608
- template=template,
609
- custom_prompt=prompt,
610
- )
611
-
612
- for story in stories:
1166
+ except Exception as e:
613
1167
  if not json_output:
614
- console.print(f" {story['summary']}")
615
- total_stories += 1
616
- all_stories.append(story)
617
-
618
- # Log cloud operation if using cloud
619
- if cloud and stories:
620
- log_cloud_operation(
621
- operation="cloud_generation",
622
- destination="repr.dev",
623
- payload_summary={
624
- "repo": repo_info.name,
625
- "commits": len(commit_list),
626
- "stories_generated": len(stories),
627
- },
628
- bytes_sent=len(str(commit_list)) // 2, # Rough estimate
629
- )
630
-
631
- if not json_output:
632
- console.print()
633
-
1168
+ print_error(f"Failed to generate for {repo_path.name}: {e}")
1169
+
634
1170
  if json_output:
635
- print(json.dumps({"generated": total_stories, "stories": all_stories}, indent=2, default=str))
636
- return
637
-
638
- if dry_run:
639
- console.print()
640
- console.print("Continue with generation? (run without --dry-run)")
641
- else:
642
- print_success(f"Generated {total_stories} stories")
1171
+ print(json.dumps({
1172
+ "success": True,
1173
+ "stories_count": len(all_generated_stories),
1174
+ "stories": [s.model_dump(mode="json") for s in all_generated_stories]
1175
+ }, default=str))
1176
+ elif all_generated_stories:
643
1177
  console.print()
644
- console.print(f"Stories saved to: {STORIES_DIR}")
645
- print_next_steps([
646
- "repr stories View your stories",
647
- "repr push Publish to repr.dev (requires login)",
648
- ])
1178
+ print_success(f"Generated {len(all_generated_stories)} stories")
1179
+ print_info("Run `repr dashboard` to view")
649
1180
 
650
1181
 
651
1182
  async def _generate_stories_async(
@@ -748,6 +1279,15 @@ async def _generate_stories_async(
748
1279
  if not content or content.startswith("[Batch"):
749
1280
  continue
750
1281
 
1282
+ # Get technologies from LLM output, fallback to file-based detection
1283
+ technologies = story_output.technologies or []
1284
+ if not technologies:
1285
+ # Detect from files in this batch
1286
+ all_files = []
1287
+ for c in batch:
1288
+ all_files.extend(c.get("files", []))
1289
+ technologies = _detect_technologies_from_files(all_files)
1290
+
751
1291
  metadata = {
752
1292
  "summary": summary,
753
1293
  "repo_name": repo_info.name,
@@ -761,6 +1301,8 @@ async def _generate_stories_async(
761
1301
  "generated_locally": local,
762
1302
  "template": template,
763
1303
  "needs_review": False,
1304
+ # Technologies
1305
+ "technologies": technologies,
764
1306
  # Categories
765
1307
  "category": story_output.category,
766
1308
  "scope": story_output.scope,
@@ -815,49 +1357,286 @@ def _generate_stories(
815
1357
  # STORIES MANAGEMENT
816
1358
  # =============================================================================
817
1359
 
818
- @app.command()
819
- def stories(
820
- repo: Optional[str] = typer.Option(None, "--repo", help="Filter by repository"),
821
- category: Optional[str] = typer.Option(None, "--category", "-c", help="Filter by category (feature, bugfix, refactor, perf, infra, docs, test, chore)"),
822
- scope: Optional[str] = typer.Option(None, "--scope", "-s", help="Filter by scope (user-facing, internal, platform, ops)"),
823
- stack: Optional[str] = typer.Option(None, "--stack", help="Filter by stack (frontend, backend, database, infra, mobile, fullstack)"),
824
- needs_review: bool = typer.Option(False, "--needs-review", help="Show only stories needing review"),
1360
+ @app.command("week")
1361
+ def week(
825
1362
  json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
826
1363
  ):
827
1364
  """
828
- List all stories.
1365
+ Weekly summary — what you built in the last 7 days.
829
1366
 
830
- Example:
831
- repr stories
832
- repr stories --repo myproject
833
- repr stories --category feature
834
- repr stories --scope user-facing
835
- repr stories --stack backend
836
- repr stories --needs-review
1367
+ Provides a quick overview of your work from the past week,
1368
+ including commits and generated stories.
837
1369
  """
838
- story_list = list_stories(repo_name=repo, needs_review=needs_review)
1370
+ from .timeline import extract_commits_from_git
1371
+ from .db import get_db
839
1372
 
840
- # Apply category filters (local filtering since storage doesn't support these yet)
841
- if category:
842
- story_list = [s for s in story_list if s.get("category") == category]
843
- if scope:
844
- story_list = [s for s in story_list if s.get("scope") == scope]
845
- if stack:
846
- story_list = [s for s in story_list if s.get("stack") == stack]
1373
+ week_ago = datetime.now() - timedelta(days=7)
1374
+
1375
+ # Get stories from DB
1376
+ db = get_db()
1377
+ story_list = list_stories()
1378
+ recent_stories = []
1379
+ for s in story_list:
1380
+ try:
1381
+ created_at = s.get("created_at")
1382
+ if isinstance(created_at, str):
1383
+ dt = datetime.fromisoformat(created_at.replace("Z", "+00:00")).replace(tzinfo=None)
1384
+ else:
1385
+ dt = created_at.replace(tzinfo=None)
1386
+
1387
+ if dt > week_ago:
1388
+ recent_stories.append(s)
1389
+ except Exception:
1390
+ continue
1391
+
1392
+ # Get commits from tracked repos
1393
+ tracked = get_tracked_repos()
1394
+ all_commits = []
1395
+ for repo in tracked:
1396
+ path = Path(repo["path"])
1397
+ if path.exists():
1398
+ repo_commits = extract_commits_from_git(path, days=7)
1399
+ for c in repo_commits:
1400
+ c_dict = {
1401
+ "sha": c.sha,
1402
+ "message": c.message,
1403
+ "date": c.timestamp.isoformat(),
1404
+ "repo_name": path.name,
1405
+ "insertions": c.insertions,
1406
+ "deletions": c.deletions,
1407
+ }
1408
+ all_commits.append(c_dict)
847
1409
 
848
1410
  if json_output:
849
- print(json.dumps(story_list, indent=2, default=str))
1411
+ print(json.dumps({
1412
+ "stories": recent_stories,
1413
+ "commits": all_commits,
1414
+ "period": "7 days",
1415
+ }, indent=2, default=str))
850
1416
  return
1417
+
1418
+ print_header()
1419
+ console.print(f"[bold]Weekly Summary[/] (since {week_ago.strftime('%Y-%m-%d')})")
1420
+ console.print()
851
1421
 
852
- if not story_list:
853
- print_info("No stories found.")
854
- print_info("Run `repr generate` to create stories from your commits.")
855
- raise typer.Exit()
1422
+ # Stats
1423
+ total_commits = len(all_commits)
1424
+ repos = set(c.get("repo_name") for c in all_commits)
1425
+ total_adds = sum(c.get("insertions", 0) for c in all_commits)
1426
+ total_dels = sum(c.get("deletions", 0) for c in all_commits)
856
1427
 
857
- console.print(f"[bold]Stories[/] ({len(story_list)} total)")
1428
+ console.print(f" {total_commits} commits across {len(repos)} repos")
1429
+ console.print(f" [{BRAND_SUCCESS}]+{total_adds}[/] / [{BRAND_ERROR}]-{total_dels}[/] lines changed")
858
1430
  console.print()
859
-
860
- # Group stories by repository
1431
+
1432
+ # Recent stories
1433
+ if recent_stories:
1434
+ console.print("[bold]Stories Generated[/]")
1435
+ for s in recent_stories[:10]:
1436
+ summary = s.get("summary", s.get("title", "Untitled"))
1437
+ repo = s.get("repo_name", "unknown")
1438
+ console.print(f" • {summary} [{BRAND_MUTED}]({repo})[/]")
1439
+ console.print()
1440
+
1441
+ # Commits by repo
1442
+ console.print("[bold]Recent Activity[/]")
1443
+ by_repo = defaultdict(list)
1444
+ for c in all_commits:
1445
+ by_repo[c["repo_name"]].append(c)
1446
+
1447
+ for repo_name, repo_commits in sorted(by_repo.items(), key=lambda x: -len(x[1])):
1448
+ console.print(f" [bold]{repo_name}[/] ({len(repo_commits)} commits)")
1449
+ for c in repo_commits[:3]:
1450
+ msg = c["message"].split("\n")[0][:60]
1451
+ console.print(f" - {msg}")
1452
+ if len(repo_commits) > 3:
1453
+ console.print(f" [{BRAND_MUTED}]... and {len(repo_commits) - 3} more[/]")
1454
+
1455
+ console.print()
1456
+ print_info("Run `repr generate` to turn recent commits into stories.")
1457
+
1458
+
1459
+ @app.command("standup")
1460
+ def standup(
1461
+ json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
1462
+ ):
1463
+ """
1464
+ Quick standup — what you did yesterday and today.
1465
+ """
1466
+ from .timeline import extract_commits_from_git
1467
+
1468
+ today = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
1469
+ yesterday = today - timedelta(days=1)
1470
+
1471
+ tracked = get_tracked_repos()
1472
+ all_commits = []
1473
+ for repo in tracked:
1474
+ path = Path(repo["path"])
1475
+ if path.exists():
1476
+ repo_commits = extract_commits_from_git(path, days=2)
1477
+ for c in repo_commits:
1478
+ c_dict = {
1479
+ "sha": c.sha,
1480
+ "message": c.message,
1481
+ "date": c.timestamp.isoformat(),
1482
+ "repo_name": path.name,
1483
+ }
1484
+ all_commits.append(c_dict)
1485
+
1486
+ today_commits = []
1487
+ yesterday_commits = []
1488
+
1489
+ for c in all_commits:
1490
+ try:
1491
+ commit_date = datetime.fromisoformat(c["date"].replace("Z", "+00:00")).replace(tzinfo=None)
1492
+ if commit_date >= today:
1493
+ today_commits.append(c)
1494
+ elif commit_date >= yesterday:
1495
+ yesterday_commits.append(c)
1496
+ except (ValueError, TypeError):
1497
+ yesterday_commits.append(c)
1498
+
1499
+ if json_output:
1500
+ print(json.dumps({
1501
+ "today": today_commits,
1502
+ "yesterday": yesterday_commits,
1503
+ }, indent=2, default=str))
1504
+ return
1505
+
1506
+ print_header()
1507
+ console.print("[bold]Standup Summary[/]")
1508
+ console.print()
1509
+
1510
+ if yesterday_commits:
1511
+ console.print("[bold]Yesterday[/]")
1512
+ for c in yesterday_commits:
1513
+ msg = c["message"].split("\n")[0][:70]
1514
+ console.print(f" • {msg} [{BRAND_MUTED}]({c['repo_name']})[/]")
1515
+ console.print()
1516
+
1517
+ if today_commits:
1518
+ console.print("[bold]Today[/]")
1519
+ for c in today_commits:
1520
+ msg = c["message"].split("\n")[0][:70]
1521
+ console.print(f" • {msg} [{BRAND_MUTED}]({c['repo_name']})[/]")
1522
+ console.print()
1523
+
1524
+ if not yesterday_commits and not today_commits:
1525
+ print_info("No activity found in the last 2 days.")
1526
+ else:
1527
+ print_info("Generated from local git history.")
1528
+
1529
+
1530
+ @app.command("since")
1531
+ def since(
1532
+ date_ref: str = typer.Argument(..., help="Date reference (e.g., 'monday', '2024-01-01', '3 days ago')"),
1533
+ json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
1534
+ ):
1535
+ """
1536
+ Reflection summary since a specific date.
1537
+ """
1538
+ from .timeline import extract_commits_from_git
1539
+
1540
+ parsed_date_str = _parse_date_reference(date_ref)
1541
+ if not parsed_date_str:
1542
+ print_error(f"Could not parse date: {date_ref}")
1543
+ print_info("Try: 'monday', 'yesterday', '2024-01-01', '3 days ago'")
1544
+ raise typer.Exit(1)
1545
+
1546
+ since_date = datetime.fromisoformat(parsed_date_str)
1547
+ now = datetime.now()
1548
+ days_back = (now - since_date).days + 1
1549
+
1550
+ tracked = get_tracked_repos()
1551
+ all_commits = []
1552
+ for repo in tracked:
1553
+ path = Path(repo["path"])
1554
+ if path.exists():
1555
+ repo_commits = extract_commits_from_git(path, days=days_back)
1556
+ for c in repo_commits:
1557
+ if c.timestamp.replace(tzinfo=None) >= since_date:
1558
+ c_dict = {
1559
+ "sha": c.sha,
1560
+ "message": c.message,
1561
+ "date": c.timestamp.isoformat(),
1562
+ "repo_name": path.name,
1563
+ }
1564
+ all_commits.append(c_dict)
1565
+
1566
+ if json_output:
1567
+ print(json.dumps({
1568
+ "since": parsed_date_str,
1569
+ "commits": all_commits,
1570
+ }, indent=2, default=str))
1571
+ return
1572
+
1573
+ print_header()
1574
+ console.print(f"[bold]Work since {date_ref}[/] ({since_date.strftime('%Y-%m-%d')})")
1575
+ console.print()
1576
+
1577
+ if not all_commits:
1578
+ print_info(f"No commits found since {date_ref}.")
1579
+ return
1580
+
1581
+ by_repo = defaultdict(list)
1582
+ for c in all_commits:
1583
+ by_repo[c["repo_name"]].append(c)
1584
+
1585
+ for repo_name, repo_commits in sorted(by_repo.items(), key=lambda x: -len(x[1])):
1586
+ console.print(f" [bold]{repo_name}[/] ({len(repo_commits)} commits)")
1587
+ for c in repo_commits[:5]:
1588
+ msg = c["message"].split("\n")[0][:60]
1589
+ console.print(f" - {msg}")
1590
+ if len(repo_commits) > 5:
1591
+ console.print(f" [{BRAND_MUTED}]... and {len(repo_commits) - 5} more[/]")
1592
+
1593
+ console.print()
1594
+ print_info(f"Summary based on {len(all_commits)} commits.")
1595
+
1596
+
1597
+ @app.command()
1598
+ def stories(
1599
+ repo: Optional[str] = typer.Option(None, "--repo", help="Filter by repository"),
1600
+ category: Optional[str] = typer.Option(None, "--category", "-c", help="Filter by category (feature, bugfix, refactor, perf, infra, docs, test, chore)"),
1601
+ scope: Optional[str] = typer.Option(None, "--scope", "-s", help="Filter by scope (user-facing, internal, platform, ops)"),
1602
+ stack: Optional[str] = typer.Option(None, "--stack", help="Filter by stack (frontend, backend, database, infra, mobile, fullstack)"),
1603
+ needs_review: bool = typer.Option(False, "--needs-review", help="Show only stories needing review"),
1604
+ json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
1605
+ ):
1606
+ """
1607
+ List all stories.
1608
+
1609
+ Example:
1610
+ repr stories
1611
+ repr stories --repo myproject
1612
+ repr stories --category feature
1613
+ repr stories --scope user-facing
1614
+ repr stories --stack backend
1615
+ repr stories --needs-review
1616
+ """
1617
+ story_list = list_stories(repo_name=repo, needs_review=needs_review)
1618
+
1619
+ # Apply category filters (local filtering since storage doesn't support these yet)
1620
+ if category:
1621
+ story_list = [s for s in story_list if s.get("category") == category]
1622
+ if scope:
1623
+ story_list = [s for s in story_list if s.get("scope") == scope]
1624
+ if stack:
1625
+ story_list = [s for s in story_list if s.get("stack") == stack]
1626
+
1627
+ if json_output:
1628
+ print(json.dumps(story_list, indent=2, default=str))
1629
+ return
1630
+
1631
+ if not story_list:
1632
+ print_info("No stories found.")
1633
+ print_info("Run `repr generate` to create stories from your commits.")
1634
+ raise typer.Exit()
1635
+
1636
+ console.print(f"[bold]Stories[/] ({len(story_list)} total)")
1637
+ console.print()
1638
+
1639
+ # Group stories by repository
861
1640
  by_repo = defaultdict(list)
862
1641
  for story in story_list[:20]:
863
1642
  r_name = story.get("repo_name", "unknown")
@@ -1100,93 +1879,106 @@ def stories_review():
1100
1879
 
1101
1880
  @app.command()
1102
1881
  def push(
1103
- story_id: Optional[str] = typer.Option(None, "--story", help="Push specific story"),
1104
- all_stories: bool = typer.Option(False, "--all", help="Push all unpushed stories"),
1882
+ visibility: str = typer.Option("friends", "--visibility", help="Visibility setting: public, friends, private"),
1883
+ force: bool = typer.Option(False, "--force", help="Re-push all stories, even if already pushed"),
1105
1884
  dry_run: bool = typer.Option(False, "--dry-run", help="Preview what would be pushed"),
1106
1885
  ):
1107
1886
  """
1108
- Publish stories to repr.dev.
1109
-
1887
+ Sync local stories to repr.dev backend.
1888
+
1110
1889
  Examples:
1111
1890
  repr push
1112
- repr push --story 01ARYZ6S41TSV4RRFFQ69G5FAV
1891
+ repr push --visibility public
1892
+ repr push --force
1113
1893
  """
1114
1894
  from .privacy import check_cloud_permission, log_cloud_operation
1115
-
1895
+ from .api import push_stories_batch, APIError, AuthError
1896
+ from .db import get_db
1897
+
1898
+ # Check authentication
1899
+ if not get_access_token():
1900
+ print_error("Not authenticated")
1901
+ print_info("Run 'repr login' to authenticate")
1902
+ raise typer.Exit(1)
1903
+
1116
1904
  allowed, reason = check_cloud_permission("push")
1117
1905
  if not allowed:
1118
1906
  print_error("Publishing blocked")
1119
1907
  print_info(reason)
1120
1908
  raise typer.Exit(1)
1121
-
1122
- # Get stories to push
1123
- if story_id:
1124
- result = load_story(story_id)
1125
- if not result:
1126
- print_error(f"Story not found: {story_id}")
1127
- raise typer.Exit(1)
1128
- content, metadata = result
1129
- to_push = [{"id": story_id, "content": content, **metadata}]
1130
- else:
1131
- to_push = get_unpushed_stories()
1132
-
1133
- if not to_push:
1134
- print_success("All stories already synced!")
1909
+
1910
+ # Get all stories from database
1911
+ db = get_db()
1912
+ all_stories = db.list_stories(limit=10000)
1913
+
1914
+ if not all_stories:
1915
+ print_info("No stories to push")
1135
1916
  raise typer.Exit()
1136
-
1137
- console.print(f"Publishing {len(to_push)} story(ies) to repr.dev...")
1917
+
1918
+ console.print(f"Found {len(all_stories)} story(ies) in local database")
1138
1919
  console.print()
1139
-
1920
+
1140
1921
  if dry_run:
1141
- for s in to_push:
1142
- console.print(f" • {s.get('summary', s.get('id'))}")
1922
+ for story in all_stories:
1923
+ console.print(f" • {story.title[:60]}")
1143
1924
  console.print()
1144
- console.print("Run without --dry-run to publish")
1925
+ console.print("Run without --dry-run to push")
1145
1926
  raise typer.Exit()
1146
-
1927
+
1147
1928
  # Build batch payload
1148
- from .api import push_stories_batch
1149
-
1929
+ console.print(f"Preparing to push with visibility: {visibility}...")
1930
+
1150
1931
  stories_payload = []
1151
- for s in to_push:
1152
- content, meta = load_story(s["id"])
1153
- # Use local story ID as client_id for sync
1154
- payload = {**meta, "content": content, "client_id": s["id"]}
1932
+ for story in all_stories:
1933
+ # Convert Story model to dict and ensure author_name is included
1934
+ payload = story.model_dump(mode="json")
1935
+ payload["visibility"] = visibility
1936
+ payload["client_id"] = story.id # Use story ID for sync tracking
1155
1937
  stories_payload.append(payload)
1156
-
1157
- # Push all stories in a single batch request
1158
- try:
1159
- result = asyncio.run(push_stories_batch(stories_payload))
1160
- pushed = result.get("pushed", 0)
1161
- results = result.get("results", [])
1162
-
1163
- # Mark successful stories as pushed and display results
1164
- for i, story_result in enumerate(results):
1165
- story_id_local = to_push[i]["id"]
1166
- summary = to_push[i].get("summary", story_id_local)[:50]
1167
-
1168
- if story_result.get("success"):
1169
- mark_story_pushed(story_id_local)
1170
- console.print(f" [{BRAND_SUCCESS}][/] {summary}")
1171
- else:
1172
- error_msg = story_result.get("error", "Unknown error")
1173
- console.print(f" [{BRAND_ERROR}][/] {summary}: {error_msg}")
1174
-
1175
- except (APIError, AuthError) as e:
1176
- print_error(f"Batch push failed: {e}")
1177
- raise typer.Exit(1)
1178
-
1938
+
1939
+ # Push all stories in batch with progress
1940
+ console.print(f"Pushing {len(stories_payload)} stories...")
1941
+ console.print()
1942
+
1943
+ with BatchProgress() as progress:
1944
+ try:
1945
+ result = asyncio.run(push_stories_batch(stories_payload))
1946
+ pushed = result.get("pushed", 0)
1947
+ failed = result.get("failed", 0)
1948
+ results = result.get("results", [])
1949
+
1950
+ # Display results
1951
+ for i, story_result in enumerate(results):
1952
+ story_title = all_stories[i].title[:50] if i < len(all_stories) else "Unknown"
1953
+
1954
+ if story_result.get("success"):
1955
+ console.print(f" [{BRAND_SUCCESS}][/] {story_title}")
1956
+ else:
1957
+ error_msg = story_result.get("error", "Unknown error")
1958
+ console.print(f" [{BRAND_ERROR}]✗[/] {story_title}: {error_msg}")
1959
+
1960
+ except (APIError, AuthError) as e:
1961
+ print_error(f"Batch push failed: {e}")
1962
+ raise typer.Exit(1)
1963
+
1179
1964
  # Log operation
1180
1965
  if pushed > 0:
1181
1966
  log_cloud_operation(
1182
1967
  operation="push",
1183
1968
  destination="repr.dev",
1184
- payload_summary={"stories_pushed": pushed},
1969
+ payload_summary={
1970
+ "stories_pushed": pushed,
1971
+ "visibility": visibility,
1972
+ "force": force,
1973
+ },
1185
1974
  bytes_sent=0,
1186
1975
  )
1187
-
1976
+
1188
1977
  console.print()
1189
- print_success(f"Pushed {pushed}/{len(to_push)} stories")
1978
+ if failed > 0:
1979
+ print_warning(f"Pushed {pushed}/{len(stories_payload)} stories ({failed} failed)")
1980
+ else:
1981
+ print_success(f"Pushed {pushed}/{len(stories_payload)} stories")
1190
1982
 
1191
1983
 
1192
1984
  @app.command()
@@ -1733,6 +2525,138 @@ def hooks_queue(
1733
2525
  pass
1734
2526
 
1735
2527
 
2528
+ # =============================================================================
2529
+ # CRON SCHEDULING
2530
+ # =============================================================================
2531
+
2532
+ @cron_app.command("install")
2533
+ def cron_install(
2534
+ interval: int = typer.Option(4, "--interval", "-i", help="Hours between runs (default: 4)"),
2535
+ min_commits: int = typer.Option(3, "--min-commits", "-m", help="Minimum commits to trigger generation"),
2536
+ ):
2537
+ """
2538
+ Install cron job for automatic story generation.
2539
+
2540
+ Runs every 4 hours by default, only generating if there are
2541
+ enough commits in the queue.
2542
+
2543
+ Example:
2544
+ repr cron install
2545
+ repr cron install --interval 6 --min-commits 5
2546
+ """
2547
+ from .cron import install_cron
2548
+
2549
+ result = install_cron(interval, min_commits)
2550
+
2551
+ if result["success"]:
2552
+ if result["already_installed"]:
2553
+ print_success(result["message"])
2554
+ else:
2555
+ print_success(result["message"])
2556
+ console.print()
2557
+ console.print(f"[{BRAND_MUTED}]Stories will generate every {interval}h when queue has ≥{min_commits} commits[/]")
2558
+ console.print(f"[{BRAND_MUTED}]Logs: ~/.repr/logs/cron.log[/]")
2559
+ else:
2560
+ print_error(result["message"])
2561
+ raise typer.Exit(1)
2562
+
2563
+
2564
+ @cron_app.command("remove")
2565
+ def cron_remove():
2566
+ """
2567
+ Remove cron job for story generation.
2568
+
2569
+ Example:
2570
+ repr cron remove
2571
+ """
2572
+ from .cron import remove_cron
2573
+
2574
+ result = remove_cron()
2575
+
2576
+ if result["success"]:
2577
+ print_success(result["message"])
2578
+ else:
2579
+ print_error(result["message"])
2580
+ raise typer.Exit(1)
2581
+
2582
+
2583
+ @cron_app.command("pause")
2584
+ def cron_pause():
2585
+ """
2586
+ Pause cron job without removing it.
2587
+
2588
+ Example:
2589
+ repr cron pause
2590
+ """
2591
+ from .cron import pause_cron
2592
+
2593
+ result = pause_cron()
2594
+
2595
+ if result["success"]:
2596
+ print_success(result["message"])
2597
+ console.print(f"[{BRAND_MUTED}]Use `repr cron resume` to re-enable[/]")
2598
+ else:
2599
+ print_error(result["message"])
2600
+ raise typer.Exit(1)
2601
+
2602
+
2603
+ @cron_app.command("resume")
2604
+ def cron_resume():
2605
+ """
2606
+ Resume paused cron job.
2607
+
2608
+ Example:
2609
+ repr cron resume
2610
+ """
2611
+ from .cron import resume_cron
2612
+
2613
+ result = resume_cron()
2614
+
2615
+ if result["success"]:
2616
+ print_success(result["message"])
2617
+ else:
2618
+ print_error(result["message"])
2619
+ raise typer.Exit(1)
2620
+
2621
+
2622
+ @cron_app.command("status")
2623
+ def cron_status(
2624
+ json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
2625
+ ):
2626
+ """
2627
+ Show cron job status.
2628
+
2629
+ Example:
2630
+ repr cron status
2631
+ """
2632
+ from .cron import get_cron_status
2633
+
2634
+ status = get_cron_status()
2635
+
2636
+ if json_output:
2637
+ print(json.dumps(status, indent=2))
2638
+ return
2639
+
2640
+ console.print("[bold]Cron Status[/]")
2641
+ console.print()
2642
+
2643
+ if not status["installed"]:
2644
+ console.print(f"[{BRAND_MUTED}]○[/] Not installed")
2645
+ console.print()
2646
+ console.print(f"[{BRAND_MUTED}]Run `repr cron install` to enable scheduled generation[/]")
2647
+ return
2648
+
2649
+ if status["paused"]:
2650
+ console.print(f"[{BRAND_WARNING}]⏸[/] Paused")
2651
+ console.print(f" [{BRAND_MUTED}]Interval: every {status['interval_hours']}h[/]")
2652
+ console.print()
2653
+ console.print(f"[{BRAND_MUTED}]Run `repr cron resume` to re-enable[/]")
2654
+ else:
2655
+ console.print(f"[{BRAND_SUCCESS}]✓[/] Active")
2656
+ console.print(f" [{BRAND_MUTED}]Interval: every {status['interval_hours']}h[/]")
2657
+ console.print(f" [{BRAND_MUTED}]Logs: ~/.repr/logs/cron.log[/]")
2658
+
2659
+
1736
2660
  # =============================================================================
1737
2661
  # LLM CONFIGURATION
1738
2662
  # =============================================================================
@@ -1743,7 +2667,7 @@ def llm_add(
1743
2667
  ):
1744
2668
  """
1745
2669
  Configure a BYOK provider.
1746
-
2670
+
1747
2671
  Example:
1748
2672
  repr llm add openai
1749
2673
  """
@@ -1948,6 +2872,77 @@ def llm_test():
1948
2872
  console.print(f" [{BRAND_SUCCESS}]✓[/] {provider}")
1949
2873
 
1950
2874
 
2875
+ # =============================================================================
2876
+ # CONFIGURE (unified setup wizard)
2877
+ # =============================================================================
2878
+
2879
+ @configure_app.callback(invoke_without_command=True)
2880
+ def configure_main(ctx: typer.Context):
2881
+ """
2882
+ Configure repr settings (LLM, repos, schedule).
2883
+
2884
+ Run without arguments to see an interactive menu.
2885
+ Use subcommands for direct configuration:
2886
+
2887
+ repr configure llm Configure LLM provider
2888
+ repr configure repos Configure tracked repositories
2889
+ repr configure schedule Configure automatic generation
2890
+
2891
+ Example:
2892
+ repr configure
2893
+ """
2894
+ if ctx.invoked_subcommand is None:
2895
+ from .configure import run_configure_menu
2896
+ run_configure_menu()
2897
+
2898
+
2899
+ @configure_app.command("llm")
2900
+ def configure_llm():
2901
+ """
2902
+ Configure LLM provider interactively.
2903
+
2904
+ Supports:
2905
+ - Local: Ollama, LM Studio
2906
+ - API: OpenAI, Anthropic, Gemini, Groq, Together, OpenRouter
2907
+
2908
+ Example:
2909
+ repr configure llm
2910
+ """
2911
+ from .configure import wizard_llm
2912
+ wizard_llm()
2913
+
2914
+
2915
+ @configure_app.command("repos")
2916
+ def configure_repos():
2917
+ """
2918
+ Configure tracked repositories.
2919
+
2920
+ Scans for git repositories and lets you select which to track.
2921
+
2922
+ Example:
2923
+ repr configure repos
2924
+ """
2925
+ from .configure import wizard_repos
2926
+ wizard_repos()
2927
+
2928
+
2929
+ @configure_app.command("schedule")
2930
+ def configure_schedule():
2931
+ """
2932
+ Configure automatic story generation schedule.
2933
+
2934
+ Options:
2935
+ - Scheduled (cron) - Every N hours
2936
+ - On commit (hooks) - After N commits
2937
+ - Manual - Run `repr generate` yourself
2938
+
2939
+ Example:
2940
+ repr configure schedule
2941
+ """
2942
+ from .configure import wizard_schedule
2943
+ wizard_schedule()
2944
+
2945
+
1951
2946
  # =============================================================================
1952
2947
  # PRIVACY
1953
2948
  # =============================================================================
@@ -2340,12 +3335,12 @@ def data_restore(
2340
3335
  def data_clear_cache():
2341
3336
  """
2342
3337
  Clear local cache.
2343
-
3338
+
2344
3339
  Example:
2345
3340
  repr data clear-cache
2346
3341
  """
2347
3342
  from .config import clear_cache, get_cache_size
2348
-
3343
+
2349
3344
  size = get_cache_size()
2350
3345
  clear_cache()
2351
3346
  print_success(f"Cache cleared ({format_bytes(size)} freed)")
@@ -2353,7 +3348,200 @@ def data_clear_cache():
2353
3348
  console.print(" Config preserved")
2354
3349
 
2355
3350
 
2356
- # =============================================================================
3351
+ @data_app.command("migrate-db")
3352
+ def data_migrate_db(
3353
+ dry_run: bool = typer.Option(False, "--dry-run", help="Show what would be migrated"),
3354
+ project: Optional[Path] = typer.Option(None, "--project", "-p", help="Migrate specific project"),
3355
+ ):
3356
+ """
3357
+ Migrate store.json files to central SQLite database.
3358
+
3359
+ This command imports existing .repr/store.json files into the central
3360
+ SQLite database at ~/.repr/stories.db for faster queries.
3361
+
3362
+ Example:
3363
+ repr data migrate-db # Migrate all tracked repos
3364
+ repr data migrate-db --dry-run # Preview migration
3365
+ repr data migrate-db -p /path/to/repo
3366
+ """
3367
+ from .storage import migrate_stores_to_db, get_db_stats
3368
+
3369
+ project_paths = [project] if project else None
3370
+
3371
+ with create_spinner("Migrating stories to SQLite...") as progress:
3372
+ task = progress.add_task("migrating", total=None)
3373
+
3374
+ if dry_run:
3375
+ console.print("[bold]Dry run mode - no changes will be made[/]\n")
3376
+
3377
+ stats = migrate_stores_to_db(project_paths=project_paths, dry_run=dry_run)
3378
+
3379
+ progress.update(task, completed=True)
3380
+
3381
+ console.print()
3382
+ console.print(f"Projects scanned: {stats['projects_scanned']}")
3383
+ console.print(f"Projects migrated: {stats['projects_migrated']}")
3384
+ console.print(f"Stories imported: {stats['stories_imported']}")
3385
+
3386
+ if stats['errors']:
3387
+ console.print()
3388
+ print_warning(f"{len(stats['errors'])} errors:")
3389
+ for error in stats['errors'][:5]:
3390
+ console.print(f" • {error}")
3391
+ if len(stats['errors']) > 5:
3392
+ console.print(f" ... and {len(stats['errors']) - 5} more")
3393
+
3394
+ if not dry_run and stats['stories_imported'] > 0:
3395
+ console.print()
3396
+ db_stats = get_db_stats()
3397
+ console.print(f"Database: {db_stats['db_path']}")
3398
+ console.print(f"Total stories: {db_stats['story_count']}")
3399
+ console.print(f"Database size: {format_bytes(db_stats['db_size_bytes'])}")
3400
+
3401
+
3402
+ @data_app.command("db-stats")
3403
+ def data_db_stats():
3404
+ """
3405
+ Show SQLite database statistics.
3406
+
3407
+ Example:
3408
+ repr data db-stats
3409
+ """
3410
+ from .storage import get_db_stats
3411
+ from .db import get_db_path
3412
+
3413
+ if not get_db_path().exists():
3414
+ print_info("No SQLite database yet.")
3415
+ print_info("Run `repr data migrate-db` to create one.")
3416
+ return
3417
+
3418
+ stats = get_db_stats()
3419
+
3420
+ console.print("[bold]SQLite Database Stats[/]")
3421
+ console.print()
3422
+ console.print(f"Path: {stats['db_path']}")
3423
+ console.print(f"Size: {format_bytes(stats['db_size_bytes'])}")
3424
+ console.print()
3425
+ console.print(f"Stories: {stats['story_count']}")
3426
+ console.print(f"Projects: {stats['project_count']}")
3427
+ console.print(f"Unique files: {stats['unique_files']}")
3428
+ console.print(f"Unique commits: {stats['unique_commits']}")
3429
+
3430
+ if stats['categories']:
3431
+ console.print()
3432
+ console.print("[bold]By Category:[/]")
3433
+ for cat, count in sorted(stats['categories'].items(), key=lambda x: -x[1]):
3434
+ console.print(f" {cat}: {count}")
3435
+
3436
+
3437
+ @data_app.command("clear")
3438
+ def data_clear(
3439
+ force: bool = typer.Option(False, "--force", "-f", help="Skip confirmation"),
3440
+ ):
3441
+ """
3442
+ Clear all stories from database and storage.
3443
+
3444
+ This permanently deletes:
3445
+ - All stories from the SQLite database
3446
+ - All story files from ~/.repr/stories/
3447
+ - Local cache
3448
+
3449
+ Projects registry and config are preserved.
3450
+
3451
+ Example:
3452
+ repr data clear # With confirmation
3453
+ repr data clear --force # Skip confirmation
3454
+ """
3455
+ from .db import get_db_path
3456
+ from .storage import STORIES_DIR
3457
+ from .config import get_cache_size, clear_cache
3458
+ import shutil
3459
+
3460
+ db_path = get_db_path()
3461
+
3462
+ # Check what exists
3463
+ db_exists = db_path.exists()
3464
+ stories_dir_exists = STORIES_DIR.exists()
3465
+ cache_size = get_cache_size()
3466
+
3467
+ if not db_exists and not stories_dir_exists and cache_size == 0:
3468
+ print_info("Nothing to clear - no database, stories, or cache found.")
3469
+ return
3470
+
3471
+ # Count what we're about to delete
3472
+ story_count = 0
3473
+ db_size = 0
3474
+ stories_file_count = 0
3475
+
3476
+ if db_exists:
3477
+ from .storage import get_db_stats
3478
+ try:
3479
+ stats = get_db_stats()
3480
+ story_count = stats.get('story_count', 0)
3481
+ db_size = stats.get('db_size_bytes', 0)
3482
+ except Exception:
3483
+ db_size = db_path.stat().st_size if db_path.exists() else 0
3484
+
3485
+ if stories_dir_exists:
3486
+ stories_file_count = len(list(STORIES_DIR.glob("*")))
3487
+
3488
+ # Show what will be deleted
3489
+ console.print("[bold red]This will permanently delete:[/]")
3490
+ console.print()
3491
+ if db_exists:
3492
+ console.print(f" • Database: {db_path}")
3493
+ console.print(f" {story_count} stories, {format_bytes(db_size)}")
3494
+ if stories_dir_exists and stories_file_count > 0:
3495
+ console.print(f" • Story files: {STORIES_DIR}")
3496
+ console.print(f" {stories_file_count} files")
3497
+ if cache_size > 0:
3498
+ console.print(f" • Cache: {format_bytes(cache_size)}")
3499
+ console.print()
3500
+ console.print("[dim]Projects registry and config will be preserved.[/]")
3501
+ console.print()
3502
+
3503
+ if not force:
3504
+ if not confirm("Are you sure you want to delete all stories and cache?"):
3505
+ print_info("Cancelled")
3506
+ raise typer.Exit()
3507
+
3508
+ # Delete database
3509
+ if db_exists:
3510
+ try:
3511
+ # Also delete WAL and SHM files if they exist
3512
+ db_path.unlink()
3513
+ wal_path = db_path.with_suffix(".db-wal")
3514
+ shm_path = db_path.with_suffix(".db-shm")
3515
+ if wal_path.exists():
3516
+ wal_path.unlink()
3517
+ if shm_path.exists():
3518
+ shm_path.unlink()
3519
+ except Exception as e:
3520
+ print_error(f"Failed to delete database: {e}")
3521
+ raise typer.Exit(1)
3522
+
3523
+ # Delete stories directory contents (but keep the directory)
3524
+ if stories_dir_exists:
3525
+ try:
3526
+ shutil.rmtree(STORIES_DIR)
3527
+ STORIES_DIR.mkdir(exist_ok=True)
3528
+ except Exception as e:
3529
+ print_error(f"Failed to clear stories directory: {e}")
3530
+ raise typer.Exit(1)
3531
+
3532
+ # Clear cache
3533
+ clear_cache()
3534
+
3535
+ print_success("All stories and cache cleared")
3536
+ if db_exists:
3537
+ console.print(f" Deleted: {story_count} stories from database")
3538
+ if stories_file_count > 0:
3539
+ console.print(f" Deleted: {stories_file_count} story files")
3540
+ if cache_size > 0:
3541
+ console.print(f" Cleared: {format_bytes(cache_size)} from cache")
3542
+
3543
+
3544
+ # =============================================================================
2357
3545
  # PROFILE
2358
3546
  # =============================================================================
2359
3547
 
@@ -2535,6 +3723,254 @@ def profile_link():
2535
3723
  print_info("Run `repr profile set-username <name>`")
2536
3724
 
2537
3725
 
3726
+ # =============================================================================
3727
+ # PUBLISH/UNPUBLISH COMMANDS
3728
+ # =============================================================================
3729
+
3730
+ @app.command("publish")
3731
+ def publish_story(
3732
+ story_id: str = typer.Argument(..., help="Story ID to publish"),
3733
+ visibility: str = typer.Option("public", "--visibility", "-v", help="Visibility: public, friends, private"),
3734
+ ):
3735
+ """
3736
+ Set story visibility.
3737
+
3738
+ Examples:
3739
+ repr publish abc123
3740
+ repr publish abc123 --visibility friends
3741
+ """
3742
+ from .api import set_story_visibility, AuthError
3743
+
3744
+ if visibility not in ("public", "friends", "private"):
3745
+ print_error(f"Invalid visibility: {visibility}")
3746
+ print_info("Valid options: public, friends, private")
3747
+ raise typer.Exit(1)
3748
+
3749
+ if not is_authenticated():
3750
+ print_error("Not authenticated")
3751
+ print_info("Run `repr login` first")
3752
+ raise typer.Exit(1)
3753
+
3754
+ try:
3755
+ with create_spinner(f"Setting visibility to {visibility}..."):
3756
+ result = asyncio.run(set_story_visibility(story_id, visibility))
3757
+ print_success(f"Story visibility set to {visibility}")
3758
+ except AuthError as e:
3759
+ print_error(str(e))
3760
+ raise typer.Exit(1)
3761
+ except APIError as e:
3762
+ print_error(f"API error: {e}")
3763
+ raise typer.Exit(1)
3764
+
3765
+
3766
+ @app.command("unpublish")
3767
+ def unpublish_story(
3768
+ story_id: str = typer.Argument(..., help="Story ID to unpublish"),
3769
+ ):
3770
+ """
3771
+ Set story to private.
3772
+
3773
+ Examples:
3774
+ repr unpublish abc123
3775
+ """
3776
+ from .api import set_story_visibility, AuthError
3777
+
3778
+ if not is_authenticated():
3779
+ print_error("Not authenticated")
3780
+ print_info("Run `repr login` first")
3781
+ raise typer.Exit(1)
3782
+
3783
+ try:
3784
+ with create_spinner("Setting story to private..."):
3785
+ result = asyncio.run(set_story_visibility(story_id, "private"))
3786
+ print_success("Story set to private")
3787
+ except AuthError as e:
3788
+ print_error(str(e))
3789
+ raise typer.Exit(1)
3790
+ except APIError as e:
3791
+ print_error(f"API error: {e}")
3792
+ raise typer.Exit(1)
3793
+
3794
+
3795
+ # =============================================================================
3796
+ # FRIENDS
3797
+ # =============================================================================
3798
+
3799
+ @friends_app.command("add")
3800
+ def friends_add(
3801
+ username: str = typer.Argument(..., help="Username to send friend request to"),
3802
+ ):
3803
+ """Send friend request.
3804
+
3805
+ Example:
3806
+ repr friends add johndoe
3807
+ """
3808
+ if not is_authenticated():
3809
+ print_error("Friend requests require sign-in")
3810
+ print_info("Run `repr login` first")
3811
+ raise typer.Exit(1)
3812
+
3813
+ from .api import send_friend_request, AuthError
3814
+
3815
+ try:
3816
+ result = asyncio.run(send_friend_request(username))
3817
+ print_success(f"Friend request sent to {username}")
3818
+ except AuthError as e:
3819
+ print_error(str(e))
3820
+ print_info("Run `repr login` to re-authenticate")
3821
+ raise typer.Exit(1)
3822
+ except APIError as e:
3823
+ print_error(str(e))
3824
+ raise typer.Exit(1)
3825
+
3826
+
3827
+ @friends_app.command("list")
3828
+ def friends_list():
3829
+ """List friends.
3830
+
3831
+ Example:
3832
+ repr friends list
3833
+ """
3834
+ if not is_authenticated():
3835
+ print_error("Friends list requires sign-in")
3836
+ print_info("Run `repr login` first")
3837
+ raise typer.Exit(1)
3838
+
3839
+ from .api import get_friends, AuthError
3840
+
3841
+ try:
3842
+ friends = asyncio.run(get_friends())
3843
+
3844
+ if not friends:
3845
+ print_info("No friends yet")
3846
+ return
3847
+
3848
+ table = create_table("Friends", ["Username", "Added"])
3849
+ for friend in friends:
3850
+ username = friend.get("username", "N/A")
3851
+ added_at = friend.get("created_at", "")
3852
+ # Format the date
3853
+ if added_at:
3854
+ from datetime import datetime
3855
+ try:
3856
+ dt = datetime.fromisoformat(added_at.replace("Z", "+00:00"))
3857
+ added_at = dt.strftime("%Y-%m-%d")
3858
+ except ValueError:
3859
+ pass
3860
+ table.add_row(username, added_at or "N/A")
3861
+ console.print(table)
3862
+ except AuthError as e:
3863
+ print_error(str(e))
3864
+ print_info("Run `repr login` to re-authenticate")
3865
+ raise typer.Exit(1)
3866
+ except APIError as e:
3867
+ print_error(str(e))
3868
+ raise typer.Exit(1)
3869
+
3870
+
3871
+ @friends_app.command("requests")
3872
+ def friends_requests():
3873
+ """View pending friend requests.
3874
+
3875
+ Example:
3876
+ repr friends requests
3877
+ """
3878
+ if not is_authenticated():
3879
+ print_error("Friend requests require sign-in")
3880
+ print_info("Run `repr login` first")
3881
+ raise typer.Exit(1)
3882
+
3883
+ from .api import get_friend_requests, AuthError
3884
+
3885
+ try:
3886
+ requests = asyncio.run(get_friend_requests())
3887
+
3888
+ if not requests:
3889
+ print_info("No pending friend requests")
3890
+ return
3891
+
3892
+ table = create_table("Pending Friend Requests", ["Request ID", "From", "Sent"])
3893
+ for req in requests:
3894
+ request_id = req.get("id", "N/A")
3895
+ from_user = req.get("from_username", "N/A")
3896
+ sent_at = req.get("created_at", "")
3897
+ # Format the date
3898
+ if sent_at:
3899
+ from datetime import datetime
3900
+ try:
3901
+ dt = datetime.fromisoformat(sent_at.replace("Z", "+00:00"))
3902
+ sent_at = dt.strftime("%Y-%m-%d")
3903
+ except ValueError:
3904
+ pass
3905
+ table.add_row(str(request_id), from_user, sent_at or "N/A")
3906
+ console.print(table)
3907
+
3908
+ print_info("Use `repr friends approve <id>` or `repr friends reject <id>`")
3909
+ except AuthError as e:
3910
+ print_error(str(e))
3911
+ print_info("Run `repr login` to re-authenticate")
3912
+ raise typer.Exit(1)
3913
+ except APIError as e:
3914
+ print_error(str(e))
3915
+ raise typer.Exit(1)
3916
+
3917
+
3918
+ @friends_app.command("approve")
3919
+ def friends_approve(
3920
+ request_id: str = typer.Argument(..., help="ID of the friend request to approve"),
3921
+ ):
3922
+ """Approve friend request.
3923
+
3924
+ Example:
3925
+ repr friends approve abc123
3926
+ """
3927
+ if not is_authenticated():
3928
+ print_error("Approving friend requests requires sign-in")
3929
+ print_info("Run `repr login` first")
3930
+ raise typer.Exit(1)
3931
+
3932
+ from .api import approve_friend_request, AuthError
3933
+
3934
+ try:
3935
+ result = asyncio.run(approve_friend_request(request_id))
3936
+ print_success("Friend request approved")
3937
+ except AuthError as e:
3938
+ print_error(str(e))
3939
+ print_info("Run `repr login` to re-authenticate")
3940
+ raise typer.Exit(1)
3941
+ except APIError as e:
3942
+ print_error(str(e))
3943
+ raise typer.Exit(1)
3944
+
3945
+
3946
+ @friends_app.command("reject")
3947
+ def friends_reject(
3948
+ request_id: str = typer.Argument(..., help="ID of the friend request to reject"),
3949
+ ):
3950
+ """Reject friend request.
3951
+
3952
+ Example:
3953
+ repr friends reject abc123
3954
+ """
3955
+ if not is_authenticated():
3956
+ print_error("Rejecting friend requests requires sign-in")
3957
+ print_info("Run `repr login` first")
3958
+ raise typer.Exit(1)
3959
+
3960
+ from .api import reject_friend_request, AuthError
3961
+
3962
+ try:
3963
+ result = asyncio.run(reject_friend_request(request_id))
3964
+ print_info("Friend request rejected")
3965
+ except AuthError as e:
3966
+ print_error(str(e))
3967
+ print_info("Run `repr login` to re-authenticate")
3968
+ raise typer.Exit(1)
3969
+ except APIError as e:
3970
+ print_error(str(e))
3971
+ raise typer.Exit(1)
3972
+
3973
+
2538
3974
  # =============================================================================
2539
3975
  # STATUS & INFO
2540
3976
  # =============================================================================
@@ -2590,6 +4026,184 @@ def status(
2590
4026
  print_info(f"Run `repr push` to publish {unpushed} stories")
2591
4027
 
2592
4028
 
4029
+ @app.command()
4030
+ def changes(
4031
+ path: Optional[Path] = typer.Argument(
4032
+ None,
4033
+ help="Path to repository (default: current directory)",
4034
+ exists=True,
4035
+ resolve_path=True,
4036
+ ),
4037
+ explain: bool = typer.Option(False, "--explain", "-e", help="Use LLM to explain changes"),
4038
+ compact: bool = typer.Option(False, "--compact", "-c", help="Compact output (no diff previews)"),
4039
+ json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
4040
+ ):
4041
+ """
4042
+ Show file changes across git states with diff details.
4043
+
4044
+ Displays changes in three states:
4045
+ - Unstaged: Tracked files modified but not staged (with diff preview)
4046
+ - Staged: Changes ready to commit (with diff preview)
4047
+ - Unpushed: Commits not yet pushed to remote
4048
+
4049
+ Example:
4050
+ repr changes # Show changes with diffs
4051
+ repr changes --compact # Just file names
4052
+ repr changes --explain # LLM summary
4053
+ repr changes --json
4054
+ """
4055
+ from .change_synthesis import (
4056
+ get_change_report,
4057
+ ChangeState,
4058
+ explain_group,
4059
+ )
4060
+
4061
+ target_path = path or Path.cwd()
4062
+ report = get_change_report(target_path)
4063
+
4064
+ if not report:
4065
+ print_error(f"Not a git repository: {target_path}")
4066
+ raise typer.Exit(1)
4067
+
4068
+ if json_output:
4069
+ data = {
4070
+ "repo_path": str(report.repo_path),
4071
+ "timestamp": report.timestamp.isoformat(),
4072
+ "unstaged": [
4073
+ {
4074
+ "path": f.path,
4075
+ "change_type": f.change_type,
4076
+ "insertions": f.insertions,
4077
+ "deletions": f.deletions,
4078
+ }
4079
+ for f in report.unstaged
4080
+ ],
4081
+ "staged": [
4082
+ {
4083
+ "path": f.path,
4084
+ "change_type": f.change_type,
4085
+ "insertions": f.insertions,
4086
+ "deletions": f.deletions,
4087
+ }
4088
+ for f in report.staged
4089
+ ],
4090
+ "unpushed": [
4091
+ {
4092
+ "sha": c.sha,
4093
+ "message": c.message,
4094
+ "author": c.author,
4095
+ "timestamp": c.timestamp.isoformat(),
4096
+ "files": [{"path": f.path, "change_type": f.change_type} for f in c.files],
4097
+ }
4098
+ for c in report.unpushed
4099
+ ],
4100
+ }
4101
+ if report.summary:
4102
+ data["summary"] = {
4103
+ "hook": report.summary.hook,
4104
+ "what": report.summary.what,
4105
+ "value": report.summary.value,
4106
+ "problem": report.summary.problem,
4107
+ "insight": report.summary.insight,
4108
+ "show": report.summary.show,
4109
+ }
4110
+ print(json.dumps(data, indent=2))
4111
+ return
4112
+
4113
+ if not report.has_changes:
4114
+ print_info("No changes detected.")
4115
+ console.print(f"[{BRAND_MUTED}]Working tree clean, nothing staged, up to date with remote.[/]")
4116
+ raise typer.Exit()
4117
+
4118
+ # Get LLM client if explain mode
4119
+ client = None
4120
+ if explain:
4121
+ from .openai_analysis import get_openai_client
4122
+ client = get_openai_client()
4123
+ if not client:
4124
+ print_error("LLM not configured. Run `repr llm setup` first.")
4125
+ raise typer.Exit(1)
4126
+
4127
+ # Header
4128
+ console.print(f"[bold]Changes in {report.repo_path.name}[/]")
4129
+ console.print()
4130
+
4131
+ # Unstaged changes
4132
+ if report.unstaged:
4133
+ console.print(f"[bold][{BRAND_WARNING}]Unstaged[/][/] ({len(report.unstaged)} files)")
4134
+ # Explain this group right after header
4135
+ if client:
4136
+ with create_spinner("Explaining unstaged..."):
4137
+ explanation = asyncio.run(explain_group("unstaged", file_changes=report.unstaged, client=client))
4138
+ console.print()
4139
+ console.print(f"[{BRAND_MUTED}]{explanation}[/]")
4140
+ console.print()
4141
+ for f in report.unstaged:
4142
+ type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
4143
+ stats = ""
4144
+ if f.insertions or f.deletions:
4145
+ stats = f" [{BRAND_SUCCESS}]+{f.insertions}[/][{BRAND_ERROR}]-{f.deletions}[/]"
4146
+ full_path = report.repo_path / f.path
4147
+ console.print(f" {type_icon} {full_path}{stats}")
4148
+ # Show diff preview unless compact mode
4149
+ if not compact and f.diff_preview:
4150
+ for line in f.diff_preview.split("\n")[:10]:
4151
+ escaped_line = rich_escape(line)
4152
+ if line.startswith("+"):
4153
+ console.print(f" [{BRAND_SUCCESS}]{escaped_line}[/]")
4154
+ elif line.startswith("-"):
4155
+ console.print(f" [{BRAND_ERROR}]{escaped_line}[/]")
4156
+ console.print()
4157
+
4158
+ # Staged changes
4159
+ if report.staged:
4160
+ console.print(f"[bold][{BRAND_SUCCESS}]Staged[/][/] ({len(report.staged)} files)")
4161
+ # Explain this group right after header
4162
+ if client:
4163
+ with create_spinner("Explaining staged..."):
4164
+ explanation = asyncio.run(explain_group("staged", file_changes=report.staged, client=client))
4165
+ console.print()
4166
+ console.print(f"[{BRAND_MUTED}]{explanation}[/]")
4167
+ console.print()
4168
+ for f in report.staged:
4169
+ type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
4170
+ stats = ""
4171
+ if f.insertions or f.deletions:
4172
+ stats = f" [{BRAND_SUCCESS}]+{f.insertions}[/][{BRAND_ERROR}]-{f.deletions}[/]"
4173
+ full_path = report.repo_path / f.path
4174
+ console.print(f" {type_icon} {full_path}{stats}")
4175
+ # Show diff preview unless compact mode
4176
+ if not compact and f.diff_preview:
4177
+ for line in f.diff_preview.split("\n")[:10]:
4178
+ escaped_line = rich_escape(line)
4179
+ if line.startswith("+"):
4180
+ console.print(f" [{BRAND_SUCCESS}]{escaped_line}[/]")
4181
+ elif line.startswith("-"):
4182
+ console.print(f" [{BRAND_ERROR}]{escaped_line}[/]")
4183
+ console.print()
4184
+
4185
+ # Unpushed commits
4186
+ if report.unpushed:
4187
+ console.print(f"[bold][{BRAND_PRIMARY}]Unpushed[/][/] ({len(report.unpushed)} commits)")
4188
+ # Explain this group right after header
4189
+ if client:
4190
+ with create_spinner("Explaining unpushed..."):
4191
+ explanation = asyncio.run(explain_group("unpushed", commit_changes=report.unpushed, client=client))
4192
+ console.print()
4193
+ console.print(f"[{BRAND_MUTED}]{explanation}[/]")
4194
+ console.print()
4195
+ for commit in report.unpushed:
4196
+ console.print(f" [{BRAND_MUTED}]{commit.sha}[/] {commit.message}")
4197
+ # Show files changed in this commit
4198
+ for f in commit.files[:5]:
4199
+ type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
4200
+ full_path = report.repo_path / f.path
4201
+ console.print(f" {type_icon} {full_path}")
4202
+ if len(commit.files) > 5:
4203
+ console.print(f" [{BRAND_MUTED}]... +{len(commit.files) - 5} more[/]")
4204
+ console.print()
4205
+
4206
+
2593
4207
  @app.command()
2594
4208
  def mode(
2595
4209
  json_output: bool = typer.Option(False, "--json", help="Output as JSON"),
@@ -2753,6 +4367,2211 @@ def doctor():
2753
4367
  print_error("Issues found - see recommendations above")
2754
4368
 
2755
4369
 
4370
+ # =============================================================================
4371
+ # MCP SERVER
4372
+ # =============================================================================
4373
+
4374
+ @mcp_app.command("serve")
4375
+ def mcp_serve(
4376
+ sse: bool = typer.Option(False, "--sse", help="Use SSE transport instead of stdio"),
4377
+ port: int = typer.Option(3001, "--port", "-p", help="Port for SSE mode"),
4378
+ host: str = typer.Option("127.0.0.1", "--host", "-h", help="Host for SSE mode"),
4379
+ ):
4380
+ """
4381
+ Start the MCP server for AI agent integration.
4382
+
4383
+ The MCP (Model Context Protocol) server exposes repr functionality
4384
+ to AI agents like Claude Code, Cursor, Windsurf, and Cline.
4385
+
4386
+ Examples:
4387
+ repr mcp serve # stdio mode (default)
4388
+ repr mcp serve --sse # SSE mode for remote clients
4389
+ repr mcp serve --port 3001 # Custom port for SSE
4390
+
4391
+ Configuration for Claude Code:
4392
+ claude mcp add repr -- repr mcp serve
4393
+
4394
+ Configuration for Cursor/Windsurf (mcp.json):
4395
+ {
4396
+ "mcpServers": {
4397
+ "repr": {
4398
+ "command": "repr",
4399
+ "args": ["mcp", "serve"]
4400
+ }
4401
+ }
4402
+ }
4403
+ """
4404
+ from .mcp_server import run_server
4405
+
4406
+ if not sse:
4407
+ # stdio mode - silent start, let MCP handle communication
4408
+ run_server(sse=False)
4409
+ else:
4410
+ console.print(f"Starting MCP server (SSE mode) on {host}:{port}...")
4411
+ run_server(sse=True, host=host, port=port)
4412
+
4413
+
4414
+ @mcp_app.command("info")
4415
+ def mcp_info():
4416
+ """
4417
+ Show MCP server configuration info.
4418
+
4419
+ Example:
4420
+ repr mcp info
4421
+ """
4422
+ console.print("[bold]MCP Server Info[/]")
4423
+ console.print()
4424
+ console.print("The MCP server exposes repr to AI agents via the Model Context Protocol.")
4425
+ console.print()
4426
+ console.print("[bold]Available Tools:[/]")
4427
+ console.print(" • repr_generate — Generate stories from commits")
4428
+ console.print(" • repr_stories_list — List existing stories")
4429
+ console.print(" • repr_week — Weekly work summary")
4430
+ console.print(" • repr_standup — Yesterday/today summary")
4431
+ console.print(" • repr_profile — Get developer profile")
4432
+ console.print()
4433
+ console.print("[bold]Available Resources:[/]")
4434
+ console.print(" • repr://profile — Current profile")
4435
+ console.print(" • repr://stories/recent — Recent stories")
4436
+ console.print()
4437
+ console.print("[bold]Usage:[/]")
4438
+ console.print(" repr mcp serve # Start server (stdio)")
4439
+ console.print(" repr mcp serve --sse # SSE mode")
4440
+ console.print()
4441
+ console.print("[bold]Claude Code setup:[/]")
4442
+ console.print(" claude mcp add repr -- repr mcp serve")
4443
+
4444
+
4445
+ @mcp_app.command("install-skills")
4446
+ def mcp_install_skills(
4447
+ force: bool = typer.Option(False, "--force", "-f", help="Overwrite existing skill files"),
4448
+ ):
4449
+ """
4450
+ Install repr skills to Claude Code.
4451
+
4452
+ Installs a skill that teaches Claude Code how and when to use repr commands
4453
+ (init, generate, timeline, changes).
4454
+
4455
+ Example:
4456
+ repr mcp install-skills
4457
+ repr mcp install-skills --force # Overwrite existing
4458
+ """
4459
+ skill_dir = Path.home() / ".claude" / "skills"
4460
+ skill_file = skill_dir / "repr.md"
4461
+
4462
+ # Check if already installed
4463
+ if skill_file.exists() and not force:
4464
+ print_warning("repr skill already installed")
4465
+ console.print(f" Location: {skill_file}")
4466
+ console.print()
4467
+ console.print("Use --force to overwrite")
4468
+ return
4469
+
4470
+ # Create directory
4471
+ skill_dir.mkdir(parents=True, exist_ok=True)
4472
+
4473
+ # Write skill file
4474
+ skill_content = '''---
4475
+ name: repr
4476
+ description: Use this skill when the user asks to "show my changes", "what did I work on", "generate a story", "initialize repr", "show timeline", "set up repr", or needs context about their recent development work.
4477
+ version: 1.0.0
4478
+ ---
4479
+
4480
+ # repr - Developer Context Layer
4481
+
4482
+ repr helps developers understand what they've actually worked on by analyzing git history and AI sessions.
4483
+
4484
+ ## When to Use repr
4485
+
4486
+ - User asks about their recent work or changes
4487
+ - User wants to generate a story or summary from commits
4488
+ - User needs to set up repr for a project
4489
+ - User wants to see their development timeline
4490
+ - User asks "what did I work on" or similar
4491
+
4492
+ ## Commands
4493
+
4494
+ ### Initialize repr
4495
+
4496
+ ```bash
4497
+ # First-time setup - scan for repositories
4498
+ repr init
4499
+
4500
+ # Scan specific directory
4501
+ repr init ~/projects
4502
+ ```
4503
+
4504
+ Use when: User is setting up repr for the first time or adding new repositories.
4505
+
4506
+ ### Show Changes
4507
+
4508
+ ```bash
4509
+ # Show current changes (unstaged, staged, unpushed)
4510
+ repr changes
4511
+
4512
+ # Compact view (just file names)
4513
+ repr changes --compact
4514
+
4515
+ # With LLM explanation
4516
+ repr changes --explain
4517
+
4518
+ # JSON output
4519
+ repr changes --json
4520
+ ```
4521
+
4522
+ Use when: User asks "what are my changes", "show my work", "what's uncommitted", or needs to understand current git state.
4523
+
4524
+ ### Generate Stories
4525
+
4526
+ ```bash
4527
+ # Generate stories from recent commits
4528
+ repr generate
4529
+
4530
+ # Generate for specific date range
4531
+ repr generate --since monday
4532
+ repr generate --since "2 weeks ago"
4533
+
4534
+ # Use local LLM (Ollama)
4535
+ repr generate --local
4536
+
4537
+ # Dry run (preview without saving)
4538
+ repr generate --dry-run
4539
+ ```
4540
+
4541
+ Use when: User wants to create narratives from their commits, document their work, or generate content for standup/weekly reports.
4542
+
4543
+ ### Timeline
4544
+
4545
+ ```bash
4546
+ # Initialize timeline for current project
4547
+ repr timeline init
4548
+
4549
+ # Initialize with AI session ingestion
4550
+ repr timeline init --with-sessions
4551
+
4552
+ # Show timeline entries
4553
+ repr timeline show
4554
+ repr timeline show --days 14
4555
+
4556
+ # Filter by type
4557
+ repr timeline show --type commit
4558
+ repr timeline show --type session
4559
+
4560
+ # Show timeline status
4561
+ repr timeline status
4562
+
4563
+ # Refresh/update timeline
4564
+ repr timeline refresh
4565
+
4566
+ # Launch web dashboard
4567
+ repr dashboard
4568
+ ```
4569
+
4570
+ Use when: User wants a unified view of commits and AI sessions, or needs to understand the full context of their development work.
4571
+
4572
+ ## Output Interpretation
4573
+
4574
+ ### Changes Output
4575
+ - **Unstaged**: Modified files not yet staged (with diff preview)
4576
+ - **Staged**: Changes ready to commit
4577
+ - **Unpushed**: Commits not yet pushed to remote
4578
+
4579
+ ### Timeline Entry Types
4580
+ - **commit**: Regular git commits
4581
+ - **session**: AI coding sessions (Claude Code, etc.)
4582
+ - **merged**: Commits with associated AI session context
4583
+
4584
+ ### Smart Git Workflow
4585
+
4586
+ ```bash
4587
+ # Stage files
4588
+ repr add .py # Stage *.py files
4589
+ repr add . # Stage all
4590
+ repr add src/ # Stage directory
4591
+
4592
+ # Generate message and commit
4593
+ repr commit # AI generates message
4594
+ repr commit -m "fix: x" # Custom message
4595
+ repr commit -r # Regenerate message
4596
+
4597
+ # Push to remote
4598
+ repr push
4599
+ ```
4600
+
4601
+ Use when: User wants to stage, commit with AI-generated message, or push.
4602
+
4603
+ ## Tips
4604
+
4605
+ 1. Run `repr changes` before committing to see what you're about to commit
4606
+ 2. Use `repr generate --dry-run` to preview stories before saving
4607
+ 3. Initialize timeline with `--with-sessions` to capture AI context
4608
+ 4. Use `repr timeline show --type session` to see AI-assisted work separately
4609
+ 5. Use `repr commit add` for quick commits with AI-generated messages
4610
+ '''
4611
+
4612
+ skill_file.write_text(skill_content)
4613
+
4614
+ print_success("repr skill installed to Claude Code")
4615
+ console.print()
4616
+ console.print(f" Skill: {skill_file}")
4617
+ console.print()
4618
+ console.print("Claude Code will now recognize repr commands.")
4619
+ console.print("Try asking: 'show my changes' or 'what did I work on'")
4620
+
4621
+
4622
+ # =============================================================================
4623
+ # TIMELINE
4624
+ # =============================================================================
4625
+
4626
+ @timeline_app.command("init")
4627
+ def timeline_init(
4628
+ path: Optional[Path] = typer.Argument(
4629
+ None,
4630
+ help="Project path (default: current directory)",
4631
+ exists=True,
4632
+ dir_okay=True,
4633
+ resolve_path=True,
4634
+ ),
4635
+ with_sessions: bool = typer.Option(
4636
+ False, "--with-sessions", "-s",
4637
+ help="Include AI session context (Claude Code, Clawdbot)",
4638
+ ),
4639
+ days: int = typer.Option(
4640
+ 90, "--days", "-d",
4641
+ help="Number of days to look back",
4642
+ ),
4643
+ max_commits: int = typer.Option(
4644
+ 500, "--max-commits",
4645
+ help="Maximum commits to include",
4646
+ ),
4647
+ model: str = typer.Option(
4648
+ "openai/gpt-4.1-mini", "--model", "-m",
4649
+ help="Model for session extraction (with --with-sessions)",
4650
+ ),
4651
+ force: bool = typer.Option(
4652
+ False, "--force", "-f",
4653
+ help="Overwrite existing timeline",
4654
+ ),
4655
+ json_output: bool = typer.Option(
4656
+ False, "--json",
4657
+ help="Output as JSON",
4658
+ ),
4659
+ ):
4660
+ """
4661
+ Initialize a project timeline from git commits and AI sessions.
4662
+
4663
+ Creates .repr/timeline.json with unified context from:
4664
+ - Git commits (always included)
4665
+ - AI session logs (with --with-sessions flag)
4666
+
4667
+ Examples:
4668
+ repr timeline init # Commits only
4669
+ repr timeline init --with-sessions # Include AI sessions
4670
+ repr timeline init --days 30 # Last 30 days
4671
+ repr timeline init ~/myproject # Specific project
4672
+ """
4673
+ from .timeline import (
4674
+ detect_project_root,
4675
+ is_initialized,
4676
+ init_timeline_commits_only,
4677
+ init_timeline_with_sessions_sync,
4678
+ get_timeline_stats,
4679
+ )
4680
+ from .loaders import detect_session_source
4681
+
4682
+ # Determine project path
4683
+ project_path = path or Path.cwd()
4684
+
4685
+ # Check if in git repo
4686
+ repo_root = detect_project_root(project_path)
4687
+ if not repo_root:
4688
+ print_error(f"Not a git repository: {project_path}")
4689
+ print_info("Run this command inside a git repository")
4690
+ raise typer.Exit(1)
4691
+
4692
+ project_path = repo_root
4693
+
4694
+ # Check if already initialized
4695
+ if is_initialized(project_path) and not force:
4696
+ print_warning(f"Timeline already exists for {project_path}")
4697
+ print_info("Use --force to reinitialize")
4698
+ raise typer.Exit(1)
4699
+
4700
+ if not json_output:
4701
+ print_header()
4702
+ console.print(f"Initializing timeline for [bold]{project_path.name}[/]")
4703
+ console.print()
4704
+
4705
+ # Check for session sources if --with-sessions
4706
+ session_sources = []
4707
+ if with_sessions:
4708
+ session_sources = detect_session_source(project_path)
4709
+ if not session_sources:
4710
+ if not json_output:
4711
+ print_warning("No AI session sources found for this project")
4712
+ print_info("Supported: Claude Code (~/.claude/projects/), Clawdbot (~/.clawdbot/)")
4713
+ if not confirm("Continue with commits only?", default=True):
4714
+ raise typer.Exit(0)
4715
+ with_sessions = False
4716
+ else:
4717
+ if not json_output:
4718
+ console.print(f"Session sources: {', '.join(session_sources)}")
4719
+
4720
+ # Progress tracking
4721
+ progress_state = {"stage": "", "current": 0, "total": 0}
4722
+
4723
+ def progress_callback(stage: str, current: int, total: int) -> None:
4724
+ progress_state["stage"] = stage
4725
+ progress_state["current"] = current
4726
+ progress_state["total"] = total
4727
+
4728
+ try:
4729
+ if with_sessions:
4730
+ # Get API key for extraction
4731
+ # Priority: BYOK OpenAI > env OPENAI_API_KEY > LiteLLM (cloud)
4732
+ api_key = None
4733
+ byok_config = get_byok_config("openai")
4734
+ if byok_config:
4735
+ api_key = byok_config.get("api_key")
4736
+
4737
+ if not api_key:
4738
+ # Try environment variable (direct OpenAI access)
4739
+ api_key = os.environ.get("OPENAI_API_KEY")
4740
+
4741
+ if not api_key:
4742
+ # Try LiteLLM config (cloud mode - needs LiteLLM proxy URL)
4743
+ _, litellm_key = get_litellm_config()
4744
+ api_key = litellm_key
4745
+
4746
+ if not api_key:
4747
+ if not json_output:
4748
+ print_warning("No API key configured for session extraction")
4749
+ print_info("Configure with: repr llm add openai")
4750
+ print_info("Or set OPENAI_API_KEY environment variable")
4751
+ if not confirm("Continue with commits only?", default=True):
4752
+ raise typer.Exit(0)
4753
+ with_sessions = False
4754
+
4755
+ if with_sessions:
4756
+ if not json_output:
4757
+ with create_spinner() as progress:
4758
+ task = progress.add_task("Initializing...", total=None)
4759
+ timeline = init_timeline_with_sessions_sync(
4760
+ project_path,
4761
+ days=days,
4762
+ max_commits=max_commits,
4763
+ session_sources=session_sources,
4764
+ api_key=api_key,
4765
+ model=model,
4766
+ progress_callback=progress_callback,
4767
+ )
4768
+ else:
4769
+ timeline = init_timeline_with_sessions_sync(
4770
+ project_path,
4771
+ days=days,
4772
+ max_commits=max_commits,
4773
+ session_sources=session_sources,
4774
+ api_key=api_key,
4775
+ model=model,
4776
+ )
4777
+ else:
4778
+ if not json_output:
4779
+ with create_spinner() as progress:
4780
+ task = progress.add_task("Scanning commits...", total=None)
4781
+ timeline = init_timeline_commits_only(
4782
+ project_path,
4783
+ days=days,
4784
+ max_commits=max_commits,
4785
+ progress_callback=progress_callback,
4786
+ )
4787
+ else:
4788
+ timeline = init_timeline_commits_only(
4789
+ project_path,
4790
+ days=days,
4791
+ max_commits=max_commits,
4792
+ )
4793
+
4794
+ # Get stats
4795
+ stats = get_timeline_stats(timeline)
4796
+
4797
+ if json_output:
4798
+ print(json.dumps({
4799
+ "success": True,
4800
+ "project": str(project_path),
4801
+ "timeline_path": str(project_path / ".repr" / "timeline.json"),
4802
+ "stats": stats,
4803
+ }, indent=2))
4804
+ else:
4805
+ console.print()
4806
+ print_success(f"Timeline initialized!")
4807
+ console.print()
4808
+ console.print(f" Location: .repr/timeline.json")
4809
+ console.print(f" Entries: {stats['total_entries']}")
4810
+ console.print(f" Commits: {stats['commit_count']}")
4811
+ if stats['merged_count'] > 0:
4812
+ console.print(f" Merged (commit + session): {stats['merged_count']}")
4813
+ if stats['session_count'] > 0:
4814
+ console.print(f" Sessions only: {stats['session_count']}")
4815
+ if stats['date_range']['first']:
4816
+ console.print(f" Date range: {stats['date_range']['first'][:10]} to {stats['date_range']['last'][:10]}")
4817
+
4818
+ console.print()
4819
+ print_next_steps([
4820
+ "repr timeline status View timeline status",
4821
+ "repr timeline show Browse timeline entries",
4822
+ ])
4823
+
4824
+ except Exception as e:
4825
+ if json_output:
4826
+ print(json.dumps({"success": False, "error": str(e)}, indent=2))
4827
+ else:
4828
+ print_error(f"Failed to initialize timeline: {e}")
4829
+ raise typer.Exit(1)
4830
+
4831
+
4832
+ @timeline_app.command("status")
4833
+ def timeline_status(
4834
+ path: Optional[Path] = typer.Argument(
4835
+ None,
4836
+ help="Project path (default: current directory)",
4837
+ exists=True,
4838
+ dir_okay=True,
4839
+ resolve_path=True,
4840
+ ),
4841
+ json_output: bool = typer.Option(
4842
+ False, "--json",
4843
+ help="Output as JSON",
4844
+ ),
4845
+ ):
4846
+ """
4847
+ Show timeline status for a project.
4848
+
4849
+ Example:
4850
+ repr timeline status
4851
+ """
4852
+ from .timeline import (
4853
+ detect_project_root,
4854
+ is_initialized,
4855
+ load_timeline,
4856
+ get_timeline_stats,
4857
+ )
4858
+
4859
+ # Determine project path
4860
+ project_path = path or Path.cwd()
4861
+ repo_root = detect_project_root(project_path)
4862
+
4863
+ if not repo_root:
4864
+ print_error(f"Not a git repository: {project_path}")
4865
+ raise typer.Exit(1)
4866
+
4867
+ project_path = repo_root
4868
+
4869
+ if not is_initialized(project_path):
4870
+ if json_output:
4871
+ print(json.dumps({"initialized": False, "project": str(project_path)}, indent=2))
4872
+ else:
4873
+ print_warning(f"Timeline not initialized for {project_path.name}")
4874
+ print_info("Run: repr timeline init")
4875
+ raise typer.Exit(1)
4876
+
4877
+ timeline = load_timeline(project_path)
4878
+ if not timeline:
4879
+ print_error("Failed to load timeline")
4880
+ raise typer.Exit(1)
4881
+
4882
+ stats = get_timeline_stats(timeline)
4883
+
4884
+ if json_output:
4885
+ print(json.dumps({
4886
+ "initialized": True,
4887
+ "project": str(project_path),
4888
+ "stats": stats,
4889
+ "last_updated": timeline.last_updated.isoformat() if timeline.last_updated else None,
4890
+ }, indent=2))
4891
+ else:
4892
+ print_header()
4893
+ console.print(f"Timeline: [bold]{project_path.name}[/]")
4894
+ console.print()
4895
+ console.print(f" Entries: {stats['total_entries']}")
4896
+ console.print(f" Commits: {stats['commit_count']}")
4897
+ console.print(f" Merged: {stats['merged_count']}")
4898
+ console.print(f" Sessions: {stats['session_count']}")
4899
+ if stats['date_range']['first']:
4900
+ console.print(f" Date range: {stats['date_range']['first'][:10]} to {stats['date_range']['last'][:10]}")
4901
+ if stats['session_sources']:
4902
+ console.print(f" Session sources: {', '.join(stats['session_sources'])}")
4903
+ if timeline.last_updated:
4904
+ last_updated_str = timeline.last_updated.isoformat() if hasattr(timeline.last_updated, 'isoformat') else str(timeline.last_updated)
4905
+ console.print(f" Last updated: {format_relative_time(last_updated_str)}")
4906
+
4907
+
4908
+ @timeline_app.command("show")
4909
+ def timeline_show(
4910
+ path: Optional[str] = typer.Argument(
4911
+ None,
4912
+ help="Project path (use '.' for current repo, omit for all repos)",
4913
+ ),
4914
+ days: int = typer.Option(
4915
+ 7, "--days", "-d",
4916
+ help="Show entries from last N days",
4917
+ ),
4918
+ entry_type: Optional[str] = typer.Option(
4919
+ None, "--type", "-t",
4920
+ help="Filter by type: commit, session, merged",
4921
+ ),
4922
+ limit: int = typer.Option(
4923
+ 20, "--limit", "-n",
4924
+ help="Maximum entries to show",
4925
+ ),
4926
+ json_output: bool = typer.Option(
4927
+ False, "--json",
4928
+ help="Output as JSON",
4929
+ ),
4930
+ public: bool = typer.Option(
4931
+ True, "--public/--no-public",
4932
+ help="Build-in-public feed format (default)",
4933
+ ),
4934
+ internal: bool = typer.Option(
4935
+ False, "--internal",
4936
+ help="Show technical details in feed",
4937
+ ),
4938
+ raw: bool = typer.Option(
4939
+ False, "--raw",
4940
+ help="Show raw timeline entries (commits/sessions)",
4941
+ ),
4942
+ all_repos: bool = typer.Option(
4943
+ False, "--all", "-a",
4944
+ help="Show stories from all tracked repos (default when no path given)",
4945
+ ),
4946
+ group: bool = typer.Option(
4947
+ False, "--group", "-g",
4948
+ help="Group stories by project (presentation view)",
4949
+ ),
4950
+ ):
4951
+ """
4952
+ Show timeline entries.
4953
+
4954
+ Examples:
4955
+ repr timeline show # All tracked repos from database
4956
+ repr timeline show . # Current repo only
4957
+ repr timeline show /path/to/repo # Specific repo
4958
+ repr timeline show --group # All repos, grouped by project
4959
+ repr timeline show . --days 30 # Current repo, last 30 days
4960
+ repr timeline show --internal # Feed format with tech details
4961
+ """
4962
+ from datetime import timezone
4963
+ from .timeline import (
4964
+ detect_project_root,
4965
+ is_initialized,
4966
+ load_timeline,
4967
+ query_timeline,
4968
+ )
4969
+ from .models import TimelineEntryType
4970
+ from .db import get_db
4971
+
4972
+ db = get_db()
4973
+ since = datetime.now(timezone.utc) - timedelta(days=days)
4974
+
4975
+ # Determine mode: all repos (no path) vs specific repo (path given)
4976
+ show_all_repos = path is None and not raw
4977
+
4978
+ # If --all flag is given, always show all repos
4979
+ if all_repos:
4980
+ show_all_repos = True
4981
+
4982
+ project_path = None
4983
+ timeline = None
4984
+ entries = []
4985
+
4986
+ if not show_all_repos:
4987
+ # Specific repo mode - resolve path
4988
+ resolved_path = Path(path) if path else Path.cwd()
4989
+ if not resolved_path.is_absolute():
4990
+ resolved_path = Path.cwd() / resolved_path
4991
+ resolved_path = resolved_path.resolve()
4992
+
4993
+ repo_root = detect_project_root(resolved_path)
4994
+
4995
+ if not repo_root:
4996
+ print_error(f"Not a git repository: {resolved_path}")
4997
+ raise typer.Exit(1)
4998
+
4999
+ project_path = repo_root
5000
+
5001
+ if not is_initialized(project_path):
5002
+ print_warning(f"Timeline not initialized for {project_path.name}")
5003
+ print_info("Run: repr timeline init")
5004
+ raise typer.Exit(1)
5005
+
5006
+ timeline = load_timeline(project_path)
5007
+ if not timeline:
5008
+ print_error("Failed to load timeline")
5009
+ raise typer.Exit(1)
5010
+
5011
+ # Parse entry type filter
5012
+ entry_types = None
5013
+ if entry_type:
5014
+ try:
5015
+ entry_types = [TimelineEntryType(entry_type)]
5016
+ except ValueError:
5017
+ print_error(f"Invalid type: {entry_type}")
5018
+ print_info("Valid types: commit, session, merged")
5019
+ raise typer.Exit(1)
5020
+
5021
+ # Query entries
5022
+ entries = query_timeline(timeline, since=since, entry_types=entry_types)
5023
+ entries = entries[-limit:] # Take most recent
5024
+
5025
+ if json_output:
5026
+ if show_all_repos:
5027
+ # JSON output for all repos
5028
+ stories = db.list_stories(since=since, limit=limit)
5029
+ print(json.dumps({
5030
+ "mode": "all_repos",
5031
+ "stories": [{"id": s.id, "title": s.title, "project_id": s.project_id} for s in stories],
5032
+ }, indent=2))
5033
+ else:
5034
+ from .timeline import _serialize_entry
5035
+ print(json.dumps({
5036
+ "project": str(project_path),
5037
+ "entries": [_serialize_entry(e) for e in entries],
5038
+ }, indent=2))
5039
+ return
5040
+
5041
+ if not show_all_repos and not entries:
5042
+ print_info(f"No entries in the last {days} days")
5043
+ return
5044
+
5045
+ # Feed format (default, unless --raw)
5046
+ if not raw:
5047
+ # Build project lookup
5048
+ projects = {p["id"]: p["name"] for p in db.list_projects()}
5049
+
5050
+ if show_all_repos:
5051
+ # Show stories from all repos
5052
+ stories = db.list_stories(since=since, limit=limit)
5053
+ header_name = "all repos"
5054
+ else:
5055
+ # Show stories from current repo only
5056
+ project = db.get_project_by_path(project_path)
5057
+
5058
+ if not project:
5059
+ print_info(f"No stories found. Run 'repr generate' first.")
5060
+ return
5061
+
5062
+ stories = db.list_stories(project_id=project["id"], since=since, limit=limit)
5063
+ header_name = project_path.name
5064
+
5065
+ if not stories:
5066
+ print_info(f"No stories in the last {days} days. Run 'repr generate' first.")
5067
+ return
5068
+
5069
+ def format_rel_time(story_time):
5070
+ """Format timestamp as relative time."""
5071
+ local_time = story_time.astimezone()
5072
+ now = datetime.now(local_time.tzinfo)
5073
+ delta = now - local_time
5074
+
5075
+ if delta.days == 0:
5076
+ if delta.seconds < 3600:
5077
+ return f"{delta.seconds // 60}m ago"
5078
+ else:
5079
+ return f"{delta.seconds // 3600}h ago"
5080
+ elif delta.days == 1:
5081
+ return "Yesterday"
5082
+ elif delta.days < 7:
5083
+ return f"{delta.days} days ago"
5084
+ else:
5085
+ return local_time.strftime("%b %d")
5086
+
5087
+ def render_story(story, show_repo=False):
5088
+ """Render a single story entry using Tripartite Codex structure."""
5089
+ story_time = story.started_at or story.created_at
5090
+ rel_time = format_rel_time(story_time)
5091
+ repo_name = projects.get(story.project_id, "unknown")
5092
+
5093
+ # Header with time and optional repo
5094
+ if show_repo:
5095
+ console.print(f"[{BRAND_PRIMARY}]{repo_name}[/] · [{BRAND_MUTED}]{rel_time}[/]")
5096
+ else:
5097
+ console.print(f"[{BRAND_MUTED}]{rel_time}[/]")
5098
+
5099
+ # Use structured fields if available, fall back to legacy
5100
+ if story.hook:
5101
+ # New Tripartite Codex format
5102
+ console.print(f"[bold]{story.hook}[/]")
5103
+ console.print()
5104
+ what_text = story.what.rstrip(".").rstrip()
5105
+ value_text = story.value.lstrip(".").lstrip()
5106
+ console.print(f"{what_text}. {value_text}")
5107
+
5108
+ # Show block if present
5109
+ if story.show:
5110
+ console.print()
5111
+ console.print(f"```\n{story.show}\n```")
5112
+
5113
+ # Diagram if present
5114
+ if story.diagram:
5115
+ console.print()
5116
+ console.print(f"[{BRAND_MUTED}]Diagram:[/]")
5117
+ console.print(f"```\n{story.diagram}\n```")
5118
+
5119
+ # Internal mode: show problem and how
5120
+ if internal:
5121
+ if story.problem:
5122
+ console.print()
5123
+ console.print(f"[{BRAND_MUTED}]Problem:[/] {story.problem}")
5124
+
5125
+ if story.implementation_details:
5126
+ console.print()
5127
+ console.print(f"[{BRAND_MUTED}]How:[/]")
5128
+ for detail in story.implementation_details[:5]:
5129
+ console.print(f" [{BRAND_MUTED}]›[/] {detail}")
5130
+
5131
+ # Insight
5132
+ if story.insight:
5133
+ console.print()
5134
+ console.print(f"[{BRAND_MUTED}]Insight:[/] {story.insight}")
5135
+
5136
+ else:
5137
+ # Legacy format fallback
5138
+ post_text = story.public_post or story.title
5139
+ if internal:
5140
+ post_text = story.internal_post or post_text
5141
+ console.print(post_text)
5142
+
5143
+ show_block = story.show or story.public_show
5144
+ if internal:
5145
+ show_block = show_block or story.internal_show
5146
+ if show_block:
5147
+ console.print()
5148
+ console.print(f"```\n{show_block}\n```")
5149
+
5150
+ if internal and story.internal_details:
5151
+ console.print()
5152
+ console.print(f"[{BRAND_MUTED}]Implementation:[/]")
5153
+ for detail in story.internal_details[:5]:
5154
+ console.print(f" [{BRAND_MUTED}]›[/] {detail}")
5155
+
5156
+ # Recall data (internal mode only) - file changes and snippets
5157
+ if internal:
5158
+ # File changes summary
5159
+ if story.file_changes:
5160
+ total_changes = f"+{story.total_insertions}/-{story.total_deletions}" if (story.total_insertions or story.total_deletions) else ""
5161
+ console.print()
5162
+ console.print(f"[{BRAND_MUTED}]Files changed ({len(story.file_changes)})[/] [{BRAND_MUTED}]{total_changes}[/]")
5163
+ for fc in story.file_changes[:8]: # Show up to 8 files
5164
+ change_indicator = {"added": "+", "deleted": "-", "modified": "~"}.get(fc.change_type, "~")
5165
+ stats = f"[green]+{fc.insertions}[/][{BRAND_MUTED}]/[/][red]-{fc.deletions}[/]" if (fc.insertions or fc.deletions) else ""
5166
+ console.print(f" [{BRAND_MUTED}]{change_indicator}[/] {fc.file_path} {stats}")
5167
+ if len(story.file_changes) > 8:
5168
+ console.print(f" [{BRAND_MUTED}]... and {len(story.file_changes) - 8} more files[/]")
5169
+
5170
+ # Key code snippets
5171
+ if story.key_snippets:
5172
+ console.print()
5173
+ console.print(f"[{BRAND_MUTED}]Snippets:[/]")
5174
+ for snippet in story.key_snippets[:2]: # Show up to 2 snippets
5175
+ lang = snippet.language or ""
5176
+ console.print(f" [{BRAND_MUTED}]{snippet.file_path}[/]")
5177
+ console.print(f"```{lang}\n{snippet.content}\n```")
5178
+
5179
+ console.print()
5180
+ console.print(f"[{BRAND_MUTED}]{'─' * 60}[/]")
5181
+ console.print()
5182
+
5183
+ print_header()
5184
+
5185
+ if group and all_repos:
5186
+ # Grouped view: organize by project
5187
+ console.print(f"[bold]@all repos[/] · build log [dim](grouped)[/]")
5188
+ console.print()
5189
+
5190
+ # Group stories by project
5191
+ from collections import defaultdict
5192
+ stories_by_project = defaultdict(list)
5193
+ for story in stories:
5194
+ stories_by_project[story.project_id].append(story)
5195
+
5196
+ # Render each project group
5197
+ for project_id, project_stories in stories_by_project.items():
5198
+ project_name = projects.get(project_id, "unknown")
5199
+ console.print(f"[bold]── {project_name} ──[/]")
5200
+ console.print()
5201
+
5202
+ for story in project_stories:
5203
+ render_story(story, show_repo=False)
5204
+
5205
+ else:
5206
+ # Timeline view (default)
5207
+ console.print(f"[bold]@{header_name}[/] · build log")
5208
+ console.print()
5209
+
5210
+ for story in stories:
5211
+ render_story(story, show_repo=all_repos)
5212
+
5213
+ return
5214
+
5215
+ # Default format (unchanged)
5216
+ print_header()
5217
+ console.print(f"Timeline: [bold]{project_path.name}[/] (last {days} days)")
5218
+ console.print()
5219
+
5220
+ for entry in reversed(entries): # Show newest first
5221
+ # Format timestamp (convert to local timezone)
5222
+ local_ts = entry.timestamp.astimezone()
5223
+ ts = local_ts.strftime("%Y-%m-%d %H:%M")
5224
+
5225
+ # Entry type indicator
5226
+ if entry.type == TimelineEntryType.COMMIT:
5227
+ type_icon = "📝"
5228
+ elif entry.type == TimelineEntryType.SESSION:
5229
+ type_icon = "💬"
5230
+ else: # MERGED
5231
+ type_icon = "🔗"
5232
+
5233
+ # Build description
5234
+ if entry.commit:
5235
+ # Show first line of commit message
5236
+ msg = entry.commit.message.split("\n")[0][:60]
5237
+ if len(entry.commit.message.split("\n")[0]) > 60:
5238
+ msg += "..."
5239
+ desc = f"{msg}"
5240
+ sha = entry.commit.sha[:8]
5241
+ console.print(f"{type_icon} [{BRAND_MUTED}]{ts}[/] [{BRAND_PRIMARY}]{sha}[/] {desc}")
5242
+ elif entry.session_context:
5243
+ # Show problem from session
5244
+ problem = entry.session_context.problem[:60]
5245
+ if len(entry.session_context.problem) > 60:
5246
+ problem += "..."
5247
+ console.print(f"{type_icon} [{BRAND_MUTED}]{ts}[/] {problem}")
5248
+
5249
+ # Show session context if merged
5250
+ if entry.type == TimelineEntryType.MERGED and entry.session_context:
5251
+ console.print(f" [{BRAND_MUTED}]→ {entry.session_context.problem[:70]}[/]")
5252
+
5253
+ console.print()
5254
+
5255
+
5256
+ @timeline_app.command("refresh")
5257
+ def timeline_refresh(
5258
+ path: Optional[str] = typer.Argument(
5259
+ None,
5260
+ help="Project path (use '.' for current repo, omit for all repos)",
5261
+ ),
5262
+ limit: int = typer.Option(
5263
+ 50, "--limit", "-n",
5264
+ help="Maximum stories to refresh",
5265
+ ),
5266
+ json_output: bool = typer.Option(
5267
+ False, "--json",
5268
+ help="Output as JSON",
5269
+ ),
5270
+ ):
5271
+ """
5272
+ Regenerate posts for existing stories.
5273
+
5274
+ This updates the public_post and internal_post fields without
5275
+ re-analyzing commits. Useful after prompt improvements.
5276
+
5277
+ Examples:
5278
+ repr timeline refresh # Refresh all repos
5279
+ repr timeline refresh . # Refresh current repo only
5280
+ repr timeline refresh --limit 10 # Refresh last 10 stories
5281
+ """
5282
+ from .db import get_db
5283
+ from .story_synthesis import (
5284
+ transform_story_for_feed_sync,
5285
+ _build_fallback_post,
5286
+ extract_file_changes_from_commits,
5287
+ extract_key_snippets_from_commits,
5288
+ )
5289
+
5290
+ db = get_db()
5291
+
5292
+ # Determine scope
5293
+ if path is None:
5294
+ # All repos
5295
+ stories = db.list_stories(limit=limit)
5296
+ scope_name = "all repos"
5297
+ else:
5298
+ # Specific repo
5299
+ from .timeline import detect_project_root
5300
+ resolved_path = Path(path) if path else Path.cwd()
5301
+ if not resolved_path.is_absolute():
5302
+ resolved_path = Path.cwd() / resolved_path
5303
+ resolved_path = resolved_path.resolve()
5304
+
5305
+ repo_root = detect_project_root(resolved_path)
5306
+ if not repo_root:
5307
+ print_error(f"Not a git repository: {resolved_path}")
5308
+ raise typer.Exit(1)
5309
+
5310
+ project = db.get_project_by_path(repo_root)
5311
+ if not project:
5312
+ print_error(f"No stories found for {repo_root.name}")
5313
+ raise typer.Exit(1)
5314
+
5315
+ stories = db.list_stories(project_id=project["id"], limit=limit)
5316
+ scope_name = repo_root.name
5317
+
5318
+ if not stories:
5319
+ print_info("No stories to refresh")
5320
+ return
5321
+
5322
+ print_header()
5323
+ console.print(f"Refreshing {len(stories)} stories from {scope_name}...")
5324
+ console.print()
5325
+
5326
+ refreshed = 0
5327
+ failed = 0
5328
+
5329
+ # Get project paths for file extraction
5330
+ project_paths = {}
5331
+ for p in db.list_projects():
5332
+ project_paths[p["id"]] = p["path"]
5333
+
5334
+ for story in stories:
5335
+ try:
5336
+ # Extract file changes and snippets from git
5337
+ project_path = project_paths.get(story.project_id)
5338
+ if story.commit_shas and project_path:
5339
+ file_changes, total_ins, total_del = extract_file_changes_from_commits(
5340
+ story.commit_shas, project_path
5341
+ )
5342
+ story.file_changes = file_changes
5343
+ story.total_insertions = total_ins
5344
+ story.total_deletions = total_del
5345
+ story.key_snippets = extract_key_snippets_from_commits(
5346
+ story.commit_shas, project_path, max_snippets=3
5347
+ )
5348
+
5349
+ # Regenerate Tripartite Codex content
5350
+ result = transform_story_for_feed_sync(story, mode="internal")
5351
+
5352
+ # Store structured fields
5353
+ story.hook = result.hook
5354
+ story.what = result.what
5355
+ story.value = result.value
5356
+ story.insight = result.insight
5357
+ story.show = result.show
5358
+ story.post_body = result.post_body
5359
+
5360
+ # Internal-specific fields
5361
+ if hasattr(result, 'problem') and result.problem:
5362
+ story.problem = result.problem
5363
+ if hasattr(result, 'how') and result.how:
5364
+ story.implementation_details = result.how
5365
+
5366
+ # Legacy fields for backward compatibility
5367
+ what_clean = result.what.rstrip(".").rstrip()
5368
+ value_clean = result.value.lstrip(".").lstrip()
5369
+ story.public_post = f"{result.hook}\n\n{what_clean}. {value_clean}\n\nInsight: {result.insight}"
5370
+ story.internal_post = story.public_post
5371
+ story.public_show = result.show
5372
+ story.internal_show = result.show
5373
+
5374
+ # Update in database
5375
+ db.save_story(story, story.project_id)
5376
+ refreshed += 1
5377
+
5378
+ if not json_output:
5379
+ console.print(f" [green]✓[/] {story.title[:60]}")
5380
+
5381
+ except Exception as e:
5382
+ # Use fallback
5383
+ fallback_post = _build_fallback_post(story)
5384
+ story.public_post = fallback_post
5385
+ story.internal_post = fallback_post
5386
+ db.save_story(story, story.project_id)
5387
+ failed += 1
5388
+
5389
+ if not json_output:
5390
+ console.print(f" [yellow]![/] {story.title[:60]} (fallback)")
5391
+
5392
+ console.print()
5393
+
5394
+ if json_output:
5395
+ print(json.dumps({"refreshed": refreshed, "failed": failed}))
5396
+ else:
5397
+ print_success(f"Refreshed {refreshed} stories" + (f" ({failed} used fallback)" if failed else ""))
5398
+
5399
+
5400
+ @timeline_app.command("ingest-session")
5401
+ def timeline_ingest_session(
5402
+ file: Path = typer.Option(
5403
+ ..., "--file", "-f",
5404
+ help="Path to session file (JSONL)",
5405
+ exists=True,
5406
+ file_okay=True,
5407
+ resolve_path=True,
5408
+ ),
5409
+ project: Optional[Path] = typer.Option(
5410
+ None, "--project", "-p",
5411
+ help="Project path (default: detected from session cwd)",
5412
+ exists=True,
5413
+ dir_okay=True,
5414
+ resolve_path=True,
5415
+ ),
5416
+ source: Optional[str] = typer.Option(
5417
+ None, "--source", "-s",
5418
+ help="Session source: claude_code, clawdbot (default: auto-detect)",
5419
+ ),
5420
+ model: str = typer.Option(
5421
+ "openai/gpt-4.1-mini", "--model", "-m",
5422
+ help="Model for context extraction",
5423
+ ),
5424
+ json_output: bool = typer.Option(
5425
+ False, "--json",
5426
+ help="Output as JSON",
5427
+ ),
5428
+ ):
5429
+ """
5430
+ Ingest a completed AI session into the timeline.
5431
+
5432
+ Called by SessionEnd hooks to capture context from AI coding sessions.
5433
+
5434
+ Examples:
5435
+ repr timeline ingest-session --file ~/.claude/projects/.../session.jsonl
5436
+ repr timeline ingest-session --file /path/to/session.jsonl --project ~/myproject
5437
+ """
5438
+ from datetime import timezone
5439
+ from .timeline import (
5440
+ detect_project_root,
5441
+ is_initialized,
5442
+ load_timeline,
5443
+ save_timeline,
5444
+ extract_commits_from_git,
5445
+ )
5446
+ from .models import (
5447
+ TimelineEntry,
5448
+ TimelineEntryType,
5449
+ match_commits_to_sessions,
5450
+ )
5451
+ from .loaders import ClaudeCodeLoader, ClawdbotLoader
5452
+ from .session_extractor import SessionExtractor
5453
+
5454
+ # Determine source
5455
+ if source is None:
5456
+ if ".claude" in str(file):
5457
+ source = "claude_code"
5458
+ elif ".clawdbot" in str(file):
5459
+ source = "clawdbot"
5460
+ else:
5461
+ # Try both loaders
5462
+ source = "claude_code"
5463
+
5464
+ # Load session
5465
+ if source == "claude_code":
5466
+ loader = ClaudeCodeLoader()
5467
+ elif source == "clawdbot":
5468
+ loader = ClawdbotLoader()
5469
+ else:
5470
+ print_error(f"Unknown source: {source}")
5471
+ raise typer.Exit(1)
5472
+
5473
+ session = loader.load_session(file)
5474
+ if not session:
5475
+ if json_output:
5476
+ print(json.dumps({"success": False, "error": "Failed to load session"}))
5477
+ else:
5478
+ print_error(f"Failed to load session from {file}")
5479
+ raise typer.Exit(1)
5480
+
5481
+ # Determine project path
5482
+ if project is None:
5483
+ if session.cwd:
5484
+ project = detect_project_root(Path(session.cwd))
5485
+ if project is None:
5486
+ if json_output:
5487
+ print(json.dumps({"success": False, "error": "Could not detect project path"}))
5488
+ else:
5489
+ print_error("Could not detect project path from session")
5490
+ print_info("Specify with --project /path/to/repo")
5491
+ raise typer.Exit(1)
5492
+
5493
+ project_path = project
5494
+
5495
+ # Check if timeline exists
5496
+ if not is_initialized(project_path):
5497
+ if json_output:
5498
+ print(json.dumps({"success": False, "error": f"Timeline not initialized for {project_path}"}))
5499
+ else:
5500
+ print_warning(f"Timeline not initialized for {project_path.name}")
5501
+ print_info("Run: repr timeline init")
5502
+ raise typer.Exit(1)
5503
+
5504
+ # Load existing timeline
5505
+ timeline = load_timeline(project_path)
5506
+ if not timeline:
5507
+ print_error("Failed to load timeline")
5508
+ raise typer.Exit(1)
5509
+
5510
+ # Check if session already ingested
5511
+ for entry in timeline.entries:
5512
+ if entry.session_context and entry.session_context.session_id == session.id:
5513
+ if json_output:
5514
+ print(json.dumps({"success": True, "skipped": True, "reason": "Session already ingested"}))
5515
+ else:
5516
+ print_info(f"Session {session.id[:8]} already ingested")
5517
+ return
5518
+
5519
+ # Get API key for extraction
5520
+ api_key = None
5521
+ byok_config = get_byok_config("openai")
5522
+ if byok_config:
5523
+ api_key = byok_config.get("api_key")
5524
+
5525
+ if not api_key:
5526
+ _, litellm_key = get_litellm_config()
5527
+ api_key = litellm_key
5528
+
5529
+ if not api_key:
5530
+ api_key = os.environ.get("OPENAI_API_KEY")
5531
+
5532
+ if not api_key:
5533
+ if json_output:
5534
+ print(json.dumps({"success": False, "error": "No API key for extraction"}))
5535
+ else:
5536
+ print_error("No API key configured for session extraction")
5537
+ print_info("Configure with: repr llm add openai")
5538
+ raise typer.Exit(1)
5539
+
5540
+ # Extract context from session
5541
+ async def _extract():
5542
+ extractor = SessionExtractor(api_key=api_key, model=model)
5543
+ return await extractor.extract_context(session)
5544
+
5545
+ if not json_output:
5546
+ with create_spinner() as progress:
5547
+ task = progress.add_task("Extracting context...", total=None)
5548
+ context = asyncio.run(_extract())
5549
+ else:
5550
+ context = asyncio.run(_extract())
5551
+
5552
+ # Get recent commits to potentially link
5553
+ recent_commits = extract_commits_from_git(
5554
+ project_path,
5555
+ days=1, # Just last day for linking
5556
+ max_commits=50,
5557
+ )
5558
+
5559
+ # Match session to commits
5560
+ if recent_commits:
5561
+ matches = match_commits_to_sessions(recent_commits, [session])
5562
+ linked_commits = [m.commit_sha for m in matches if m.session_id == session.id]
5563
+ context.linked_commits = linked_commits
5564
+
5565
+ # Create timeline entry
5566
+ entry_type = TimelineEntryType.SESSION
5567
+ if context.linked_commits:
5568
+ # Find and upgrade matching commit entries to MERGED
5569
+ for commit_sha in context.linked_commits:
5570
+ for entry in timeline.entries:
5571
+ if entry.commit and entry.commit.sha == commit_sha:
5572
+ entry.session_context = context
5573
+ entry.type = TimelineEntryType.MERGED
5574
+ entry_type = None # Don't create standalone entry
5575
+ break
5576
+ if entry_type is None:
5577
+ break
5578
+
5579
+ # Add standalone session entry if not merged
5580
+ if entry_type is not None:
5581
+ entry = TimelineEntry(
5582
+ timestamp=context.timestamp,
5583
+ type=entry_type,
5584
+ commit=None,
5585
+ session_context=context,
5586
+ story=None,
5587
+ )
5588
+ timeline.add_entry(entry)
5589
+
5590
+ # Save timeline
5591
+ save_timeline(timeline, project_path)
5592
+
5593
+ if json_output:
5594
+ print(json.dumps({
5595
+ "success": True,
5596
+ "session_id": session.id,
5597
+ "project": str(project_path),
5598
+ "problem": context.problem[:100],
5599
+ "linked_commits": context.linked_commits,
5600
+ "entry_type": entry_type.value if entry_type else "merged",
5601
+ }, indent=2))
5602
+ else:
5603
+ print_success(f"Session ingested!")
5604
+ console.print()
5605
+ console.print(f" Session: {session.id[:8]}")
5606
+ console.print(f" Problem: {context.problem[:60]}...")
5607
+ if context.linked_commits:
5608
+ console.print(f" Linked to commits: {', '.join(c[:8] for c in context.linked_commits)}")
5609
+ console.print(f" Entry type: {entry_type.value if entry_type else 'merged'}")
5610
+
5611
+
5612
+ @timeline_app.command("serve", hidden=True)
5613
+ def timeline_serve(
5614
+ port: int = typer.Option(
5615
+ 8787, "--port", "-p",
5616
+ help="Port to serve on",
5617
+ ),
5618
+ host: str = typer.Option(
5619
+ "127.0.0.1", "--host",
5620
+ help="Host to bind to",
5621
+ ),
5622
+ open_browser: bool = typer.Option(
5623
+ True, "--open/--no-open",
5624
+ help="Auto-open browser (default: enabled)",
5625
+ ),
5626
+ ):
5627
+ """
5628
+ [Deprecated] Use 'repr dashboard' instead.
5629
+
5630
+ Alias for backward compatibility - calls 'repr dashboard'.
5631
+ """
5632
+ print_info("Note: 'repr timeline serve' is deprecated. Use 'repr dashboard' instead.")
5633
+ dashboard(port=port, host=host, open_browser=open_browser)
5634
+
5635
+
5636
+ # =============================================================================
5637
+ # GIT WORKFLOW COMMANDS (add, commit, push)
5638
+ # =============================================================================
5639
+
5640
+ # File-based cache for commit message (persists between commands)
5641
+ _COMMIT_MSG_CACHE_FILE = Path.home() / ".repr" / ".commit_message_cache"
5642
+
5643
+
5644
+ COMMIT_MESSAGE_SYSTEM = """You generate git commit messages and branch names. Given staged changes, output JSON with:
5645
+
5646
+ {
5647
+ "branch": "<type>/<short-description>",
5648
+ "message": "<type>: <description>"
5649
+ }
5650
+
5651
+ Types: feat, fix, refactor, docs, style, test, chore
5652
+
5653
+ Rules:
5654
+ - Branch: lowercase, hyphens, no spaces (e.g., feat/add-user-auth)
5655
+ - Message: under 72 chars, imperative mood, no period at end
5656
+
5657
+ Output only valid JSON."""
5658
+
5659
+ COMMIT_MESSAGE_USER = """Generate branch name and commit message for these staged changes:
5660
+
5661
+ {changes}"""
5662
+
5663
+
5664
+ def _get_cached_commit_info() -> Optional[dict]:
5665
+ """Get cached commit info (branch + message) if it exists."""
5666
+ if _COMMIT_MSG_CACHE_FILE.exists():
5667
+ try:
5668
+ return json.loads(_COMMIT_MSG_CACHE_FILE.read_text())
5669
+ except Exception:
5670
+ return None
5671
+ return None
5672
+
5673
+
5674
+ def _set_cached_commit_info(branch: str, message: str):
5675
+ """Cache the commit info."""
5676
+ _COMMIT_MSG_CACHE_FILE.parent.mkdir(parents=True, exist_ok=True)
5677
+ _COMMIT_MSG_CACHE_FILE.write_text(json.dumps({"branch": branch, "message": message}))
5678
+
5679
+
5680
+ def _clear_cached_commit_info():
5681
+ """Clear the cached commit info."""
5682
+ if _COMMIT_MSG_CACHE_FILE.exists():
5683
+ _COMMIT_MSG_CACHE_FILE.unlink()
5684
+
5685
+
5686
+ @app.command("clear")
5687
+ def clear_cache():
5688
+ """
5689
+ Clear cached branch name and commit message.
5690
+
5691
+ Examples:
5692
+ repr clear
5693
+ """
5694
+ cached = _get_cached_commit_info()
5695
+ if cached:
5696
+ console.print(f"[{BRAND_MUTED}]Clearing cached:[/]")
5697
+ if cached.get("branch"):
5698
+ console.print(f" Branch: {cached['branch']}")
5699
+ if cached.get("message"):
5700
+ console.print(f" Message: {cached['message']}")
5701
+ _clear_cached_commit_info()
5702
+ print_success("Cache cleared")
5703
+ else:
5704
+ print_info("No cached branch/message")
5705
+
5706
+
5707
+ @app.command("add")
5708
+ def add_files(
5709
+ pattern: str = typer.Argument(..., help="File pattern to stage (glob pattern)"),
5710
+ force: bool = typer.Option(False, "--force", "-f", help="Force add ignored files"),
5711
+ ):
5712
+ """
5713
+ Stage files matching pattern.
5714
+
5715
+ Run `repr commit` to generate a message and commit.
5716
+
5717
+ Examples:
5718
+ repr add cli # Stage files containing "cli"
5719
+ repr add .py # Stage files containing ".py"
5720
+ repr add . # Stage all
5721
+ repr add cli -f # Force add ignored files
5722
+ """
5723
+ import subprocess
5724
+ from .change_synthesis import get_repo, get_staged_changes
5725
+
5726
+ repo = get_repo(Path.cwd())
5727
+ if not repo:
5728
+ print_error("Not a git repository")
5729
+ raise typer.Exit(1)
5730
+
5731
+ # Special case: "." means all
5732
+ if pattern == ".":
5733
+ try:
5734
+ cmd = ["git", "add", "."]
5735
+ if force:
5736
+ cmd.insert(2, "-f")
5737
+ result = subprocess.run(cmd, cwd=repo.working_dir, capture_output=True, text=True)
5738
+ if result.returncode != 0:
5739
+ print_error(f"Failed to stage files: {result.stderr}")
5740
+ raise typer.Exit(1)
5741
+ except Exception as e:
5742
+ print_error(f"Failed to stage files: {e}")
5743
+ raise typer.Exit(1)
5744
+ else:
5745
+ # Grep-style match: find files containing pattern
5746
+ # Get modified/untracked files
5747
+ status_result = subprocess.run(
5748
+ ["git", "status", "--porcelain"],
5749
+ cwd=repo.working_dir,
5750
+ capture_output=True,
5751
+ text=True,
5752
+ )
5753
+ if status_result.returncode != 0:
5754
+ print_error(f"Failed to get status: {status_result.stderr}")
5755
+ raise typer.Exit(1)
5756
+
5757
+ # Parse status output and filter by pattern (grep-style regex)
5758
+ import re
5759
+ matching_files = []
5760
+ try:
5761
+ regex = re.compile(pattern, re.IGNORECASE)
5762
+ except re.error:
5763
+ # Fall back to literal match if invalid regex
5764
+ regex = re.compile(re.escape(pattern), re.IGNORECASE)
5765
+
5766
+ for line in status_result.stdout.strip().split("\n"):
5767
+ if not line:
5768
+ continue
5769
+ # Format: XY filename or XY orig -> renamed (XY is 2 chars, then optional space)
5770
+ file_path = line[2:].lstrip().split(" -> ")[-1].strip()
5771
+ if regex.search(file_path):
5772
+ matching_files.append(file_path)
5773
+
5774
+ if not matching_files:
5775
+ print_warning(f"No changed files matching '{pattern}'")
5776
+ raise typer.Exit(0)
5777
+
5778
+ # Stage matching files
5779
+ try:
5780
+ cmd = ["git", "add"]
5781
+ if force:
5782
+ cmd.append("-f")
5783
+ cmd.extend(matching_files)
5784
+ result = subprocess.run(cmd, cwd=repo.working_dir, capture_output=True, text=True)
5785
+ if result.returncode != 0:
5786
+ print_error(f"Failed to stage files: {result.stderr}")
5787
+ raise typer.Exit(1)
5788
+ except Exception as e:
5789
+ print_error(f"Failed to stage files: {e}")
5790
+ raise typer.Exit(1)
5791
+
5792
+ staged = get_staged_changes(repo)
5793
+ if not staged:
5794
+ print_warning("No files matched pattern or nothing to stage")
5795
+ raise typer.Exit(0)
5796
+
5797
+ # Clear cached branch/message so next commit generates fresh
5798
+ _clear_cached_commit_info()
5799
+
5800
+ # Show staged files
5801
+ console.print(f"[bold]Staged {len(staged)} files[/]")
5802
+ for f in staged:
5803
+ type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
5804
+ stats = ""
5805
+ if f.insertions or f.deletions:
5806
+ stats = f" [{BRAND_SUCCESS}]+{f.insertions}[/][{BRAND_ERROR}]-{f.deletions}[/]"
5807
+ console.print(f" {type_icon} {f.path}{stats}")
5808
+ console.print()
5809
+
5810
+ # Suggest branch if on main/master, otherwise commit
5811
+ try:
5812
+ branch_name = repo.active_branch.name
5813
+ if branch_name in ("main", "master"):
5814
+ print_info("Run `repr branch` to create a new feature branch")
5815
+ else:
5816
+ print_info("Run `repr commit` to generate message and commit, or `repr branch` to create a new feature branch")
5817
+ except Exception:
5818
+ print_info("Run `repr commit` to generate message and commit")
5819
+
5820
+
5821
+ @app.command("unstage")
5822
+ def unstage_files(
5823
+ pattern: str = typer.Argument(..., help="File pattern to unstage (grep-style match)"),
5824
+ ):
5825
+ """
5826
+ Unstage files matching pattern.
5827
+
5828
+ Examples:
5829
+ repr unstage cli # Unstage files containing "cli"
5830
+ repr unstage .py # Unstage files containing ".py"
5831
+ repr unstage . # Unstage all staged files
5832
+ """
5833
+ import subprocess
5834
+ from .change_synthesis import get_repo, get_staged_changes
5835
+
5836
+ repo = get_repo(Path.cwd())
5837
+ if not repo:
5838
+ print_error("Not a git repository")
5839
+ raise typer.Exit(1)
5840
+
5841
+ # Get currently staged files
5842
+ staged = get_staged_changes(repo)
5843
+ if not staged:
5844
+ print_warning("No staged files")
5845
+ raise typer.Exit(0)
5846
+
5847
+ # Special case: "." means all
5848
+ if pattern == ".":
5849
+ matching_files = [f.path for f in staged]
5850
+ else:
5851
+ # Grep-style regex match
5852
+ import re
5853
+ try:
5854
+ regex = re.compile(pattern, re.IGNORECASE)
5855
+ except re.error:
5856
+ regex = re.compile(re.escape(pattern), re.IGNORECASE)
5857
+ matching_files = [f.path for f in staged if regex.search(f.path)]
5858
+
5859
+ if not matching_files:
5860
+ print_warning(f"No staged files matching '{pattern}'")
5861
+ raise typer.Exit(0)
5862
+
5863
+ # Unstage matching files
5864
+ try:
5865
+ cmd = ["git", "restore", "--staged"] + matching_files
5866
+ result = subprocess.run(cmd, cwd=repo.working_dir, capture_output=True, text=True)
5867
+ if result.returncode != 0:
5868
+ print_error(f"Failed to unstage files: {result.stderr}")
5869
+ raise typer.Exit(1)
5870
+ except Exception as e:
5871
+ print_error(f"Failed to unstage files: {e}")
5872
+ raise typer.Exit(1)
5873
+
5874
+ # Show what was unstaged
5875
+ console.print(f"[bold]Unstaged {len(matching_files)} files[/]")
5876
+ for f in matching_files:
5877
+ console.print(f" - {f}")
5878
+
5879
+
5880
+ @app.command("branch")
5881
+ def create_branch(
5882
+ name: Optional[str] = typer.Argument(None, help="Branch name (optional, AI generates if omitted)"),
5883
+ regenerate: bool = typer.Option(False, "--regenerate", "-r", help="Regenerate branch name"),
5884
+ ):
5885
+ """
5886
+ Create and switch to a new branch.
5887
+
5888
+ If no name given, generates one from staged or unpushed changes.
5889
+
5890
+ Examples:
5891
+ repr branch # AI generates name
5892
+ repr branch feat/my-feature # Use explicit name
5893
+ repr branch -r # Regenerate name
5894
+ """
5895
+ import subprocess
5896
+ from .change_synthesis import get_repo, get_staged_changes, get_unpushed_commits, format_file_changes, format_commit_changes
5897
+
5898
+ repo = get_repo(Path.cwd())
5899
+ if not repo:
5900
+ print_error("Not a git repository")
5901
+ raise typer.Exit(1)
5902
+
5903
+ branch_name = name
5904
+
5905
+ if not branch_name:
5906
+ # Check cache first
5907
+ cached = _get_cached_commit_info()
5908
+ if cached and cached.get("branch") and not regenerate:
5909
+ branch_name = cached["branch"]
5910
+ console.print(f"[{BRAND_MUTED}](cached)[/]")
5911
+ else:
5912
+ # Generate from staged or unpushed changes
5913
+ staged = get_staged_changes(repo)
5914
+ unpushed = get_unpushed_commits(repo)
5915
+
5916
+ if not staged and not unpushed:
5917
+ print_error("No staged or unpushed changes to generate branch name from")
5918
+ print_info("Run `repr add <pattern>` first, or provide a name")
5919
+ raise typer.Exit(1)
5920
+
5921
+ from .openai_analysis import get_openai_client
5922
+
5923
+ client = get_openai_client()
5924
+ if not client:
5925
+ print_error("LLM not configured. Run `repr llm setup` first, or provide a name")
5926
+ raise typer.Exit(1)
5927
+
5928
+ # Build context from staged and/or unpushed
5929
+ changes_str = ""
5930
+ if staged:
5931
+ changes_str += "Staged changes:\n" + format_file_changes(staged) + "\n\n"
5932
+ if unpushed:
5933
+ changes_str += "Unpushed commits:\n" + format_commit_changes(unpushed)
5934
+
5935
+ prompt = COMMIT_MESSAGE_USER.format(changes=changes_str)
5936
+
5937
+ with create_spinner("Generating branch name..."):
5938
+ response = asyncio.run(client.chat.completions.create(
5939
+ model="gpt-4o-mini",
5940
+ messages=[
5941
+ {"role": "system", "content": COMMIT_MESSAGE_SYSTEM},
5942
+ {"role": "user", "content": prompt},
5943
+ ],
5944
+ response_format={"type": "json_object"},
5945
+ temperature=0.3,
5946
+ ))
5947
+ data = json.loads(response.choices[0].message.content)
5948
+ branch_name = data.get("branch", "")
5949
+ commit_msg = data.get("message", "")
5950
+
5951
+ # Cache both
5952
+ _set_cached_commit_info(branch_name, commit_msg)
5953
+
5954
+ console.print(f"[bold]Branch:[/] {branch_name}")
5955
+
5956
+ try:
5957
+ result = subprocess.run(
5958
+ ["git", "checkout", "-b", branch_name],
5959
+ cwd=repo.working_dir,
5960
+ capture_output=True,
5961
+ text=True,
5962
+ )
5963
+ if result.returncode != 0:
5964
+ if "already exists" in result.stderr:
5965
+ # Switch to existing branch
5966
+ result = subprocess.run(
5967
+ ["git", "checkout", branch_name],
5968
+ cwd=repo.working_dir,
5969
+ capture_output=True,
5970
+ text=True,
5971
+ )
5972
+ if result.returncode == 0:
5973
+ print_success(f"Switched to {branch_name}")
5974
+ return
5975
+ print_error(f"Failed: {result.stderr}")
5976
+ raise typer.Exit(1)
5977
+
5978
+ print_success(f"Created and switched to {branch_name}")
5979
+
5980
+ # Check if there are staged changes and suggest next step
5981
+ from .change_synthesis import get_staged_changes
5982
+ staged = get_staged_changes(repo)
5983
+ if staged:
5984
+ print_info("Run `repr commit` to commit your staged changes")
5985
+
5986
+ except Exception as e:
5987
+ print_error(f"Failed: {e}")
5988
+ raise typer.Exit(1)
5989
+
5990
+
5991
+ @app.command("commit")
5992
+ def commit_staged(
5993
+ message: Optional[str] = typer.Option(None, "--message", "-m", help="Custom message (skip AI)"),
5994
+ regenerate: bool = typer.Option(False, "--regenerate", "-r", help="Regenerate message"),
5995
+ yes: bool = typer.Option(False, "--yes", "-y", help="Skip confirmation on main/master"),
5996
+ ):
5997
+ """
5998
+ Generate commit message and commit staged changes.
5999
+
6000
+ Examples:
6001
+ repr commit # Generate and commit
6002
+ repr commit -m "fix: typo" # Custom message
6003
+ repr commit -r # Regenerate message
6004
+ """
6005
+ import subprocess
6006
+ from .change_synthesis import get_repo, get_staged_changes, format_file_changes
6007
+ from .config import get_config_value, set_config_value
6008
+
6009
+ repo = get_repo(Path.cwd())
6010
+ if not repo:
6011
+ print_error("Not a git repository")
6012
+ raise typer.Exit(1)
6013
+
6014
+ # Check if on main/master
6015
+ current_branch = subprocess.run(
6016
+ ["git", "branch", "--show-current"],
6017
+ cwd=repo.working_dir,
6018
+ capture_output=True,
6019
+ text=True,
6020
+ ).stdout.strip()
6021
+
6022
+ if current_branch in ("main", "master") and not yes:
6023
+ allow_main = get_config_value("allow_commit_to_main")
6024
+ if allow_main is None:
6025
+ # Ask user
6026
+ print_warning(f"You're about to commit directly to {current_branch}")
6027
+ console.print()
6028
+ response = typer.prompt(
6029
+ "Allow commits to main/master? [y]es / [n]o / [a]lways / [never]",
6030
+ default="n",
6031
+ ).lower()
6032
+
6033
+ if response in ("a", "always"):
6034
+ set_config_value("allow_commit_to_main", True)
6035
+ console.print(f"[{BRAND_MUTED}]Preference saved. Use `repr config set allow_commit_to_main false` to reset.[/]")
6036
+ elif response in ("never",):
6037
+ set_config_value("allow_commit_to_main", False)
6038
+ console.print(f"[{BRAND_MUTED}]Preference saved. Use `repr config set allow_commit_to_main true` to reset.[/]")
6039
+ print_info("Create a branch first: repr branch")
6040
+ raise typer.Exit(0)
6041
+ elif response not in ("y", "yes"):
6042
+ print_info("Create a branch first: repr branch")
6043
+ raise typer.Exit(0)
6044
+ elif allow_main is False:
6045
+ print_warning(f"Commits to {current_branch} are disabled")
6046
+ print_info("Create a branch first: repr branch")
6047
+ print_info("Or use: repr config set allow_commit_to_main true")
6048
+ raise typer.Exit(0)
6049
+
6050
+ staged = get_staged_changes(repo)
6051
+ if not staged:
6052
+ print_warning("Nothing staged to commit")
6053
+ print_info("Run `repr add <pattern>` first")
6054
+ raise typer.Exit(0)
6055
+
6056
+ # Show staged files
6057
+ console.print(f"[bold]Staged {len(staged)} files[/]")
6058
+ for f in staged:
6059
+ type_icon = {"A": "+", "M": "~", "D": "-", "R": "→"}.get(f.change_type, "?")
6060
+ stats = ""
6061
+ if f.insertions or f.deletions:
6062
+ stats = f" [{BRAND_SUCCESS}]+{f.insertions}[/][{BRAND_ERROR}]-{f.deletions}[/]"
6063
+ console.print(f" {type_icon} {f.path}{stats}")
6064
+ console.print()
6065
+
6066
+ # Get commit message
6067
+ commit_msg = None
6068
+
6069
+ if message:
6070
+ commit_msg = message
6071
+ else:
6072
+ # Check cache
6073
+ cached = _get_cached_commit_info()
6074
+ if cached and cached.get("message") and not regenerate:
6075
+ commit_msg = cached["message"]
6076
+ console.print(f"[{BRAND_MUTED}](cached)[/]")
6077
+ else:
6078
+ # Generate with LLM
6079
+ from .openai_analysis import get_openai_client
6080
+
6081
+ client = get_openai_client()
6082
+ if not client:
6083
+ print_error("LLM not configured. Run `repr llm setup` first, or use -m")
6084
+ raise typer.Exit(1)
6085
+
6086
+ changes_str = format_file_changes(staged)
6087
+ prompt = COMMIT_MESSAGE_USER.format(changes=changes_str)
6088
+
6089
+ with create_spinner("Generating commit message..."):
6090
+ response = asyncio.run(client.chat.completions.create(
6091
+ model="gpt-4o-mini",
6092
+ messages=[
6093
+ {"role": "system", "content": COMMIT_MESSAGE_SYSTEM},
6094
+ {"role": "user", "content": prompt},
6095
+ ],
6096
+ response_format={"type": "json_object"},
6097
+ temperature=0.3,
6098
+ ))
6099
+ data = json.loads(response.choices[0].message.content)
6100
+ branch_name = data.get("branch", "")
6101
+ commit_msg = data.get("message", "")
6102
+
6103
+ _set_cached_commit_info(branch_name, commit_msg)
6104
+
6105
+ console.print(f"[bold]Message:[/] {commit_msg}")
6106
+ console.print()
6107
+
6108
+ try:
6109
+ result = subprocess.run(
6110
+ ["git", "commit", "-m", commit_msg],
6111
+ cwd=repo.working_dir,
6112
+ capture_output=True,
6113
+ text=True,
6114
+ )
6115
+ if result.returncode != 0:
6116
+ print_error(f"Commit failed: {result.stderr}")
6117
+ raise typer.Exit(1)
6118
+
6119
+ _clear_cached_commit_info()
6120
+ print_success("Committed")
6121
+
6122
+ sha_result = subprocess.run(
6123
+ ["git", "rev-parse", "--short", "HEAD"],
6124
+ cwd=repo.working_dir,
6125
+ capture_output=True,
6126
+ text=True,
6127
+ )
6128
+ if sha_result.returncode == 0:
6129
+ console.print(f" [{BRAND_MUTED}]{sha_result.stdout.strip()}[/]")
6130
+
6131
+ except Exception as e:
6132
+ print_error(f"Commit failed: {e}")
6133
+ raise typer.Exit(1)
6134
+
6135
+
6136
+ @app.command("push")
6137
+ def push_commits():
6138
+ """
6139
+ Push commits to remote.
6140
+
6141
+ Examples:
6142
+ repr push
6143
+ """
6144
+ import subprocess
6145
+ from .change_synthesis import get_repo
6146
+
6147
+ repo = get_repo(Path.cwd())
6148
+ if not repo:
6149
+ print_error("Not a git repository")
6150
+ raise typer.Exit(1)
6151
+
6152
+ # Check for unpushed commits
6153
+ try:
6154
+ result = subprocess.run(
6155
+ ["git", "log", "@{u}..", "--oneline"],
6156
+ cwd=repo.working_dir,
6157
+ capture_output=True,
6158
+ text=True,
6159
+ )
6160
+ if result.returncode != 0:
6161
+ # No upstream, just push
6162
+ pass
6163
+ elif not result.stdout.strip():
6164
+ print_info("Nothing to push")
6165
+ raise typer.Exit(0)
6166
+ else:
6167
+ commits = result.stdout.strip().split("\n")
6168
+ console.print(f"[bold]Pushing {len(commits)} commits[/]")
6169
+ for c in commits[:5]:
6170
+ console.print(f" [{BRAND_MUTED}]{c}[/]")
6171
+ if len(commits) > 5:
6172
+ console.print(f" [{BRAND_MUTED}]... +{len(commits) - 5} more[/]")
6173
+ console.print()
6174
+ except Exception:
6175
+ pass
6176
+
6177
+ try:
6178
+ with create_spinner("Pushing..."):
6179
+ result = subprocess.run(
6180
+ ["git", "push"],
6181
+ cwd=repo.working_dir,
6182
+ capture_output=True,
6183
+ text=True,
6184
+ )
6185
+ if result.returncode != 0:
6186
+ if "no upstream branch" in result.stderr:
6187
+ # Try push with -u
6188
+ branch = subprocess.run(
6189
+ ["git", "branch", "--show-current"],
6190
+ cwd=repo.working_dir,
6191
+ capture_output=True,
6192
+ text=True,
6193
+ ).stdout.strip()
6194
+ with create_spinner(f"Setting upstream and pushing {branch}..."):
6195
+ result = subprocess.run(
6196
+ ["git", "push", "-u", "origin", branch],
6197
+ cwd=repo.working_dir,
6198
+ capture_output=True,
6199
+ text=True,
6200
+ )
6201
+ if result.returncode != 0:
6202
+ print_error(f"Push failed: {result.stderr}")
6203
+ raise typer.Exit(1)
6204
+ else:
6205
+ print_error(f"Push failed: {result.stderr}")
6206
+ raise typer.Exit(1)
6207
+
6208
+ print_success("Pushed")
6209
+
6210
+ except Exception as e:
6211
+ print_error(f"Push failed: {e}")
6212
+ raise typer.Exit(1)
6213
+
6214
+
6215
+ # ==================== SKILL COMMANDS ====================
6216
+
6217
+ # Repr skill content for AI agents
6218
+ REPR_SKILL = '''---
6219
+ name: repr
6220
+ description: Use repr to extract developer context from git history for interviews, reviews, and AI agents
6221
+ ---
6222
+
6223
+ # repr - Developer Context Layer
6224
+
6225
+ Use `repr` to capture and surface developer context from git history. Generate stories, prepare for interviews, create performance review material, and provide context to AI agents.
6226
+
6227
+ ## Quick Start
6228
+
6229
+ ```bash
6230
+ # Initialize (scan repos)
6231
+ repr init ~/code
6232
+
6233
+ # Generate stories from recent commits
6234
+ repr generate --local
6235
+
6236
+ # View generated stories
6237
+ repr stories
6238
+ repr story view <id>
6239
+ ```
6240
+
6241
+ ## Common Commands
6242
+
6243
+ | Command | Description |
6244
+ |---------|-------------|
6245
+ | `repr init <path>` | Scan and track repositories |
6246
+ | `repr generate --local` | Generate stories using local LLM |
6247
+ | `repr generate --days 30` | Generate from last 30 days |
6248
+ | `repr stories` | List all stories |
6249
+ | `repr story view <id>` | View a specific story |
6250
+ | `repr commits --days 7` | Show recent commits |
6251
+ | `repr dashboard` | Open web dashboard |
6252
+ | `repr mcp serve` | Start MCP server for AI agents |
6253
+
6254
+ ## Story Generation
6255
+
6256
+ ```bash
6257
+ # Generate with local LLM (Ollama)
6258
+ repr generate --local
6259
+
6260
+ # Generate from specific timeframe
6261
+ repr generate --days 30 --local
6262
+ repr generate --since "2 weeks ago" --local
6263
+
6264
+ # Generate interview stories (STAR format)
6265
+ repr generate --template interview --local
6266
+ ```
6267
+
6268
+ ## LLM Configuration
6269
+
6270
+ ```bash
6271
+ # Configure local LLM
6272
+ repr llm configure
6273
+
6274
+ # Add API keys (stored in OS keychain)
6275
+ repr llm add openai
6276
+ repr llm add anthropic
6277
+
6278
+ # Test LLM connection
6279
+ repr llm test
6280
+ ```
6281
+
6282
+ ## Use Cases
6283
+
6284
+ - **Interview Prep**: Generate STAR-format stories from commits
6285
+ - **Performance Reviews**: Summarize months of work with impact
6286
+ - **Sprint Demos**: Quick changelogs for stakeholders
6287
+ - **AI Context**: MCP server provides work history to Claude/Cursor
6288
+ - **Weekly Reflection**: See what you accomplished
6289
+
6290
+ ## Privacy
6291
+
6292
+ - Local-first: data stays in ~/.repr/
6293
+ - Air-gapped ready: works fully offline
6294
+ - BYOK: use your own API keys
6295
+ - Privacy audit: `repr privacy audit`
6296
+ '''
6297
+
6298
+
6299
+ def _get_skill_path(provider: str) -> Path:
6300
+ """Get the skill installation path for a provider."""
6301
+ home = Path.home()
6302
+ if provider == "claude":
6303
+ return home / ".claude" / "skills" / "repr"
6304
+ elif provider == "gemini":
6305
+ return home / ".gemini" / "skills" / "repr"
6306
+ else:
6307
+ raise ValueError(f"Unknown provider: {provider}")
6308
+
6309
+
6310
+ def _is_skill_installed(provider: str) -> bool:
6311
+ """Check if skill is installed for a provider."""
6312
+ skill_path = _get_skill_path(provider) / "SKILL.md"
6313
+ return skill_path.exists()
6314
+
6315
+
6316
+ def _install_skill_to(provider: str) -> Path:
6317
+ """Install the repr skill to a provider's skills directory."""
6318
+ skill_dir = _get_skill_path(provider)
6319
+ skill_dir.mkdir(parents=True, exist_ok=True)
6320
+ skill_file = skill_dir / "SKILL.md"
6321
+ skill_file.write_text(REPR_SKILL)
6322
+ return skill_dir
6323
+
6324
+
6325
+ @skill_app.callback(invoke_without_command=True)
6326
+ def skill_default(ctx: typer.Context):
6327
+ """View the repr skill for AI agents."""
6328
+ if ctx.invoked_subcommand is None:
6329
+ # Show skill content
6330
+ console.print()
6331
+ console.print(f"[bold {BRAND_PRIMARY}]/repr[/] - [{BRAND_MUTED}]LLM instructions for using repr[/]")
6332
+ console.print()
6333
+
6334
+ # Check installation status
6335
+ claude_installed = _is_skill_installed("claude")
6336
+ gemini_installed = _is_skill_installed("gemini")
6337
+
6338
+ if claude_installed or gemini_installed:
6339
+ console.print(f"[{BRAND_SUCCESS}]Installed:[/]")
6340
+ if claude_installed:
6341
+ console.print(f" [{BRAND_MUTED}]~/.claude/skills/repr/SKILL.md[/]")
6342
+ if gemini_installed:
6343
+ console.print(f" [{BRAND_MUTED}]~/.gemini/skills/repr/SKILL.md[/]")
6344
+ console.print()
6345
+
6346
+ console.print(f"[{BRAND_MUTED}]{'─' * 60}[/]")
6347
+ console.print(REPR_SKILL)
6348
+ console.print(f"[{BRAND_MUTED}]{'─' * 60}[/]")
6349
+
6350
+ if not claude_installed and not gemini_installed:
6351
+ console.print(f"\n[{BRAND_MUTED}]Install with:[/] repr skill install")
6352
+ console.print()
6353
+
6354
+
6355
+ @skill_app.command("install")
6356
+ def skill_install(
6357
+ target: Optional[str] = typer.Argument(
6358
+ None,
6359
+ help="Target provider: claude, gemini, or all (default: all)",
6360
+ ),
6361
+ ):
6362
+ """Install the repr skill to AI agent providers."""
6363
+ console.print()
6364
+ console.print(f"[bold {BRAND_PRIMARY}]Install repr skill[/]")
6365
+ console.print()
6366
+
6367
+ if not target or target == "all":
6368
+ installed = 0
6369
+
6370
+ # Try Claude
6371
+ try:
6372
+ dest = _install_skill_to("claude")
6373
+ print_success(f"Installed to {dest}")
6374
+ installed += 1
6375
+ except Exception as e:
6376
+ print_warning(f"Could not install to Claude: {e}")
6377
+
6378
+ # Try Gemini
6379
+ try:
6380
+ dest = _install_skill_to("gemini")
6381
+ print_success(f"Installed to {dest}")
6382
+ installed += 1
6383
+ except Exception as e:
6384
+ print_warning(f"Could not install to Gemini: {e}")
6385
+
6386
+ if installed == 0:
6387
+ print_warning("No providers found.")
6388
+ else:
6389
+ console.print(f"\n[{BRAND_MUTED}]LLMs can now use /repr to learn how to run repr commands.[/]\n")
6390
+
6391
+ elif target in ("claude", "gemini"):
6392
+ try:
6393
+ dest = _install_skill_to(target)
6394
+ print_success(f"Installed to {dest}")
6395
+ console.print(f"\n[{BRAND_MUTED}]LLMs can now use /repr to learn how to run repr commands.[/]\n")
6396
+ except Exception as e:
6397
+ print_error(f"Installation failed: {e}")
6398
+ raise typer.Exit(1)
6399
+ else:
6400
+ print_error(f"Unknown target: {target}")
6401
+ console.print(f"[{BRAND_MUTED}]Usage: repr skill install [claude|gemini|all][/]")
6402
+ raise typer.Exit(1)
6403
+
6404
+
6405
+ # PR generation prompts
6406
+ PR_SYSTEM = """You generate GitHub PR titles and descriptions. Given the commits, output JSON:
6407
+
6408
+ {
6409
+ "title": "<type>: <short description>",
6410
+ "body": "## Summary\\n<bullet points>\\n\\n## Changes\\n<bullet points>"
6411
+ }
6412
+
6413
+ Rules:
6414
+ - Title: under 72 chars, conventional commit style
6415
+ - Body: markdown, concise bullet points
6416
+ - Focus on what and why, not how
6417
+
6418
+ Output only valid JSON."""
6419
+
6420
+ PR_USER = """Generate PR title and description for these commits:
6421
+
6422
+ {commits}"""
6423
+
6424
+
6425
+ @app.command("pr")
6426
+ def create_pr(
6427
+ title: Optional[str] = typer.Option(None, "--title", "-t", help="Custom PR title"),
6428
+ draft: bool = typer.Option(False, "--draft", "-d", help="Create as draft PR"),
6429
+ regenerate: bool = typer.Option(False, "--regenerate", "-r", help="Regenerate title/body"),
6430
+ ):
6431
+ """
6432
+ Create a pull request with AI-generated title and description.
6433
+
6434
+ Examples:
6435
+ repr pr # AI generates title/body
6436
+ repr pr -t "feat: add X" # Custom title
6437
+ repr pr --draft # Create draft PR
6438
+ """
6439
+ import subprocess
6440
+ from .change_synthesis import get_repo, get_unpushed_commits, format_commit_changes
6441
+
6442
+ repo = get_repo(Path.cwd())
6443
+ if not repo:
6444
+ print_error("Not a git repository")
6445
+ raise typer.Exit(1)
6446
+
6447
+ # Check gh is installed
6448
+ gh_check = subprocess.run(["which", "gh"], capture_output=True)
6449
+ if gh_check.returncode != 0:
6450
+ print_error("GitHub CLI (gh) not installed")
6451
+ print_info("Install: brew install gh")
6452
+ raise typer.Exit(1)
6453
+
6454
+ # Get current branch
6455
+ current_branch = subprocess.run(
6456
+ ["git", "branch", "--show-current"],
6457
+ cwd=repo.working_dir,
6458
+ capture_output=True,
6459
+ text=True,
6460
+ ).stdout.strip()
6461
+
6462
+ if current_branch in ("main", "master"):
6463
+ print_error(f"Cannot create PR from {current_branch}")
6464
+ print_info("Create a branch first: repr branch")
6465
+ raise typer.Exit(1)
6466
+
6467
+ # Check for unpushed commits
6468
+ unpushed = get_unpushed_commits(repo)
6469
+
6470
+ # Get commits for this branch vs main/master
6471
+ base_branch = "main"
6472
+ check_main = subprocess.run(
6473
+ ["git", "rev-parse", "--verify", "main"],
6474
+ cwd=repo.working_dir,
6475
+ capture_output=True,
6476
+ )
6477
+ if check_main.returncode != 0:
6478
+ base_branch = "master"
6479
+
6480
+ log_result = subprocess.run(
6481
+ ["git", "log", f"{base_branch}..HEAD", "--oneline"],
6482
+ cwd=repo.working_dir,
6483
+ capture_output=True,
6484
+ text=True,
6485
+ )
6486
+ commits_text = log_result.stdout.strip()
6487
+
6488
+ if not commits_text:
6489
+ print_warning("No commits to create PR from")
6490
+ raise typer.Exit(0)
6491
+
6492
+ commits_list = commits_text.split("\n")
6493
+ console.print(f"[bold]PR for {len(commits_list)} commits[/]")
6494
+ for c in commits_list[:5]:
6495
+ console.print(f" [{BRAND_MUTED}]{c}[/]")
6496
+ if len(commits_list) > 5:
6497
+ console.print(f" [{BRAND_MUTED}]... +{len(commits_list) - 5} more[/]")
6498
+ console.print()
6499
+
6500
+ # Push if needed
6501
+ if unpushed:
6502
+ console.print(f"[{BRAND_MUTED}]Pushing {len(unpushed)} unpushed commits...[/]")
6503
+ push_result = subprocess.run(
6504
+ ["git", "push", "-u", "origin", current_branch],
6505
+ cwd=repo.working_dir,
6506
+ capture_output=True,
6507
+ text=True,
6508
+ )
6509
+ if push_result.returncode != 0:
6510
+ print_error(f"Push failed: {push_result.stderr}")
6511
+ raise typer.Exit(1)
6512
+
6513
+ # Generate or use provided title/body
6514
+ pr_title = title
6515
+ pr_body = None
6516
+
6517
+ if not pr_title:
6518
+ from .openai_analysis import get_openai_client
6519
+
6520
+ client = get_openai_client()
6521
+ if not client:
6522
+ print_error("LLM not configured. Run `repr llm setup` first, or use -t")
6523
+ raise typer.Exit(1)
6524
+
6525
+ prompt = PR_USER.format(commits=commits_text)
6526
+
6527
+ with create_spinner("Generating PR..."):
6528
+ response = asyncio.run(client.chat.completions.create(
6529
+ model="gpt-4o-mini",
6530
+ messages=[
6531
+ {"role": "system", "content": PR_SYSTEM},
6532
+ {"role": "user", "content": prompt},
6533
+ ],
6534
+ response_format={"type": "json_object"},
6535
+ temperature=0.3,
6536
+ ))
6537
+ data = json.loads(response.choices[0].message.content)
6538
+ pr_title = data.get("title", current_branch)
6539
+ pr_body = data.get("body", "")
6540
+
6541
+ console.print(f"[bold]Title:[/] {pr_title}")
6542
+ if pr_body:
6543
+ console.print(f"[bold]Body:[/]")
6544
+ for line in pr_body.split("\n")[:5]:
6545
+ console.print(f" [{BRAND_MUTED}]{line}[/]")
6546
+ console.print()
6547
+
6548
+ # Create PR
6549
+ cmd = ["gh", "pr", "create", "--title", pr_title, "--base", base_branch]
6550
+ if pr_body:
6551
+ cmd.extend(["--body", pr_body])
6552
+ if draft:
6553
+ cmd.append("--draft")
6554
+
6555
+ try:
6556
+ result = subprocess.run(
6557
+ cmd,
6558
+ cwd=repo.working_dir,
6559
+ capture_output=True,
6560
+ text=True,
6561
+ )
6562
+ if result.returncode != 0:
6563
+ print_error(f"PR creation failed: {result.stderr}")
6564
+ raise typer.Exit(1)
6565
+
6566
+ pr_url = result.stdout.strip()
6567
+ print_success("PR created")
6568
+ console.print(f" {pr_url}")
6569
+
6570
+ except Exception as e:
6571
+ print_error(f"PR creation failed: {e}")
6572
+ raise typer.Exit(1)
6573
+
6574
+
2756
6575
  # Entry point
2757
6576
  if __name__ == "__main__":
2758
6577
  app()