repr-cli 0.2.16__py3-none-any.whl → 0.2.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- repr/__init__.py +1 -1
- repr/api.py +363 -62
- repr/auth.py +47 -38
- repr/change_synthesis.py +478 -0
- repr/cli.py +4306 -364
- repr/config.py +119 -11
- repr/configure.py +889 -0
- repr/cron.py +419 -0
- repr/dashboard/__init__.py +9 -0
- repr/dashboard/build.py +126 -0
- repr/dashboard/dist/assets/index-B-aCjaCw.js +384 -0
- repr/dashboard/dist/assets/index-BYFVbEev.css +1 -0
- repr/dashboard/dist/assets/index-BrrhyJFO.css +1 -0
- repr/dashboard/dist/assets/index-C7Gzxc4f.js +384 -0
- repr/dashboard/dist/assets/index-CQdMXo6g.js +391 -0
- repr/dashboard/dist/assets/index-CcEg74ts.js +270 -0
- repr/dashboard/dist/assets/index-Cerc-iA_.js +377 -0
- repr/dashboard/dist/assets/index-CjVcBW2L.css +1 -0
- repr/dashboard/dist/assets/index-Cs8ofFGd.js +384 -0
- repr/dashboard/dist/assets/index-Dfl3mR5E.js +377 -0
- repr/dashboard/dist/assets/index-DwN0SeMc.css +1 -0
- repr/dashboard/dist/assets/index-YFch_e0S.js +384 -0
- repr/dashboard/dist/favicon.svg +4 -0
- repr/dashboard/dist/index.html +14 -0
- repr/dashboard/manager.py +234 -0
- repr/dashboard/server.py +1489 -0
- repr/db.py +980 -0
- repr/hooks.py +3 -2
- repr/loaders/__init__.py +22 -0
- repr/loaders/base.py +156 -0
- repr/loaders/claude_code.py +287 -0
- repr/loaders/clawdbot.py +313 -0
- repr/loaders/gemini_antigravity.py +381 -0
- repr/mcp_server.py +1196 -0
- repr/models.py +503 -0
- repr/openai_analysis.py +25 -0
- repr/session_extractor.py +481 -0
- repr/storage.py +328 -0
- repr/story_synthesis.py +1296 -0
- repr/templates.py +68 -4
- repr/timeline.py +710 -0
- repr/tools.py +17 -8
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/METADATA +48 -10
- repr_cli-0.2.18.dist-info/RECORD +58 -0
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/WHEEL +1 -1
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/entry_points.txt +1 -0
- repr_cli-0.2.16.dist-info/RECORD +0 -26
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/licenses/LICENSE +0 -0
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/top_level.txt +0 -0
repr/storage.py
CHANGED
|
@@ -557,3 +557,331 @@ def get_storage_stats() -> dict[str, Any]:
|
|
|
557
557
|
},
|
|
558
558
|
"total_size_bytes": stories_size + profiles_size + cache_size + config_size,
|
|
559
559
|
}
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
# ============================================================================
|
|
563
|
+
# ReprStore - Project-level storage for .repr/ directory
|
|
564
|
+
# ============================================================================
|
|
565
|
+
|
|
566
|
+
REPR_DIR_NAME = ".repr"
|
|
567
|
+
STORE_FILE = "store.json"
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
def get_repr_store_path(project_path: Path) -> Path:
|
|
571
|
+
"""Get the .repr/store.json path for a project."""
|
|
572
|
+
return project_path / REPR_DIR_NAME / STORE_FILE
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
def save_repr_store(store: "ReprStore", project_path: Path) -> Path:
|
|
576
|
+
"""
|
|
577
|
+
Save ReprStore to SQLite.
|
|
578
|
+
|
|
579
|
+
Note: This function now writes to SQLite only. The .repr/store.json
|
|
580
|
+
format is deprecated. Use `repr data migrate-db` to import old stores.
|
|
581
|
+
|
|
582
|
+
Args:
|
|
583
|
+
store: ReprStore to save
|
|
584
|
+
project_path: Project root path
|
|
585
|
+
|
|
586
|
+
Returns:
|
|
587
|
+
Path to the .repr directory
|
|
588
|
+
"""
|
|
589
|
+
repr_dir = project_path / REPR_DIR_NAME
|
|
590
|
+
repr_dir.mkdir(parents=True, exist_ok=True)
|
|
591
|
+
|
|
592
|
+
# Write to SQLite
|
|
593
|
+
if store.stories:
|
|
594
|
+
save_stories_to_db(store.stories, project_path)
|
|
595
|
+
|
|
596
|
+
return repr_dir
|
|
597
|
+
|
|
598
|
+
|
|
599
|
+
def load_repr_store(project_path: Path) -> "ReprStore | None":
|
|
600
|
+
"""
|
|
601
|
+
Load ReprStore from .repr/store.json.
|
|
602
|
+
|
|
603
|
+
Args:
|
|
604
|
+
project_path: Project root path
|
|
605
|
+
|
|
606
|
+
Returns:
|
|
607
|
+
ReprStore or None if not found
|
|
608
|
+
"""
|
|
609
|
+
from .models import ReprStore as ReprStoreModel
|
|
610
|
+
|
|
611
|
+
store_path = get_repr_store_path(project_path)
|
|
612
|
+
|
|
613
|
+
if not store_path.exists():
|
|
614
|
+
return None
|
|
615
|
+
|
|
616
|
+
try:
|
|
617
|
+
store_json = store_path.read_text()
|
|
618
|
+
return ReprStoreModel.model_validate_json(store_json)
|
|
619
|
+
except Exception:
|
|
620
|
+
return None
|
|
621
|
+
|
|
622
|
+
|
|
623
|
+
def create_repr_store(project_path: Path) -> "ReprStore":
|
|
624
|
+
"""
|
|
625
|
+
Create a new empty ReprStore for a project.
|
|
626
|
+
|
|
627
|
+
Args:
|
|
628
|
+
project_path: Project root path
|
|
629
|
+
|
|
630
|
+
Returns:
|
|
631
|
+
New ReprStore instance
|
|
632
|
+
"""
|
|
633
|
+
from datetime import datetime, timezone
|
|
634
|
+
from .models import ReprStore as ReprStoreModel, ContentIndex
|
|
635
|
+
|
|
636
|
+
return ReprStoreModel(
|
|
637
|
+
project_path=str(project_path),
|
|
638
|
+
initialized_at=datetime.now(timezone.utc),
|
|
639
|
+
last_updated=datetime.now(timezone.utc),
|
|
640
|
+
commits=[],
|
|
641
|
+
sessions=[],
|
|
642
|
+
stories=[],
|
|
643
|
+
index=ContentIndex(),
|
|
644
|
+
)
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
def update_repr_store_index(store: "ReprStore") -> None:
|
|
648
|
+
"""
|
|
649
|
+
Rebuild the ContentIndex from stories in the store.
|
|
650
|
+
|
|
651
|
+
Args:
|
|
652
|
+
store: ReprStore to update (mutates in place)
|
|
653
|
+
"""
|
|
654
|
+
from datetime import datetime, timezone
|
|
655
|
+
from .models import ContentIndex, StoryDigest
|
|
656
|
+
|
|
657
|
+
index = ContentIndex(
|
|
658
|
+
last_updated=datetime.now(timezone.utc),
|
|
659
|
+
story_count=len(store.stories),
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
for story in store.stories:
|
|
663
|
+
# File → story mapping
|
|
664
|
+
for f in story.files:
|
|
665
|
+
if f not in index.files_to_stories:
|
|
666
|
+
index.files_to_stories[f] = []
|
|
667
|
+
index.files_to_stories[f].append(story.id)
|
|
668
|
+
|
|
669
|
+
# Keyword extraction and mapping
|
|
670
|
+
import re
|
|
671
|
+
text = f"{story.title} {story.problem}".lower()
|
|
672
|
+
words = re.findall(r'\b[a-z]+\b', text)
|
|
673
|
+
stopwords = {'the', 'a', 'an', 'is', 'are', 'was', 'were', 'for', 'to', 'in', 'on', 'of', 'and', 'or', 'with', 'from'}
|
|
674
|
+
keywords = [w for w in words if len(w) > 2 and w not in stopwords]
|
|
675
|
+
|
|
676
|
+
for kw in set(keywords):
|
|
677
|
+
if kw not in index.keywords_to_stories:
|
|
678
|
+
index.keywords_to_stories[kw] = []
|
|
679
|
+
index.keywords_to_stories[kw].append(story.id)
|
|
680
|
+
|
|
681
|
+
# Weekly index
|
|
682
|
+
if story.started_at:
|
|
683
|
+
week = story.started_at.strftime("%Y-W%W")
|
|
684
|
+
if week not in index.by_week:
|
|
685
|
+
index.by_week[week] = []
|
|
686
|
+
index.by_week[week].append(story.id)
|
|
687
|
+
|
|
688
|
+
# Story digest
|
|
689
|
+
index.story_digests.append(StoryDigest(
|
|
690
|
+
story_id=story.id,
|
|
691
|
+
title=story.title,
|
|
692
|
+
problem_keywords=list(set(keywords))[:10],
|
|
693
|
+
files=story.files[:5],
|
|
694
|
+
tech_stack=_detect_tech_stack(story.files),
|
|
695
|
+
category=story.category,
|
|
696
|
+
timestamp=story.started_at or story.created_at,
|
|
697
|
+
))
|
|
698
|
+
|
|
699
|
+
store.index = index
|
|
700
|
+
|
|
701
|
+
|
|
702
|
+
def _detect_tech_stack(files: list[str]) -> list[str]:
|
|
703
|
+
"""Detect technologies from file extensions."""
|
|
704
|
+
tech = set()
|
|
705
|
+
|
|
706
|
+
ext_map = {
|
|
707
|
+
'.py': 'Python', '.ts': 'TypeScript', '.tsx': 'React',
|
|
708
|
+
'.js': 'JavaScript', '.jsx': 'React', '.go': 'Go',
|
|
709
|
+
'.rs': 'Rust', '.vue': 'Vue', '.sql': 'SQL',
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
for f in files:
|
|
713
|
+
for ext, name in ext_map.items():
|
|
714
|
+
if f.endswith(ext):
|
|
715
|
+
tech.add(name)
|
|
716
|
+
|
|
717
|
+
return sorted(tech)
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
# ============================================================================
|
|
721
|
+
# SQLite Storage Functions
|
|
722
|
+
# ============================================================================
|
|
723
|
+
|
|
724
|
+
def save_stories_to_db(stories: list, project_path: Path) -> int:
|
|
725
|
+
"""
|
|
726
|
+
Save stories to central SQLite database.
|
|
727
|
+
|
|
728
|
+
Args:
|
|
729
|
+
stories: List of Story objects
|
|
730
|
+
project_path: Path to the project
|
|
731
|
+
|
|
732
|
+
Returns:
|
|
733
|
+
Number of stories saved
|
|
734
|
+
"""
|
|
735
|
+
from .db import get_db
|
|
736
|
+
|
|
737
|
+
db = get_db()
|
|
738
|
+
project = db.get_project_by_path(project_path)
|
|
739
|
+
|
|
740
|
+
if not project:
|
|
741
|
+
project_id = db.register_project(project_path, project_path.name)
|
|
742
|
+
else:
|
|
743
|
+
project_id = project["id"]
|
|
744
|
+
|
|
745
|
+
for story in stories:
|
|
746
|
+
db.save_story(story, project_id)
|
|
747
|
+
|
|
748
|
+
return len(stories)
|
|
749
|
+
|
|
750
|
+
|
|
751
|
+
def load_stories_from_db(
|
|
752
|
+
project_path: Path | None = None,
|
|
753
|
+
category: str | None = None,
|
|
754
|
+
since: "datetime | None" = None,
|
|
755
|
+
limit: int = 100,
|
|
756
|
+
) -> list:
|
|
757
|
+
"""
|
|
758
|
+
Load stories from central SQLite database.
|
|
759
|
+
|
|
760
|
+
Args:
|
|
761
|
+
project_path: Optional project path filter
|
|
762
|
+
category: Optional category filter
|
|
763
|
+
since: Optional date filter
|
|
764
|
+
limit: Maximum stories to return
|
|
765
|
+
|
|
766
|
+
Returns:
|
|
767
|
+
List of Story objects
|
|
768
|
+
"""
|
|
769
|
+
from .db import get_db
|
|
770
|
+
|
|
771
|
+
db = get_db()
|
|
772
|
+
|
|
773
|
+
project_id = None
|
|
774
|
+
if project_path:
|
|
775
|
+
project = db.get_project_by_path(project_path)
|
|
776
|
+
if project:
|
|
777
|
+
project_id = project["id"]
|
|
778
|
+
|
|
779
|
+
return db.list_stories(
|
|
780
|
+
project_id=project_id,
|
|
781
|
+
category=category,
|
|
782
|
+
since=since,
|
|
783
|
+
limit=limit,
|
|
784
|
+
)
|
|
785
|
+
|
|
786
|
+
|
|
787
|
+
def search_stories_in_db(
|
|
788
|
+
query: str,
|
|
789
|
+
files: list[str] | None = None,
|
|
790
|
+
limit: int = 20,
|
|
791
|
+
) -> list:
|
|
792
|
+
"""
|
|
793
|
+
Search stories using FTS5.
|
|
794
|
+
|
|
795
|
+
Args:
|
|
796
|
+
query: Search query
|
|
797
|
+
files: Optional file paths to filter by
|
|
798
|
+
limit: Maximum results
|
|
799
|
+
|
|
800
|
+
Returns:
|
|
801
|
+
List of Story objects
|
|
802
|
+
"""
|
|
803
|
+
from .db import get_db
|
|
804
|
+
|
|
805
|
+
db = get_db()
|
|
806
|
+
return db.search_stories(query, files=files, limit=limit)
|
|
807
|
+
|
|
808
|
+
|
|
809
|
+
def get_story_from_db(story_id: str):
|
|
810
|
+
"""
|
|
811
|
+
Get a story by ID from SQLite.
|
|
812
|
+
|
|
813
|
+
Args:
|
|
814
|
+
story_id: Story UUID
|
|
815
|
+
|
|
816
|
+
Returns:
|
|
817
|
+
Story object or None
|
|
818
|
+
"""
|
|
819
|
+
from .db import get_db
|
|
820
|
+
|
|
821
|
+
db = get_db()
|
|
822
|
+
return db.get_story(story_id)
|
|
823
|
+
|
|
824
|
+
|
|
825
|
+
def migrate_stores_to_db(
|
|
826
|
+
project_paths: list[Path] | None = None,
|
|
827
|
+
dry_run: bool = False,
|
|
828
|
+
) -> dict:
|
|
829
|
+
"""
|
|
830
|
+
Migrate existing store.json files to SQLite.
|
|
831
|
+
|
|
832
|
+
Args:
|
|
833
|
+
project_paths: List of project paths to migrate (None = all tracked)
|
|
834
|
+
dry_run: If True, don't actually write to DB
|
|
835
|
+
|
|
836
|
+
Returns:
|
|
837
|
+
Migration statistics
|
|
838
|
+
"""
|
|
839
|
+
from .db import get_db
|
|
840
|
+
from .config import get_tracked_repos
|
|
841
|
+
|
|
842
|
+
db = get_db()
|
|
843
|
+
|
|
844
|
+
if project_paths is None:
|
|
845
|
+
# Get all tracked repos
|
|
846
|
+
tracked = get_tracked_repos()
|
|
847
|
+
project_paths = [Path(r["path"]) for r in tracked if Path(r["path"]).exists()]
|
|
848
|
+
|
|
849
|
+
stats = {
|
|
850
|
+
"projects_scanned": 0,
|
|
851
|
+
"projects_migrated": 0,
|
|
852
|
+
"stories_imported": 0,
|
|
853
|
+
"errors": [],
|
|
854
|
+
}
|
|
855
|
+
|
|
856
|
+
for project_path in project_paths:
|
|
857
|
+
stats["projects_scanned"] += 1
|
|
858
|
+
|
|
859
|
+
store = load_repr_store(project_path)
|
|
860
|
+
if not store:
|
|
861
|
+
continue
|
|
862
|
+
|
|
863
|
+
if not store.stories:
|
|
864
|
+
continue
|
|
865
|
+
|
|
866
|
+
if dry_run:
|
|
867
|
+
stats["projects_migrated"] += 1
|
|
868
|
+
stats["stories_imported"] += len(store.stories)
|
|
869
|
+
continue
|
|
870
|
+
|
|
871
|
+
try:
|
|
872
|
+
imported = db.import_from_store(store, project_path)
|
|
873
|
+
stats["projects_migrated"] += 1
|
|
874
|
+
stats["stories_imported"] += imported
|
|
875
|
+
except Exception as e:
|
|
876
|
+
stats["errors"].append(f"{project_path.name}: {str(e)}")
|
|
877
|
+
|
|
878
|
+
return stats
|
|
879
|
+
|
|
880
|
+
|
|
881
|
+
def get_db_stats() -> dict:
|
|
882
|
+
"""Get SQLite database statistics."""
|
|
883
|
+
from .db import get_db
|
|
884
|
+
|
|
885
|
+
db = get_db()
|
|
886
|
+
return db.get_stats()
|
|
887
|
+
|