repr-cli 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
repr/config.py ADDED
@@ -0,0 +1,392 @@
1
+ """
2
+ Configuration management for ~/.repr/ directory.
3
+ """
4
+
5
+ import json
6
+ import os
7
+ from datetime import datetime
8
+ from pathlib import Path
9
+ from typing import Any
10
+
11
+ # ============================================================================
12
+ # API Configuration
13
+ # ============================================================================
14
+
15
+ # Environment detection
16
+ _DEV_MODE = os.getenv("REPR_DEV", "").lower() in ("1", "true", "yes")
17
+
18
+ # Production URLs
19
+ PROD_API_BASE = "https://api.repr.dev/api/cli"
20
+
21
+ # Local development URLs
22
+ LOCAL_API_BASE = "http://localhost:8003/api/cli"
23
+
24
+
25
+ def get_api_base() -> str:
26
+ """Get the API base URL based on environment."""
27
+ if env_url := os.getenv("REPR_API_BASE"):
28
+ return env_url
29
+ return LOCAL_API_BASE if _DEV_MODE else PROD_API_BASE
30
+
31
+
32
+ def is_dev_mode() -> bool:
33
+ """Check if running in dev mode."""
34
+ return _DEV_MODE
35
+
36
+
37
+ def set_dev_mode(enabled: bool) -> None:
38
+ """Set dev mode programmatically (for CLI --dev flag)."""
39
+ global _DEV_MODE
40
+ _DEV_MODE = enabled
41
+
42
+
43
+ # ============================================================================
44
+ # File Configuration
45
+ # ============================================================================
46
+
47
+ CONFIG_DIR = Path.home() / ".repr"
48
+ CONFIG_FILE = CONFIG_DIR / "config.json"
49
+ PROFILES_DIR = CONFIG_DIR / "profiles"
50
+ CACHE_DIR = CONFIG_DIR / "cache"
51
+ REPO_HASHES_FILE = CACHE_DIR / "repo-hashes.json"
52
+
53
+ DEFAULT_CONFIG = {
54
+ "version": 1,
55
+ "auth": None,
56
+ "settings": {
57
+ "default_paths": ["~/code"],
58
+ "skip_patterns": ["node_modules", "venv", ".venv", "vendor", "__pycache__", ".git"],
59
+ },
60
+ "sync": {
61
+ "last_pushed": None,
62
+ "last_profile": None,
63
+ },
64
+ "llm": {
65
+ "extraction_model": None, # Model for extracting accomplishments (e.g., "gpt-4o-mini", "llama3.2")
66
+ "synthesis_model": None, # Model for synthesizing profile (e.g., "gpt-4o", "llama3.2")
67
+ "local_api_url": None, # Local LLM API base URL (e.g., "http://localhost:11434/v1")
68
+ "local_api_key": None, # Local LLM API key (often "ollama" for Ollama)
69
+ },
70
+ }
71
+
72
+
73
+ def ensure_directories() -> None:
74
+ """Ensure all required directories exist."""
75
+ CONFIG_DIR.mkdir(exist_ok=True)
76
+ PROFILES_DIR.mkdir(exist_ok=True)
77
+ CACHE_DIR.mkdir(exist_ok=True)
78
+
79
+
80
+ def load_config() -> dict[str, Any]:
81
+ """Load configuration from disk, creating default if missing."""
82
+ ensure_directories()
83
+
84
+ if not CONFIG_FILE.exists():
85
+ save_config(DEFAULT_CONFIG)
86
+ return DEFAULT_CONFIG.copy()
87
+
88
+ try:
89
+ with open(CONFIG_FILE, "r") as f:
90
+ config = json.load(f)
91
+ # Merge with defaults for any missing keys
92
+ return {**DEFAULT_CONFIG, **config}
93
+ except (json.JSONDecodeError, IOError):
94
+ return DEFAULT_CONFIG.copy()
95
+
96
+
97
+ def save_config(config: dict[str, Any]) -> None:
98
+ """Save configuration to disk."""
99
+ ensure_directories()
100
+
101
+ with open(CONFIG_FILE, "w") as f:
102
+ json.dump(config, f, indent=2, default=str)
103
+
104
+
105
+ def get_auth() -> dict[str, Any] | None:
106
+ """Get authentication info if available."""
107
+ config = load_config()
108
+ return config.get("auth")
109
+
110
+
111
+ def set_auth(
112
+ access_token: str,
113
+ user_id: str,
114
+ email: str,
115
+ litellm_api_key: str | None = None,
116
+ ) -> None:
117
+ """Store authentication info."""
118
+ config = load_config()
119
+ config["auth"] = {
120
+ "access_token": access_token,
121
+ "user_id": user_id,
122
+ "email": email,
123
+ "authenticated_at": datetime.now().isoformat(),
124
+ }
125
+ if litellm_api_key:
126
+ config["auth"]["litellm_api_key"] = litellm_api_key
127
+ save_config(config)
128
+
129
+
130
+ def clear_auth() -> None:
131
+ """Clear authentication info."""
132
+ config = load_config()
133
+ config["auth"] = None
134
+ save_config(config)
135
+
136
+
137
+ def is_authenticated() -> bool:
138
+ """Check if user is authenticated."""
139
+ auth = get_auth()
140
+ return auth is not None and auth.get("access_token") is not None
141
+
142
+
143
+ def get_access_token() -> str | None:
144
+ """Get access token if authenticated."""
145
+ auth = get_auth()
146
+ return auth.get("access_token") if auth else None
147
+
148
+
149
+ def get_litellm_config() -> tuple[str | None, str | None]:
150
+ """Get LiteLLM configuration if available.
151
+
152
+ Returns:
153
+ Tuple of (litellm_url, litellm_api_key)
154
+ """
155
+ auth = get_auth()
156
+ if not auth:
157
+ return None, None
158
+ return auth.get("litellm_url"), auth.get("litellm_api_key")
159
+
160
+
161
+ # ============================================================================
162
+ # LLM Configuration
163
+ # ============================================================================
164
+
165
+ def get_llm_config() -> dict[str, Any]:
166
+ """Get LLM configuration.
167
+
168
+ Returns:
169
+ Dict with extraction_model, synthesis_model, local_api_url, local_api_key
170
+ """
171
+ config = load_config()
172
+ llm = config.get("llm", {})
173
+ return {
174
+ "extraction_model": llm.get("extraction_model"),
175
+ "synthesis_model": llm.get("synthesis_model"),
176
+ "local_api_url": llm.get("local_api_url"),
177
+ "local_api_key": llm.get("local_api_key"),
178
+ }
179
+
180
+
181
+ def set_llm_config(
182
+ extraction_model: str | None = None,
183
+ synthesis_model: str | None = None,
184
+ local_api_url: str | None = None,
185
+ local_api_key: str | None = None,
186
+ ) -> None:
187
+ """Set LLM configuration.
188
+
189
+ Only updates provided values, leaves others unchanged.
190
+ """
191
+ config = load_config()
192
+ if "llm" not in config:
193
+ config["llm"] = DEFAULT_CONFIG["llm"].copy()
194
+
195
+ if extraction_model is not None:
196
+ config["llm"]["extraction_model"] = extraction_model if extraction_model else None
197
+ if synthesis_model is not None:
198
+ config["llm"]["synthesis_model"] = synthesis_model if synthesis_model else None
199
+ if local_api_url is not None:
200
+ config["llm"]["local_api_url"] = local_api_url if local_api_url else None
201
+ if local_api_key is not None:
202
+ config["llm"]["local_api_key"] = local_api_key if local_api_key else None
203
+
204
+ save_config(config)
205
+
206
+
207
+ def clear_llm_config() -> None:
208
+ """Clear all LLM configuration."""
209
+ config = load_config()
210
+ config["llm"] = DEFAULT_CONFIG["llm"].copy()
211
+ save_config(config)
212
+
213
+
214
+ def get_skip_patterns() -> list[str]:
215
+ """Get list of patterns to skip during discovery."""
216
+ config = load_config()
217
+ return config.get("settings", {}).get("skip_patterns", DEFAULT_CONFIG["settings"]["skip_patterns"])
218
+
219
+
220
+ def update_sync_info(profile_name: str) -> None:
221
+ """Update last sync information."""
222
+ config = load_config()
223
+ config["sync"] = {
224
+ "last_pushed": datetime.now().isoformat(),
225
+ "last_profile": profile_name,
226
+ }
227
+ save_config(config)
228
+
229
+
230
+ def get_sync_info() -> dict[str, Any]:
231
+ """Get sync information."""
232
+ config = load_config()
233
+ return config.get("sync", {})
234
+
235
+
236
+ # Profile management
237
+
238
+ def list_profiles() -> list[dict[str, Any]]:
239
+ """List all saved profiles with metadata, sorted by modification time (newest first)."""
240
+ ensure_directories()
241
+
242
+ profiles = []
243
+ for profile_path in sorted(PROFILES_DIR.glob("*.md"), key=lambda p: p.stat().st_mtime, reverse=True):
244
+ content = profile_path.read_text()
245
+
246
+ # Extract basic stats from content
247
+ project_count = content.count("## ") - 1 # Subtract header sections
248
+ if project_count < 0:
249
+ project_count = 0
250
+
251
+ # Check if synced
252
+ sync_info = get_sync_info()
253
+ is_synced = sync_info.get("last_profile") == profile_path.name
254
+
255
+ # Load metadata if exists
256
+ metadata = get_profile_metadata(profile_path.stem)
257
+
258
+ profiles.append({
259
+ "name": profile_path.stem,
260
+ "filename": profile_path.name,
261
+ "path": profile_path,
262
+ "size": profile_path.stat().st_size,
263
+ "modified": datetime.fromtimestamp(profile_path.stat().st_mtime),
264
+ "project_count": project_count,
265
+ "synced": is_synced,
266
+ "repos": metadata.get("repos", []) if metadata else [],
267
+ })
268
+
269
+ return profiles
270
+
271
+
272
+ def get_latest_profile() -> Path | None:
273
+ """Get path to the latest profile."""
274
+ profiles = list_profiles()
275
+ return profiles[0]["path"] if profiles else None
276
+
277
+
278
+ def get_profile(name: str) -> Path | None:
279
+ """Get path to a specific profile by name."""
280
+ profile_path = PROFILES_DIR / f"{name}.md"
281
+ return profile_path if profile_path.exists() else None
282
+
283
+
284
+ def get_profile_metadata(name: str) -> dict[str, Any] | None:
285
+ """Get metadata for a specific profile by name."""
286
+ metadata_path = PROFILES_DIR / f"{name}.meta.json"
287
+ if not metadata_path.exists():
288
+ return None
289
+
290
+ try:
291
+ with open(metadata_path, 'r') as f:
292
+ return json.load(f)
293
+ except (json.JSONDecodeError, IOError):
294
+ return None
295
+
296
+
297
+ def save_profile(content: str, name: str | None = None, repos: list[dict[str, Any]] | None = None) -> Path:
298
+ """Save a profile to disk with optional metadata (repos as rich objects)."""
299
+ ensure_directories()
300
+
301
+ if name is None:
302
+ name = datetime.now().strftime("%Y-%m-%d")
303
+
304
+ # Handle duplicate names by adding suffix
305
+ profile_path = PROFILES_DIR / f"{name}.md"
306
+ counter = 1
307
+ while profile_path.exists():
308
+ profile_path = PROFILES_DIR / f"{name}-{counter}.md"
309
+ counter += 1
310
+
311
+ profile_path.write_text(content)
312
+
313
+ # Save metadata if repos provided
314
+ if repos is not None:
315
+ metadata_path = profile_path.with_suffix('.meta.json')
316
+ metadata = {
317
+ "repos": repos,
318
+ "created_at": datetime.now().isoformat(),
319
+ }
320
+ with open(metadata_path, 'w') as f:
321
+ json.dump(metadata, f, indent=2)
322
+
323
+ return profile_path
324
+
325
+
326
+ def save_repo_profile(content: str, repo_name: str, repo_metadata: dict[str, Any]) -> Path:
327
+ """Save a per-repo profile to disk as {repo_name}_{date}.md."""
328
+ ensure_directories()
329
+
330
+ date_str = datetime.now().strftime("%Y-%m-%d")
331
+ name = f"{repo_name}_{date_str}"
332
+
333
+ profile_path = PROFILES_DIR / f"{name}.md"
334
+ counter = 1
335
+ while profile_path.exists():
336
+ profile_path = PROFILES_DIR / f"{name}-{counter}.md"
337
+ counter += 1
338
+
339
+ profile_path.write_text(content)
340
+
341
+ metadata_path = profile_path.with_suffix('.meta.json')
342
+ metadata = {
343
+ "repo": repo_metadata,
344
+ "created_at": datetime.now().isoformat(),
345
+ }
346
+ with open(metadata_path, 'w') as f:
347
+ json.dump(metadata, f, indent=2)
348
+
349
+ return profile_path
350
+
351
+
352
+ # Cache management
353
+
354
+ def load_repo_hashes() -> dict[str, str]:
355
+ """Load cached repository hashes."""
356
+ ensure_directories()
357
+
358
+ if not REPO_HASHES_FILE.exists():
359
+ return {}
360
+
361
+ try:
362
+ with open(REPO_HASHES_FILE, "r") as f:
363
+ return json.load(f)
364
+ except (json.JSONDecodeError, IOError):
365
+ return {}
366
+
367
+
368
+ def save_repo_hashes(hashes: dict[str, str]) -> None:
369
+ """Save repository hashes to cache."""
370
+ ensure_directories()
371
+
372
+ with open(REPO_HASHES_FILE, "w") as f:
373
+ json.dump(hashes, f, indent=2)
374
+
375
+
376
+ def get_repo_hash(repo_path: str) -> str | None:
377
+ """Get cached hash for a repository."""
378
+ hashes = load_repo_hashes()
379
+ return hashes.get(repo_path)
380
+
381
+
382
+ def set_repo_hash(repo_path: str, hash_value: str) -> None:
383
+ """Set cached hash for a repository."""
384
+ hashes = load_repo_hashes()
385
+ hashes[repo_path] = hash_value
386
+ save_repo_hashes(hashes)
387
+
388
+
389
+ def clear_cache() -> None:
390
+ """Clear all cached data."""
391
+ if REPO_HASHES_FILE.exists():
392
+ REPO_HASHES_FILE.unlink()