repr-cli 0.1.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
repr/config.py CHANGED
@@ -4,6 +4,7 @@ Configuration management for ~/.repr/ directory.
4
4
 
5
5
  import json
6
6
  import os
7
+ import tempfile
7
8
  from datetime import datetime
8
9
  from pathlib import Path
9
10
  from typing import Any
@@ -14,6 +15,8 @@ from typing import Any
14
15
 
15
16
  # Environment detection
16
17
  _DEV_MODE = os.getenv("REPR_DEV", "").lower() in ("1", "true", "yes")
18
+ _CI_MODE = os.getenv("REPR_CI", "").lower() in ("1", "true", "yes")
19
+ _FORCED_MODE = os.getenv("REPR_MODE", "").lower() # "local" or "cloud"
17
20
 
18
21
  # Production URLs
19
22
  PROD_API_BASE = "https://api.repr.dev/api/cli"
@@ -34,6 +37,16 @@ def is_dev_mode() -> bool:
34
37
  return _DEV_MODE
35
38
 
36
39
 
40
+ def is_ci_mode() -> bool:
41
+ """Check if running in CI mode (safe defaults, no prompts)."""
42
+ return _CI_MODE
43
+
44
+
45
+ def get_forced_mode() -> str | None:
46
+ """Get forced mode from environment (local/cloud)."""
47
+ return _FORCED_MODE if _FORCED_MODE in ("local", "cloud") else None
48
+
49
+
37
50
  def set_dev_mode(enabled: bool) -> None:
38
51
  """Set dev mode programmatically (for CLI --dev flag)."""
39
52
  global _DEV_MODE
@@ -44,15 +57,31 @@ def set_dev_mode(enabled: bool) -> None:
44
57
  # File Configuration
45
58
  # ============================================================================
46
59
 
47
- CONFIG_DIR = Path.home() / ".repr"
60
+ # Support REPR_HOME override for multi-user/CI environments
61
+ REPR_HOME = Path(os.getenv("REPR_HOME", Path.home() / ".repr"))
62
+ CONFIG_DIR = REPR_HOME
48
63
  CONFIG_FILE = CONFIG_DIR / "config.json"
49
64
  PROFILES_DIR = CONFIG_DIR / "profiles"
50
65
  CACHE_DIR = CONFIG_DIR / "cache"
66
+ AUDIT_DIR = CONFIG_DIR / "audit"
51
67
  REPO_HASHES_FILE = CACHE_DIR / "repo-hashes.json"
52
68
 
69
+ # Version for config schema migrations
70
+ CONFIG_VERSION = 3
71
+
53
72
  DEFAULT_CONFIG = {
54
- "version": 1,
55
- "auth": None,
73
+ "version": CONFIG_VERSION,
74
+ "auth": None, # Now stores reference to keychain, not token itself
75
+ "profile": {
76
+ "username": None, # Local username (generated if not set)
77
+ "claimed": False, # Whether username is verified with repr.dev
78
+ "bio": None,
79
+ "location": None,
80
+ "website": None,
81
+ "twitter": None,
82
+ "linkedin": None,
83
+ "available": False, # Available for work
84
+ },
56
85
  "settings": {
57
86
  "default_paths": ["~/code"],
58
87
  "skip_patterns": ["node_modules", "venv", ".venv", "vendor", "__pycache__", ".git"],
@@ -62,11 +91,39 @@ DEFAULT_CONFIG = {
62
91
  "last_profile": None,
63
92
  },
64
93
  "llm": {
65
- "extraction_model": None, # Model for extracting accomplishments (e.g., "gpt-4o-mini", "llama3.2")
66
- "synthesis_model": None, # Model for synthesizing profile (e.g., "gpt-4o", "llama3.2")
94
+ "default": "local", # "local", "cloud", or "byok:<provider>"
95
+ "local_provider": None, # "ollama", "lmstudio", "custom"
67
96
  "local_api_url": None, # Local LLM API base URL (e.g., "http://localhost:11434/v1")
68
97
  "local_api_key": None, # Local LLM API key (often "ollama" for Ollama)
98
+ "local_model": None, # Model name for local LLM (e.g., "llama3.2")
99
+ "extraction_model": None, # Model for extracting accomplishments
100
+ "synthesis_model": None, # Model for synthesizing profile
101
+ "cloud_model": "gpt-4o-mini", # Default cloud model
102
+ "cloud_send_diffs": False, # Whether to send diffs to cloud (privacy setting)
103
+ "cloud_redact_paths": True, # Whether to redact absolute paths
104
+ "cloud_redact_emails": False, # Whether to redact author emails
105
+ "cloud_redact_patterns": [], # Regex patterns to redact from commit messages
106
+ "cloud_allowlist_repos": [], # Only these repos can use cloud (empty = all)
107
+ "byok": {}, # BYOK provider configs: {"openai": {"model": "gpt-4o-mini"}, ...}
108
+ },
109
+ "generation": {
110
+ "batch_size": 5, # Commits per story
111
+ "auto_generate_on_hook": False, # Auto-generate when hook runs
112
+ "default_template": "resume", # Default story template
113
+ "token_limit": 100000, # Max tokens per cloud request
114
+ "max_commits_per_batch": 50, # Max commits per request
69
115
  },
116
+ "publish": {
117
+ "on_generate": "never", # "never", "prompt", "always"
118
+ "on_sync": "prompt", # "never", "prompt", "always"
119
+ },
120
+ "privacy": {
121
+ "lock_local_only": False, # Disable cloud features entirely
122
+ "lock_permanent": False, # Make local-only lock irreversible
123
+ "profile_visibility": "public", # "public", "unlisted", "private"
124
+ "telemetry_enabled": False, # Opt-in telemetry
125
+ },
126
+ "tracked_repos": [], # List of {"path": str, "last_sync": str|None, "hook_installed": bool, "paused": bool}
70
127
  }
71
128
 
72
129
 
@@ -75,6 +132,32 @@ def ensure_directories() -> None:
75
132
  CONFIG_DIR.mkdir(exist_ok=True)
76
133
  PROFILES_DIR.mkdir(exist_ok=True)
77
134
  CACHE_DIR.mkdir(exist_ok=True)
135
+ AUDIT_DIR.mkdir(exist_ok=True)
136
+
137
+
138
+ def _atomic_json_write(path: Path, data: dict) -> None:
139
+ """Write JSON to file atomically using temp file + rename."""
140
+ path.parent.mkdir(parents=True, exist_ok=True)
141
+ fd, tmp_path = tempfile.mkstemp(dir=path.parent, suffix=".tmp")
142
+ try:
143
+ with os.fdopen(fd, "w") as f:
144
+ json.dump(data, f, indent=2)
145
+ os.replace(tmp_path, path)
146
+ except Exception:
147
+ if os.path.exists(tmp_path):
148
+ os.unlink(tmp_path)
149
+ raise
150
+
151
+
152
+ def _deep_merge(base: dict, overlay: dict) -> dict:
153
+ """Deep merge two dicts, with overlay taking precedence."""
154
+ result = base.copy()
155
+ for key, value in overlay.items():
156
+ if key in result and isinstance(result[key], dict) and isinstance(value, dict):
157
+ result[key] = _deep_merge(result[key], value)
158
+ else:
159
+ result[key] = value
160
+ return result
78
161
 
79
162
 
80
163
  def load_config() -> dict[str, Any]:
@@ -88,24 +171,107 @@ def load_config() -> dict[str, Any]:
88
171
  try:
89
172
  with open(CONFIG_FILE, "r") as f:
90
173
  config = json.load(f)
91
- # Merge with defaults for any missing keys
92
- return {**DEFAULT_CONFIG, **config}
174
+ # Deep merge with defaults for any missing keys
175
+ merged = _deep_merge(DEFAULT_CONFIG, config)
176
+
177
+ # Check for config migration
178
+ if config.get("version", 1) < CONFIG_VERSION:
179
+ merged = _migrate_config(merged, config.get("version", 1))
180
+ save_config(merged)
181
+
182
+ return merged
93
183
  except (json.JSONDecodeError, IOError):
94
184
  return DEFAULT_CONFIG.copy()
95
185
 
96
186
 
187
+ def _migrate_config(config: dict, from_version: int) -> dict:
188
+ """Migrate config from older version."""
189
+ # Version 2 -> 3: Add BYOK, profile metadata, audit settings
190
+ if from_version < 3:
191
+ # Ensure new sections exist
192
+ if "profile" not in config:
193
+ config["profile"] = DEFAULT_CONFIG["profile"].copy()
194
+ if "publish" not in config:
195
+ config["publish"] = DEFAULT_CONFIG["publish"].copy()
196
+ if "llm" in config:
197
+ config["llm"].setdefault("byok", {})
198
+ config["llm"].setdefault("cloud_redact_emails", False)
199
+ config["llm"].setdefault("cloud_redact_patterns", [])
200
+ config["llm"].setdefault("cloud_allowlist_repos", [])
201
+
202
+ config["version"] = CONFIG_VERSION
203
+ return config
204
+
205
+
97
206
  def save_config(config: dict[str, Any]) -> None:
98
- """Save configuration to disk."""
207
+ """Save configuration to disk atomically.
208
+
209
+ Uses write-to-temp-then-rename pattern to prevent corruption
210
+ if the process is interrupted during write.
211
+ """
99
212
  ensure_directories()
100
213
 
101
- with open(CONFIG_FILE, "w") as f:
102
- json.dump(config, f, indent=2, default=str)
214
+ # Write to temp file first, then atomic rename
215
+ fd, tmp_path = tempfile.mkstemp(dir=CONFIG_DIR, suffix=".tmp")
216
+ try:
217
+ with os.fdopen(fd, "w") as f:
218
+ json.dump(config, f, indent=2, default=str)
219
+ os.replace(tmp_path, CONFIG_FILE) # Atomic on POSIX
220
+ except Exception:
221
+ # Clean up temp file on failure
222
+ if os.path.exists(tmp_path):
223
+ os.unlink(tmp_path)
224
+ raise
225
+
226
+
227
+ def get_config_value(key: str) -> Any:
228
+ """Get a config value by dot-notation key (e.g., 'llm.default')."""
229
+ config = load_config()
230
+ parts = key.split(".")
231
+ value = config
232
+ for part in parts:
233
+ if isinstance(value, dict) and part in value:
234
+ value = value[part]
235
+ else:
236
+ return None
237
+ return value
238
+
239
+
240
+ def set_config_value(key: str, value: Any) -> None:
241
+ """Set a config value by dot-notation key (e.g., 'llm.default')."""
242
+ config = load_config()
243
+ parts = key.split(".")
244
+
245
+ # Navigate to parent
246
+ parent = config
247
+ for part in parts[:-1]:
248
+ if part not in parent:
249
+ parent[part] = {}
250
+ parent = parent[part]
251
+
252
+ # Set value
253
+ parent[parts[-1]] = value
254
+ save_config(config)
103
255
 
104
256
 
105
257
  def get_auth() -> dict[str, Any] | None:
106
258
  """Get authentication info if available."""
107
259
  config = load_config()
108
- return config.get("auth")
260
+ auth = config.get("auth")
261
+
262
+ if not auth:
263
+ return None
264
+
265
+ # If using keychain (v3+), retrieve token from keychain
266
+ if auth.get("token_keychain_ref"):
267
+ from .keychain import get_secret
268
+ token = get_secret(auth["token_keychain_ref"])
269
+ if token:
270
+ return {**auth, "access_token": token}
271
+ return None
272
+
273
+ # Legacy plaintext token
274
+ return auth
109
275
 
110
276
 
111
277
  def set_auth(
@@ -114,22 +280,44 @@ def set_auth(
114
280
  email: str,
115
281
  litellm_api_key: str | None = None,
116
282
  ) -> None:
117
- """Store authentication info."""
283
+ """Store authentication info securely."""
284
+ from .keychain import store_secret
285
+
118
286
  config = load_config()
287
+
288
+ # Store token in keychain
289
+ keychain_ref = f"auth_token_{user_id[:8]}"
290
+ store_secret(keychain_ref, access_token)
291
+
119
292
  config["auth"] = {
120
- "access_token": access_token,
293
+ "token_keychain_ref": keychain_ref,
121
294
  "user_id": user_id,
122
295
  "email": email,
123
296
  "authenticated_at": datetime.now().isoformat(),
124
297
  }
298
+
125
299
  if litellm_api_key:
126
- config["auth"]["litellm_api_key"] = litellm_api_key
300
+ litellm_ref = f"litellm_key_{user_id[:8]}"
301
+ store_secret(litellm_ref, litellm_api_key)
302
+ config["auth"]["litellm_keychain_ref"] = litellm_ref
303
+
127
304
  save_config(config)
128
305
 
129
306
 
130
307
  def clear_auth() -> None:
131
308
  """Clear authentication info."""
309
+ from .keychain import delete_secret
310
+
132
311
  config = load_config()
312
+ auth = config.get("auth")
313
+
314
+ if auth:
315
+ # Remove from keychain
316
+ if auth.get("token_keychain_ref"):
317
+ delete_secret(auth["token_keychain_ref"])
318
+ if auth.get("litellm_keychain_ref"):
319
+ delete_secret(auth["litellm_keychain_ref"])
320
+
133
321
  config["auth"] = None
134
322
  save_config(config)
135
323
 
@@ -152,10 +340,79 @@ def get_litellm_config() -> tuple[str | None, str | None]:
152
340
  Returns:
153
341
  Tuple of (litellm_url, litellm_api_key)
154
342
  """
343
+ from .keychain import get_secret
344
+
155
345
  auth = get_auth()
156
346
  if not auth:
157
347
  return None, None
158
- return auth.get("litellm_url"), auth.get("litellm_api_key")
348
+
349
+ litellm_url = auth.get("litellm_url")
350
+ litellm_key = None
351
+
352
+ if auth.get("litellm_keychain_ref"):
353
+ litellm_key = get_secret(auth["litellm_keychain_ref"])
354
+ elif auth.get("litellm_api_key"): # Legacy
355
+ litellm_key = auth["litellm_api_key"]
356
+
357
+ return litellm_url, litellm_key
358
+
359
+
360
+ # ============================================================================
361
+ # Privacy Configuration
362
+ # ============================================================================
363
+
364
+ def is_cloud_allowed() -> bool:
365
+ """Check if cloud operations are allowed.
366
+
367
+ Cloud is blocked if:
368
+ - privacy.lock_local_only is True
369
+ - REPR_MODE=local is set
370
+ - REPR_CI=true is set
371
+ """
372
+ if _CI_MODE:
373
+ return False
374
+ if _FORCED_MODE == "local":
375
+ return False
376
+
377
+ config = load_config()
378
+ privacy = config.get("privacy", {})
379
+ return not privacy.get("lock_local_only", False)
380
+
381
+
382
+ def lock_local_only(permanent: bool = False) -> None:
383
+ """Lock to local-only mode.
384
+
385
+ Args:
386
+ permanent: If True, lock cannot be reversed
387
+ """
388
+ config = load_config()
389
+ config["privacy"]["lock_local_only"] = True
390
+ if permanent:
391
+ config["privacy"]["lock_permanent"] = True
392
+ save_config(config)
393
+
394
+
395
+ def unlock_local_only() -> bool:
396
+ """Unlock from local-only mode.
397
+
398
+ Returns:
399
+ True if unlocked, False if permanently locked
400
+ """
401
+ config = load_config()
402
+ privacy = config.get("privacy", {})
403
+
404
+ if privacy.get("lock_permanent"):
405
+ return False
406
+
407
+ config["privacy"]["lock_local_only"] = False
408
+ save_config(config)
409
+ return True
410
+
411
+
412
+ def get_privacy_settings() -> dict[str, Any]:
413
+ """Get current privacy settings."""
414
+ config = load_config()
415
+ return config.get("privacy", DEFAULT_CONFIG["privacy"])
159
416
 
160
417
 
161
418
  # ============================================================================
@@ -166,16 +423,10 @@ def get_llm_config() -> dict[str, Any]:
166
423
  """Get LLM configuration.
167
424
 
168
425
  Returns:
169
- Dict with extraction_model, synthesis_model, local_api_url, local_api_key
426
+ Dict with full LLM config
170
427
  """
171
428
  config = load_config()
172
- llm = config.get("llm", {})
173
- return {
174
- "extraction_model": llm.get("extraction_model"),
175
- "synthesis_model": llm.get("synthesis_model"),
176
- "local_api_url": llm.get("local_api_url"),
177
- "local_api_key": llm.get("local_api_key"),
178
- }
429
+ return config.get("llm", DEFAULT_CONFIG["llm"])
179
430
 
180
431
 
181
432
  def set_llm_config(
@@ -183,6 +434,8 @@ def set_llm_config(
183
434
  synthesis_model: str | None = None,
184
435
  local_api_url: str | None = None,
185
436
  local_api_key: str | None = None,
437
+ local_model: str | None = None,
438
+ default: str | None = None,
186
439
  ) -> None:
187
440
  """Set LLM configuration.
188
441
 
@@ -200,6 +453,10 @@ def set_llm_config(
200
453
  config["llm"]["local_api_url"] = local_api_url if local_api_url else None
201
454
  if local_api_key is not None:
202
455
  config["llm"]["local_api_key"] = local_api_key if local_api_key else None
456
+ if local_model is not None:
457
+ config["llm"]["local_model"] = local_model if local_model else None
458
+ if default is not None:
459
+ config["llm"]["default"] = default
203
460
 
204
461
  save_config(config)
205
462
 
@@ -211,6 +468,163 @@ def clear_llm_config() -> None:
211
468
  save_config(config)
212
469
 
213
470
 
471
+ def get_default_llm_mode() -> str:
472
+ """Get the default LLM mode."""
473
+ config = load_config()
474
+ return config.get("llm", {}).get("default", "local")
475
+
476
+
477
+ # ============================================================================
478
+ # BYOK Configuration
479
+ # ============================================================================
480
+
481
+ BYOK_PROVIDERS = {
482
+ "openai": {
483
+ "name": "OpenAI",
484
+ "default_model": "gpt-4o-mini",
485
+ "base_url": "https://api.openai.com/v1",
486
+ },
487
+ "anthropic": {
488
+ "name": "Anthropic",
489
+ "default_model": "claude-3-sonnet-20240229",
490
+ "base_url": "https://api.anthropic.com/v1",
491
+ },
492
+ "groq": {
493
+ "name": "Groq",
494
+ "default_model": "llama-3.1-70b-versatile",
495
+ "base_url": "https://api.groq.com/openai/v1",
496
+ },
497
+ "together": {
498
+ "name": "Together AI",
499
+ "default_model": "meta-llama/Llama-3-70b-chat-hf",
500
+ "base_url": "https://api.together.xyz/v1",
501
+ },
502
+ }
503
+
504
+
505
+ def add_byok_provider(provider: str, api_key: str, model: str | None = None) -> bool:
506
+ """Add a BYOK provider.
507
+
508
+ Args:
509
+ provider: Provider name (openai, anthropic, etc.)
510
+ api_key: API key for the provider
511
+ model: Optional model override
512
+
513
+ Returns:
514
+ True if added successfully
515
+ """
516
+ from .keychain import store_secret
517
+
518
+ if provider not in BYOK_PROVIDERS:
519
+ return False
520
+
521
+ # Store API key in keychain
522
+ keychain_ref = f"byok_{provider}"
523
+ store_secret(keychain_ref, api_key)
524
+
525
+ # Update config
526
+ config = load_config()
527
+ if "byok" not in config.get("llm", {}):
528
+ config["llm"]["byok"] = {}
529
+
530
+ config["llm"]["byok"][provider] = {
531
+ "keychain_ref": keychain_ref,
532
+ "model": model or BYOK_PROVIDERS[provider]["default_model"],
533
+ "send_diffs": False,
534
+ "redact_paths": True,
535
+ }
536
+
537
+ save_config(config)
538
+ return True
539
+
540
+
541
+ def remove_byok_provider(provider: str) -> bool:
542
+ """Remove a BYOK provider.
543
+
544
+ Args:
545
+ provider: Provider name
546
+
547
+ Returns:
548
+ True if removed, False if not found
549
+ """
550
+ from .keychain import delete_secret
551
+
552
+ config = load_config()
553
+ byok = config.get("llm", {}).get("byok", {})
554
+
555
+ if provider not in byok:
556
+ return False
557
+
558
+ # Remove from keychain
559
+ if byok[provider].get("keychain_ref"):
560
+ delete_secret(byok[provider]["keychain_ref"])
561
+
562
+ del config["llm"]["byok"][provider]
563
+ save_config(config)
564
+ return True
565
+
566
+
567
+ def get_byok_config(provider: str) -> dict[str, Any] | None:
568
+ """Get BYOK configuration for a provider.
569
+
570
+ Returns:
571
+ Config dict with api_key included, or None if not configured
572
+ """
573
+ from .keychain import get_secret
574
+
575
+ config = load_config()
576
+ byok = config.get("llm", {}).get("byok", {})
577
+
578
+ if provider not in byok:
579
+ return None
580
+
581
+ provider_config = byok[provider].copy()
582
+
583
+ # Get API key from keychain
584
+ if provider_config.get("keychain_ref"):
585
+ api_key = get_secret(provider_config["keychain_ref"])
586
+ if api_key:
587
+ provider_config["api_key"] = api_key
588
+ else:
589
+ return None # Key not found
590
+
591
+ # Add provider info
592
+ if provider in BYOK_PROVIDERS:
593
+ provider_config["base_url"] = BYOK_PROVIDERS[provider]["base_url"]
594
+ provider_config["provider_name"] = BYOK_PROVIDERS[provider]["name"]
595
+
596
+ return provider_config
597
+
598
+
599
+ def list_byok_providers() -> list[str]:
600
+ """List configured BYOK providers."""
601
+ config = load_config()
602
+ return list(config.get("llm", {}).get("byok", {}).keys())
603
+
604
+
605
+ # ============================================================================
606
+ # Profile Configuration
607
+ # ============================================================================
608
+
609
+ def get_profile_config() -> dict[str, Any]:
610
+ """Get profile configuration."""
611
+ config = load_config()
612
+ return config.get("profile", DEFAULT_CONFIG["profile"])
613
+
614
+
615
+ def set_profile_config(**kwargs) -> None:
616
+ """Set profile configuration fields."""
617
+ config = load_config()
618
+ if "profile" not in config:
619
+ config["profile"] = DEFAULT_CONFIG["profile"].copy()
620
+
621
+ for key, value in kwargs.items():
622
+ if key in DEFAULT_CONFIG["profile"]:
623
+ config["profile"][key] = value
624
+
625
+ save_config(config)
626
+
627
+
214
628
  def get_skip_patterns() -> list[str]:
215
629
  """Get list of patterns to skip during discovery."""
216
630
  config = load_config()
@@ -233,7 +647,9 @@ def get_sync_info() -> dict[str, Any]:
233
647
  return config.get("sync", {})
234
648
 
235
649
 
236
- # Profile management
650
+ # ============================================================================
651
+ # Profile File Management
652
+ # ============================================================================
237
653
 
238
654
  def list_profiles() -> list[dict[str, Any]]:
239
655
  """List all saved profiles with metadata, sorted by modification time (newest first)."""
@@ -317,8 +733,7 @@ def save_profile(content: str, name: str | None = None, repos: list[dict[str, An
317
733
  "repos": repos,
318
734
  "created_at": datetime.now().isoformat(),
319
735
  }
320
- with open(metadata_path, 'w') as f:
321
- json.dump(metadata, f, indent=2)
736
+ _atomic_json_write(metadata_path, metadata)
322
737
 
323
738
  return profile_path
324
739
 
@@ -343,13 +758,14 @@ def save_repo_profile(content: str, repo_name: str, repo_metadata: dict[str, Any
343
758
  "repo": repo_metadata,
344
759
  "created_at": datetime.now().isoformat(),
345
760
  }
346
- with open(metadata_path, 'w') as f:
347
- json.dump(metadata, f, indent=2)
761
+ _atomic_json_write(metadata_path, metadata)
348
762
 
349
763
  return profile_path
350
764
 
351
765
 
352
- # Cache management
766
+ # ============================================================================
767
+ # Cache Management
768
+ # ============================================================================
353
769
 
354
770
  def load_repo_hashes() -> dict[str, str]:
355
771
  """Load cached repository hashes."""
@@ -366,11 +782,19 @@ def load_repo_hashes() -> dict[str, str]:
366
782
 
367
783
 
368
784
  def save_repo_hashes(hashes: dict[str, str]) -> None:
369
- """Save repository hashes to cache."""
785
+ """Save repository hashes to cache atomically."""
370
786
  ensure_directories()
371
787
 
372
- with open(REPO_HASHES_FILE, "w") as f:
373
- json.dump(hashes, f, indent=2)
788
+ # Write to temp file first, then atomic rename
789
+ fd, tmp_path = tempfile.mkstemp(dir=CACHE_DIR, suffix=".tmp")
790
+ try:
791
+ with os.fdopen(fd, "w") as f:
792
+ json.dump(hashes, f, indent=2)
793
+ os.replace(tmp_path, REPO_HASHES_FILE)
794
+ except Exception:
795
+ if os.path.exists(tmp_path):
796
+ os.unlink(tmp_path)
797
+ raise
374
798
 
375
799
 
376
800
  def get_repo_hash(repo_path: str) -> str | None:
@@ -390,3 +814,205 @@ def clear_cache() -> None:
390
814
  """Clear all cached data."""
391
815
  if REPO_HASHES_FILE.exists():
392
816
  REPO_HASHES_FILE.unlink()
817
+
818
+
819
+ def get_cache_size() -> int:
820
+ """Get total size of cache directory in bytes."""
821
+ total = 0
822
+ if CACHE_DIR.exists():
823
+ for f in CACHE_DIR.rglob("*"):
824
+ if f.is_file():
825
+ total += f.stat().st_size
826
+ return total
827
+
828
+
829
+ # ============================================================================
830
+ # Tracked Repositories Management
831
+ # ============================================================================
832
+
833
+ def get_tracked_repos() -> list[dict[str, Any]]:
834
+ """Get list of tracked repositories.
835
+
836
+ Returns:
837
+ List of dicts with 'path', 'last_sync', 'hook_installed', 'paused'
838
+ """
839
+ config = load_config()
840
+ return config.get("tracked_repos", [])
841
+
842
+
843
+ def add_tracked_repo(path: str) -> None:
844
+ """Add a repository to tracked list.
845
+
846
+ Args:
847
+ path: Absolute path to repository
848
+ """
849
+ config = load_config()
850
+
851
+ # Normalize path
852
+ normalized_path = str(Path(path).expanduser().resolve())
853
+
854
+ # Check if already tracked
855
+ tracked = config.get("tracked_repos", [])
856
+ for repo in tracked:
857
+ if repo["path"] == normalized_path:
858
+ return # Already tracked
859
+
860
+ # Add new repo
861
+ tracked.append({
862
+ "path": normalized_path,
863
+ "last_sync": None,
864
+ "hook_installed": False,
865
+ "paused": False,
866
+ })
867
+
868
+ config["tracked_repos"] = tracked
869
+ save_config(config)
870
+
871
+
872
+ def remove_tracked_repo(path: str) -> bool:
873
+ """Remove a repository from tracked list.
874
+
875
+ Args:
876
+ path: Path to repository
877
+
878
+ Returns:
879
+ True if removed, False if not found
880
+ """
881
+ config = load_config()
882
+ normalized_path = str(Path(path).expanduser().resolve())
883
+
884
+ tracked = config.get("tracked_repos", [])
885
+ original_len = len(tracked)
886
+
887
+ # Filter out the repo
888
+ tracked = [r for r in tracked if r["path"] != normalized_path]
889
+
890
+ if len(tracked) < original_len:
891
+ config["tracked_repos"] = tracked
892
+ save_config(config)
893
+ return True
894
+
895
+ return False
896
+
897
+
898
+ def update_repo_sync(path: str, timestamp: str | None = None) -> None:
899
+ """Update last sync timestamp for a repository.
900
+
901
+ Args:
902
+ path: Path to repository
903
+ timestamp: ISO format timestamp (default: now)
904
+ """
905
+ config = load_config()
906
+ normalized_path = str(Path(path).expanduser().resolve())
907
+
908
+ if timestamp is None:
909
+ timestamp = datetime.now().isoformat()
910
+
911
+ tracked = config.get("tracked_repos", [])
912
+ for repo in tracked:
913
+ if repo["path"] == normalized_path:
914
+ repo["last_sync"] = timestamp
915
+ break
916
+
917
+ config["tracked_repos"] = tracked
918
+ save_config(config)
919
+
920
+
921
+ def set_repo_hook_status(path: str, installed: bool) -> None:
922
+ """Set hook installation status for a repository.
923
+
924
+ Args:
925
+ path: Path to repository
926
+ installed: Whether hook is installed
927
+ """
928
+ config = load_config()
929
+ normalized_path = str(Path(path).expanduser().resolve())
930
+
931
+ tracked = config.get("tracked_repos", [])
932
+ for repo in tracked:
933
+ if repo["path"] == normalized_path:
934
+ repo["hook_installed"] = installed
935
+ break
936
+
937
+ config["tracked_repos"] = tracked
938
+ save_config(config)
939
+
940
+
941
+ def set_repo_paused(path: str, paused: bool) -> None:
942
+ """Set paused status for a repository.
943
+
944
+ Args:
945
+ path: Path to repository
946
+ paused: Whether auto-tracking is paused
947
+ """
948
+ config = load_config()
949
+ normalized_path = str(Path(path).expanduser().resolve())
950
+
951
+ tracked = config.get("tracked_repos", [])
952
+ for repo in tracked:
953
+ if repo["path"] == normalized_path:
954
+ repo["paused"] = paused
955
+ break
956
+
957
+ config["tracked_repos"] = tracked
958
+ save_config(config)
959
+
960
+
961
+ def get_repo_info(path: str) -> dict[str, Any] | None:
962
+ """Get tracking info for a specific repository.
963
+
964
+ Args:
965
+ path: Path to repository
966
+
967
+ Returns:
968
+ Repo info dict or None if not tracked
969
+ """
970
+ normalized_path = str(Path(path).expanduser().resolve())
971
+ tracked = get_tracked_repos()
972
+
973
+ for repo in tracked:
974
+ if repo["path"] == normalized_path:
975
+ return repo
976
+
977
+ return None
978
+
979
+
980
+ # ============================================================================
981
+ # Data Storage Info
982
+ # ============================================================================
983
+
984
+ def get_data_info() -> dict[str, Any]:
985
+ """Get information about local data storage."""
986
+ from .storage import STORIES_DIR, get_story_count
987
+
988
+ ensure_directories()
989
+
990
+ # Calculate sizes
991
+ def dir_size(path: Path) -> int:
992
+ total = 0
993
+ if path.exists():
994
+ for f in path.rglob("*"):
995
+ if f.is_file():
996
+ total += f.stat().st_size
997
+ return total
998
+
999
+ stories_size = dir_size(STORIES_DIR) if STORIES_DIR.exists() else 0
1000
+ profiles_size = dir_size(PROFILES_DIR)
1001
+ cache_size = dir_size(CACHE_DIR)
1002
+ config_size = CONFIG_FILE.stat().st_size if CONFIG_FILE.exists() else 0
1003
+
1004
+ return {
1005
+ "stories_count": get_story_count(),
1006
+ "stories_size": stories_size,
1007
+ "profiles_size": profiles_size,
1008
+ "cache_size": cache_size,
1009
+ "config_size": config_size,
1010
+ "total_size": stories_size + profiles_size + cache_size + config_size,
1011
+ "paths": {
1012
+ "home": str(REPR_HOME),
1013
+ "stories": str(STORIES_DIR),
1014
+ "profiles": str(PROFILES_DIR),
1015
+ "cache": str(CACHE_DIR),
1016
+ "config": str(CONFIG_FILE),
1017
+ },
1018
+ }