repr-cli 0.1.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
repr/storage.py ADDED
@@ -0,0 +1,527 @@
1
+ """
2
+ Story storage management for ~/.repr/stories/
3
+
4
+ Stories are stored as pairs:
5
+ - <ULID>.md: Story content in Markdown
6
+ - <ULID>.json: Metadata (commits, timestamps, repo, etc.)
7
+ """
8
+
9
+ import json
10
+ import os
11
+ import tempfile
12
+ from datetime import datetime
13
+ from pathlib import Path
14
+ from typing import Any
15
+
16
+ # ULID generation (simple implementation)
17
+ import random
18
+ import time
19
+
20
+ # Base32 alphabet for ULID
21
+ ULID_ALPHABET = "0123456789ABCDEFGHJKMNPQRSTVWXYZ"
22
+
23
+
24
+ def generate_ulid() -> str:
25
+ """Generate a ULID (Universally Unique Lexicographically Sortable Identifier)."""
26
+ # Timestamp component (48 bits = 10 chars)
27
+ timestamp_ms = int(time.time() * 1000)
28
+ timestamp_chars = []
29
+ for _ in range(10):
30
+ timestamp_chars.append(ULID_ALPHABET[timestamp_ms & 31])
31
+ timestamp_ms >>= 5
32
+ timestamp_part = "".join(reversed(timestamp_chars))
33
+
34
+ # Random component (80 bits = 16 chars)
35
+ random_chars = [random.choice(ULID_ALPHABET) for _ in range(16)]
36
+ random_part = "".join(random_chars)
37
+
38
+ return timestamp_part + random_part
39
+
40
+
41
+ # Storage paths
42
+ REPR_HOME = Path(os.getenv("REPR_HOME", Path.home() / ".repr"))
43
+ STORIES_DIR = REPR_HOME / "stories"
44
+
45
+
46
+ def ensure_directories() -> None:
47
+ """Ensure storage directories exist."""
48
+ REPR_HOME.mkdir(exist_ok=True)
49
+ STORIES_DIR.mkdir(exist_ok=True)
50
+
51
+
52
+ def _atomic_write(path: Path, content: str) -> None:
53
+ """Write content atomically using temp file + rename."""
54
+ path.parent.mkdir(parents=True, exist_ok=True)
55
+ fd, tmp_path = tempfile.mkstemp(dir=path.parent, suffix=".tmp")
56
+ try:
57
+ with os.fdopen(fd, "w") as f:
58
+ f.write(content)
59
+ os.replace(tmp_path, path)
60
+ except Exception:
61
+ if os.path.exists(tmp_path):
62
+ os.unlink(tmp_path)
63
+ raise
64
+
65
+
66
+ def save_story(
67
+ content: str,
68
+ metadata: dict[str, Any],
69
+ story_id: str | None = None,
70
+ ) -> str:
71
+ """
72
+ Save a story to ~/.repr/stories/
73
+
74
+ Args:
75
+ content: Markdown content of the story
76
+ metadata: Story metadata (commits, repo, timestamps, etc.)
77
+ story_id: Optional ULID (generates new one if not provided)
78
+
79
+ Returns:
80
+ Story ID (ULID)
81
+ """
82
+ ensure_directories()
83
+
84
+ if story_id is None:
85
+ story_id = generate_ulid()
86
+
87
+ # Add timestamps to metadata
88
+ now = datetime.now().isoformat()
89
+ if "created_at" not in metadata:
90
+ metadata["created_at"] = now
91
+ metadata["updated_at"] = now
92
+ metadata["id"] = story_id
93
+
94
+ # Write markdown file
95
+ md_path = STORIES_DIR / f"{story_id}.md"
96
+ _atomic_write(md_path, content)
97
+
98
+ # Write metadata file
99
+ meta_path = STORIES_DIR / f"{story_id}.json"
100
+ _atomic_write(meta_path, json.dumps(metadata, indent=2, default=str))
101
+
102
+ return story_id
103
+
104
+
105
+ def load_story(story_id: str) -> tuple[str, dict[str, Any]] | None:
106
+ """
107
+ Load a story by ID.
108
+
109
+ Args:
110
+ story_id: Story ULID
111
+
112
+ Returns:
113
+ Tuple of (content, metadata) or None if not found
114
+ """
115
+ md_path = STORIES_DIR / f"{story_id}.md"
116
+ meta_path = STORIES_DIR / f"{story_id}.json"
117
+
118
+ if not md_path.exists():
119
+ return None
120
+
121
+ content = md_path.read_text()
122
+
123
+ metadata = {}
124
+ if meta_path.exists():
125
+ try:
126
+ metadata = json.loads(meta_path.read_text())
127
+ except json.JSONDecodeError:
128
+ pass
129
+
130
+ return content, metadata
131
+
132
+
133
+ def delete_story(story_id: str) -> bool:
134
+ """
135
+ Delete a story by ID.
136
+
137
+ Args:
138
+ story_id: Story ULID
139
+
140
+ Returns:
141
+ True if deleted, False if not found
142
+ """
143
+ md_path = STORIES_DIR / f"{story_id}.md"
144
+ meta_path = STORIES_DIR / f"{story_id}.json"
145
+
146
+ deleted = False
147
+
148
+ if md_path.exists():
149
+ md_path.unlink()
150
+ deleted = True
151
+
152
+ if meta_path.exists():
153
+ meta_path.unlink()
154
+ deleted = True
155
+
156
+ return deleted
157
+
158
+
159
+ def list_stories(
160
+ repo_name: str | None = None,
161
+ since: datetime | None = None,
162
+ needs_review: bool = False,
163
+ limit: int | None = None,
164
+ ) -> list[dict[str, Any]]:
165
+ """
166
+ List all stories with metadata.
167
+
168
+ Args:
169
+ repo_name: Filter by repository name
170
+ since: Filter by creation date
171
+ needs_review: Only show stories needing review
172
+ limit: Maximum stories to return
173
+
174
+ Returns:
175
+ List of story metadata dicts (sorted by creation date, newest first)
176
+ """
177
+ ensure_directories()
178
+
179
+ stories = []
180
+
181
+ for meta_path in STORIES_DIR.glob("*.json"):
182
+ try:
183
+ metadata = json.loads(meta_path.read_text())
184
+
185
+ # Apply filters
186
+ if repo_name and metadata.get("repo_name") != repo_name:
187
+ continue
188
+
189
+ if since:
190
+ created_at = metadata.get("created_at")
191
+ if created_at:
192
+ created = datetime.fromisoformat(created_at.replace("Z", "+00:00"))
193
+ if created < since:
194
+ continue
195
+
196
+ if needs_review and not metadata.get("needs_review", False):
197
+ continue
198
+
199
+ # Check if content file exists
200
+ story_id = meta_path.stem
201
+ md_path = STORIES_DIR / f"{story_id}.md"
202
+ if md_path.exists():
203
+ metadata["_has_content"] = True
204
+ metadata["_content_size"] = md_path.stat().st_size
205
+
206
+ stories.append(metadata)
207
+
208
+ except (json.JSONDecodeError, IOError):
209
+ continue
210
+
211
+ # Sort by creation date (newest first)
212
+ stories.sort(
213
+ key=lambda s: s.get("created_at", ""),
214
+ reverse=True,
215
+ )
216
+
217
+ if limit:
218
+ stories = stories[:limit]
219
+
220
+ return stories
221
+
222
+
223
+ def update_story_metadata(story_id: str, updates: dict[str, Any]) -> bool:
224
+ """
225
+ Update metadata for a story.
226
+
227
+ Args:
228
+ story_id: Story ULID
229
+ updates: Dict of fields to update
230
+
231
+ Returns:
232
+ True if updated, False if not found
233
+ """
234
+ meta_path = STORIES_DIR / f"{story_id}.json"
235
+
236
+ if not meta_path.exists():
237
+ return False
238
+
239
+ try:
240
+ metadata = json.loads(meta_path.read_text())
241
+ metadata.update(updates)
242
+ metadata["updated_at"] = datetime.now().isoformat()
243
+ _atomic_write(meta_path, json.dumps(metadata, indent=2, default=str))
244
+ return True
245
+ except (json.JSONDecodeError, IOError):
246
+ return False
247
+
248
+
249
+ def get_unpushed_stories() -> list[dict[str, Any]]:
250
+ """Get stories that haven't been pushed to cloud."""
251
+ stories = list_stories()
252
+ return [s for s in stories if not s.get("pushed_at")]
253
+
254
+
255
+ def mark_story_pushed(story_id: str) -> bool:
256
+ """Mark a story as pushed to cloud."""
257
+ return update_story_metadata(story_id, {"pushed_at": datetime.now().isoformat()})
258
+
259
+
260
+ def get_stories_needing_review() -> list[dict[str, Any]]:
261
+ """Get stories that need review."""
262
+ return list_stories(needs_review=True)
263
+
264
+
265
+ def get_story_count() -> int:
266
+ """Get total number of stories."""
267
+ ensure_directories()
268
+ return len(list(STORIES_DIR.glob("*.md")))
269
+
270
+
271
+ # ============================================================================
272
+ # Backup & Restore
273
+ # ============================================================================
274
+
275
+ def backup_all_data() -> dict[str, Any]:
276
+ """
277
+ Create a full backup of all repr data.
278
+
279
+ Returns:
280
+ Dict containing all data (can be serialized to JSON)
281
+ """
282
+ from .config import load_config, PROFILES_DIR, CACHE_DIR
283
+ from .privacy import load_audit_log
284
+
285
+ ensure_directories()
286
+
287
+ # Backup stories
288
+ stories_backup = []
289
+ for md_path in STORIES_DIR.glob("*.md"):
290
+ story_id = md_path.stem
291
+ meta_path = STORIES_DIR / f"{story_id}.json"
292
+
293
+ content = md_path.read_text()
294
+ metadata = {}
295
+ if meta_path.exists():
296
+ try:
297
+ metadata = json.loads(meta_path.read_text())
298
+ except json.JSONDecodeError:
299
+ pass
300
+
301
+ stories_backup.append({
302
+ "id": story_id,
303
+ "content": content,
304
+ "metadata": metadata,
305
+ })
306
+
307
+ # Backup profiles
308
+ profiles_backup = []
309
+ for profile_path in PROFILES_DIR.glob("*.md"):
310
+ content = profile_path.read_text()
311
+ meta_path = profile_path.with_suffix(".meta.json")
312
+ metadata = {}
313
+ if meta_path.exists():
314
+ try:
315
+ metadata = json.loads(meta_path.read_text())
316
+ except json.JSONDecodeError:
317
+ pass
318
+
319
+ profiles_backup.append({
320
+ "name": profile_path.stem,
321
+ "content": content,
322
+ "metadata": metadata,
323
+ })
324
+
325
+ # Get config (without sensitive data)
326
+ config = load_config()
327
+ config_backup = {k: v for k, v in config.items() if k != "auth"}
328
+
329
+ # Get audit log
330
+ audit_log = load_audit_log()
331
+
332
+ return {
333
+ "version": "1.0",
334
+ "exported_at": datetime.now().isoformat(),
335
+ "stories": stories_backup,
336
+ "profiles": profiles_backup,
337
+ "config": config_backup,
338
+ "audit_log": audit_log,
339
+ }
340
+
341
+
342
+ def restore_from_backup(backup_data: dict[str, Any], merge: bool = True) -> dict[str, int]:
343
+ """
344
+ Restore data from a backup.
345
+
346
+ Args:
347
+ backup_data: Backup dict (from backup_all_data or JSON file)
348
+ merge: If True, merge with existing data. If False, replace.
349
+
350
+ Returns:
351
+ Dict with counts of restored items
352
+ """
353
+ from .config import save_config, load_config, PROFILES_DIR
354
+
355
+ ensure_directories()
356
+
357
+ restored = {
358
+ "stories": 0,
359
+ "profiles": 0,
360
+ "config_keys": 0,
361
+ }
362
+
363
+ # Restore stories
364
+ for story in backup_data.get("stories", []):
365
+ story_id = story.get("id")
366
+ if not story_id:
367
+ continue
368
+
369
+ # Check if exists
370
+ md_path = STORIES_DIR / f"{story_id}.md"
371
+ if md_path.exists() and merge:
372
+ continue # Skip existing in merge mode
373
+
374
+ # Write story
375
+ _atomic_write(md_path, story.get("content", ""))
376
+ meta_path = STORIES_DIR / f"{story_id}.json"
377
+ _atomic_write(meta_path, json.dumps(story.get("metadata", {}), indent=2))
378
+ restored["stories"] += 1
379
+
380
+ # Restore profiles
381
+ for profile in backup_data.get("profiles", []):
382
+ name = profile.get("name")
383
+ if not name:
384
+ continue
385
+
386
+ profile_path = PROFILES_DIR / f"{name}.md"
387
+ if profile_path.exists() and merge:
388
+ continue
389
+
390
+ profile_path.write_text(profile.get("content", ""))
391
+ if profile.get("metadata"):
392
+ meta_path = profile_path.with_suffix(".meta.json")
393
+ _atomic_write(meta_path, json.dumps(profile.get("metadata"), indent=2))
394
+ restored["profiles"] += 1
395
+
396
+ # Restore config (merge non-sensitive keys)
397
+ if backup_data.get("config"):
398
+ current_config = load_config()
399
+ backup_config = backup_data["config"]
400
+
401
+ # Only restore safe keys
402
+ safe_keys = ["settings", "llm", "generation", "publish", "privacy", "tracked_repos", "profile"]
403
+ for key in safe_keys:
404
+ if key in backup_config:
405
+ if merge and key in current_config:
406
+ # Deep merge for dicts
407
+ if isinstance(current_config.get(key), dict) and isinstance(backup_config[key], dict):
408
+ current_config[key] = {**backup_config[key], **current_config[key]}
409
+ else:
410
+ current_config[key] = backup_config[key]
411
+ else:
412
+ current_config[key] = backup_config[key]
413
+ restored["config_keys"] += 1
414
+
415
+ save_config(current_config)
416
+
417
+ return restored
418
+
419
+
420
+ def export_stories_json() -> str:
421
+ """
422
+ Export all stories as JSON.
423
+
424
+ Returns:
425
+ JSON string of all stories
426
+ """
427
+ stories = []
428
+ for md_path in STORIES_DIR.glob("*.md"):
429
+ story_id = md_path.stem
430
+ result = load_story(story_id)
431
+ if result:
432
+ content, metadata = result
433
+ stories.append({
434
+ "id": story_id,
435
+ "content": content,
436
+ "metadata": metadata,
437
+ })
438
+
439
+ return json.dumps(stories, indent=2, default=str)
440
+
441
+
442
+ def import_stories_json(json_data: str, merge: bool = True) -> int:
443
+ """
444
+ Import stories from JSON.
445
+
446
+ Args:
447
+ json_data: JSON string of stories
448
+ merge: If True, skip existing stories
449
+
450
+ Returns:
451
+ Number of stories imported
452
+ """
453
+ try:
454
+ stories = json.loads(json_data)
455
+ except json.JSONDecodeError:
456
+ return 0
457
+
458
+ imported = 0
459
+ for story in stories:
460
+ story_id = story.get("id")
461
+
462
+ # Generate new ID if not provided or if exists and not merging
463
+ md_path = STORIES_DIR / f"{story_id}.md" if story_id else None
464
+ if not story_id or (md_path and md_path.exists() and not merge):
465
+ story_id = generate_ulid()
466
+ elif md_path and md_path.exists() and merge:
467
+ continue # Skip existing
468
+
469
+ # Save story
470
+ content = story.get("content", "")
471
+ metadata = story.get("metadata", {})
472
+ save_story(content, metadata, story_id)
473
+ imported += 1
474
+
475
+ return imported
476
+
477
+
478
+ def get_storage_stats() -> dict[str, Any]:
479
+ """
480
+ Get storage statistics.
481
+
482
+ Returns:
483
+ Dict with storage stats
484
+ """
485
+ from .config import PROFILES_DIR, CACHE_DIR, CONFIG_FILE
486
+
487
+ ensure_directories()
488
+
489
+ def dir_size(path: Path) -> int:
490
+ total = 0
491
+ if path.exists():
492
+ for f in path.rglob("*"):
493
+ if f.is_file():
494
+ total += f.stat().st_size
495
+ return total
496
+
497
+ def file_count(path: Path, pattern: str = "*") -> int:
498
+ if path.exists():
499
+ return len(list(path.glob(pattern)))
500
+ return 0
501
+
502
+ stories_size = dir_size(STORIES_DIR)
503
+ profiles_size = dir_size(PROFILES_DIR)
504
+ cache_size = dir_size(CACHE_DIR)
505
+ config_size = CONFIG_FILE.stat().st_size if CONFIG_FILE.exists() else 0
506
+
507
+ return {
508
+ "stories": {
509
+ "count": file_count(STORIES_DIR, "*.md"),
510
+ "size_bytes": stories_size,
511
+ "path": str(STORIES_DIR),
512
+ },
513
+ "profiles": {
514
+ "count": file_count(PROFILES_DIR, "*.md"),
515
+ "size_bytes": profiles_size,
516
+ "path": str(PROFILES_DIR),
517
+ },
518
+ "cache": {
519
+ "size_bytes": cache_size,
520
+ "path": str(CACHE_DIR),
521
+ },
522
+ "config": {
523
+ "size_bytes": config_size,
524
+ "path": str(CONFIG_FILE),
525
+ },
526
+ "total_size_bytes": stories_size + profiles_size + cache_size + config_size,
527
+ }