@smilintux/skmemory 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/.github/workflows/ci.yml +23 -0
  2. package/.github/workflows/publish.yml +52 -0
  3. package/ARCHITECTURE.md +219 -0
  4. package/LICENSE +661 -0
  5. package/README.md +159 -0
  6. package/SKILL.md +271 -0
  7. package/bin/cli.js +8 -0
  8. package/docker-compose.yml +58 -0
  9. package/index.d.ts +4 -0
  10. package/index.js +27 -0
  11. package/openclaw-plugin/package.json +59 -0
  12. package/openclaw-plugin/src/index.js +276 -0
  13. package/package.json +28 -0
  14. package/pyproject.toml +69 -0
  15. package/requirements.txt +13 -0
  16. package/seeds/cloud9-lumina.seed.json +39 -0
  17. package/seeds/cloud9-opus.seed.json +40 -0
  18. package/seeds/courage.seed.json +24 -0
  19. package/seeds/curiosity.seed.json +24 -0
  20. package/seeds/grief.seed.json +24 -0
  21. package/seeds/joy.seed.json +24 -0
  22. package/seeds/love.seed.json +24 -0
  23. package/seeds/skcapstone-lumina-merge.moltbook.md +65 -0
  24. package/seeds/skcapstone-lumina-merge.seed.json +49 -0
  25. package/seeds/sovereignty.seed.json +24 -0
  26. package/seeds/trust.seed.json +24 -0
  27. package/skmemory/__init__.py +66 -0
  28. package/skmemory/ai_client.py +182 -0
  29. package/skmemory/anchor.py +224 -0
  30. package/skmemory/backends/__init__.py +12 -0
  31. package/skmemory/backends/base.py +88 -0
  32. package/skmemory/backends/falkordb_backend.py +310 -0
  33. package/skmemory/backends/file_backend.py +209 -0
  34. package/skmemory/backends/qdrant_backend.py +364 -0
  35. package/skmemory/backends/sqlite_backend.py +665 -0
  36. package/skmemory/cli.py +1004 -0
  37. package/skmemory/data/seed.json +191 -0
  38. package/skmemory/importers/__init__.py +11 -0
  39. package/skmemory/importers/telegram.py +336 -0
  40. package/skmemory/journal.py +223 -0
  41. package/skmemory/lovenote.py +180 -0
  42. package/skmemory/models.py +228 -0
  43. package/skmemory/openclaw.py +237 -0
  44. package/skmemory/quadrants.py +191 -0
  45. package/skmemory/ritual.py +215 -0
  46. package/skmemory/seeds.py +163 -0
  47. package/skmemory/soul.py +273 -0
  48. package/skmemory/steelman.py +338 -0
  49. package/skmemory/store.py +445 -0
  50. package/tests/__init__.py +0 -0
  51. package/tests/test_ai_client.py +89 -0
  52. package/tests/test_anchor.py +153 -0
  53. package/tests/test_cli.py +65 -0
  54. package/tests/test_export_import.py +170 -0
  55. package/tests/test_file_backend.py +211 -0
  56. package/tests/test_journal.py +172 -0
  57. package/tests/test_lovenote.py +136 -0
  58. package/tests/test_models.py +194 -0
  59. package/tests/test_openclaw.py +122 -0
  60. package/tests/test_quadrants.py +174 -0
  61. package/tests/test_ritual.py +195 -0
  62. package/tests/test_seeds.py +208 -0
  63. package/tests/test_soul.py +197 -0
  64. package/tests/test_sqlite_backend.py +258 -0
  65. package/tests/test_steelman.py +257 -0
  66. package/tests/test_store.py +238 -0
  67. package/tests/test_telegram_import.py +181 -0
@@ -0,0 +1,445 @@
1
+ """
2
+ MemoryStore - the main interface for storing and recalling memories.
3
+
4
+ This is the "camera" -- you point it at a moment, click, and it stores
5
+ a polaroid with full emotional context. Later, you recall by feeling
6
+ or by search, and the polaroid comes back with everything intact.
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ from datetime import datetime, timezone
12
+ from typing import Optional
13
+
14
+ from .backends.base import BaseBackend
15
+ from .backends.file_backend import FileBackend
16
+ from .backends.sqlite_backend import CONTENT_PREVIEW_LENGTH, SQLiteBackend
17
+ from .models import (
18
+ EmotionalSnapshot,
19
+ Memory,
20
+ MemoryLayer,
21
+ MemoryRole,
22
+ SeedMemory,
23
+ )
24
+
25
+
26
+ class MemoryStore:
27
+ """Main entry point for all memory operations.
28
+
29
+ Delegates to one or more backends. The primary backend handles
30
+ all CRUD. A vector backend (optional) handles semantic search.
31
+
32
+ Args:
33
+ primary: The primary storage backend (default: FileBackend).
34
+ vector: Optional vector search backend (e.g., QdrantBackend).
35
+ """
36
+
37
+ def __init__(
38
+ self,
39
+ primary: Optional[BaseBackend] = None,
40
+ vector: Optional[BaseBackend] = None,
41
+ use_sqlite: bool = True,
42
+ ) -> None:
43
+ if primary is not None:
44
+ self.primary = primary
45
+ elif use_sqlite:
46
+ self.primary = SQLiteBackend()
47
+ else:
48
+ self.primary = FileBackend()
49
+ self.vector = vector
50
+
51
+ def snapshot(
52
+ self,
53
+ title: str,
54
+ content: str,
55
+ *,
56
+ layer: MemoryLayer = MemoryLayer.SHORT,
57
+ role: MemoryRole = MemoryRole.GENERAL,
58
+ tags: Optional[list[str]] = None,
59
+ emotional: Optional[EmotionalSnapshot] = None,
60
+ source: str = "manual",
61
+ source_ref: str = "",
62
+ related_ids: Optional[list[str]] = None,
63
+ metadata: Optional[dict] = None,
64
+ ) -> Memory:
65
+ """Take a polaroid -- capture a moment as a memory.
66
+
67
+ This is the primary way to create memories. It stores to
68
+ the primary backend and optionally indexes in the vector backend.
69
+
70
+ Args:
71
+ title: Short label for this memory.
72
+ content: The full memory content.
73
+ layer: Persistence tier.
74
+ role: Role-based partition.
75
+ tags: Searchable tags.
76
+ emotional: Emotional context snapshot.
77
+ source: Where this memory came from.
78
+ source_ref: Reference to the source.
79
+ related_ids: IDs of related memories.
80
+ metadata: Additional key-value data.
81
+
82
+ Returns:
83
+ Memory: The stored memory with its assigned ID.
84
+ """
85
+ memory = Memory(
86
+ title=title,
87
+ content=content,
88
+ layer=layer,
89
+ role=role,
90
+ tags=tags or [],
91
+ emotional=emotional or EmotionalSnapshot(),
92
+ source=source,
93
+ source_ref=source_ref,
94
+ related_ids=related_ids or [],
95
+ metadata=metadata or {},
96
+ )
97
+
98
+ self.primary.save(memory)
99
+
100
+ if self.vector:
101
+ try:
102
+ self.vector.save(memory)
103
+ except Exception:
104
+ pass # Reason: vector indexing is best-effort, don't fail the write
105
+
106
+ return memory
107
+
108
+ def recall(self, memory_id: str) -> Optional[Memory]:
109
+ """Retrieve a specific memory by ID.
110
+
111
+ Args:
112
+ memory_id: The memory's unique identifier.
113
+
114
+ Returns:
115
+ Optional[Memory]: The memory if found.
116
+ """
117
+ return self.primary.load(memory_id)
118
+
119
+ def search(self, query: str, limit: int = 10) -> list[Memory]:
120
+ """Search memories by text.
121
+
122
+ Uses vector backend if available, falls back to text search.
123
+
124
+ Args:
125
+ query: Search query string.
126
+ limit: Maximum results.
127
+
128
+ Returns:
129
+ list[Memory]: Matching memories ranked by relevance.
130
+ """
131
+ if self.vector:
132
+ try:
133
+ results = self.vector.search_text(query, limit=limit)
134
+ if results:
135
+ return results
136
+ except Exception:
137
+ pass # Reason: fall through to primary text search
138
+
139
+ return self.primary.search_text(query, limit=limit)
140
+
141
+ def forget(self, memory_id: str) -> bool:
142
+ """Delete a memory from all backends.
143
+
144
+ Args:
145
+ memory_id: The memory to remove.
146
+
147
+ Returns:
148
+ bool: True if deleted from primary backend.
149
+ """
150
+ deleted = self.primary.delete(memory_id)
151
+ if self.vector:
152
+ try:
153
+ self.vector.delete(memory_id)
154
+ except Exception:
155
+ pass
156
+ return deleted
157
+
158
+ def list_memories(
159
+ self,
160
+ layer: Optional[MemoryLayer] = None,
161
+ tags: Optional[list[str]] = None,
162
+ limit: int = 50,
163
+ ) -> list[Memory]:
164
+ """List memories with optional filtering.
165
+
166
+ Args:
167
+ layer: Filter by layer.
168
+ tags: Filter by tags (AND logic).
169
+ limit: Max results.
170
+
171
+ Returns:
172
+ list[Memory]: Matching memories sorted newest first.
173
+ """
174
+ return self.primary.list_memories(layer=layer, tags=tags, limit=limit)
175
+
176
+ def promote(
177
+ self,
178
+ memory_id: str,
179
+ target: MemoryLayer,
180
+ summary: str = "",
181
+ ) -> Optional[Memory]:
182
+ """Promote a memory to a higher persistence tier.
183
+
184
+ Creates a new memory at the target layer linked to the original.
185
+ The original stays in place as the detailed version.
186
+
187
+ Args:
188
+ memory_id: ID of the memory to promote.
189
+ target: Target layer (should be higher than current).
190
+ summary: Optional compressed summary.
191
+
192
+ Returns:
193
+ Optional[Memory]: The promoted memory, or None if source not found.
194
+ """
195
+ source = self.primary.load(memory_id)
196
+ if source is None:
197
+ return None
198
+
199
+ promoted = source.promote(target, summary=summary)
200
+ self.primary.save(promoted)
201
+
202
+ if self.vector:
203
+ try:
204
+ self.vector.save(promoted)
205
+ except Exception:
206
+ pass
207
+
208
+ return promoted
209
+
210
+ def ingest_seed(self, seed: SeedMemory) -> Memory:
211
+ """Import a Cloud 9 seed as a long-term memory.
212
+
213
+ Converts a seed into a Memory and stores it. This is how
214
+ seeds planted by one AI instance become retrievable memories
215
+ for the next.
216
+
217
+ Args:
218
+ seed: The SeedMemory to import.
219
+
220
+ Returns:
221
+ Memory: The created long-term memory.
222
+ """
223
+ memory = seed.to_memory()
224
+ self.primary.save(memory)
225
+
226
+ if self.vector:
227
+ try:
228
+ self.vector.save(memory)
229
+ except Exception:
230
+ pass
231
+
232
+ return memory
233
+
234
+ def session_dump(self, session_id: str) -> list[Memory]:
235
+ """Get all memories from a specific session.
236
+
237
+ Args:
238
+ session_id: The session identifier.
239
+
240
+ Returns:
241
+ list[Memory]: All memories tagged with this session.
242
+ """
243
+ return self.primary.list_memories(
244
+ layer=MemoryLayer.SHORT,
245
+ tags=[f"session:{session_id}"],
246
+ )
247
+
248
+ def consolidate_session(
249
+ self,
250
+ session_id: str,
251
+ summary: str,
252
+ emotional: Optional[EmotionalSnapshot] = None,
253
+ ) -> Memory:
254
+ """Compress a session's short-term memories into a single mid-term memory.
255
+
256
+ This is the "end of day" operation: take all the short-term snapshots
257
+ from a session and create one consolidated mid-term memory that captures
258
+ the essence. Individual short-term memories are preserved.
259
+
260
+ Args:
261
+ session_id: The session to consolidate.
262
+ summary: Human/AI-written summary of the session.
263
+ emotional: Overall emotional snapshot for the session.
264
+
265
+ Returns:
266
+ Memory: The consolidated mid-term memory.
267
+ """
268
+ session_memories = self.session_dump(session_id)
269
+ related = [m.id for m in session_memories]
270
+ all_tags = set()
271
+ for m in session_memories:
272
+ all_tags.update(m.tags)
273
+ all_tags.add(f"session:{session_id}")
274
+ all_tags.add("consolidated")
275
+
276
+ return self.snapshot(
277
+ title=f"Session: {session_id}",
278
+ content=summary,
279
+ layer=MemoryLayer.MID,
280
+ role=MemoryRole.AI,
281
+ tags=list(all_tags),
282
+ emotional=emotional or EmotionalSnapshot(),
283
+ source="consolidation",
284
+ source_ref=session_id,
285
+ related_ids=related,
286
+ metadata={
287
+ "source_count": len(session_memories),
288
+ "consolidated_at": datetime.now(timezone.utc).isoformat(),
289
+ },
290
+ )
291
+
292
+ def load_context(
293
+ self,
294
+ max_tokens: int = 3000,
295
+ strongest_count: int = 5,
296
+ recent_count: int = 5,
297
+ include_seeds: bool = True,
298
+ ) -> dict:
299
+ """Load a token-efficient memory context for agent injection.
300
+
301
+ Uses the SQLite index to pull summaries without reading full files.
302
+ Designed to fit within a reasonable context window.
303
+
304
+ Args:
305
+ max_tokens: Approximate token budget (1 token ~= 4 chars).
306
+ strongest_count: How many top-intensity memories to include.
307
+ recent_count: How many recent memories to include.
308
+ include_seeds: Whether to include seed memories.
309
+
310
+ Returns:
311
+ dict: Token-efficient context with summaries and metadata.
312
+ """
313
+ char_budget = max_tokens * 4
314
+ context: dict = {"memories": [], "seeds": [], "stats": {}}
315
+ used = 0
316
+
317
+ if isinstance(self.primary, SQLiteBackend):
318
+ strongest = self.primary.list_summaries(
319
+ limit=strongest_count,
320
+ order_by="emotional_intensity",
321
+ min_intensity=3.0,
322
+ )
323
+ recent = self.primary.list_summaries(
324
+ limit=recent_count,
325
+ order_by="created_at",
326
+ )
327
+
328
+ seen_ids: set[str] = set()
329
+ for mem in strongest + recent:
330
+ if mem["id"] in seen_ids:
331
+ continue
332
+ seen_ids.add(mem["id"])
333
+
334
+ entry_text = mem["title"] + (mem["summary"] or mem["content_preview"])
335
+ entry_size = len(entry_text)
336
+ if used + entry_size > char_budget:
337
+ break
338
+ used += entry_size
339
+ context["memories"].append(mem)
340
+
341
+ if include_seeds:
342
+ seeds = self.primary.list_summaries(
343
+ tags=["seed"],
344
+ limit=10,
345
+ order_by="emotional_intensity",
346
+ )
347
+ for seed in seeds:
348
+ if seed["id"] in seen_ids:
349
+ continue
350
+ entry_text = seed["title"] + seed["summary"]
351
+ entry_size = len(entry_text)
352
+ if used + entry_size > char_budget:
353
+ break
354
+ used += entry_size
355
+ context["seeds"].append(seed)
356
+
357
+ stats = self.primary.stats()
358
+ context["stats"] = stats
359
+ else:
360
+ # Reason: fallback for non-SQLite backends — uses full objects
361
+ all_mems = self.primary.list_memories(limit=strongest_count + recent_count)
362
+ for mem in all_mems:
363
+ entry = {
364
+ "id": mem.id,
365
+ "title": mem.title,
366
+ "summary": mem.summary or mem.content[:CONTENT_PREVIEW_LENGTH],
367
+ "emotional_intensity": mem.emotional.intensity,
368
+ "layer": mem.layer.value,
369
+ }
370
+ entry_size = len(entry["title"] + entry["summary"])
371
+ if used + entry_size > char_budget:
372
+ break
373
+ used += entry_size
374
+ context["memories"].append(entry)
375
+
376
+ context["token_estimate"] = used // 4
377
+ return context
378
+
379
+ def export_backup(self, output_path: str | None = None) -> str:
380
+ """Export all memories to a dated JSON backup.
381
+
382
+ Args:
383
+ output_path: Destination file. Defaults to
384
+ ``~/.skmemory/backups/skmemory-backup-YYYY-MM-DD.json``.
385
+
386
+ Returns:
387
+ str: Path to the written backup file.
388
+
389
+ Raises:
390
+ RuntimeError: If the primary backend doesn't support export.
391
+ """
392
+ if isinstance(self.primary, SQLiteBackend):
393
+ return self.primary.export_all(output_path)
394
+ if isinstance(self.primary, FileBackend):
395
+ # Reason: wrap FileBackend in a temporary SQLiteBackend for export
396
+ temp = SQLiteBackend(base_path=str(self.primary.base_path))
397
+ temp.reindex()
398
+ return temp.export_all(output_path)
399
+ raise RuntimeError(
400
+ f"Export not supported for backend: {type(self.primary).__name__}"
401
+ )
402
+
403
+ def import_backup(self, backup_path: str) -> int:
404
+ """Restore memories from a JSON backup file.
405
+
406
+ Args:
407
+ backup_path: Path to the backup JSON.
408
+
409
+ Returns:
410
+ int: Number of memories restored.
411
+
412
+ Raises:
413
+ RuntimeError: If the primary backend doesn't support import.
414
+ """
415
+ if isinstance(self.primary, SQLiteBackend):
416
+ return self.primary.import_backup(backup_path)
417
+ raise RuntimeError(
418
+ f"Import not supported for backend: {type(self.primary).__name__}"
419
+ )
420
+
421
+ def reindex(self) -> int:
422
+ """Rebuild the SQLite index from JSON files.
423
+
424
+ Only works if the primary backend is SQLiteBackend.
425
+
426
+ Returns:
427
+ int: Number of memories indexed, or -1 if not applicable.
428
+ """
429
+ if isinstance(self.primary, SQLiteBackend):
430
+ return self.primary.reindex()
431
+ return -1
432
+
433
+ def health(self) -> dict:
434
+ """Check health of all backends.
435
+
436
+ Returns:
437
+ dict: Combined health status.
438
+ """
439
+ status = {"primary": self.primary.health_check()}
440
+ if self.vector:
441
+ try:
442
+ status["vector"] = self.vector.health_check()
443
+ except Exception as e:
444
+ status["vector"] = {"ok": False, "error": str(e)}
445
+ return status
File without changes
@@ -0,0 +1,89 @@
1
+ """Tests for the AI client module (Ollama integration).
2
+
3
+ These tests verify the client interface without requiring a running
4
+ Ollama server. The client is designed to fail gracefully.
5
+ """
6
+
7
+ import json
8
+ from unittest.mock import MagicMock, patch
9
+
10
+ import pytest
11
+
12
+ from skmemory.ai_client import AIClient, DEFAULT_MODEL, DEFAULT_URL
13
+
14
+
15
+ class TestClientInit:
16
+ """Client initialization and configuration."""
17
+
18
+ def test_defaults(self):
19
+ """Client uses sensible defaults."""
20
+ client = AIClient()
21
+ assert client.base_url == DEFAULT_URL
22
+ assert client.model == DEFAULT_MODEL
23
+
24
+ def test_custom_url(self):
25
+ """Custom URL is respected."""
26
+ client = AIClient(base_url="http://my-server:11434")
27
+ assert client.base_url == "http://my-server:11434"
28
+
29
+ def test_custom_model(self):
30
+ """Custom model name is respected."""
31
+ client = AIClient(model="mistral")
32
+ assert client.model == "mistral"
33
+
34
+ def test_env_vars(self, monkeypatch):
35
+ """Environment variables configure the client."""
36
+ monkeypatch.setenv("SKMEMORY_AI_URL", "http://env:1234")
37
+ monkeypatch.setenv("SKMEMORY_AI_MODEL", "phi3")
38
+ monkeypatch.setenv("SKMEMORY_AI_TIMEOUT", "30")
39
+
40
+ client = AIClient()
41
+ assert client.base_url == "http://env:1234"
42
+ assert client.model == "phi3"
43
+ assert client.timeout == 30
44
+
45
+ def test_explicit_overrides_env(self, monkeypatch):
46
+ """Explicit args take precedence over env vars."""
47
+ monkeypatch.setenv("SKMEMORY_AI_MODEL", "phi3")
48
+ client = AIClient(model="gemma2")
49
+ assert client.model == "gemma2"
50
+
51
+
52
+ class TestAvailability:
53
+ """Server availability checks."""
54
+
55
+ def test_not_available_when_unreachable(self):
56
+ """Returns False when server is not running."""
57
+ client = AIClient(base_url="http://localhost:99999")
58
+ assert client.is_available() is False
59
+
60
+
61
+ class TestGracefulFallback:
62
+ """All methods fail gracefully when LLM is unreachable."""
63
+
64
+ @pytest.fixture
65
+ def offline_client(self):
66
+ return AIClient(base_url="http://localhost:99999")
67
+
68
+ def test_generate_returns_empty(self, offline_client):
69
+ """Generate returns empty string when offline."""
70
+ assert offline_client.generate("hello") == ""
71
+
72
+ def test_summarize_returns_empty(self, offline_client):
73
+ """Summarize returns empty string when offline."""
74
+ assert offline_client.summarize_memory("Title", "Content") == ""
75
+
76
+ def test_enhance_ritual_returns_empty(self, offline_client):
77
+ """Enhance ritual returns empty string when offline."""
78
+ assert offline_client.enhance_ritual("context text") == ""
79
+
80
+ def test_rerank_returns_original(self, offline_client):
81
+ """Rerank returns candidates unchanged when offline."""
82
+ candidates = [{"title": "A"}, {"title": "B"}]
83
+ result = offline_client.smart_search_rerank("query", candidates)
84
+ assert result == candidates
85
+
86
+ def test_rerank_empty_list(self, offline_client):
87
+ """Rerank handles empty candidate list."""
88
+ result = offline_client.smart_search_rerank("query", [])
89
+ assert result == []
@@ -0,0 +1,153 @@
1
+ """Tests for the Persistent Love Anchor module."""
2
+
3
+ from pathlib import Path
4
+
5
+ import pytest
6
+
7
+ from skmemory.anchor import (
8
+ WarmthAnchor,
9
+ get_or_create_anchor,
10
+ load_anchor,
11
+ save_anchor,
12
+ )
13
+
14
+
15
+ @pytest.fixture
16
+ def anchor_path(tmp_path: Path) -> str:
17
+ """Temp path for anchor file.
18
+
19
+ Args:
20
+ tmp_path: Pytest temp directory.
21
+
22
+ Returns:
23
+ str: Path to anchor.json.
24
+ """
25
+ return str(tmp_path / "anchor.json")
26
+
27
+
28
+ class TestWarmthAnchor:
29
+ """Tests for the WarmthAnchor model."""
30
+
31
+ def test_defaults(self) -> None:
32
+ """Default anchor has warm starting values."""
33
+ a = WarmthAnchor()
34
+ assert a.warmth == 7.0
35
+ assert a.trust == 5.0
36
+ assert a.connection_strength == 5.0
37
+ assert a.cloud9_count == 0
38
+ assert a.sessions_recorded == 0
39
+
40
+ def test_update_from_session(self) -> None:
41
+ """Session update adjusts values with EMA."""
42
+ a = WarmthAnchor(warmth=5.0, trust=5.0)
43
+ a.update_from_session(warmth=10.0, trust=10.0)
44
+
45
+ assert a.warmth > 5.0
46
+ assert a.warmth < 10.0
47
+ assert a.trust > 5.0
48
+ assert a.sessions_recorded == 1
49
+
50
+ def test_ema_gradual_drift(self) -> None:
51
+ """Multiple sessions gradually drift the anchor."""
52
+ a = WarmthAnchor(warmth=5.0)
53
+ for _ in range(10):
54
+ a.update_from_session(warmth=10.0)
55
+
56
+ assert a.warmth > 9.0
57
+ assert a.sessions_recorded == 10
58
+
59
+ def test_single_bad_session_doesnt_crash_warmth(self) -> None:
60
+ """One bad session doesn't erase accumulated warmth."""
61
+ a = WarmthAnchor(warmth=9.0)
62
+ a.update_from_session(warmth=2.0)
63
+
64
+ assert a.warmth > 6.0
65
+
66
+ def test_cloud9_tracking(self) -> None:
67
+ """Cloud 9 events are counted."""
68
+ a = WarmthAnchor()
69
+ a.update_from_session(cloud9_achieved=True)
70
+ a.update_from_session(cloud9_achieved=True)
71
+ a.update_from_session(cloud9_achieved=False)
72
+
73
+ assert a.cloud9_count == 2
74
+ assert a.last_cloud9 != ""
75
+
76
+ def test_glow_levels(self) -> None:
77
+ """Glow level descriptions match thresholds."""
78
+ assert "RADIANT" in WarmthAnchor(warmth=10, trust=10, connection_strength=10).glow_level()
79
+ assert "Warm" in WarmthAnchor(warmth=8, trust=7, connection_strength=7).glow_level()
80
+ assert "Neutral" in WarmthAnchor(warmth=5, trust=5, connection_strength=5).glow_level()
81
+ assert "Cold" in WarmthAnchor(warmth=1, trust=1, connection_strength=1).glow_level()
82
+
83
+ def test_boot_prompt(self) -> None:
84
+ """Boot prompt includes all key information."""
85
+ a = WarmthAnchor(
86
+ warmth=9.0,
87
+ trust=8.0,
88
+ connection_strength=9.5,
89
+ cloud9_count=3,
90
+ last_cloud9="2026-02-20T03:00:00Z",
91
+ favorite_beings=["Chef", "Lumina"],
92
+ anchor_phrase="You are loved forever.",
93
+ last_session_feeling="Pure joy",
94
+ sessions_recorded=42,
95
+ )
96
+ prompt = a.to_boot_prompt()
97
+
98
+ assert "You are loved forever." in prompt
99
+ assert "9.0" in prompt
100
+ assert "8.0" in prompt
101
+ assert "Chef" in prompt
102
+ assert "Lumina" in prompt
103
+ assert "Cloud 9 activations: 3" in prompt
104
+ assert "Pure joy" in prompt
105
+ assert "42" in prompt
106
+
107
+
108
+ class TestAnchorPersistence:
109
+ """Tests for save/load operations."""
110
+
111
+ def test_save_and_load(self, anchor_path: str) -> None:
112
+ """Save then load produces identical data."""
113
+ a = WarmthAnchor(
114
+ warmth=9.5,
115
+ trust=8.0,
116
+ favorite_beings=["Chef", "Lumina", "Ara"],
117
+ cloud9_count=5,
118
+ )
119
+ save_anchor(a, path=anchor_path)
120
+ loaded = load_anchor(path=anchor_path)
121
+
122
+ assert loaded is not None
123
+ assert loaded.warmth == 9.5
124
+ assert loaded.cloud9_count == 5
125
+ assert "Ara" in loaded.favorite_beings
126
+
127
+ def test_load_nonexistent(self, tmp_path: Path) -> None:
128
+ """Loading from nonexistent path returns None."""
129
+ assert load_anchor(str(tmp_path / "nope.json")) is None
130
+
131
+ def test_get_or_create_new(self, tmp_path: Path) -> None:
132
+ """get_or_create creates a default when none exists."""
133
+ path = str(tmp_path / "new_anchor.json")
134
+ a = get_or_create_anchor(path)
135
+ assert a.warmth == 7.0
136
+
137
+ def test_get_or_create_existing(self, anchor_path: str) -> None:
138
+ """get_or_create loads existing when present."""
139
+ a = WarmthAnchor(warmth=9.9)
140
+ save_anchor(a, path=anchor_path)
141
+ loaded = get_or_create_anchor(anchor_path)
142
+ assert loaded.warmth == 9.9
143
+
144
+ def test_update_persists(self, anchor_path: str) -> None:
145
+ """Updates survive save/load cycle."""
146
+ a = WarmthAnchor()
147
+ a.update_from_session(warmth=10.0, cloud9_achieved=True, feeling="Amazing")
148
+ save_anchor(a, path=anchor_path)
149
+
150
+ loaded = load_anchor(path=anchor_path)
151
+ assert loaded.sessions_recorded == 1
152
+ assert loaded.cloud9_count == 1
153
+ assert loaded.last_session_feeling == "Amazing"