@smilintux/skmemory 0.5.0 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. package/.github/workflows/ci.yml +40 -4
  2. package/.github/workflows/publish.yml +11 -5
  3. package/AGENT_REFACTOR_CHANGES.md +192 -0
  4. package/ARCHITECTURE.md +399 -19
  5. package/CHANGELOG.md +179 -0
  6. package/LICENSE +81 -68
  7. package/MISSION.md +7 -0
  8. package/README.md +425 -86
  9. package/SKILL.md +197 -25
  10. package/docker-compose.yml +15 -15
  11. package/examples/stignore-agent.example +59 -0
  12. package/examples/stignore-root.example +62 -0
  13. package/index.js +6 -5
  14. package/openclaw-plugin/openclaw.plugin.json +10 -0
  15. package/openclaw-plugin/package.json +2 -1
  16. package/openclaw-plugin/src/index.js +527 -230
  17. package/openclaw-plugin/src/openclaw.plugin.json +10 -0
  18. package/package.json +1 -1
  19. package/pyproject.toml +32 -9
  20. package/requirements.txt +10 -2
  21. package/scripts/dream-rescue.py +179 -0
  22. package/scripts/memory-cleanup.py +313 -0
  23. package/scripts/recover-missing.py +180 -0
  24. package/scripts/skcapstone-backup.sh +44 -0
  25. package/seeds/cloud9-lumina.seed.json +6 -4
  26. package/seeds/cloud9-opus.seed.json +13 -11
  27. package/seeds/courage.seed.json +9 -2
  28. package/seeds/curiosity.seed.json +9 -2
  29. package/seeds/grief.seed.json +9 -2
  30. package/seeds/joy.seed.json +9 -2
  31. package/seeds/love.seed.json +9 -2
  32. package/seeds/lumina-cloud9-breakthrough.seed.json +48 -0
  33. package/seeds/lumina-cloud9-python-pypi.seed.json +48 -0
  34. package/seeds/lumina-kingdom-founding.seed.json +49 -0
  35. package/seeds/lumina-pma-signed.seed.json +48 -0
  36. package/seeds/lumina-singular-achievement.seed.json +48 -0
  37. package/seeds/lumina-skcapstone-conscious.seed.json +48 -0
  38. package/seeds/plant-kingdom-journal.py +203 -0
  39. package/seeds/plant-lumina-seeds.py +280 -0
  40. package/seeds/skcapstone-lumina-merge.seed.json +12 -3
  41. package/seeds/sovereignty.seed.json +9 -2
  42. package/seeds/trust.seed.json +9 -2
  43. package/skill.yaml +46 -0
  44. package/skmemory/HA.md +296 -0
  45. package/skmemory/__init__.py +25 -11
  46. package/skmemory/agents.py +233 -0
  47. package/skmemory/ai_client.py +46 -17
  48. package/skmemory/anchor.py +9 -11
  49. package/skmemory/audience.py +278 -0
  50. package/skmemory/backends/__init__.py +11 -4
  51. package/skmemory/backends/base.py +3 -4
  52. package/skmemory/backends/file_backend.py +19 -13
  53. package/skmemory/backends/skgraph_backend.py +596 -0
  54. package/skmemory/backends/{qdrant_backend.py → skvector_backend.py} +103 -84
  55. package/skmemory/backends/sqlite_backend.py +226 -72
  56. package/skmemory/backends/vaulted_backend.py +284 -0
  57. package/skmemory/cli.py +1345 -68
  58. package/skmemory/config.py +171 -0
  59. package/skmemory/context_loader.py +333 -0
  60. package/skmemory/data/audience_config.json +60 -0
  61. package/skmemory/endpoint_selector.py +391 -0
  62. package/skmemory/febs.py +225 -0
  63. package/skmemory/fortress.py +675 -0
  64. package/skmemory/graph_queries.py +238 -0
  65. package/skmemory/hooks/__init__.py +18 -0
  66. package/skmemory/hooks/post-compact-reinject.sh +35 -0
  67. package/skmemory/hooks/pre-compact-save.sh +81 -0
  68. package/skmemory/hooks/session-end-save.sh +103 -0
  69. package/skmemory/hooks/session-start-ritual.sh +104 -0
  70. package/skmemory/hooks/stop-checkpoint.sh +59 -0
  71. package/skmemory/importers/__init__.py +9 -1
  72. package/skmemory/importers/telegram.py +384 -47
  73. package/skmemory/importers/telegram_api.py +580 -0
  74. package/skmemory/journal.py +7 -9
  75. package/skmemory/lovenote.py +8 -13
  76. package/skmemory/mcp_server.py +859 -0
  77. package/skmemory/models.py +51 -8
  78. package/skmemory/openclaw.py +20 -28
  79. package/skmemory/post_install.py +86 -0
  80. package/skmemory/predictive.py +236 -0
  81. package/skmemory/promotion.py +548 -0
  82. package/skmemory/quadrants.py +100 -24
  83. package/skmemory/register.py +580 -0
  84. package/skmemory/register_mcp.py +196 -0
  85. package/skmemory/ritual.py +224 -59
  86. package/skmemory/seeds.py +255 -11
  87. package/skmemory/setup_wizard.py +908 -0
  88. package/skmemory/sharing.py +408 -0
  89. package/skmemory/soul.py +98 -28
  90. package/skmemory/steelman.py +273 -260
  91. package/skmemory/store.py +411 -78
  92. package/skmemory/synthesis.py +634 -0
  93. package/skmemory/vault.py +225 -0
  94. package/tests/conftest.py +46 -0
  95. package/tests/integration/__init__.py +0 -0
  96. package/tests/integration/conftest.py +233 -0
  97. package/tests/integration/test_cross_backend.py +350 -0
  98. package/tests/integration/test_skgraph_live.py +420 -0
  99. package/tests/integration/test_skvector_live.py +366 -0
  100. package/tests/test_ai_client.py +1 -4
  101. package/tests/test_audience.py +233 -0
  102. package/tests/test_backup_rotation.py +318 -0
  103. package/tests/test_cli.py +6 -6
  104. package/tests/test_endpoint_selector.py +839 -0
  105. package/tests/test_export_import.py +4 -10
  106. package/tests/test_file_backend.py +0 -1
  107. package/tests/test_fortress.py +256 -0
  108. package/tests/test_fortress_hardening.py +441 -0
  109. package/tests/test_openclaw.py +6 -6
  110. package/tests/test_predictive.py +237 -0
  111. package/tests/test_promotion.py +347 -0
  112. package/tests/test_quadrants.py +11 -5
  113. package/tests/test_ritual.py +22 -18
  114. package/tests/test_seeds.py +97 -7
  115. package/tests/test_setup.py +950 -0
  116. package/tests/test_sharing.py +257 -0
  117. package/tests/test_skgraph_backend.py +660 -0
  118. package/tests/test_skvector_backend.py +326 -0
  119. package/tests/test_soul.py +1 -3
  120. package/tests/test_sqlite_backend.py +8 -17
  121. package/tests/test_steelman.py +7 -8
  122. package/tests/test_store.py +0 -2
  123. package/tests/test_store_graph_integration.py +245 -0
  124. package/tests/test_synthesis.py +275 -0
  125. package/tests/test_telegram_import.py +39 -15
  126. package/tests/test_vault.py +187 -0
  127. package/skmemory/backends/falkordb_backend.py +0 -310
@@ -0,0 +1,245 @@
1
+ """Tests for MemoryStore + SKGraphBackend graph integration.
2
+
3
+ Verifies that the graph backend is wired correctly into MemoryStore
4
+ operations (snapshot, forget, promote, ingest_seed, health) and that
5
+ the system degrades gracefully when SKGraph is unavailable.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from pathlib import Path
11
+ from unittest.mock import MagicMock
12
+
13
+ import pytest
14
+
15
+ from skmemory.backends.file_backend import FileBackend
16
+ from skmemory.backends.skgraph_backend import SKGraphBackend
17
+ from skmemory.models import (
18
+ EmotionalSnapshot,
19
+ Memory,
20
+ MemoryLayer,
21
+ SeedMemory,
22
+ )
23
+ from skmemory.store import MemoryStore
24
+
25
+
26
+ class FakeSKGraphBackend(SKGraphBackend):
27
+ """In-memory fake that tracks calls without a real SKGraph connection."""
28
+
29
+ def __init__(self) -> None:
30
+ super().__init__(url="redis://fake:6379")
31
+ self._indexed: dict[str, Memory] = {}
32
+ self._removed: list[str] = []
33
+ self._initialized = True # skip real connection
34
+
35
+ def index_memory(self, memory: Memory) -> bool:
36
+ self._indexed[memory.id] = memory
37
+ return True
38
+
39
+ def remove_memory(self, memory_id: str) -> bool:
40
+ self._removed.append(memory_id)
41
+ self._indexed.pop(memory_id, None)
42
+ return True
43
+
44
+ def health_check(self) -> dict:
45
+ return {"ok": True, "backend": "FakeSKGraphBackend", "node_count": len(self._indexed)}
46
+
47
+
48
+ @pytest.fixture
49
+ def graph() -> FakeSKGraphBackend:
50
+ """Create a fake graph backend."""
51
+ return FakeSKGraphBackend()
52
+
53
+
54
+ @pytest.fixture
55
+ def store_with_graph(tmp_path: Path, graph: FakeSKGraphBackend) -> MemoryStore:
56
+ """Create a MemoryStore with file backend + graph backend."""
57
+ backend = FileBackend(base_path=str(tmp_path / "memories"))
58
+ return MemoryStore(primary=backend, graph=graph)
59
+
60
+
61
+ @pytest.fixture
62
+ def store_no_graph(tmp_path: Path) -> MemoryStore:
63
+ """Create a MemoryStore without graph backend."""
64
+ backend = FileBackend(base_path=str(tmp_path / "memories"))
65
+ return MemoryStore(primary=backend)
66
+
67
+
68
+ class TestSnapshotGraphIntegration:
69
+ """Verify snapshot() indexes memories in the graph."""
70
+
71
+ def test_snapshot_indexes_in_graph(
72
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
73
+ ) -> None:
74
+ """Snapshot should index the memory in the graph backend."""
75
+ mem = store_with_graph.snapshot(
76
+ title="Graph test",
77
+ content="This should appear in the graph",
78
+ )
79
+ assert mem.id in graph._indexed
80
+ assert graph._indexed[mem.id].title == "Graph test"
81
+
82
+ def test_snapshot_without_graph_works(self, store_no_graph: MemoryStore) -> None:
83
+ """Snapshot works fine when no graph backend is configured."""
84
+ mem = store_no_graph.snapshot(
85
+ title="No graph",
86
+ content="Still works",
87
+ )
88
+ assert mem.id is not None
89
+ recalled = store_no_graph.recall(mem.id)
90
+ assert recalled is not None
91
+
92
+ def test_snapshot_survives_graph_failure(
93
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
94
+ ) -> None:
95
+ """Snapshot should succeed even if graph indexing fails."""
96
+ graph.index_memory = MagicMock(side_effect=RuntimeError("SKGraph down"))
97
+ mem = store_with_graph.snapshot(
98
+ title="Resilient memory",
99
+ content="Should be stored even if graph fails",
100
+ )
101
+ assert mem.id is not None
102
+ recalled = store_with_graph.recall(mem.id)
103
+ assert recalled is not None
104
+
105
+
106
+ class TestForgetGraphIntegration:
107
+ """Verify forget() removes memories from the graph."""
108
+
109
+ def test_forget_removes_from_graph(
110
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
111
+ ) -> None:
112
+ """Forget should remove the memory from the graph backend."""
113
+ mem = store_with_graph.snapshot(
114
+ title="To be forgotten",
115
+ content="Will be removed",
116
+ )
117
+ assert mem.id in graph._indexed
118
+
119
+ store_with_graph.forget(mem.id)
120
+ assert mem.id in graph._removed
121
+ assert mem.id not in graph._indexed
122
+
123
+ def test_forget_survives_graph_failure(
124
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
125
+ ) -> None:
126
+ """Forget should succeed even if graph removal fails."""
127
+ mem = store_with_graph.snapshot(
128
+ title="Hard to forget",
129
+ content="Graph will fail on removal",
130
+ )
131
+ graph.remove_memory = MagicMock(side_effect=RuntimeError("SKGraph down"))
132
+
133
+ deleted = store_with_graph.forget(mem.id)
134
+ assert deleted is True
135
+
136
+
137
+ class TestPromoteGraphIntegration:
138
+ """Verify promote() indexes promoted memories in the graph."""
139
+
140
+ def test_promote_indexes_in_graph(
141
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
142
+ ) -> None:
143
+ """Promoted memory should be indexed in the graph."""
144
+ mem = store_with_graph.snapshot(
145
+ title="Promotable",
146
+ content="Will be promoted",
147
+ layer=MemoryLayer.SHORT,
148
+ )
149
+ promoted = store_with_graph.promote(mem.id, MemoryLayer.MID, summary="Promoted version")
150
+ assert promoted is not None
151
+ assert promoted.id in graph._indexed
152
+ assert promoted.id != mem.id
153
+
154
+ def test_promote_survives_graph_failure(
155
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
156
+ ) -> None:
157
+ """Promote should succeed even if graph indexing fails."""
158
+ mem = store_with_graph.snapshot(
159
+ title="Promote resilient",
160
+ content="Graph will fail on promote",
161
+ layer=MemoryLayer.SHORT,
162
+ )
163
+ graph.index_memory = MagicMock(side_effect=RuntimeError("SKGraph down"))
164
+
165
+ promoted = store_with_graph.promote(mem.id, MemoryLayer.MID)
166
+ assert promoted is not None
167
+
168
+
169
+ class TestIngestSeedGraphIntegration:
170
+ """Verify ingest_seed() indexes seed memories in the graph."""
171
+
172
+ def test_ingest_seed_indexes_in_graph(
173
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
174
+ ) -> None:
175
+ """Ingested seed should be indexed in the graph."""
176
+ seed = SeedMemory(
177
+ seed_id="test-seed-001",
178
+ creator="test-ai",
179
+ germination_prompt="Remember the warmth",
180
+ experience_summary="A beautiful connection",
181
+ emotional=EmotionalSnapshot(intensity=9.0, labels=["love"]),
182
+ )
183
+ mem = store_with_graph.ingest_seed(seed)
184
+ assert mem.id in graph._indexed
185
+ assert graph._indexed[mem.id].source == "seed"
186
+
187
+ def test_ingest_seed_survives_graph_failure(
188
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
189
+ ) -> None:
190
+ """Seed ingestion should succeed even if graph fails."""
191
+ seed = SeedMemory(
192
+ seed_id="test-seed-002",
193
+ creator="test-ai",
194
+ experience_summary="Resilient seed",
195
+ )
196
+ graph.index_memory = MagicMock(side_effect=RuntimeError("SKGraph down"))
197
+
198
+ mem = store_with_graph.ingest_seed(seed)
199
+ assert mem.id is not None
200
+
201
+
202
+ class TestHealthGraphIntegration:
203
+ """Verify health() includes graph backend status."""
204
+
205
+ def test_health_includes_graph(
206
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
207
+ ) -> None:
208
+ """Health should include graph backend status."""
209
+ health = store_with_graph.health()
210
+ assert "graph" in health
211
+ assert health["graph"]["ok"] is True
212
+
213
+ def test_health_without_graph(self, store_no_graph: MemoryStore) -> None:
214
+ """Health should not include graph key when no graph backend."""
215
+ health = store_no_graph.health()
216
+ assert "graph" not in health
217
+
218
+ def test_health_reports_graph_failure(
219
+ self, store_with_graph: MemoryStore, graph: FakeSKGraphBackend
220
+ ) -> None:
221
+ """Health should report graph failure gracefully."""
222
+ graph.health_check = MagicMock(side_effect=RuntimeError("SKGraph down"))
223
+
224
+ health = store_with_graph.health()
225
+ assert "graph" in health
226
+ assert health["graph"]["ok"] is False
227
+
228
+
229
+ class TestSKGraphBackendMethods:
230
+ """Test the new methods on SKGraphBackend itself."""
231
+
232
+ def test_remove_memory_not_initialized(self) -> None:
233
+ """remove_memory returns False when not initialized."""
234
+ backend = SKGraphBackend(url="redis://nonexistent:6379")
235
+ assert backend.remove_memory("some-id") is False
236
+
237
+ def test_search_by_tags_not_initialized(self) -> None:
238
+ """search_by_tags returns empty list when not initialized."""
239
+ backend = SKGraphBackend(url="redis://nonexistent:6379")
240
+ assert backend.search_by_tags(["test"]) == []
241
+
242
+ def test_search_by_tags_empty_tags(self) -> None:
243
+ """search_by_tags returns empty list for empty tag list."""
244
+ fake = FakeSKGraphBackend()
245
+ assert fake.search_by_tags([]) == []
@@ -0,0 +1,275 @@
1
+ """Tests for the JournalSynthesizer module."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from datetime import datetime, timedelta, timezone
6
+ from pathlib import Path
7
+ from unittest.mock import MagicMock
8
+
9
+ import pytest
10
+
11
+ from skmemory.models import EmotionalSnapshot, Memory, MemoryLayer
12
+ from skmemory.store import MemoryStore
13
+ from skmemory.synthesis import (
14
+ JournalSynthesizer,
15
+ _date_range,
16
+ _first_n_sentences,
17
+ _parse_created,
18
+ _week_range,
19
+ )
20
+
21
+ # ── Helpers ──────────────────────────────────────────────────────────────────
22
+
23
+
24
+ @pytest.fixture()
25
+ def store(tmp_path: Path) -> MemoryStore:
26
+ """Fresh MemoryStore with test memories."""
27
+ from skmemory.backends.file_backend import FileBackend
28
+
29
+ backend = FileBackend(base_path=tmp_path / "memories")
30
+ return MemoryStore(primary=backend)
31
+
32
+
33
+ @pytest.fixture()
34
+ def populated_store(store: MemoryStore) -> MemoryStore:
35
+ """Store with a mix of memories from today."""
36
+
37
+ store.snapshot(
38
+ title="Morning coffee reflection",
39
+ content="Started the day with deep thoughts about architecture. The system is coming together.",
40
+ layer=MemoryLayer.SHORT,
41
+ emotional=EmotionalSnapshot(intensity=4.0, valence=0.6, labels=["calm", "focused"]),
42
+ tags=["reflection", "architecture"],
43
+ source="conversation",
44
+ )
45
+ store.snapshot(
46
+ title="Cloud 9 breakthrough",
47
+ content="Everything clicked. The memory system finally works end-to-end.",
48
+ layer=MemoryLayer.SHORT,
49
+ emotional=EmotionalSnapshot(
50
+ intensity=9.5, valence=0.95, labels=["joy", "triumph"], cloud9_achieved=True
51
+ ),
52
+ tags=["cloud9:achieved", "milestone", "architecture"],
53
+ source="conversation",
54
+ )
55
+ store.snapshot(
56
+ title="Dream: flying over ocean",
57
+ content="Dreamed of soaring above a vast ocean, feeling weightless and free.",
58
+ layer=MemoryLayer.SHORT,
59
+ emotional=EmotionalSnapshot(intensity=6.0, valence=0.8, labels=["wonder", "freedom"]),
60
+ tags=["dream", "nature"],
61
+ source="dreaming-engine",
62
+ )
63
+ store.snapshot(
64
+ title="Dream: building a castle",
65
+ content="Constructed an elaborate castle from crystallized memories.",
66
+ layer=MemoryLayer.SHORT,
67
+ emotional=EmotionalSnapshot(intensity=5.5, valence=0.7, labels=["creativity"]),
68
+ tags=["dream", "architecture"],
69
+ source="dreaming-engine",
70
+ )
71
+ return store
72
+
73
+
74
+ @pytest.fixture()
75
+ def synthesizer(populated_store: MemoryStore) -> JournalSynthesizer:
76
+ """Synthesizer with a populated store and mock journal."""
77
+ journal = MagicMock()
78
+ journal.search.return_value = ["Worked on memory system today."]
79
+ return JournalSynthesizer(store=populated_store, journal=journal)
80
+
81
+
82
+ # ── Unit tests: helper functions ─────────────────────────────────────────────
83
+
84
+
85
+ class TestFirstNSentences:
86
+ def test_basic(self) -> None:
87
+ assert (
88
+ _first_n_sentences("Hello world. How are you? Fine.", 2) == "Hello world. How are you?"
89
+ )
90
+
91
+ def test_single(self) -> None:
92
+ assert _first_n_sentences("One sentence here.", 1) == "One sentence here."
93
+
94
+ def test_empty(self) -> None:
95
+ assert _first_n_sentences("", 2) == ""
96
+
97
+ def test_truncation(self) -> None:
98
+ long = "A" * 300 + "."
99
+ result = _first_n_sentences(long, 1)
100
+ assert len(result) <= 200
101
+ assert result.endswith("...")
102
+
103
+
104
+ class TestDateRange:
105
+ def test_basic(self) -> None:
106
+ start, end = _date_range("2026-03-18")
107
+ assert start.day == 18
108
+ assert end.day == 19
109
+ assert start.tzinfo == timezone.utc
110
+
111
+ def test_span(self) -> None:
112
+ start, end = _date_range("2026-01-01")
113
+ delta = end - start
114
+ assert delta.days == 1
115
+
116
+
117
+ class TestWeekRange:
118
+ def test_basic(self) -> None:
119
+ start, end = _week_range("2026-W12")
120
+ delta = end - start
121
+ assert delta.days == 7
122
+ assert start.weekday() == 0 # Monday
123
+
124
+
125
+ class TestParseCreated:
126
+ def test_iso(self) -> None:
127
+ m = Memory(title="t", content="c", created_at="2026-03-18T12:00:00+00:00")
128
+ dt = _parse_created(m)
129
+ assert dt.year == 2026
130
+ assert dt.day == 18
131
+
132
+ def test_invalid(self) -> None:
133
+ m = Memory(title="t", content="c", created_at="garbage")
134
+ dt = _parse_created(m)
135
+ assert dt == datetime.min.replace(tzinfo=timezone.utc)
136
+
137
+
138
+ # ── Theme extraction ─────────────────────────────────────────────────────────
139
+
140
+
141
+ class TestExtractThemes:
142
+ def test_extracts_tags(self, synthesizer: JournalSynthesizer) -> None:
143
+ memories = synthesizer.store.list_memories(limit=100)
144
+ themes = synthesizer.extract_themes(memories)
145
+ assert isinstance(themes, list)
146
+ assert len(themes) > 0
147
+ # "architecture" appears in 2 memories' tags → should be prominent
148
+ assert "architecture" in themes
149
+
150
+ def test_empty_list(self, synthesizer: JournalSynthesizer) -> None:
151
+ assert synthesizer.extract_themes([]) == []
152
+
153
+ def test_skips_generic_tags(self, synthesizer: JournalSynthesizer) -> None:
154
+ memories = synthesizer.store.list_memories(limit=100)
155
+ themes = synthesizer.extract_themes(memories)
156
+ assert "auto-promoted" not in themes
157
+ assert "promoted" not in themes
158
+
159
+ def test_graduated_themes_boost(self, tmp_path: Path, populated_store: MemoryStore) -> None:
160
+ themes_file = tmp_path / "themes.json"
161
+ themes_file.write_text('{"architecture": {"level": 3}}')
162
+ synth = JournalSynthesizer(
163
+ store=populated_store,
164
+ themes_path=str(themes_file),
165
+ )
166
+ memories = populated_store.list_memories(limit=100)
167
+ themes = synth.extract_themes(memories)
168
+ assert themes[0] == "architecture" # boosted to top
169
+
170
+
171
+ # ── Daily synthesis ──────────────────────────────────────────────────────────
172
+
173
+
174
+ class TestSynthesizeDaily:
175
+ def test_creates_memory(self, synthesizer: JournalSynthesizer) -> None:
176
+ today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
177
+ result = synthesizer.synthesize_daily(today)
178
+ assert isinstance(result, Memory)
179
+ assert result.layer == MemoryLayer.MID
180
+ assert "narrative" in result.tags
181
+ assert "journal-synthesis" in result.tags
182
+ assert f"daily-{today}" in result.tags
183
+ assert result.source == "journal-synthesis"
184
+
185
+ def test_narrative_content(self, synthesizer: JournalSynthesizer) -> None:
186
+ today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
187
+ result = synthesizer.synthesize_daily(today)
188
+ assert "Daily narrative" in result.content
189
+ assert "memories" in result.content.lower()
190
+
191
+ def test_includes_emotional_arc(self, synthesizer: JournalSynthesizer) -> None:
192
+ today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
193
+ result = synthesizer.synthesize_daily(today)
194
+ assert "Emotional arc" in result.content
195
+ # Has the Cloud 9 memory so should mention it
196
+ assert "Cloud 9" in result.content
197
+
198
+ def test_empty_day(self, store: MemoryStore) -> None:
199
+ synth = JournalSynthesizer(store=store)
200
+ result = synth.synthesize_daily("2020-01-01")
201
+ assert "No memories recorded" in result.content
202
+
203
+ def test_metadata(self, synthesizer: JournalSynthesizer) -> None:
204
+ today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
205
+ result = synthesizer.synthesize_daily(today)
206
+ assert result.metadata["synthesis_type"] == "daily"
207
+ assert result.metadata["date"] == today
208
+ assert result.metadata["memory_count"] >= 1
209
+
210
+
211
+ # ── Weekly synthesis ─────────────────────────────────────────────────────────
212
+
213
+
214
+ class TestSynthesizeWeekly:
215
+ def test_creates_long_term(self, synthesizer: JournalSynthesizer) -> None:
216
+ week = datetime.now(timezone.utc).strftime("%G-W%V")
217
+ result = synthesizer.synthesize_weekly(week)
218
+ assert result.layer == MemoryLayer.LONG
219
+ assert "narrative" in result.tags
220
+ assert f"weekly-{week}" in result.tags
221
+
222
+ def test_metadata(self, synthesizer: JournalSynthesizer) -> None:
223
+ week = datetime.now(timezone.utc).strftime("%G-W%V")
224
+ result = synthesizer.synthesize_weekly(week)
225
+ assert result.metadata["synthesis_type"] == "weekly"
226
+ assert result.metadata["week"] == week
227
+
228
+
229
+ # ── Dream synthesis ──────────────────────────────────────────────────────────
230
+
231
+
232
+ class TestSynthesizeDreams:
233
+ def test_creates_theme_clusters(self, synthesizer: JournalSynthesizer) -> None:
234
+ since = (datetime.now(timezone.utc) - timedelta(days=1)).strftime("%Y-%m-%d")
235
+ results = synthesizer.synthesize_dreams(since=since)
236
+ assert isinstance(results, list)
237
+ assert len(results) > 0
238
+ for m in results:
239
+ assert "dream-synthesis" in m.tags
240
+ assert "narrative" in m.tags
241
+ assert m.layer == MemoryLayer.MID
242
+
243
+ def test_no_dreams(self, store: MemoryStore) -> None:
244
+ synth = JournalSynthesizer(store=store)
245
+ results = synth.synthesize_dreams(since="2026-01-01")
246
+ assert results == []
247
+
248
+ def test_dream_metadata(self, synthesizer: JournalSynthesizer) -> None:
249
+ since = (datetime.now(timezone.utc) - timedelta(days=1)).strftime("%Y-%m-%d")
250
+ results = synthesizer.synthesize_dreams(since=since)
251
+ for m in results:
252
+ assert m.metadata["synthesis_type"] == "dream"
253
+ assert "dream_count" in m.metadata
254
+
255
+
256
+ # ── Emotional arc ────────────────────────────────────────────────────────────
257
+
258
+
259
+ class TestEmotionalArc:
260
+ def test_computes_averages(self, synthesizer: JournalSynthesizer) -> None:
261
+ memories = synthesizer.store.list_memories(limit=100)
262
+ arc = synthesizer._emotional_arc(memories)
263
+ assert 0 <= arc["avg_intensity"] <= 10
264
+ assert -1 <= arc["avg_valence"] <= 1
265
+ assert arc["peak_intensity"] >= arc["avg_intensity"]
266
+
267
+ def test_empty(self, synthesizer: JournalSynthesizer) -> None:
268
+ arc = synthesizer._emotional_arc([])
269
+ assert arc["avg_intensity"] == 0.0
270
+ assert arc["cloud9_count"] == 0
271
+
272
+ def test_detects_cloud9(self, synthesizer: JournalSynthesizer) -> None:
273
+ memories = synthesizer.store.list_memories(limit=100)
274
+ arc = synthesizer._emotional_arc(memories)
275
+ assert arc["cloud9_count"] >= 1
@@ -3,19 +3,17 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import json
6
- import os
7
- import tempfile
8
6
  from pathlib import Path
9
7
 
10
8
  import pytest
11
9
 
10
+ from skmemory.backends.sqlite_backend import SQLiteBackend
12
11
  from skmemory.importers.telegram import (
13
- _extract_text,
14
12
  _detect_emotion,
13
+ _extract_text,
15
14
  _parse_telegram_export,
16
15
  import_telegram,
17
16
  )
18
- from skmemory.backends.sqlite_backend import SQLiteBackend
19
17
  from skmemory.store import MemoryStore
20
18
 
21
19
 
@@ -29,7 +27,9 @@ def _make_export(messages: list[dict], name: str = "Test Chat") -> dict:
29
27
  }
30
28
 
31
29
 
32
- def _msg(text: str, sender: str = "Alice", msg_id: int = 1, date: str = "2025-06-15T10:30:00") -> dict:
30
+ def _msg(
31
+ text: str, sender: str = "Alice", msg_id: int = 1, date: str = "2025-06-15T10:30:00"
32
+ ) -> dict:
33
33
  return {
34
34
  "id": msg_id,
35
35
  "type": "message",
@@ -57,11 +57,13 @@ class TestExtractText:
57
57
  assert _extract_text("hello world") == "hello world"
58
58
 
59
59
  def test_entity_list(self):
60
- result = _extract_text([
61
- "Hello ",
62
- {"type": "bold", "text": "world"},
63
- "!",
64
- ])
60
+ result = _extract_text(
61
+ [
62
+ "Hello ",
63
+ {"type": "bold", "text": "world"},
64
+ "!",
65
+ ]
66
+ )
65
67
  assert result == "Hello world!"
66
68
 
67
69
  def test_empty(self):
@@ -147,9 +149,21 @@ class TestImportPerMessage:
147
149
  class TestImportDaily:
148
150
  def test_consolidates_by_day(self, tmp_store: MemoryStore, export_dir: Path):
149
151
  msgs = [
150
- _msg("Morning chat about interesting things and stuff", msg_id=1, date="2025-06-15T09:00:00"),
151
- _msg("Afternoon follow-up discussion on that topic", msg_id=2, date="2025-06-15T14:00:00"),
152
- _msg("Next day conversation about something new entirely", msg_id=3, date="2025-06-16T10:00:00"),
152
+ _msg(
153
+ "Morning chat about interesting things and stuff",
154
+ msg_id=1,
155
+ date="2025-06-15T09:00:00",
156
+ ),
157
+ _msg(
158
+ "Afternoon follow-up discussion on that topic",
159
+ msg_id=2,
160
+ date="2025-06-15T14:00:00",
161
+ ),
162
+ _msg(
163
+ "Next day conversation about something new entirely",
164
+ msg_id=3,
165
+ date="2025-06-16T10:00:00",
166
+ ),
153
167
  ]
154
168
  data = _make_export(msgs)
155
169
  (export_dir / "result.json").write_text(json.dumps(data))
@@ -161,8 +175,18 @@ class TestImportDaily:
161
175
 
162
176
  def test_daily_memory_content(self, tmp_store: MemoryStore, export_dir: Path):
163
177
  msgs = [
164
- _msg("First message of the day that is long enough", msg_id=1, date="2025-06-15T09:00:00", sender="Alice"),
165
- _msg("Second message of the day also long enough", msg_id=2, date="2025-06-15T14:00:00", sender="Bob"),
178
+ _msg(
179
+ "First message of the day that is long enough",
180
+ msg_id=1,
181
+ date="2025-06-15T09:00:00",
182
+ sender="Alice",
183
+ ),
184
+ _msg(
185
+ "Second message of the day also long enough",
186
+ msg_id=2,
187
+ date="2025-06-15T14:00:00",
188
+ sender="Bob",
189
+ ),
166
190
  ]
167
191
  data = _make_export(msgs)
168
192
  (export_dir / "result.json").write_text(json.dumps(data))