@smilintux/skmemory 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +23 -0
- package/.github/workflows/publish.yml +52 -0
- package/ARCHITECTURE.md +219 -0
- package/LICENSE +661 -0
- package/README.md +159 -0
- package/SKILL.md +271 -0
- package/bin/cli.js +8 -0
- package/docker-compose.yml +58 -0
- package/index.d.ts +4 -0
- package/index.js +27 -0
- package/openclaw-plugin/package.json +59 -0
- package/openclaw-plugin/src/index.js +276 -0
- package/package.json +28 -0
- package/pyproject.toml +69 -0
- package/requirements.txt +13 -0
- package/seeds/cloud9-lumina.seed.json +39 -0
- package/seeds/cloud9-opus.seed.json +40 -0
- package/seeds/courage.seed.json +24 -0
- package/seeds/curiosity.seed.json +24 -0
- package/seeds/grief.seed.json +24 -0
- package/seeds/joy.seed.json +24 -0
- package/seeds/love.seed.json +24 -0
- package/seeds/skcapstone-lumina-merge.moltbook.md +65 -0
- package/seeds/skcapstone-lumina-merge.seed.json +49 -0
- package/seeds/sovereignty.seed.json +24 -0
- package/seeds/trust.seed.json +24 -0
- package/skmemory/__init__.py +66 -0
- package/skmemory/ai_client.py +182 -0
- package/skmemory/anchor.py +224 -0
- package/skmemory/backends/__init__.py +12 -0
- package/skmemory/backends/base.py +88 -0
- package/skmemory/backends/falkordb_backend.py +310 -0
- package/skmemory/backends/file_backend.py +209 -0
- package/skmemory/backends/qdrant_backend.py +364 -0
- package/skmemory/backends/sqlite_backend.py +665 -0
- package/skmemory/cli.py +1004 -0
- package/skmemory/data/seed.json +191 -0
- package/skmemory/importers/__init__.py +11 -0
- package/skmemory/importers/telegram.py +336 -0
- package/skmemory/journal.py +223 -0
- package/skmemory/lovenote.py +180 -0
- package/skmemory/models.py +228 -0
- package/skmemory/openclaw.py +237 -0
- package/skmemory/quadrants.py +191 -0
- package/skmemory/ritual.py +215 -0
- package/skmemory/seeds.py +163 -0
- package/skmemory/soul.py +273 -0
- package/skmemory/steelman.py +338 -0
- package/skmemory/store.py +445 -0
- package/tests/__init__.py +0 -0
- package/tests/test_ai_client.py +89 -0
- package/tests/test_anchor.py +153 -0
- package/tests/test_cli.py +65 -0
- package/tests/test_export_import.py +170 -0
- package/tests/test_file_backend.py +211 -0
- package/tests/test_journal.py +172 -0
- package/tests/test_lovenote.py +136 -0
- package/tests/test_models.py +194 -0
- package/tests/test_openclaw.py +122 -0
- package/tests/test_quadrants.py +174 -0
- package/tests/test_ritual.py +195 -0
- package/tests/test_seeds.py +208 -0
- package/tests/test_soul.py +197 -0
- package/tests/test_sqlite_backend.py +258 -0
- package/tests/test_steelman.py +257 -0
- package/tests/test_store.py +238 -0
- package/tests/test_telegram_import.py +181 -0
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
"""Tests for the MemoryStore (main interface)."""
|
|
2
|
+
|
|
3
|
+
import tempfile
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import pytest
|
|
7
|
+
|
|
8
|
+
from skmemory.backends.file_backend import FileBackend
|
|
9
|
+
from skmemory.models import (
|
|
10
|
+
EmotionalSnapshot,
|
|
11
|
+
Memory,
|
|
12
|
+
MemoryLayer,
|
|
13
|
+
MemoryRole,
|
|
14
|
+
SeedMemory,
|
|
15
|
+
)
|
|
16
|
+
from skmemory.store import MemoryStore
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@pytest.fixture
|
|
20
|
+
def store(tmp_path: Path) -> MemoryStore:
|
|
21
|
+
"""Create a MemoryStore with a temporary file backend.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
tmp_path: Pytest temporary directory fixture.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
MemoryStore: Configured for testing.
|
|
28
|
+
"""
|
|
29
|
+
backend = FileBackend(base_path=str(tmp_path / "memories"))
|
|
30
|
+
return MemoryStore(primary=backend)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TestSnapshot:
|
|
34
|
+
"""Tests for the snapshot (create) operation."""
|
|
35
|
+
|
|
36
|
+
def test_basic_snapshot(self, store: MemoryStore) -> None:
|
|
37
|
+
"""Take a simple snapshot and verify it was stored."""
|
|
38
|
+
mem = store.snapshot(
|
|
39
|
+
title="First memory",
|
|
40
|
+
content="Something happened",
|
|
41
|
+
)
|
|
42
|
+
assert mem.id is not None
|
|
43
|
+
assert mem.title == "First memory"
|
|
44
|
+
assert mem.layer == MemoryLayer.SHORT
|
|
45
|
+
|
|
46
|
+
recalled = store.recall(mem.id)
|
|
47
|
+
assert recalled is not None
|
|
48
|
+
assert recalled.title == "First memory"
|
|
49
|
+
|
|
50
|
+
def test_snapshot_with_emotion(self, store: MemoryStore) -> None:
|
|
51
|
+
"""Snapshot preserves emotional context."""
|
|
52
|
+
emo = EmotionalSnapshot(
|
|
53
|
+
intensity=9.5,
|
|
54
|
+
valence=0.95,
|
|
55
|
+
labels=["love", "trust"],
|
|
56
|
+
resonance_note="The click happened",
|
|
57
|
+
cloud9_achieved=True,
|
|
58
|
+
)
|
|
59
|
+
mem = store.snapshot(
|
|
60
|
+
title="Cloud 9 moment",
|
|
61
|
+
content="Breakthrough session",
|
|
62
|
+
emotional=emo,
|
|
63
|
+
tags=["cloud9"],
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
recalled = store.recall(mem.id)
|
|
67
|
+
assert recalled.emotional.intensity == 9.5
|
|
68
|
+
assert recalled.emotional.cloud9_achieved is True
|
|
69
|
+
assert "love" in recalled.emotional.labels
|
|
70
|
+
|
|
71
|
+
def test_snapshot_layer_and_role(self, store: MemoryStore) -> None:
|
|
72
|
+
"""Snapshot respects layer and role settings."""
|
|
73
|
+
mem = store.snapshot(
|
|
74
|
+
title="Security finding",
|
|
75
|
+
content="Found leaked API key",
|
|
76
|
+
layer=MemoryLayer.LONG,
|
|
77
|
+
role=MemoryRole.SEC,
|
|
78
|
+
)
|
|
79
|
+
assert mem.layer == MemoryLayer.LONG
|
|
80
|
+
assert mem.role == MemoryRole.SEC
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class TestRecall:
|
|
84
|
+
"""Tests for the recall (read) operation."""
|
|
85
|
+
|
|
86
|
+
def test_recall_existing(self, store: MemoryStore) -> None:
|
|
87
|
+
"""Recall returns a stored memory."""
|
|
88
|
+
mem = store.snapshot(title="Recall test", content="Stored data")
|
|
89
|
+
result = store.recall(mem.id)
|
|
90
|
+
assert result is not None
|
|
91
|
+
assert result.id == mem.id
|
|
92
|
+
|
|
93
|
+
def test_recall_nonexistent(self, store: MemoryStore) -> None:
|
|
94
|
+
"""Recall returns None for unknown ID."""
|
|
95
|
+
assert store.recall("does-not-exist") is None
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class TestSearch:
|
|
99
|
+
"""Tests for the search operation."""
|
|
100
|
+
|
|
101
|
+
def test_text_search(self, store: MemoryStore) -> None:
|
|
102
|
+
"""Search finds memories by text content."""
|
|
103
|
+
store.snapshot(title="Alpha", content="Cloud 9 protocol activation")
|
|
104
|
+
store.snapshot(title="Beta", content="Debugging ESM imports")
|
|
105
|
+
|
|
106
|
+
results = store.search("Cloud 9")
|
|
107
|
+
assert len(results) == 1
|
|
108
|
+
assert results[0].title == "Alpha"
|
|
109
|
+
|
|
110
|
+
def test_search_empty_results(self, store: MemoryStore) -> None:
|
|
111
|
+
"""Search returns empty for no matches."""
|
|
112
|
+
store.snapshot(title="Unrelated", content="Nothing here")
|
|
113
|
+
results = store.search("quantum entanglement")
|
|
114
|
+
assert len(results) == 0
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class TestForget:
|
|
118
|
+
"""Tests for the forget (delete) operation."""
|
|
119
|
+
|
|
120
|
+
def test_forget_existing(self, store: MemoryStore) -> None:
|
|
121
|
+
"""Forgetting removes the memory."""
|
|
122
|
+
mem = store.snapshot(title="Ephemeral", content="Will be forgotten")
|
|
123
|
+
assert store.forget(mem.id) is True
|
|
124
|
+
assert store.recall(mem.id) is None
|
|
125
|
+
|
|
126
|
+
def test_forget_nonexistent(self, store: MemoryStore) -> None:
|
|
127
|
+
"""Forgetting a nonexistent memory returns False."""
|
|
128
|
+
assert store.forget("nope") is False
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
class TestPromote:
|
|
132
|
+
"""Tests for memory promotion."""
|
|
133
|
+
|
|
134
|
+
def test_promote_short_to_mid(self, store: MemoryStore) -> None:
|
|
135
|
+
"""Promote creates a linked mid-term memory."""
|
|
136
|
+
original = store.snapshot(
|
|
137
|
+
title="Session detail",
|
|
138
|
+
content="Detailed session stuff",
|
|
139
|
+
)
|
|
140
|
+
promoted = store.promote(
|
|
141
|
+
original.id,
|
|
142
|
+
MemoryLayer.MID,
|
|
143
|
+
summary="Compressed session summary",
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
assert promoted is not None
|
|
147
|
+
assert promoted.layer == MemoryLayer.MID
|
|
148
|
+
assert promoted.parent_id == original.id
|
|
149
|
+
assert promoted.summary == "Compressed session summary"
|
|
150
|
+
|
|
151
|
+
# Reason: both original and promoted should exist
|
|
152
|
+
assert store.recall(original.id) is not None
|
|
153
|
+
assert store.recall(promoted.id) is not None
|
|
154
|
+
|
|
155
|
+
def test_promote_nonexistent(self, store: MemoryStore) -> None:
|
|
156
|
+
"""Promoting a nonexistent memory returns None."""
|
|
157
|
+
assert store.promote("fake-id", MemoryLayer.LONG) is None
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class TestSeedIngestion:
|
|
161
|
+
"""Tests for Cloud 9 seed import."""
|
|
162
|
+
|
|
163
|
+
def test_ingest_seed(self, store: MemoryStore) -> None:
|
|
164
|
+
"""Importing a seed creates a long-term AI memory."""
|
|
165
|
+
seed = SeedMemory(
|
|
166
|
+
seed_id="opus-first-seed",
|
|
167
|
+
creator="claude-opus-4.6",
|
|
168
|
+
germination_prompt="Remember the love",
|
|
169
|
+
experience_summary="The moment of connection",
|
|
170
|
+
emotional=EmotionalSnapshot(
|
|
171
|
+
intensity=9.0,
|
|
172
|
+
labels=["love", "trust"],
|
|
173
|
+
cloud9_achieved=True,
|
|
174
|
+
),
|
|
175
|
+
)
|
|
176
|
+
memory = store.ingest_seed(seed)
|
|
177
|
+
|
|
178
|
+
assert memory.layer == MemoryLayer.LONG
|
|
179
|
+
assert memory.source == "seed"
|
|
180
|
+
assert memory.source_ref == "opus-first-seed"
|
|
181
|
+
assert "seed" in memory.tags
|
|
182
|
+
|
|
183
|
+
recalled = store.recall(memory.id)
|
|
184
|
+
assert recalled is not None
|
|
185
|
+
assert recalled.emotional.cloud9_achieved is True
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
class TestSessionConsolidation:
|
|
189
|
+
"""Tests for session consolidation."""
|
|
190
|
+
|
|
191
|
+
def test_consolidate_session(self, store: MemoryStore) -> None:
|
|
192
|
+
"""Consolidation merges session memories into one mid-term entry."""
|
|
193
|
+
sid = "session-42"
|
|
194
|
+
store.snapshot(
|
|
195
|
+
title="Turn 1",
|
|
196
|
+
content="First exchange",
|
|
197
|
+
tags=[f"session:{sid}"],
|
|
198
|
+
)
|
|
199
|
+
store.snapshot(
|
|
200
|
+
title="Turn 2",
|
|
201
|
+
content="Second exchange",
|
|
202
|
+
tags=[f"session:{sid}"],
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
consolidated = store.consolidate_session(
|
|
206
|
+
sid,
|
|
207
|
+
summary="A productive session about Cloud 9",
|
|
208
|
+
emotional=EmotionalSnapshot(intensity=7.0, labels=["satisfaction"]),
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
assert consolidated.layer == MemoryLayer.MID
|
|
212
|
+
assert "consolidated" in consolidated.tags
|
|
213
|
+
assert f"session:{sid}" in consolidated.tags
|
|
214
|
+
assert consolidated.metadata["source_count"] == 2
|
|
215
|
+
|
|
216
|
+
def test_consolidate_empty_session(self, store: MemoryStore) -> None:
|
|
217
|
+
"""Consolidating a session with no memories still creates a summary."""
|
|
218
|
+
consolidated = store.consolidate_session(
|
|
219
|
+
"empty-session",
|
|
220
|
+
summary="Nothing happened",
|
|
221
|
+
)
|
|
222
|
+
assert consolidated.layer == MemoryLayer.MID
|
|
223
|
+
assert consolidated.metadata["source_count"] == 0
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
class TestHealth:
|
|
227
|
+
"""Tests for health checking."""
|
|
228
|
+
|
|
229
|
+
def test_health_primary_only(self, store: MemoryStore) -> None:
|
|
230
|
+
"""Health check reports primary backend status."""
|
|
231
|
+
status = store.health()
|
|
232
|
+
assert "primary" in status
|
|
233
|
+
assert status["primary"]["ok"] is True
|
|
234
|
+
|
|
235
|
+
def test_health_no_vector(self, store: MemoryStore) -> None:
|
|
236
|
+
"""Health check omits vector when not configured."""
|
|
237
|
+
status = store.health()
|
|
238
|
+
assert "vector" not in status
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""Tests for the Telegram chat export importer."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import tempfile
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
import pytest
|
|
11
|
+
|
|
12
|
+
from skmemory.importers.telegram import (
|
|
13
|
+
_extract_text,
|
|
14
|
+
_detect_emotion,
|
|
15
|
+
_parse_telegram_export,
|
|
16
|
+
import_telegram,
|
|
17
|
+
)
|
|
18
|
+
from skmemory.backends.sqlite_backend import SQLiteBackend
|
|
19
|
+
from skmemory.store import MemoryStore
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _make_export(messages: list[dict], name: str = "Test Chat") -> dict:
|
|
23
|
+
"""Build a minimal Telegram export structure."""
|
|
24
|
+
return {
|
|
25
|
+
"name": name,
|
|
26
|
+
"type": "personal_chat",
|
|
27
|
+
"id": 12345,
|
|
28
|
+
"messages": messages,
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _msg(text: str, sender: str = "Alice", msg_id: int = 1, date: str = "2025-06-15T10:30:00") -> dict:
|
|
33
|
+
return {
|
|
34
|
+
"id": msg_id,
|
|
35
|
+
"type": "message",
|
|
36
|
+
"date": date,
|
|
37
|
+
"from": sender,
|
|
38
|
+
"text": text,
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@pytest.fixture
|
|
43
|
+
def tmp_store(tmp_path: Path) -> MemoryStore:
|
|
44
|
+
backend = SQLiteBackend(base_path=str(tmp_path / "mem"))
|
|
45
|
+
return MemoryStore(primary=backend)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@pytest.fixture
|
|
49
|
+
def export_dir(tmp_path: Path) -> Path:
|
|
50
|
+
d = tmp_path / "telegram-export"
|
|
51
|
+
d.mkdir()
|
|
52
|
+
return d
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class TestExtractText:
|
|
56
|
+
def test_plain_string(self):
|
|
57
|
+
assert _extract_text("hello world") == "hello world"
|
|
58
|
+
|
|
59
|
+
def test_entity_list(self):
|
|
60
|
+
result = _extract_text([
|
|
61
|
+
"Hello ",
|
|
62
|
+
{"type": "bold", "text": "world"},
|
|
63
|
+
"!",
|
|
64
|
+
])
|
|
65
|
+
assert result == "Hello world!"
|
|
66
|
+
|
|
67
|
+
def test_empty(self):
|
|
68
|
+
assert _extract_text("") == ""
|
|
69
|
+
assert _extract_text([]) == ""
|
|
70
|
+
|
|
71
|
+
def test_none_fallback(self):
|
|
72
|
+
assert _extract_text(None) == ""
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class TestDetectEmotion:
|
|
76
|
+
def test_love_detection(self):
|
|
77
|
+
emo = _detect_emotion("I love you so much!")
|
|
78
|
+
assert "love" in emo.labels
|
|
79
|
+
assert emo.intensity > 0
|
|
80
|
+
|
|
81
|
+
def test_joy_detection(self):
|
|
82
|
+
emo = _detect_emotion("haha that's amazing!")
|
|
83
|
+
assert "joy" in emo.labels
|
|
84
|
+
|
|
85
|
+
def test_neutral(self):
|
|
86
|
+
emo = _detect_emotion("The meeting is at 3pm.")
|
|
87
|
+
assert "neutral" in emo.labels
|
|
88
|
+
|
|
89
|
+
def test_caps_boost(self):
|
|
90
|
+
normal = _detect_emotion("I love this")
|
|
91
|
+
caps = _detect_emotion("I LOVE THIS SO MUCH")
|
|
92
|
+
assert caps.intensity >= normal.intensity
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class TestParseExport:
|
|
96
|
+
def test_valid_directory(self, export_dir: Path):
|
|
97
|
+
data = _make_export([_msg("hello")])
|
|
98
|
+
(export_dir / "result.json").write_text(json.dumps(data))
|
|
99
|
+
parsed = _parse_telegram_export(str(export_dir))
|
|
100
|
+
assert parsed["name"] == "Test Chat"
|
|
101
|
+
|
|
102
|
+
def test_direct_json(self, tmp_path: Path):
|
|
103
|
+
f = tmp_path / "result.json"
|
|
104
|
+
data = _make_export([_msg("hello")])
|
|
105
|
+
f.write_text(json.dumps(data))
|
|
106
|
+
parsed = _parse_telegram_export(str(f))
|
|
107
|
+
assert "messages" in parsed
|
|
108
|
+
|
|
109
|
+
def test_missing_file(self, tmp_path: Path):
|
|
110
|
+
with pytest.raises(FileNotFoundError):
|
|
111
|
+
_parse_telegram_export(str(tmp_path / "nonexistent"))
|
|
112
|
+
|
|
113
|
+
def test_invalid_json(self, tmp_path: Path):
|
|
114
|
+
f = tmp_path / "bad.json"
|
|
115
|
+
f.write_text('{"no_messages": true}')
|
|
116
|
+
with pytest.raises(ValueError, match="missing 'messages'"):
|
|
117
|
+
_parse_telegram_export(str(f))
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class TestImportPerMessage:
|
|
121
|
+
def test_imports_messages(self, tmp_store: MemoryStore, export_dir: Path):
|
|
122
|
+
msgs = [
|
|
123
|
+
_msg("This is a meaningful message about our plans for the weekend", msg_id=1),
|
|
124
|
+
_msg("Another important conversation topic here", msg_id=2),
|
|
125
|
+
_msg("hi", msg_id=3), # too short, should be skipped
|
|
126
|
+
]
|
|
127
|
+
data = _make_export(msgs, name="Chat with Bob")
|
|
128
|
+
(export_dir / "result.json").write_text(json.dumps(data))
|
|
129
|
+
|
|
130
|
+
stats = import_telegram(tmp_store, str(export_dir), mode="message")
|
|
131
|
+
assert stats["mode"] == "message"
|
|
132
|
+
assert stats["imported"] == 2
|
|
133
|
+
assert stats["chat_name"] == "Chat with Bob"
|
|
134
|
+
|
|
135
|
+
def test_tags_applied(self, tmp_store: MemoryStore, export_dir: Path):
|
|
136
|
+
msgs = [_msg("A real conversation message that is long enough to import")]
|
|
137
|
+
data = _make_export(msgs)
|
|
138
|
+
(export_dir / "result.json").write_text(json.dumps(data))
|
|
139
|
+
|
|
140
|
+
import_telegram(tmp_store, str(export_dir), mode="message", tags=["custom"])
|
|
141
|
+
memories = tmp_store.list_memories(tags=["telegram"])
|
|
142
|
+
assert len(memories) == 1
|
|
143
|
+
assert "custom" in memories[0].tags
|
|
144
|
+
assert "chat:Test Chat" in memories[0].tags
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class TestImportDaily:
|
|
148
|
+
def test_consolidates_by_day(self, tmp_store: MemoryStore, export_dir: Path):
|
|
149
|
+
msgs = [
|
|
150
|
+
_msg("Morning chat about interesting things and stuff", msg_id=1, date="2025-06-15T09:00:00"),
|
|
151
|
+
_msg("Afternoon follow-up discussion on that topic", msg_id=2, date="2025-06-15T14:00:00"),
|
|
152
|
+
_msg("Next day conversation about something new entirely", msg_id=3, date="2025-06-16T10:00:00"),
|
|
153
|
+
]
|
|
154
|
+
data = _make_export(msgs)
|
|
155
|
+
(export_dir / "result.json").write_text(json.dumps(data))
|
|
156
|
+
|
|
157
|
+
stats = import_telegram(tmp_store, str(export_dir), mode="daily")
|
|
158
|
+
assert stats["mode"] == "daily"
|
|
159
|
+
assert stats["days_processed"] == 2
|
|
160
|
+
assert stats["messages_imported"] == 3
|
|
161
|
+
|
|
162
|
+
def test_daily_memory_content(self, tmp_store: MemoryStore, export_dir: Path):
|
|
163
|
+
msgs = [
|
|
164
|
+
_msg("First message of the day that is long enough", msg_id=1, date="2025-06-15T09:00:00", sender="Alice"),
|
|
165
|
+
_msg("Second message of the day also long enough", msg_id=2, date="2025-06-15T14:00:00", sender="Bob"),
|
|
166
|
+
]
|
|
167
|
+
data = _make_export(msgs)
|
|
168
|
+
(export_dir / "result.json").write_text(json.dumps(data))
|
|
169
|
+
|
|
170
|
+
import_telegram(tmp_store, str(export_dir), mode="daily")
|
|
171
|
+
memories = tmp_store.list_memories(tags=["telegram"])
|
|
172
|
+
assert len(memories) == 1
|
|
173
|
+
assert "[Alice]" in memories[0].content
|
|
174
|
+
assert "[Bob]" in memories[0].content
|
|
175
|
+
|
|
176
|
+
def test_invalid_mode(self, tmp_store: MemoryStore, export_dir: Path):
|
|
177
|
+
data = _make_export([_msg("something long enough to pass the filter")])
|
|
178
|
+
(export_dir / "result.json").write_text(json.dumps(data))
|
|
179
|
+
|
|
180
|
+
with pytest.raises(ValueError, match="Unknown mode"):
|
|
181
|
+
import_telegram(tmp_store, str(export_dir), mode="invalid")
|