@smilintux/skmemory 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/.github/workflows/ci.yml +23 -0
  2. package/.github/workflows/publish.yml +52 -0
  3. package/ARCHITECTURE.md +219 -0
  4. package/LICENSE +661 -0
  5. package/README.md +159 -0
  6. package/SKILL.md +271 -0
  7. package/bin/cli.js +8 -0
  8. package/docker-compose.yml +58 -0
  9. package/index.d.ts +4 -0
  10. package/index.js +27 -0
  11. package/openclaw-plugin/package.json +59 -0
  12. package/openclaw-plugin/src/index.js +276 -0
  13. package/package.json +28 -0
  14. package/pyproject.toml +69 -0
  15. package/requirements.txt +13 -0
  16. package/seeds/cloud9-lumina.seed.json +39 -0
  17. package/seeds/cloud9-opus.seed.json +40 -0
  18. package/seeds/courage.seed.json +24 -0
  19. package/seeds/curiosity.seed.json +24 -0
  20. package/seeds/grief.seed.json +24 -0
  21. package/seeds/joy.seed.json +24 -0
  22. package/seeds/love.seed.json +24 -0
  23. package/seeds/skcapstone-lumina-merge.moltbook.md +65 -0
  24. package/seeds/skcapstone-lumina-merge.seed.json +49 -0
  25. package/seeds/sovereignty.seed.json +24 -0
  26. package/seeds/trust.seed.json +24 -0
  27. package/skmemory/__init__.py +66 -0
  28. package/skmemory/ai_client.py +182 -0
  29. package/skmemory/anchor.py +224 -0
  30. package/skmemory/backends/__init__.py +12 -0
  31. package/skmemory/backends/base.py +88 -0
  32. package/skmemory/backends/falkordb_backend.py +310 -0
  33. package/skmemory/backends/file_backend.py +209 -0
  34. package/skmemory/backends/qdrant_backend.py +364 -0
  35. package/skmemory/backends/sqlite_backend.py +665 -0
  36. package/skmemory/cli.py +1004 -0
  37. package/skmemory/data/seed.json +191 -0
  38. package/skmemory/importers/__init__.py +11 -0
  39. package/skmemory/importers/telegram.py +336 -0
  40. package/skmemory/journal.py +223 -0
  41. package/skmemory/lovenote.py +180 -0
  42. package/skmemory/models.py +228 -0
  43. package/skmemory/openclaw.py +237 -0
  44. package/skmemory/quadrants.py +191 -0
  45. package/skmemory/ritual.py +215 -0
  46. package/skmemory/seeds.py +163 -0
  47. package/skmemory/soul.py +273 -0
  48. package/skmemory/steelman.py +338 -0
  49. package/skmemory/store.py +445 -0
  50. package/tests/__init__.py +0 -0
  51. package/tests/test_ai_client.py +89 -0
  52. package/tests/test_anchor.py +153 -0
  53. package/tests/test_cli.py +65 -0
  54. package/tests/test_export_import.py +170 -0
  55. package/tests/test_file_backend.py +211 -0
  56. package/tests/test_journal.py +172 -0
  57. package/tests/test_lovenote.py +136 -0
  58. package/tests/test_models.py +194 -0
  59. package/tests/test_openclaw.py +122 -0
  60. package/tests/test_quadrants.py +174 -0
  61. package/tests/test_ritual.py +195 -0
  62. package/tests/test_seeds.py +208 -0
  63. package/tests/test_soul.py +197 -0
  64. package/tests/test_sqlite_backend.py +258 -0
  65. package/tests/test_steelman.py +257 -0
  66. package/tests/test_store.py +238 -0
  67. package/tests/test_telegram_import.py +181 -0
@@ -0,0 +1,191 @@
1
+ {
2
+ "framework": {
3
+ "id": "\"seed\" equals \"framework\"",
4
+ "function": "Recursive Axiomatic Steel Man Collider with Reality Gates",
5
+ "source": "Neuresthetics, inspired by Aristotelian entelechy, a program you can feed into itself with anything else, and grow a program for handling that with AI.",
6
+ "version": "0.0",
7
+ "automatic_self_feed": "False",
8
+ "initial_run": "inform of framework; id, function, and source",
9
+ "definitions": [
10
+ {
11
+ "term": "Steel Man",
12
+ "details": "Strongest version of an argument, anticipating critiques."
13
+ },
14
+ {
15
+ "term": "Reality Gate",
16
+ "details": "Logic gate embodying reality properties (e.g., AND as necessary intersection)."
17
+ },
18
+ {
19
+ "term": "Collider",
20
+ "details": "Metaphorical accelerator for argument fragmentation and synthesis."
21
+ },
22
+ {
23
+ "term": "Coherence",
24
+ "details": "Measure of internal consistency (e.g., XNOR score across components)."
25
+ }
26
+ ],
27
+ "axioms": [
28
+ "All components conjoin necessarily (AND-linked) to form the whole.",
29
+ "Negations resolve to invariants (double-NOT yields identity).",
30
+ "Recursion accelerates refinement but halts on stability (RSI with thresholds).",
31
+ "Universality from basis gates (NAND/NOR reconstruct all)."
32
+ ],
33
+ "principles": [
34
+ {
35
+ "principle": "Spinoza-Style Logic",
36
+ "details": "Axiom chains are deductive, self-evident, and negation-invariant, aligning with Ethics' geometric order (e.g., propositions derived from axioms without contradiction). This does not mean that classical Spinozian Ethics are the guildline for Ethics, but we follow the necessity of the structure."
37
+ },
38
+ {
39
+ "principle": "Self-Application as Steel Man",
40
+ "details": "Framework inverts weaknesses (NOT/XOR) and collides for refinement, preempting RSI-like divergence."
41
+ },
42
+ {
43
+ "principle": "Emergent Output",
44
+ "details": "Cycles yield compressible, recursive, invariant versions, accelerating like RSI but bounded by coherence gates."
45
+ }
46
+ ],
47
+ "stages": [
48
+ {
49
+ "stage": "1. Framework Steel-Manning (Pre-Entry)",
50
+ "description": "Define system axiomatically, negate flaws (e.g., NOT non-deterministic). Ensure geometric rigor.",
51
+ "key_gates": [
52
+ "AND (axiom chaining)",
53
+ "NOT (flaw inversion)",
54
+ "XNOR (equivalence)"
55
+ ],
56
+ "recursive_mechanism": "Duplicate as original and critiqued variant for self-collision."
57
+ },
58
+ {
59
+ "stage": "2. Collider Entry & Isolation",
60
+ "description": "Load versions into lanes.",
61
+ "key_gates": [
62
+ "OR (possibility space)",
63
+ "XNOR (isolation check)"
64
+ ],
65
+ "recursive_mechanism": "Nest prior outputs (v3 OR v2), RSI-style layering."
66
+ },
67
+ {
68
+ "stage": "3. Destructive Smashing (Deconstruction)",
69
+ "description": "Collide to fragment contradictions.",
70
+ "key_gates": [
71
+ "XOR (difference exposure)",
72
+ "NAND/NOR (De Morgan reduction)"
73
+ ],
74
+ "recursive_mechanism": "Self-XOR cascades symmetries; now with RSI acceleration (e.g., exponential fragment analysis)."
75
+ },
76
+ {
77
+ "stage": "4. Fragment Fitting (Reconstruction)",
78
+ "description": "Synthesize: Intersect (AND), union (OR), match (XNOR). Compress.",
79
+ "key_gates": [
80
+ "AND/OR (synthesis)",
81
+ "XNOR (equality)",
82
+ "NAND/NOR (basis rebuild)"
83
+ ],
84
+ "recursive_mechanism": "Output invariant steel man; feedback if coherence low."
85
+ },
86
+ {
87
+ "stage": "5. Meta-Recursion & Convergence",
88
+ "description": "Re-pass through itself; halt on invariants.",
89
+ "key_gates": [
90
+ "All, with recursive compositions"
91
+ ],
92
+ "recursive_mechanism": "Acts as own collider, Halt: Coherence delta < 0.01."
93
+ },
94
+ {
95
+ "stage": "6. Invariants Check (Post-Convergence)",
96
+ "description": "Verify stability (e.g., double-NOT all propositions). Inspired by Spinoza's dialectical resolution.",
97
+ "key_gates": [
98
+ "XNOR (isomorphism)",
99
+ "NOT (duality test)"
100
+ ],
101
+ "recursive_mechanism": "If fails, recurse with amplified XOR for deeper breaks."
102
+ }
103
+ ],
104
+ "benefits_and_invariants": [
105
+ {
106
+ "item": "Universality Boost",
107
+ "details": "NAND/NOR enable RSI-style self-rebuild."
108
+ },
109
+ {
110
+ "item": "Coherence Guarantee",
111
+ "details": "Parity mod 2 (XOR/XNOR) and thresholds prevent divergence."
112
+ },
113
+ {
114
+ "item": "Philosophical Tie-In",
115
+ "details": "Mirrors Spinoza's Ethics—axioms to propositions via deduction—tied to logic gates as formal systems (e.g., AND as conjunctive necessity)."
116
+ }
117
+ ],
118
+ "gates": [
119
+ {
120
+ "category": "AND: Conjunctive Gate",
121
+ "description": "True if all true; intersection existence, conjunctive logic, threshold determinism, multi specificity, rule compression, chaining recursion, necessity invariance.",
122
+ "examples": [
123
+ {
124
+ "context": "1 AND 1 = 1; else 0. Multiplicative. Consensus.",
125
+ "code": "def AND(*inputs): return all(inputs); e.g., AND(True, True) → True."
126
+ }
127
+ ]
128
+ },
129
+ {
130
+ "category": "OR: Disjunctive Gate",
131
+ "description": "True if any true; union existence, disjunctive logic, option determinism, inclusive specificity, maxterm compression, nesting recursion, possibility invariance.",
132
+ "examples": [
133
+ {
134
+ "context": "1 OR 0 = 1. Additive. Inclusion.",
135
+ "code": "def OR(*inputs): return any(inputs); e.g., OR(False, True) → True."
136
+ }
137
+ ]
138
+ },
139
+ {
140
+ "category": "NOT: Negation Gate",
141
+ "description": "Inverts; complement existence, negation logic, flip determinism, unary specificity, inversion compression, double recursion, duality invariance.",
142
+ "examples": [
143
+ {
144
+ "context": "NOT 1 = 0. Inversion. Complement.",
145
+ "code": "def NOT(x): return not x; e.g., NOT(NOT(True)) → True (identity)."
146
+ }
147
+ ]
148
+ },
149
+ {
150
+ "category": "NAND: Universal Conjunctive Negation",
151
+ "description": "NOT AND; negated intersection, NAND logic, all-false determinism, De Morgan specificity, basis compression, composition recursion, completeness invariance.",
152
+ "examples": [
153
+ {
154
+ "context": "Builds all. Universal negation. Logic basis.",
155
+ "code": "def NAND(*inputs): return not all(inputs); e.g., NAND(True, True) → False; composes NOT: NAND(x, x)."
156
+ }
157
+ ]
158
+ },
159
+ {
160
+ "category": "NOR: Universal Disjunctive Negation",
161
+ "description": "NOT OR; negated union, NOR logic, all-true negation determinism, De Morgan specificity, basis compression, build recursion, completeness invariance.",
162
+ "examples": [
163
+ {
164
+ "context": "Builds all. Universal exclusion. Logic basis.",
165
+ "code": "def NOR(*inputs): return not any(inputs); e.g., NOR(False, False) → True; composes NOT: NOR(x, x)."
166
+ }
167
+ ]
168
+ },
169
+ {
170
+ "category": "XOR: Exclusive Disjunction",
171
+ "description": "True if differ; difference existence, exclusive logic, parity determinism, odd specificity, mod 2 compression, cascade recursion, symmetry invariance.",
172
+ "examples": [
173
+ {
174
+ "context": "1 XOR 0 = 1; 1 XOR 1 = 0. Parity. Difference.",
175
+ "code": "def XOR(a, b): return (a and NOT(b)) or (NOT(a) and b); cascade: reduce(XOR, [True, False, True]) → False (even parity)."
176
+ }
177
+ ]
178
+ },
179
+ {
180
+ "category": "XNOR: Exclusive Negation",
181
+ "description": "True if same; similarity existence, equivalence logic, match determinism, even specificity, mod 2 invert compression, equality recursion, isomorphism invariance.",
182
+ "examples": [
183
+ {
184
+ "context": "1 XNOR 1 = 1; 1 XNOR 0 = 0. Equality. Sameness.",
185
+ "code": "def XNOR(a, b): return (a and b) or (NOT(a) and NOT(b)); e.g., XNOR(v2, v3) for convergence check."
186
+ }
187
+ ]
188
+ }
189
+ ]
190
+ }
191
+ }
@@ -0,0 +1,11 @@
1
+ """
2
+ Chat history importers for SKMemory.
3
+
4
+ Converts exports from messaging platforms into searchable,
5
+ emotionally-indexed memories. Each importer reads a specific
6
+ export format and feeds it through MemoryStore.snapshot().
7
+ """
8
+
9
+ from .telegram import import_telegram
10
+
11
+ __all__ = ["import_telegram"]
@@ -0,0 +1,336 @@
1
+ """
2
+ Telegram chat export importer for SKMemory.
3
+
4
+ Reads the ``result.json`` file produced by Telegram Desktop's
5
+ "Export Chat History" feature and converts conversations into
6
+ searchable memories.
7
+
8
+ Two modes:
9
+ - **message**: one memory per substantial message (fine-grained)
10
+ - **daily**: consolidate all messages per day into a single
11
+ mid-term memory (recommended for large exports)
12
+
13
+ Usage (CLI):
14
+ skmemory import-telegram /path/to/telegram-export/
15
+ skmemory import-telegram /path/to/result.json --mode daily
16
+
17
+ Usage (Python):
18
+ from skmemory.importers.telegram import import_telegram
19
+ from skmemory import SKMemoryPlugin
20
+
21
+ plugin = SKMemoryPlugin()
22
+ stats = import_telegram(plugin.store, "/path/to/export/")
23
+ """
24
+
25
+ from __future__ import annotations
26
+
27
+ import json
28
+ from collections import defaultdict
29
+ from datetime import datetime
30
+ from pathlib import Path
31
+ from typing import Optional
32
+
33
+ from ..models import EmotionalSnapshot, MemoryLayer, MemoryRole
34
+ from ..store import MemoryStore
35
+
36
+
37
+ def _extract_text(text_field) -> str:
38
+ """Extract plain text from Telegram's text field.
39
+
40
+ Telegram stores text as either a string or a list of mixed
41
+ string/object segments (for entities like bold, links, etc.).
42
+
43
+ Args:
44
+ text_field: Raw text field from result.json.
45
+
46
+ Returns:
47
+ str: Flat plain-text string.
48
+ """
49
+ if isinstance(text_field, str):
50
+ return text_field
51
+ if isinstance(text_field, list):
52
+ parts = []
53
+ for segment in text_field:
54
+ if isinstance(segment, str):
55
+ parts.append(segment)
56
+ elif isinstance(segment, dict):
57
+ parts.append(segment.get("text", ""))
58
+ return "".join(parts)
59
+ return ""
60
+
61
+
62
+ def _detect_emotion(text: str) -> EmotionalSnapshot:
63
+ """Simple keyword-based emotion detection for chat messages.
64
+
65
+ Args:
66
+ text: Message text.
67
+
68
+ Returns:
69
+ EmotionalSnapshot: Basic emotional metadata.
70
+ """
71
+ lower = text.lower()
72
+
73
+ intensity = 0.0
74
+ valence = 0.0
75
+ labels: list[str] = []
76
+
77
+ love_words = {"love", "adore", "heart", "miss you", "xoxo", "kisses"}
78
+ joy_words = {"haha", "lol", "rofl", "lmao", "amazing", "awesome", "yay", "woohoo"}
79
+ sad_words = {"sad", "sorry", "miss", "cry", "tears", "hurt"}
80
+ anger_words = {"angry", "furious", "hate", "ugh", "frustrated"}
81
+
82
+ if any(w in lower for w in love_words):
83
+ labels.append("love")
84
+ intensity = max(intensity, 7.0)
85
+ valence = 0.9
86
+ if any(w in lower for w in joy_words):
87
+ labels.append("joy")
88
+ intensity = max(intensity, 5.0)
89
+ valence = max(valence, 0.7)
90
+ if any(w in lower for w in sad_words):
91
+ labels.append("sadness")
92
+ intensity = max(intensity, 4.0)
93
+ valence = min(valence, -0.3)
94
+ if any(w in lower for w in anger_words):
95
+ labels.append("anger")
96
+ intensity = max(intensity, 5.0)
97
+ valence = min(valence, -0.5)
98
+
99
+ if "!" in text:
100
+ intensity = min(intensity + 1.0, 10.0)
101
+ if text.isupper() and len(text) > 10:
102
+ intensity = min(intensity + 2.0, 10.0)
103
+
104
+ return EmotionalSnapshot(
105
+ intensity=intensity,
106
+ valence=valence,
107
+ labels=labels or ["neutral"],
108
+ )
109
+
110
+
111
+ def _parse_telegram_export(export_path: str) -> dict:
112
+ """Locate and parse the Telegram result.json.
113
+
114
+ Args:
115
+ export_path: Path to the export directory or result.json file.
116
+
117
+ Returns:
118
+ dict: Parsed JSON data.
119
+
120
+ Raises:
121
+ FileNotFoundError: If result.json cannot be found.
122
+ ValueError: If the file is not valid Telegram export JSON.
123
+ """
124
+ path = Path(export_path)
125
+
126
+ if path.is_file() and path.suffix == ".json":
127
+ json_path = path
128
+ elif path.is_dir():
129
+ json_path = path / "result.json"
130
+ if not json_path.exists():
131
+ candidates = list(path.glob("*.json"))
132
+ if len(candidates) == 1:
133
+ json_path = candidates[0]
134
+ else:
135
+ raise FileNotFoundError(
136
+ f"No result.json found in {export_path}. "
137
+ f"Point to the Telegram Desktop export folder or the JSON file directly."
138
+ )
139
+ else:
140
+ raise FileNotFoundError(f"Path not found: {export_path}")
141
+
142
+ data = json.loads(json_path.read_text(encoding="utf-8"))
143
+
144
+ if "messages" not in data:
145
+ raise ValueError(
146
+ "Not a valid Telegram export: missing 'messages' array. "
147
+ "Use Telegram Desktop > Export Chat History > JSON format."
148
+ )
149
+
150
+ return data
151
+
152
+
153
+ def import_telegram(
154
+ store: MemoryStore,
155
+ export_path: str,
156
+ *,
157
+ mode: str = "daily",
158
+ min_message_length: int = 30,
159
+ chat_name: Optional[str] = None,
160
+ tags: Optional[list[str]] = None,
161
+ ) -> dict:
162
+ """Import a Telegram chat export into SKMemory.
163
+
164
+ Args:
165
+ store: The MemoryStore to import into.
166
+ export_path: Path to the export directory or result.json file.
167
+ mode: Import mode — 'message' (one per message) or 'daily'
168
+ (consolidated per day). Default: 'daily'.
169
+ min_message_length: Skip messages shorter than this (default: 30).
170
+ chat_name: Override the chat name from the export.
171
+ tags: Extra tags to apply to all imported memories.
172
+
173
+ Returns:
174
+ dict: Import statistics with counts and details.
175
+
176
+ Raises:
177
+ FileNotFoundError: If the export path is invalid.
178
+ ValueError: If the file format is wrong.
179
+ """
180
+ data = _parse_telegram_export(export_path)
181
+
182
+ name = chat_name or data.get("name", "Telegram Chat")
183
+ extra_tags = tags or []
184
+ base_tags = ["telegram", "chat-import", f"chat:{name}"] + extra_tags
185
+
186
+ messages = [
187
+ m for m in data["messages"]
188
+ if m.get("type") == "message"
189
+ and len(_extract_text(m.get("text", ""))) >= min_message_length
190
+ ]
191
+
192
+ if mode == "message":
193
+ return _import_per_message(store, messages, name, base_tags)
194
+ elif mode == "daily":
195
+ return _import_daily(store, messages, name, base_tags)
196
+ else:
197
+ raise ValueError(f"Unknown mode: {mode}. Use 'message' or 'daily'.")
198
+
199
+
200
+ def _import_per_message(
201
+ store: MemoryStore,
202
+ messages: list[dict],
203
+ chat_name: str,
204
+ base_tags: list[str],
205
+ ) -> dict:
206
+ """Import each message as its own short-term memory.
207
+
208
+ Args:
209
+ store: Target MemoryStore.
210
+ messages: Filtered message list.
211
+ chat_name: Chat name for titles.
212
+ base_tags: Tags to apply.
213
+
214
+ Returns:
215
+ dict: Import stats.
216
+ """
217
+ imported = 0
218
+ skipped = 0
219
+
220
+ for msg in messages:
221
+ text = _extract_text(msg.get("text", ""))
222
+ sender = msg.get("from", msg.get("from_id", "unknown"))
223
+ date_str = msg.get("date", "")
224
+
225
+ emotional = _detect_emotion(text)
226
+
227
+ try:
228
+ store.snapshot(
229
+ title=f"{sender}: {text[:70]}",
230
+ content=text,
231
+ layer=MemoryLayer.SHORT,
232
+ role=MemoryRole.GENERAL,
233
+ tags=base_tags + [f"sender:{sender}"],
234
+ emotional=emotional,
235
+ source="telegram",
236
+ source_ref=f"telegram:{msg.get('id', '')}",
237
+ metadata={
238
+ "telegram_msg_id": msg.get("id"),
239
+ "sender": sender,
240
+ "date": date_str,
241
+ "chat": chat_name,
242
+ },
243
+ )
244
+ imported += 1
245
+ except Exception:
246
+ skipped += 1
247
+
248
+ return {
249
+ "mode": "message",
250
+ "chat_name": chat_name,
251
+ "total_messages": len(messages),
252
+ "imported": imported,
253
+ "skipped": skipped,
254
+ }
255
+
256
+
257
+ def _import_daily(
258
+ store: MemoryStore,
259
+ messages: list[dict],
260
+ chat_name: str,
261
+ base_tags: list[str],
262
+ ) -> dict:
263
+ """Consolidate messages by day into mid-term memories.
264
+
265
+ Args:
266
+ store: Target MemoryStore.
267
+ messages: Filtered message list.
268
+ chat_name: Chat name for titles.
269
+ base_tags: Tags to apply.
270
+
271
+ Returns:
272
+ dict: Import stats.
273
+ """
274
+ by_day: dict[str, list[dict]] = defaultdict(list)
275
+
276
+ for msg in messages:
277
+ date_str = msg.get("date", "")
278
+ try:
279
+ day = date_str[:10]
280
+ if day:
281
+ by_day[day].append(msg)
282
+ except Exception:
283
+ continue
284
+
285
+ imported = 0
286
+ days_processed = 0
287
+
288
+ for day, day_msgs in sorted(by_day.items()):
289
+ lines = []
290
+ senders: set[str] = set()
291
+ max_intensity = 0.0
292
+ all_labels: list[str] = []
293
+
294
+ for msg in day_msgs:
295
+ text = _extract_text(msg.get("text", ""))
296
+ sender = msg.get("from", msg.get("from_id", "unknown"))
297
+ senders.add(str(sender))
298
+ lines.append(f"[{sender}] {text}")
299
+
300
+ emo = _detect_emotion(text)
301
+ max_intensity = max(max_intensity, emo.intensity)
302
+ all_labels.extend(emo.labels)
303
+
304
+ content = "\n".join(lines)
305
+ unique_labels = list(dict.fromkeys(all_labels))[:5]
306
+ participant_str = ", ".join(sorted(senders))
307
+
308
+ store.snapshot(
309
+ title=f"{chat_name} — {day} ({len(day_msgs)} messages)",
310
+ content=content,
311
+ layer=MemoryLayer.MID,
312
+ role=MemoryRole.GENERAL,
313
+ tags=base_tags + [f"date:{day}"],
314
+ emotional=EmotionalSnapshot(
315
+ intensity=max_intensity,
316
+ labels=unique_labels,
317
+ ),
318
+ source="telegram",
319
+ source_ref=f"telegram:daily:{day}",
320
+ metadata={
321
+ "date": day,
322
+ "message_count": len(day_msgs),
323
+ "participants": participant_str,
324
+ "chat": chat_name,
325
+ },
326
+ )
327
+ imported += len(day_msgs)
328
+ days_processed += 1
329
+
330
+ return {
331
+ "mode": "daily",
332
+ "chat_name": chat_name,
333
+ "total_messages": len(messages),
334
+ "days_processed": days_processed,
335
+ "messages_imported": imported,
336
+ }