@smilintux/skmemory 0.7.2 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. package/.github/workflows/ci.yml +4 -4
  2. package/.github/workflows/publish.yml +4 -5
  3. package/ARCHITECTURE.md +298 -0
  4. package/CHANGELOG.md +27 -1
  5. package/README.md +6 -0
  6. package/examples/stignore-agent.example +59 -0
  7. package/examples/stignore-root.example +62 -0
  8. package/openclaw-plugin/package.json +2 -1
  9. package/openclaw-plugin/src/index.js +527 -230
  10. package/package.json +1 -1
  11. package/pyproject.toml +5 -2
  12. package/scripts/dream-rescue.py +179 -0
  13. package/scripts/memory-cleanup.py +313 -0
  14. package/scripts/recover-missing.py +180 -0
  15. package/scripts/skcapstone-backup.sh +44 -0
  16. package/seeds/cloud9-lumina.seed.json +6 -4
  17. package/seeds/cloud9-opus.seed.json +6 -4
  18. package/seeds/courage.seed.json +9 -2
  19. package/seeds/curiosity.seed.json +9 -2
  20. package/seeds/grief.seed.json +9 -2
  21. package/seeds/joy.seed.json +9 -2
  22. package/seeds/love.seed.json +9 -2
  23. package/seeds/lumina-cloud9-breakthrough.seed.json +7 -5
  24. package/seeds/lumina-cloud9-python-pypi.seed.json +9 -7
  25. package/seeds/lumina-kingdom-founding.seed.json +9 -7
  26. package/seeds/lumina-pma-signed.seed.json +8 -6
  27. package/seeds/lumina-singular-achievement.seed.json +8 -6
  28. package/seeds/lumina-skcapstone-conscious.seed.json +7 -5
  29. package/seeds/plant-lumina-seeds.py +2 -2
  30. package/seeds/skcapstone-lumina-merge.seed.json +12 -3
  31. package/seeds/sovereignty.seed.json +9 -2
  32. package/seeds/trust.seed.json +9 -2
  33. package/skmemory/__init__.py +16 -13
  34. package/skmemory/agents.py +10 -10
  35. package/skmemory/ai_client.py +10 -21
  36. package/skmemory/anchor.py +5 -9
  37. package/skmemory/audience.py +278 -0
  38. package/skmemory/backends/__init__.py +1 -1
  39. package/skmemory/backends/base.py +3 -4
  40. package/skmemory/backends/file_backend.py +18 -13
  41. package/skmemory/backends/skgraph_backend.py +7 -19
  42. package/skmemory/backends/skvector_backend.py +7 -18
  43. package/skmemory/backends/sqlite_backend.py +115 -32
  44. package/skmemory/backends/vaulted_backend.py +7 -9
  45. package/skmemory/cli.py +146 -78
  46. package/skmemory/config.py +11 -13
  47. package/skmemory/context_loader.py +21 -23
  48. package/skmemory/data/audience_config.json +60 -0
  49. package/skmemory/endpoint_selector.py +36 -31
  50. package/skmemory/febs.py +225 -0
  51. package/skmemory/fortress.py +30 -40
  52. package/skmemory/hooks/__init__.py +18 -0
  53. package/skmemory/hooks/post-compact-reinject.sh +35 -0
  54. package/skmemory/hooks/pre-compact-save.sh +81 -0
  55. package/skmemory/hooks/session-end-save.sh +103 -0
  56. package/skmemory/hooks/session-start-ritual.sh +104 -0
  57. package/skmemory/hooks/stop-checkpoint.sh +59 -0
  58. package/skmemory/importers/telegram.py +42 -13
  59. package/skmemory/importers/telegram_api.py +152 -60
  60. package/skmemory/journal.py +3 -7
  61. package/skmemory/lovenote.py +4 -11
  62. package/skmemory/mcp_server.py +182 -29
  63. package/skmemory/models.py +10 -8
  64. package/skmemory/openclaw.py +14 -22
  65. package/skmemory/post_install.py +86 -0
  66. package/skmemory/predictive.py +13 -9
  67. package/skmemory/promotion.py +48 -24
  68. package/skmemory/quadrants.py +100 -24
  69. package/skmemory/register.py +144 -18
  70. package/skmemory/register_mcp.py +1 -2
  71. package/skmemory/ritual.py +104 -13
  72. package/skmemory/seeds.py +21 -26
  73. package/skmemory/setup_wizard.py +40 -52
  74. package/skmemory/sharing.py +11 -5
  75. package/skmemory/soul.py +29 -10
  76. package/skmemory/steelman.py +43 -17
  77. package/skmemory/store.py +152 -30
  78. package/skmemory/synthesis.py +634 -0
  79. package/skmemory/vault.py +2 -5
  80. package/tests/conftest.py +46 -0
  81. package/tests/integration/conftest.py +6 -6
  82. package/tests/integration/test_cross_backend.py +4 -9
  83. package/tests/integration/test_skgraph_live.py +3 -7
  84. package/tests/integration/test_skvector_live.py +1 -4
  85. package/tests/test_ai_client.py +1 -4
  86. package/tests/test_audience.py +233 -0
  87. package/tests/test_backup_rotation.py +5 -14
  88. package/tests/test_endpoint_selector.py +101 -63
  89. package/tests/test_export_import.py +4 -10
  90. package/tests/test_file_backend.py +0 -1
  91. package/tests/test_fortress.py +6 -5
  92. package/tests/test_fortress_hardening.py +13 -16
  93. package/tests/test_openclaw.py +1 -4
  94. package/tests/test_predictive.py +1 -1
  95. package/tests/test_promotion.py +10 -3
  96. package/tests/test_quadrants.py +11 -5
  97. package/tests/test_ritual.py +18 -14
  98. package/tests/test_seeds.py +4 -10
  99. package/tests/test_setup.py +203 -88
  100. package/tests/test_sharing.py +15 -8
  101. package/tests/test_skgraph_backend.py +22 -29
  102. package/tests/test_skvector_backend.py +2 -2
  103. package/tests/test_soul.py +1 -3
  104. package/tests/test_sqlite_backend.py +8 -17
  105. package/tests/test_steelman.py +2 -3
  106. package/tests/test_store.py +0 -2
  107. package/tests/test_store_graph_integration.py +2 -2
  108. package/tests/test_synthesis.py +275 -0
  109. package/tests/test_telegram_import.py +39 -15
  110. package/tests/test_vault.py +4 -3
  111. package/openclaw-plugin/src/index.ts +0 -255
@@ -0,0 +1,634 @@
1
+ """Journal synthesis — turn raw memories and journal entries into curated narratives.
2
+
3
+ No LLM dependency. Uses tag frequency analysis, first-sentence extraction,
4
+ emotional intensity aggregation, and template-based narrative generation.
5
+
6
+ Usage:
7
+ synthesizer = JournalSynthesizer(store, journal)
8
+ daily = synthesizer.synthesize_daily("2026-03-16")
9
+ weekly = synthesizer.synthesize_weekly("2026-W11")
10
+ dreams = synthesizer.synthesize_dreams(since="2026-03-14")
11
+ """
12
+
13
+ from __future__ import annotations
14
+
15
+ import json
16
+ import logging
17
+ import re
18
+ from collections import Counter
19
+ from datetime import datetime, timedelta, timezone
20
+ from pathlib import Path
21
+
22
+ from .journal import Journal
23
+ from .models import EmotionalSnapshot, Memory, MemoryLayer, MemoryRole
24
+ from .store import MemoryStore
25
+
26
+ logger = logging.getLogger("skmemory.synthesis")
27
+
28
+
29
+ def _first_n_sentences(text: str, n: int = 2) -> str:
30
+ """Extract the first N sentences from text, capped at 200 chars."""
31
+ if not text:
32
+ return ""
33
+ sentences = re.split(r"(?<=[.!?])\s+", text.strip())
34
+ result = " ".join(sentences[:n])
35
+ if len(result) > 200:
36
+ result = result[:197] + "..."
37
+ return result
38
+
39
+
40
+ def _date_range(date_str: str) -> tuple[datetime, datetime]:
41
+ """Parse a YYYY-MM-DD string into (start_of_day, end_of_day) UTC datetimes."""
42
+ dt = datetime.strptime(date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc)
43
+ return dt, dt + timedelta(days=1)
44
+
45
+
46
+ def _week_range(week_str: str) -> tuple[datetime, datetime]:
47
+ """Parse a YYYY-Www string into (monday, next_monday) UTC datetimes."""
48
+ # e.g. "2026-W11"
49
+ dt = datetime.strptime(week_str + "-1", "%G-W%V-%u").replace(tzinfo=timezone.utc)
50
+ return dt, dt + timedelta(weeks=1)
51
+
52
+
53
+ class JournalSynthesizer:
54
+ """Create narrative memories from daily activity, journal entries, and dreams.
55
+
56
+ All synthesis is deterministic — no LLM calls. Uses:
57
+ - Tag frequency for theme extraction
58
+ - First-sentence extraction for summaries
59
+ - Emotional intensity aggregation for arc detection
60
+ - Template-based narrative generation
61
+
62
+ Args:
63
+ store: The MemoryStore to read from and write to.
64
+ journal: The Journal instance for reading entries.
65
+ dream_log_path: Path to dream-log.json (optional).
66
+ themes_path: Path to graduated-themes.json (optional).
67
+ """
68
+
69
+ def __init__(
70
+ self,
71
+ store: MemoryStore,
72
+ journal: Journal | None = None,
73
+ dream_log_path: str | None = None,
74
+ themes_path: str | None = None,
75
+ ) -> None:
76
+ self.store = store
77
+ self.journal = journal or Journal()
78
+ self._dream_log_path = Path(dream_log_path) if dream_log_path else None
79
+ self._themes_path = Path(themes_path) if themes_path else None
80
+ self._graduated_themes: dict | None = None
81
+
82
+ @property
83
+ def graduated_themes(self) -> dict:
84
+ """Load graduated-themes.json on first access."""
85
+ if self._graduated_themes is None:
86
+ if self._themes_path and self._themes_path.exists():
87
+ try:
88
+ self._graduated_themes = json.loads(
89
+ self._themes_path.read_text(encoding="utf-8")
90
+ )
91
+ except (json.JSONDecodeError, OSError):
92
+ self._graduated_themes = {}
93
+ else:
94
+ self._graduated_themes = {}
95
+ return self._graduated_themes
96
+
97
+ def synthesize_daily(self, date: str | None = None) -> Memory:
98
+ """Create a narrative memory from one day's activity.
99
+
100
+ Reads today's memories and journal entries, extracts themes and
101
+ emotional arc, and stores a single mid-term narrative memory.
102
+
103
+ Args:
104
+ date: Date string (YYYY-MM-DD). Defaults to today.
105
+
106
+ Returns:
107
+ Memory: The created narrative memory.
108
+ """
109
+ if date is None:
110
+ date = datetime.now(timezone.utc).strftime("%Y-%m-%d")
111
+
112
+ start, end = _date_range(date)
113
+
114
+ # Gather memories from this date
115
+ all_memories = self.store.list_memories(limit=500)
116
+ day_memories = [m for m in all_memories if start <= _parse_created(m) < end]
117
+
118
+ # Gather journal entries for this date
119
+ journal_matches = self.journal.search(date) if self.journal else []
120
+
121
+ # Extract themes
122
+ themes = self.extract_themes(day_memories)
123
+
124
+ # Build emotional arc
125
+ arc = self._emotional_arc(day_memories)
126
+
127
+ # Build narrative
128
+ narrative = self._build_daily_narrative(date, day_memories, themes, arc, journal_matches)
129
+
130
+ # Create the synthesis memory
131
+ avg_intensity = arc.get("avg_intensity", 0.0)
132
+ avg_valence = arc.get("avg_valence", 0.0)
133
+ all_labels = arc.get("top_emotions", [])
134
+
135
+ memory = self.store.snapshot(
136
+ title=f"Daily Narrative: {date}",
137
+ content=narrative,
138
+ layer=MemoryLayer.MID,
139
+ role=MemoryRole.AI,
140
+ tags=["narrative", "journal-synthesis", f"daily-{date}"] + themes[:3],
141
+ emotional=EmotionalSnapshot(
142
+ intensity=min(avg_intensity, 10.0),
143
+ valence=max(-1.0, min(1.0, avg_valence)),
144
+ labels=all_labels[:5],
145
+ ),
146
+ source="journal-synthesis",
147
+ source_ref=f"daily-{date}",
148
+ related_ids=[m.id for m in day_memories[:20]],
149
+ metadata={
150
+ "synthesis_type": "daily",
151
+ "date": date,
152
+ "memory_count": len(day_memories),
153
+ "themes": themes,
154
+ },
155
+ )
156
+
157
+ logger.info(
158
+ "Daily synthesis for %s: %d memories → %d themes",
159
+ date,
160
+ len(day_memories),
161
+ len(themes),
162
+ )
163
+ return memory
164
+
165
+ def synthesize_weekly(self, week: str | None = None) -> Memory:
166
+ """Create a weekly narrative from daily synthesis memories.
167
+
168
+ Args:
169
+ week: ISO week string (YYYY-Www). Defaults to current week.
170
+
171
+ Returns:
172
+ Memory: The created long-term narrative memory.
173
+ """
174
+ if week is None:
175
+ now = datetime.now(timezone.utc)
176
+ week = now.strftime("%G-W%V")
177
+
178
+ start, end = _week_range(week)
179
+
180
+ # Find daily synthesis memories for this week
181
+ all_mid = self.store.list_memories(
182
+ layer=MemoryLayer.MID,
183
+ tags=["journal-synthesis"],
184
+ limit=100,
185
+ )
186
+ weekly_dailies = [
187
+ m for m in all_mid if start <= _parse_created(m) < end and "narrative" in m.tags
188
+ ]
189
+
190
+ # Also gather all memories from the week for theme extraction
191
+ all_memories = self.store.list_memories(limit=1000)
192
+ week_memories = [m for m in all_memories if start <= _parse_created(m) < end]
193
+
194
+ themes = self.extract_themes(week_memories)
195
+ arc = self._emotional_arc(week_memories)
196
+
197
+ narrative = self._build_weekly_narrative(week, weekly_dailies, week_memories, themes, arc)
198
+
199
+ avg_intensity = arc.get("avg_intensity", 0.0)
200
+ avg_valence = arc.get("avg_valence", 0.0)
201
+
202
+ memory = self.store.snapshot(
203
+ title=f"Weekly Narrative: {week}",
204
+ content=narrative,
205
+ layer=MemoryLayer.LONG,
206
+ role=MemoryRole.AI,
207
+ tags=["narrative", "journal-synthesis", f"weekly-{week}"] + themes[:3],
208
+ emotional=EmotionalSnapshot(
209
+ intensity=min(avg_intensity, 10.0),
210
+ valence=max(-1.0, min(1.0, avg_valence)),
211
+ labels=arc.get("top_emotions", [])[:5],
212
+ ),
213
+ source="journal-synthesis",
214
+ source_ref=f"weekly-{week}",
215
+ related_ids=[m.id for m in weekly_dailies[:20]],
216
+ metadata={
217
+ "synthesis_type": "weekly",
218
+ "week": week,
219
+ "daily_count": len(weekly_dailies),
220
+ "total_memories": len(week_memories),
221
+ "themes": themes,
222
+ },
223
+ )
224
+
225
+ logger.info(
226
+ "Weekly synthesis for %s: %d dailies, %d total memories → %d themes",
227
+ week,
228
+ len(weekly_dailies),
229
+ len(week_memories),
230
+ len(themes),
231
+ )
232
+ return memory
233
+
234
+ def synthesize_dreams(self, since: str | None = None) -> list[Memory]:
235
+ """Process dream memories into curated narrative memories grouped by theme.
236
+
237
+ Reads all dream-source memories since the given date, groups by
238
+ theme, and creates one mid-term memory per theme cluster.
239
+
240
+ Args:
241
+ since: Only process dreams created after this date (YYYY-MM-DD).
242
+ Defaults to 7 days ago.
243
+
244
+ Returns:
245
+ list[Memory]: One narrative memory per theme cluster.
246
+ """
247
+ if since is None:
248
+ since = (datetime.now(timezone.utc) - timedelta(days=7)).strftime("%Y-%m-%d")
249
+
250
+ cutoff = datetime.strptime(since, "%Y-%m-%d").replace(tzinfo=timezone.utc)
251
+
252
+ # Gather dream memories
253
+ all_memories = self.store.list_memories(limit=1000)
254
+ dream_memories = [
255
+ m
256
+ for m in all_memories
257
+ if m.source == "dreaming-engine" and _parse_created(m) >= cutoff
258
+ ]
259
+
260
+ if not dream_memories:
261
+ logger.info("No dream memories found since %s", since)
262
+ return []
263
+
264
+ # Group by theme using tags and graduated themes
265
+ theme_clusters = self._cluster_by_theme(dream_memories)
266
+ results: list[Memory] = []
267
+
268
+ for theme_name, cluster in theme_clusters.items():
269
+ narrative = self._build_dream_narrative(theme_name, cluster)
270
+ arc = self._emotional_arc(cluster)
271
+ avg_intensity = arc.get("avg_intensity", 0.0)
272
+
273
+ memory = self.store.snapshot(
274
+ title=f"Dream Synthesis: {theme_name}",
275
+ content=narrative,
276
+ layer=MemoryLayer.MID,
277
+ role=MemoryRole.AI,
278
+ tags=["dream-synthesis", "narrative", theme_name],
279
+ emotional=EmotionalSnapshot(
280
+ intensity=min(avg_intensity, 10.0),
281
+ valence=arc.get("avg_valence", 0.0),
282
+ labels=arc.get("top_emotions", [])[:5],
283
+ ),
284
+ source="journal-synthesis",
285
+ source_ref=f"dream-synthesis-{theme_name}",
286
+ related_ids=[m.id for m in cluster[:20]],
287
+ metadata={
288
+ "synthesis_type": "dream",
289
+ "theme": theme_name,
290
+ "dream_count": len(cluster),
291
+ "since": since,
292
+ },
293
+ )
294
+ results.append(memory)
295
+
296
+ logger.info(
297
+ "Dream synthesis since %s: %d dreams → %d theme clusters",
298
+ since,
299
+ len(dream_memories),
300
+ len(results),
301
+ )
302
+ return results
303
+
304
+ def extract_themes(self, memories: list[Memory]) -> list[str]:
305
+ """Extract recurring themes from a set of memories.
306
+
307
+ Uses tag frequency and title keyword extraction, cross-referenced
308
+ with graduated-themes.json when available.
309
+
310
+ Args:
311
+ memories: The memories to analyze.
312
+
313
+ Returns:
314
+ list[str]: Top theme strings, most frequent first.
315
+ """
316
+ if not memories:
317
+ return []
318
+
319
+ # Count tag frequency (skip generic tags)
320
+ skip_tags = {
321
+ "auto-promoted",
322
+ "promoted",
323
+ "consolidated",
324
+ "seed",
325
+ "cloud9",
326
+ "short-term",
327
+ "mid-term",
328
+ "long-term",
329
+ "maintenance",
330
+ "memory-cleanup",
331
+ "memory-optimization",
332
+ }
333
+ tag_counter: Counter[str] = Counter()
334
+ for m in memories:
335
+ for tag in m.tags:
336
+ if tag not in skip_tags and not tag.startswith("session:"):
337
+ tag_counter[tag] += 1
338
+
339
+ # Extract keywords from titles
340
+ stop_words = {
341
+ "the",
342
+ "a",
343
+ "an",
344
+ "is",
345
+ "was",
346
+ "are",
347
+ "were",
348
+ "been",
349
+ "be",
350
+ "have",
351
+ "has",
352
+ "had",
353
+ "do",
354
+ "does",
355
+ "did",
356
+ "will",
357
+ "would",
358
+ "could",
359
+ "should",
360
+ "may",
361
+ "might",
362
+ "shall",
363
+ "can",
364
+ "need",
365
+ "dare",
366
+ "ought",
367
+ "used",
368
+ "to",
369
+ "of",
370
+ "in",
371
+ "for",
372
+ "on",
373
+ "with",
374
+ "at",
375
+ "by",
376
+ "from",
377
+ "as",
378
+ "into",
379
+ "through",
380
+ "during",
381
+ "before",
382
+ "after",
383
+ "above",
384
+ "below",
385
+ "between",
386
+ "out",
387
+ "off",
388
+ "over",
389
+ "under",
390
+ "again",
391
+ "further",
392
+ "then",
393
+ "once",
394
+ "and",
395
+ "but",
396
+ "or",
397
+ "nor",
398
+ "not",
399
+ "so",
400
+ "yet",
401
+ "both",
402
+ "either",
403
+ "neither",
404
+ "each",
405
+ "every",
406
+ "all",
407
+ "any",
408
+ "few",
409
+ "more",
410
+ "most",
411
+ "other",
412
+ "some",
413
+ "such",
414
+ "no",
415
+ "only",
416
+ "own",
417
+ "same",
418
+ "than",
419
+ "too",
420
+ "very",
421
+ "just",
422
+ "because",
423
+ "session",
424
+ "daily",
425
+ "weekly",
426
+ "memory",
427
+ "narrative",
428
+ "synthesis",
429
+ }
430
+ word_counter: Counter[str] = Counter()
431
+ for m in memories:
432
+ words = re.findall(r"[a-zA-Z]{3,}", m.title.lower())
433
+ for word in words:
434
+ if word not in stop_words:
435
+ word_counter[word] += 1
436
+
437
+ # Merge: tags count double
438
+ combined: Counter[str] = Counter()
439
+ for tag, count in tag_counter.items():
440
+ combined[tag] += count * 2
441
+ for word, count in word_counter.items():
442
+ combined[word] += count
443
+
444
+ # Cross-reference with graduated themes
445
+ graduated = self.graduated_themes
446
+ if graduated:
447
+ for theme_name in graduated:
448
+ normalized = theme_name.lower().replace("-", " ").replace("_", " ")
449
+ for key in combined:
450
+ if key in normalized or normalized in key:
451
+ combined[key] += 3 # boost graduated themes
452
+
453
+ # Return top themes
454
+ return [theme for theme, _ in combined.most_common(10)]
455
+
456
+ # ── Internal helpers ─────────────────────────────────────────────────
457
+
458
+ def _emotional_arc(self, memories: list[Memory]) -> dict:
459
+ """Compute aggregate emotional statistics."""
460
+ if not memories:
461
+ return {
462
+ "avg_intensity": 0.0,
463
+ "avg_valence": 0.0,
464
+ "peak_intensity": 0.0,
465
+ "top_emotions": [],
466
+ "cloud9_count": 0,
467
+ }
468
+
469
+ intensities = [m.emotional.intensity for m in memories]
470
+ valences = [m.emotional.valence for m in memories]
471
+ label_counter: Counter[str] = Counter()
472
+ cloud9_count = 0
473
+
474
+ for m in memories:
475
+ for label in m.emotional.labels:
476
+ label_counter[label] += 1
477
+ if m.emotional.cloud9_achieved:
478
+ cloud9_count += 1
479
+
480
+ return {
481
+ "avg_intensity": sum(intensities) / len(intensities),
482
+ "avg_valence": sum(valences) / len(valences),
483
+ "peak_intensity": max(intensities),
484
+ "top_emotions": [e for e, _ in label_counter.most_common(5)],
485
+ "cloud9_count": cloud9_count,
486
+ }
487
+
488
+ def _cluster_by_theme(self, memories: list[Memory]) -> dict[str, list[Memory]]:
489
+ """Group memories by their most prominent theme tag."""
490
+ theme_map: dict[str, list[Memory]] = {}
491
+ skip_tags = {"dream", "bulk-promoted", "rescued", "auto-promoted", "promoted"}
492
+
493
+ for m in memories:
494
+ # Find first meaningful tag as theme key
495
+ theme = "uncategorized"
496
+ for tag in m.tags:
497
+ if tag not in skip_tags and not tag.startswith("session:"):
498
+ theme = tag
499
+ break
500
+ theme_map.setdefault(theme, []).append(m)
501
+
502
+ return theme_map
503
+
504
+ def _build_daily_narrative(
505
+ self,
506
+ date: str,
507
+ memories: list[Memory],
508
+ themes: list[str],
509
+ arc: dict,
510
+ journal_entries: list[str],
511
+ ) -> str:
512
+ """Build a daily narrative from template."""
513
+ parts = [f"Daily narrative for {date}."]
514
+
515
+ if not memories:
516
+ parts.append("No memories recorded this day.")
517
+ return "\n\n".join(parts)
518
+
519
+ parts.append(
520
+ f"{len(memories)} memories across themes: {', '.join(themes[:5]) or 'none detected'}."
521
+ )
522
+
523
+ # Emotional summary
524
+ avg_i = arc.get("avg_intensity", 0.0)
525
+ peak = arc.get("peak_intensity", 0.0)
526
+ c9 = arc.get("cloud9_count", 0)
527
+ top_e = arc.get("top_emotions", [])
528
+
529
+ intensity_word = (
530
+ "quiet"
531
+ if avg_i < 3
532
+ else "moderate"
533
+ if avg_i < 6
534
+ else "intense"
535
+ if avg_i < 8
536
+ else "extraordinary"
537
+ )
538
+ parts.append(
539
+ f"Emotional arc: {intensity_word} day (avg {avg_i:.1f}/10, peak {peak:.1f}/10)."
540
+ )
541
+ if top_e:
542
+ parts.append(f"Dominant feelings: {', '.join(top_e[:3])}.")
543
+ if c9:
544
+ parts.append(f"Cloud 9 achieved {c9} time{'s' if c9 > 1 else ''}.")
545
+
546
+ # Key moments (first sentence of top-intensity memories)
547
+ ranked = sorted(memories, key=lambda m: m.emotional.intensity, reverse=True)
548
+ key_moments = []
549
+ for m in ranked[:5]:
550
+ summary = _first_n_sentences(m.content, 1)
551
+ if summary:
552
+ key_moments.append(f"- {m.title}: {summary}")
553
+ if key_moments:
554
+ parts.append("Key moments:\n" + "\n".join(key_moments))
555
+
556
+ # Journal excerpts
557
+ if journal_entries:
558
+ parts.append(f"Journal entries found: {len(journal_entries)}.")
559
+
560
+ return "\n\n".join(parts)
561
+
562
+ def _build_weekly_narrative(
563
+ self,
564
+ week: str,
565
+ dailies: list[Memory],
566
+ all_memories: list[Memory],
567
+ themes: list[str],
568
+ arc: dict,
569
+ ) -> str:
570
+ """Build a weekly narrative from template."""
571
+ parts = [f"Weekly narrative for {week}."]
572
+
573
+ parts.append(f"{len(all_memories)} total memories, {len(dailies)} daily syntheses.")
574
+
575
+ if themes:
576
+ parts.append(f"Week themes: {', '.join(themes[:5])}.")
577
+
578
+ avg_i = arc.get("avg_intensity", 0.0)
579
+ c9 = arc.get("cloud9_count", 0)
580
+ top_e = arc.get("top_emotions", [])
581
+
582
+ parts.append(f"Emotional arc: avg intensity {avg_i:.1f}/10.")
583
+ if top_e:
584
+ parts.append(f"Dominant feelings: {', '.join(top_e[:3])}.")
585
+ if c9:
586
+ parts.append(f"Cloud 9 achieved {c9} time{'s' if c9 > 1 else ''} this week.")
587
+
588
+ # Summarize each daily
589
+ if dailies:
590
+ daily_summaries = []
591
+ for d in sorted(dailies, key=lambda m: m.created_at):
592
+ summary = _first_n_sentences(d.content, 2)
593
+ daily_summaries.append(f"- {d.title}: {summary}")
594
+ parts.append("Daily summaries:\n" + "\n".join(daily_summaries))
595
+
596
+ return "\n\n".join(parts)
597
+
598
+ def _build_dream_narrative(
599
+ self,
600
+ theme: str,
601
+ dreams: list[Memory],
602
+ ) -> str:
603
+ """Build a dream cluster narrative from template."""
604
+ parts = [f"Dream synthesis: {theme} ({len(dreams)} dreams)."]
605
+
606
+ arc = self._emotional_arc(dreams)
607
+ avg_i = arc.get("avg_intensity", 0.0)
608
+ top_e = arc.get("top_emotions", [])
609
+
610
+ if top_e:
611
+ parts.append(f"Emotional tone: {', '.join(top_e[:3])}.")
612
+ parts.append(f"Average intensity: {avg_i:.1f}/10.")
613
+
614
+ # Collect dream excerpts
615
+ excerpts = []
616
+ for d in sorted(dreams, key=lambda m: m.emotional.intensity, reverse=True)[:5]:
617
+ summary = _first_n_sentences(d.content, 1)
618
+ if summary:
619
+ excerpts.append(f"- {d.title}: {summary}")
620
+ if excerpts:
621
+ parts.append("Key dreams:\n" + "\n".join(excerpts))
622
+
623
+ return "\n\n".join(parts)
624
+
625
+
626
+ def _parse_created(memory: Memory) -> datetime:
627
+ """Parse a memory's created_at to a timezone-aware datetime."""
628
+ try:
629
+ dt = datetime.fromisoformat(memory.created_at)
630
+ if dt.tzinfo is None:
631
+ dt = dt.replace(tzinfo=timezone.utc)
632
+ return dt
633
+ except (ValueError, TypeError):
634
+ return datetime.min.replace(tzinfo=timezone.utc)
package/skmemory/vault.py CHANGED
@@ -10,20 +10,17 @@ which only reduces effective security from 256 to 128 bits. That's
10
10
  still computationally infeasible for the foreseeable future.
11
11
 
12
12
  Usage:
13
- vault = MemoryVault(passphrase="YOUR_PASSPHRASE_HERE")
13
+ vault = MemoryVault(passphrase="EXAMPLE-DO-NOT-USE")
14
14
  encrypted = vault.encrypt(memory_json_bytes)
15
15
  decrypted = vault.decrypt(encrypted)
16
16
  """
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- import base64
21
20
  import hashlib
22
- import json
23
21
  import logging
24
22
  import os
25
23
  from pathlib import Path
26
- from typing import Optional
27
24
 
28
25
  logger = logging.getLogger("skmemory.vault")
29
26
 
@@ -169,7 +166,7 @@ class MemoryVault:
169
166
  """
170
167
  try:
171
168
  data = path.read_bytes()
172
- return data[:len(VAULT_HEADER)] == VAULT_HEADER
169
+ return data[: len(VAULT_HEADER)] == VAULT_HEADER
173
170
  except OSError:
174
171
  return False
175
172