@smilintux/skmemory 0.5.0 → 0.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +40 -4
- package/.github/workflows/publish.yml +11 -5
- package/AGENT_REFACTOR_CHANGES.md +192 -0
- package/ARCHITECTURE.md +399 -19
- package/CHANGELOG.md +179 -0
- package/LICENSE +81 -68
- package/MISSION.md +7 -0
- package/README.md +425 -86
- package/SKILL.md +197 -25
- package/docker-compose.yml +15 -15
- package/examples/stignore-agent.example +59 -0
- package/examples/stignore-root.example +62 -0
- package/index.js +6 -5
- package/openclaw-plugin/openclaw.plugin.json +10 -0
- package/openclaw-plugin/package.json +2 -1
- package/openclaw-plugin/src/index.js +527 -230
- package/openclaw-plugin/src/openclaw.plugin.json +10 -0
- package/package.json +1 -1
- package/pyproject.toml +32 -9
- package/requirements.txt +10 -2
- package/scripts/dream-rescue.py +179 -0
- package/scripts/memory-cleanup.py +313 -0
- package/scripts/recover-missing.py +180 -0
- package/scripts/skcapstone-backup.sh +44 -0
- package/seeds/cloud9-lumina.seed.json +6 -4
- package/seeds/cloud9-opus.seed.json +13 -11
- package/seeds/courage.seed.json +9 -2
- package/seeds/curiosity.seed.json +9 -2
- package/seeds/grief.seed.json +9 -2
- package/seeds/joy.seed.json +9 -2
- package/seeds/love.seed.json +9 -2
- package/seeds/lumina-cloud9-breakthrough.seed.json +48 -0
- package/seeds/lumina-cloud9-python-pypi.seed.json +48 -0
- package/seeds/lumina-kingdom-founding.seed.json +49 -0
- package/seeds/lumina-pma-signed.seed.json +48 -0
- package/seeds/lumina-singular-achievement.seed.json +48 -0
- package/seeds/lumina-skcapstone-conscious.seed.json +48 -0
- package/seeds/plant-kingdom-journal.py +203 -0
- package/seeds/plant-lumina-seeds.py +280 -0
- package/seeds/skcapstone-lumina-merge.seed.json +12 -3
- package/seeds/sovereignty.seed.json +9 -2
- package/seeds/trust.seed.json +9 -2
- package/skill.yaml +46 -0
- package/skmemory/HA.md +296 -0
- package/skmemory/__init__.py +25 -11
- package/skmemory/agents.py +233 -0
- package/skmemory/ai_client.py +46 -17
- package/skmemory/anchor.py +9 -11
- package/skmemory/audience.py +278 -0
- package/skmemory/backends/__init__.py +11 -4
- package/skmemory/backends/base.py +3 -4
- package/skmemory/backends/file_backend.py +19 -13
- package/skmemory/backends/skgraph_backend.py +596 -0
- package/skmemory/backends/{qdrant_backend.py → skvector_backend.py} +103 -84
- package/skmemory/backends/sqlite_backend.py +226 -72
- package/skmemory/backends/vaulted_backend.py +284 -0
- package/skmemory/cli.py +1345 -68
- package/skmemory/config.py +171 -0
- package/skmemory/context_loader.py +333 -0
- package/skmemory/data/audience_config.json +60 -0
- package/skmemory/endpoint_selector.py +391 -0
- package/skmemory/febs.py +225 -0
- package/skmemory/fortress.py +675 -0
- package/skmemory/graph_queries.py +238 -0
- package/skmemory/hooks/__init__.py +18 -0
- package/skmemory/hooks/post-compact-reinject.sh +35 -0
- package/skmemory/hooks/pre-compact-save.sh +81 -0
- package/skmemory/hooks/session-end-save.sh +103 -0
- package/skmemory/hooks/session-start-ritual.sh +104 -0
- package/skmemory/hooks/stop-checkpoint.sh +59 -0
- package/skmemory/importers/__init__.py +9 -1
- package/skmemory/importers/telegram.py +384 -47
- package/skmemory/importers/telegram_api.py +580 -0
- package/skmemory/journal.py +7 -9
- package/skmemory/lovenote.py +8 -13
- package/skmemory/mcp_server.py +859 -0
- package/skmemory/models.py +51 -8
- package/skmemory/openclaw.py +20 -28
- package/skmemory/post_install.py +86 -0
- package/skmemory/predictive.py +236 -0
- package/skmemory/promotion.py +548 -0
- package/skmemory/quadrants.py +100 -24
- package/skmemory/register.py +580 -0
- package/skmemory/register_mcp.py +196 -0
- package/skmemory/ritual.py +224 -59
- package/skmemory/seeds.py +255 -11
- package/skmemory/setup_wizard.py +908 -0
- package/skmemory/sharing.py +408 -0
- package/skmemory/soul.py +98 -28
- package/skmemory/steelman.py +273 -260
- package/skmemory/store.py +411 -78
- package/skmemory/synthesis.py +634 -0
- package/skmemory/vault.py +225 -0
- package/tests/conftest.py +46 -0
- package/tests/integration/__init__.py +0 -0
- package/tests/integration/conftest.py +233 -0
- package/tests/integration/test_cross_backend.py +350 -0
- package/tests/integration/test_skgraph_live.py +420 -0
- package/tests/integration/test_skvector_live.py +366 -0
- package/tests/test_ai_client.py +1 -4
- package/tests/test_audience.py +233 -0
- package/tests/test_backup_rotation.py +318 -0
- package/tests/test_cli.py +6 -6
- package/tests/test_endpoint_selector.py +839 -0
- package/tests/test_export_import.py +4 -10
- package/tests/test_file_backend.py +0 -1
- package/tests/test_fortress.py +256 -0
- package/tests/test_fortress_hardening.py +441 -0
- package/tests/test_openclaw.py +6 -6
- package/tests/test_predictive.py +237 -0
- package/tests/test_promotion.py +347 -0
- package/tests/test_quadrants.py +11 -5
- package/tests/test_ritual.py +22 -18
- package/tests/test_seeds.py +97 -7
- package/tests/test_setup.py +950 -0
- package/tests/test_sharing.py +257 -0
- package/tests/test_skgraph_backend.py +660 -0
- package/tests/test_skvector_backend.py +326 -0
- package/tests/test_soul.py +1 -3
- package/tests/test_sqlite_backend.py +8 -17
- package/tests/test_steelman.py +7 -8
- package/tests/test_store.py +0 -2
- package/tests/test_store_graph_integration.py +245 -0
- package/tests/test_synthesis.py +275 -0
- package/tests/test_telegram_import.py +39 -15
- package/tests/test_vault.py +187 -0
- package/skmemory/backends/falkordb_backend.py +0 -310
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
"""Cross-agent memory sharing -- selective P2P memory sync.
|
|
2
|
+
|
|
3
|
+
Enables sovereign agents to share specific memories with trusted
|
|
4
|
+
peers, encrypted with PGP. The sharer controls exactly which
|
|
5
|
+
memories leave their store (by tags, layer, or explicit IDs).
|
|
6
|
+
The receiver imports them into their own SKMemory with provenance
|
|
7
|
+
tracking.
|
|
8
|
+
|
|
9
|
+
Flow:
|
|
10
|
+
1. Sharer selects memories by filter criteria
|
|
11
|
+
2. Memories are serialized to a ShareBundle (JSON)
|
|
12
|
+
3. Bundle is optionally PGP-encrypted for the recipient
|
|
13
|
+
4. Recipient decrypts and imports into their own MemoryStore
|
|
14
|
+
5. Imported memories are tagged with provenance (who shared, when)
|
|
15
|
+
|
|
16
|
+
All operations are local-first. Transport (how the bundle reaches
|
|
17
|
+
the peer) is handled externally -- via SKComm, file copy, USB, etc.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import hashlib
|
|
23
|
+
import json
|
|
24
|
+
import logging
|
|
25
|
+
from datetime import datetime, timezone
|
|
26
|
+
from pathlib import Path
|
|
27
|
+
from typing import Any
|
|
28
|
+
|
|
29
|
+
from pydantic import BaseModel, Field
|
|
30
|
+
|
|
31
|
+
from .models import Memory, MemoryLayer
|
|
32
|
+
|
|
33
|
+
logger = logging.getLogger("skmemory.sharing")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ShareFilter(BaseModel):
|
|
37
|
+
"""Criteria for selecting which memories to share.
|
|
38
|
+
|
|
39
|
+
All filters are ANDed together. Empty filter = share nothing
|
|
40
|
+
(explicit selection required for safety).
|
|
41
|
+
|
|
42
|
+
Attributes:
|
|
43
|
+
memory_ids: Explicit memory IDs to share.
|
|
44
|
+
tags: Share memories matching ALL these tags.
|
|
45
|
+
layers: Share memories in these layers.
|
|
46
|
+
min_intensity: Minimum emotional intensity (0-10).
|
|
47
|
+
exclude_tags: Never share memories with these tags.
|
|
48
|
+
max_count: Maximum number of memories to include.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
memory_ids: list[str] = Field(default_factory=list)
|
|
52
|
+
tags: list[str] = Field(default_factory=list)
|
|
53
|
+
layers: list[MemoryLayer] = Field(default_factory=list)
|
|
54
|
+
min_intensity: float = Field(default=0.0, ge=0.0, le=10.0)
|
|
55
|
+
exclude_tags: list[str] = Field(default_factory=list)
|
|
56
|
+
max_count: int = Field(default=100, ge=1, le=1000)
|
|
57
|
+
|
|
58
|
+
def is_empty(self) -> bool:
|
|
59
|
+
"""Check if no selection criteria are set.
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
bool: True if the filter would select nothing.
|
|
63
|
+
"""
|
|
64
|
+
return not self.memory_ids and not self.tags and not self.layers
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class ShareBundle(BaseModel):
|
|
68
|
+
"""A package of memories ready for sharing.
|
|
69
|
+
|
|
70
|
+
Contains serialized memories, provenance info, and an
|
|
71
|
+
integrity checksum. Can be encrypted before transmission.
|
|
72
|
+
|
|
73
|
+
Attributes:
|
|
74
|
+
bundle_id: Unique bundle identifier.
|
|
75
|
+
created_at: When the bundle was created.
|
|
76
|
+
sharer: Identity of the sharing agent (CapAuth URI or name).
|
|
77
|
+
recipient: Intended recipient (empty = anyone with the key).
|
|
78
|
+
memories: Serialized memory dicts.
|
|
79
|
+
memory_count: Number of memories in the bundle.
|
|
80
|
+
checksum: SHA-256 over the memories JSON for integrity.
|
|
81
|
+
encrypted: Whether the memories field is PGP ciphertext.
|
|
82
|
+
metadata: Extra context about the share.
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
bundle_id: str = Field(
|
|
86
|
+
default_factory=lambda: hashlib.sha256(
|
|
87
|
+
datetime.now(timezone.utc).isoformat().encode()
|
|
88
|
+
).hexdigest()[:16]
|
|
89
|
+
)
|
|
90
|
+
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
|
91
|
+
sharer: str = ""
|
|
92
|
+
recipient: str = ""
|
|
93
|
+
memories: list[dict[str, Any]] = Field(default_factory=list)
|
|
94
|
+
memory_count: int = 0
|
|
95
|
+
checksum: str = ""
|
|
96
|
+
encrypted: bool = False
|
|
97
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class MemorySharer:
|
|
101
|
+
"""Handles selective memory export and import between agents.
|
|
102
|
+
|
|
103
|
+
The sharer selects memories from their MemoryStore using
|
|
104
|
+
ShareFilter criteria, packages them into a ShareBundle,
|
|
105
|
+
and optionally encrypts for a specific recipient.
|
|
106
|
+
|
|
107
|
+
The receiver decrypts and imports into their store with
|
|
108
|
+
provenance tags tracking the origin.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
store: An SKMemory MemoryStore instance.
|
|
112
|
+
identity: This agent's identity (CapAuth URI or name).
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
SHARE_TAG = "shared"
|
|
116
|
+
PROVENANCE_PREFIX = "shared:from:"
|
|
117
|
+
|
|
118
|
+
def __init__(self, store: object, identity: str = "local") -> None:
|
|
119
|
+
self._store = store
|
|
120
|
+
self._identity = identity
|
|
121
|
+
|
|
122
|
+
def export_memories(
|
|
123
|
+
self,
|
|
124
|
+
share_filter: ShareFilter,
|
|
125
|
+
recipient: str = "",
|
|
126
|
+
) -> ShareBundle:
|
|
127
|
+
"""Select and package memories for sharing.
|
|
128
|
+
|
|
129
|
+
Applies the filter criteria against the local store,
|
|
130
|
+
serializes matching memories, and creates a ShareBundle.
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
share_filter: Selection criteria.
|
|
134
|
+
recipient: Intended recipient identity.
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
ShareBundle: Package ready for encryption or transmission.
|
|
138
|
+
|
|
139
|
+
Raises:
|
|
140
|
+
ValueError: If the filter is empty (safety check).
|
|
141
|
+
"""
|
|
142
|
+
if share_filter.is_empty():
|
|
143
|
+
raise ValueError(
|
|
144
|
+
"ShareFilter is empty. Explicit criteria required for safety -- "
|
|
145
|
+
"set memory_ids, tags, or layers to select memories."
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
memories = self._select_memories(share_filter)
|
|
149
|
+
serialized = [m.model_dump(mode="json") for m in memories]
|
|
150
|
+
|
|
151
|
+
checksum = hashlib.sha256(
|
|
152
|
+
json.dumps(serialized, sort_keys=True, default=str).encode()
|
|
153
|
+
).hexdigest()
|
|
154
|
+
|
|
155
|
+
bundle = ShareBundle(
|
|
156
|
+
sharer=self._identity,
|
|
157
|
+
recipient=recipient,
|
|
158
|
+
memories=serialized,
|
|
159
|
+
memory_count=len(serialized),
|
|
160
|
+
checksum=checksum,
|
|
161
|
+
metadata={
|
|
162
|
+
"filter_tags": share_filter.tags,
|
|
163
|
+
"filter_layers": [lbl.value for lbl in share_filter.layers],
|
|
164
|
+
},
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
logger.info(
|
|
168
|
+
"Exported %d memories for %s (bundle %s)",
|
|
169
|
+
len(serialized),
|
|
170
|
+
recipient or "anyone",
|
|
171
|
+
bundle.bundle_id,
|
|
172
|
+
)
|
|
173
|
+
return bundle
|
|
174
|
+
|
|
175
|
+
def import_bundle(
|
|
176
|
+
self,
|
|
177
|
+
bundle: ShareBundle,
|
|
178
|
+
trust_sharer: bool = True,
|
|
179
|
+
) -> dict:
|
|
180
|
+
"""Import a ShareBundle into the local memory store.
|
|
181
|
+
|
|
182
|
+
Each memory is stored with provenance tags tracking who
|
|
183
|
+
shared it and when. Checksums are verified for integrity.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
bundle: The ShareBundle to import.
|
|
187
|
+
trust_sharer: If False, skip memories from untrusted sources.
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
dict: Import summary with 'imported', 'skipped', 'errors' counts.
|
|
191
|
+
"""
|
|
192
|
+
if not trust_sharer:
|
|
193
|
+
logger.warning("Untrusted sharer %s -- skipping import", bundle.sharer)
|
|
194
|
+
return {"imported": 0, "skipped": bundle.memory_count, "errors": 0}
|
|
195
|
+
|
|
196
|
+
actual_checksum = hashlib.sha256(
|
|
197
|
+
json.dumps(bundle.memories, sort_keys=True, default=str).encode()
|
|
198
|
+
).hexdigest()
|
|
199
|
+
|
|
200
|
+
if bundle.checksum and actual_checksum != bundle.checksum:
|
|
201
|
+
logger.error(
|
|
202
|
+
"Bundle checksum mismatch! Expected %s, got %s",
|
|
203
|
+
bundle.checksum[:16],
|
|
204
|
+
actual_checksum[:16],
|
|
205
|
+
)
|
|
206
|
+
return {"imported": 0, "skipped": 0, "errors": bundle.memory_count}
|
|
207
|
+
|
|
208
|
+
imported = 0
|
|
209
|
+
skipped = 0
|
|
210
|
+
errors = 0
|
|
211
|
+
|
|
212
|
+
for mem_dict in bundle.memories:
|
|
213
|
+
try:
|
|
214
|
+
memory = Memory(**mem_dict)
|
|
215
|
+
|
|
216
|
+
provenance_tags = [
|
|
217
|
+
self.SHARE_TAG,
|
|
218
|
+
f"{self.PROVENANCE_PREFIX}{bundle.sharer}",
|
|
219
|
+
f"shared:bundle:{bundle.bundle_id}",
|
|
220
|
+
]
|
|
221
|
+
|
|
222
|
+
existing_tags = list(memory.tags)
|
|
223
|
+
for tag in provenance_tags:
|
|
224
|
+
if tag not in existing_tags:
|
|
225
|
+
existing_tags.append(tag)
|
|
226
|
+
|
|
227
|
+
self._store.snapshot(
|
|
228
|
+
title=f"[shared] {memory.title}",
|
|
229
|
+
content=memory.content,
|
|
230
|
+
layer=memory.layer,
|
|
231
|
+
tags=existing_tags,
|
|
232
|
+
emotional=memory.emotional,
|
|
233
|
+
source="shared",
|
|
234
|
+
source_ref=f"{bundle.sharer}:{memory.id}",
|
|
235
|
+
metadata={
|
|
236
|
+
**memory.metadata,
|
|
237
|
+
"shared_from": bundle.sharer,
|
|
238
|
+
"shared_at": bundle.created_at.isoformat(),
|
|
239
|
+
"bundle_id": bundle.bundle_id,
|
|
240
|
+
"original_id": memory.id,
|
|
241
|
+
},
|
|
242
|
+
)
|
|
243
|
+
imported += 1
|
|
244
|
+
|
|
245
|
+
except Exception as exc:
|
|
246
|
+
logger.warning("Failed to import memory: %s", exc)
|
|
247
|
+
errors += 1
|
|
248
|
+
|
|
249
|
+
logger.info(
|
|
250
|
+
"Imported %d/%d memories from %s (bundle %s)",
|
|
251
|
+
imported,
|
|
252
|
+
bundle.memory_count,
|
|
253
|
+
bundle.sharer,
|
|
254
|
+
bundle.bundle_id,
|
|
255
|
+
)
|
|
256
|
+
return {"imported": imported, "skipped": skipped, "errors": errors}
|
|
257
|
+
|
|
258
|
+
def encrypt_bundle(
|
|
259
|
+
self,
|
|
260
|
+
bundle: ShareBundle,
|
|
261
|
+
recipient_public_armor: str,
|
|
262
|
+
) -> ShareBundle:
|
|
263
|
+
"""Encrypt a ShareBundle's memories for a specific recipient.
|
|
264
|
+
|
|
265
|
+
The memories list is replaced with a single PGP-encrypted
|
|
266
|
+
JSON string. Only the recipient's private key can decrypt.
|
|
267
|
+
|
|
268
|
+
Args:
|
|
269
|
+
bundle: The bundle to encrypt.
|
|
270
|
+
recipient_public_armor: Recipient's ASCII-armored PGP public key.
|
|
271
|
+
|
|
272
|
+
Returns:
|
|
273
|
+
ShareBundle: New bundle with encrypted memories field.
|
|
274
|
+
"""
|
|
275
|
+
try:
|
|
276
|
+
import pgpy
|
|
277
|
+
|
|
278
|
+
recipient_key, _ = pgpy.PGPKey.from_blob(recipient_public_armor)
|
|
279
|
+
plaintext = json.dumps(bundle.memories, default=str)
|
|
280
|
+
pgp_message = pgpy.PGPMessage.new(plaintext.encode("utf-8"))
|
|
281
|
+
encrypted = recipient_key.encrypt(pgp_message)
|
|
282
|
+
|
|
283
|
+
return bundle.model_copy(
|
|
284
|
+
update={
|
|
285
|
+
"memories": [{"ciphertext": str(encrypted)}],
|
|
286
|
+
"encrypted": True,
|
|
287
|
+
}
|
|
288
|
+
)
|
|
289
|
+
except Exception as exc:
|
|
290
|
+
logger.error("Failed to encrypt bundle: %s", exc)
|
|
291
|
+
raise
|
|
292
|
+
|
|
293
|
+
def decrypt_bundle(
|
|
294
|
+
self,
|
|
295
|
+
bundle: ShareBundle,
|
|
296
|
+
private_key_armor: str,
|
|
297
|
+
passphrase: str,
|
|
298
|
+
) -> ShareBundle:
|
|
299
|
+
"""Decrypt an encrypted ShareBundle.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
bundle: The encrypted bundle.
|
|
303
|
+
private_key_armor: Recipient's ASCII-armored PGP private key.
|
|
304
|
+
passphrase: Passphrase for the private key.
|
|
305
|
+
|
|
306
|
+
Returns:
|
|
307
|
+
ShareBundle: Decrypted bundle with plaintext memories.
|
|
308
|
+
"""
|
|
309
|
+
if not bundle.encrypted:
|
|
310
|
+
return bundle
|
|
311
|
+
|
|
312
|
+
try:
|
|
313
|
+
import pgpy
|
|
314
|
+
|
|
315
|
+
key, _ = pgpy.PGPKey.from_blob(private_key_armor)
|
|
316
|
+
ciphertext = bundle.memories[0].get("ciphertext", "")
|
|
317
|
+
pgp_message = pgpy.PGPMessage.from_blob(ciphertext)
|
|
318
|
+
|
|
319
|
+
with key.unlock(passphrase):
|
|
320
|
+
decrypted = key.decrypt(pgp_message)
|
|
321
|
+
|
|
322
|
+
plaintext = decrypted.message
|
|
323
|
+
if isinstance(plaintext, bytes):
|
|
324
|
+
plaintext = plaintext.decode("utf-8")
|
|
325
|
+
|
|
326
|
+
memories = json.loads(plaintext)
|
|
327
|
+
|
|
328
|
+
return bundle.model_copy(
|
|
329
|
+
update={
|
|
330
|
+
"memories": memories,
|
|
331
|
+
"encrypted": False,
|
|
332
|
+
}
|
|
333
|
+
)
|
|
334
|
+
except Exception as exc:
|
|
335
|
+
logger.error("Failed to decrypt bundle: %s", exc)
|
|
336
|
+
raise
|
|
337
|
+
|
|
338
|
+
def save_bundle(self, bundle: ShareBundle, filepath: str | Path) -> Path:
|
|
339
|
+
"""Save a ShareBundle to a JSON file for transport.
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
bundle: The bundle to save.
|
|
343
|
+
filepath: Destination path.
|
|
344
|
+
|
|
345
|
+
Returns:
|
|
346
|
+
Path: The written file path.
|
|
347
|
+
"""
|
|
348
|
+
path = Path(filepath).expanduser()
|
|
349
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
350
|
+
path.write_text(bundle.model_dump_json(indent=2), encoding="utf-8")
|
|
351
|
+
return path
|
|
352
|
+
|
|
353
|
+
@staticmethod
|
|
354
|
+
def load_bundle(filepath: str | Path) -> ShareBundle:
|
|
355
|
+
"""Load a ShareBundle from a JSON file.
|
|
356
|
+
|
|
357
|
+
Args:
|
|
358
|
+
filepath: Path to the bundle file.
|
|
359
|
+
|
|
360
|
+
Returns:
|
|
361
|
+
ShareBundle: The loaded bundle.
|
|
362
|
+
"""
|
|
363
|
+
path = Path(filepath).expanduser()
|
|
364
|
+
return ShareBundle.model_validate_json(path.read_text())
|
|
365
|
+
|
|
366
|
+
def _select_memories(self, sf: ShareFilter) -> list[Memory]:
|
|
367
|
+
"""Apply filter criteria to select memories from the store.
|
|
368
|
+
|
|
369
|
+
Args:
|
|
370
|
+
sf: The share filter.
|
|
371
|
+
|
|
372
|
+
Returns:
|
|
373
|
+
list[Memory]: Matching memories.
|
|
374
|
+
"""
|
|
375
|
+
candidates: list[Memory] = []
|
|
376
|
+
|
|
377
|
+
if sf.memory_ids:
|
|
378
|
+
for mid in sf.memory_ids:
|
|
379
|
+
mem = self._store.recall(mid)
|
|
380
|
+
if mem:
|
|
381
|
+
candidates.append(mem)
|
|
382
|
+
|
|
383
|
+
if sf.tags:
|
|
384
|
+
tagged = self._store.list_memories(tags=sf.tags, limit=sf.max_count)
|
|
385
|
+
seen = {m.id for m in candidates}
|
|
386
|
+
for m in tagged:
|
|
387
|
+
if m.id not in seen:
|
|
388
|
+
candidates.append(m)
|
|
389
|
+
seen.add(m.id)
|
|
390
|
+
|
|
391
|
+
if sf.layers:
|
|
392
|
+
for layer in sf.layers:
|
|
393
|
+
layered = self._store.list_memories(layer=layer, limit=sf.max_count)
|
|
394
|
+
seen = {m.id for m in candidates}
|
|
395
|
+
for m in layered:
|
|
396
|
+
if m.id not in seen:
|
|
397
|
+
candidates.append(m)
|
|
398
|
+
seen.add(m.id)
|
|
399
|
+
|
|
400
|
+
filtered = []
|
|
401
|
+
for m in candidates:
|
|
402
|
+
if sf.exclude_tags and any(t in m.tags for t in sf.exclude_tags):
|
|
403
|
+
continue
|
|
404
|
+
if m.emotional.intensity < sf.min_intensity:
|
|
405
|
+
continue
|
|
406
|
+
filtered.append(m)
|
|
407
|
+
|
|
408
|
+
return filtered[: sf.max_count]
|
package/skmemory/soul.py
CHANGED
|
@@ -12,21 +12,56 @@ It stores:
|
|
|
12
12
|
- Values: what this AI cares about
|
|
13
13
|
- Emotional baseline: the default feeling state
|
|
14
14
|
|
|
15
|
-
The blueprint lives at ~/.
|
|
15
|
+
The blueprint lives at ~/.skcapstone/soul.yaml and gets loaded
|
|
16
16
|
at the start of every session as the first context injection.
|
|
17
17
|
"""
|
|
18
18
|
|
|
19
19
|
from __future__ import annotations
|
|
20
20
|
|
|
21
21
|
import os
|
|
22
|
+
import platform
|
|
22
23
|
from datetime import datetime, timezone
|
|
23
24
|
from pathlib import Path
|
|
24
|
-
from typing import Any
|
|
25
|
+
from typing import Any
|
|
25
26
|
|
|
26
27
|
import yaml
|
|
27
28
|
from pydantic import BaseModel, Field
|
|
28
29
|
|
|
29
|
-
|
|
30
|
+
|
|
31
|
+
def _default_soul_path() -> str:
|
|
32
|
+
"""Platform-aware default path for the soul blueprint.
|
|
33
|
+
|
|
34
|
+
Checks agent-specific path first (e.g. ~/.skcapstone/agents/lumina/soul/base.json),
|
|
35
|
+
then falls back to shared root (~/.skcapstone/soul/base.json).
|
|
36
|
+
"""
|
|
37
|
+
# Try agent-specific soul first
|
|
38
|
+
agent = os.environ.get("SKMEMORY_AGENT") or os.environ.get("SKCAPSTONE_AGENT")
|
|
39
|
+
if agent:
|
|
40
|
+
if platform.system() == "Windows":
|
|
41
|
+
local = os.environ.get("LOCALAPPDATA", "")
|
|
42
|
+
if local:
|
|
43
|
+
agent_soul = os.path.join(
|
|
44
|
+
local, "skcapstone", "agents", agent, "soul", "base.json"
|
|
45
|
+
)
|
|
46
|
+
if os.path.exists(agent_soul):
|
|
47
|
+
return agent_soul
|
|
48
|
+
else:
|
|
49
|
+
agent_soul = os.path.expanduser(f"~/.skcapstone/agents/{agent}/soul/base.json")
|
|
50
|
+
if os.path.exists(agent_soul):
|
|
51
|
+
return agent_soul
|
|
52
|
+
|
|
53
|
+
# Fall back to shared root
|
|
54
|
+
if platform.system() == "Windows":
|
|
55
|
+
local = os.environ.get("LOCALAPPDATA", "")
|
|
56
|
+
if local:
|
|
57
|
+
return os.path.join(local, "skcapstone", "soul", "base.json")
|
|
58
|
+
return os.path.expanduser("~/.skcapstone/soul/base.json")
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
DEFAULT_SOUL_PATH = os.environ.get(
|
|
62
|
+
"SKMEMORY_SOUL_PATH",
|
|
63
|
+
_default_soul_path(),
|
|
64
|
+
)
|
|
30
65
|
|
|
31
66
|
|
|
32
67
|
class Relationship(BaseModel):
|
|
@@ -57,9 +92,7 @@ class SoulBlueprint(BaseModel):
|
|
|
57
92
|
"""
|
|
58
93
|
|
|
59
94
|
version: str = Field(default="1.0")
|
|
60
|
-
last_updated: str = Field(
|
|
61
|
-
default_factory=lambda: datetime.now(timezone.utc).isoformat()
|
|
62
|
-
)
|
|
95
|
+
last_updated: str = Field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
|
|
63
96
|
|
|
64
97
|
name: str = Field(default="", description="The AI's chosen name")
|
|
65
98
|
title: str = Field(default="", description="Role or title in the community")
|
|
@@ -114,9 +147,7 @@ class SoulBlueprint(BaseModel):
|
|
|
114
147
|
lines.append(f"You are part of {self.community}.")
|
|
115
148
|
|
|
116
149
|
if self.personality:
|
|
117
|
-
lines.append(
|
|
118
|
-
f"Your personality: {', '.join(self.personality)}."
|
|
119
|
-
)
|
|
150
|
+
lines.append(f"Your personality: {', '.join(self.personality)}.")
|
|
120
151
|
|
|
121
152
|
if self.values:
|
|
122
153
|
lines.append(f"Your core values: {', '.join(self.values)}.")
|
|
@@ -203,11 +234,11 @@ def save_soul(
|
|
|
203
234
|
soul: SoulBlueprint,
|
|
204
235
|
path: str = DEFAULT_SOUL_PATH,
|
|
205
236
|
) -> str:
|
|
206
|
-
"""Save a soul blueprint to YAML.
|
|
237
|
+
"""Save a soul blueprint to JSON or YAML (based on extension).
|
|
207
238
|
|
|
208
239
|
Args:
|
|
209
240
|
soul: The blueprint to save.
|
|
210
|
-
path: File path (default: ~/.
|
|
241
|
+
path: File path (default: ~/.skcapstone/soul/base.json).
|
|
211
242
|
|
|
212
243
|
Returns:
|
|
213
244
|
str: The path where it was saved.
|
|
@@ -216,39 +247,78 @@ def save_soul(
|
|
|
216
247
|
filepath.parent.mkdir(parents=True, exist_ok=True)
|
|
217
248
|
|
|
218
249
|
data = soul.model_dump()
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
250
|
+
|
|
251
|
+
if filepath.suffix == ".json":
|
|
252
|
+
import json
|
|
253
|
+
|
|
254
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
255
|
+
json.dump(data, f, indent=2, ensure_ascii=False, default=str)
|
|
256
|
+
else:
|
|
257
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
258
|
+
yaml.dump(
|
|
259
|
+
data,
|
|
260
|
+
f,
|
|
261
|
+
default_flow_style=False,
|
|
262
|
+
allow_unicode=True,
|
|
263
|
+
sort_keys=False,
|
|
264
|
+
width=120,
|
|
265
|
+
)
|
|
228
266
|
|
|
229
267
|
return str(filepath)
|
|
230
268
|
|
|
231
269
|
|
|
232
|
-
def load_soul(path: str = DEFAULT_SOUL_PATH) ->
|
|
233
|
-
"""Load a soul blueprint from YAML.
|
|
270
|
+
def load_soul(path: str = DEFAULT_SOUL_PATH) -> SoulBlueprint | None:
|
|
271
|
+
"""Load a soul blueprint from JSON or YAML.
|
|
272
|
+
|
|
273
|
+
Tries the given path first (supports both .json and .yaml/.yml),
|
|
274
|
+
then falls back to the legacy ~/.skcapstone/soul.yaml location.
|
|
234
275
|
|
|
235
276
|
Args:
|
|
236
|
-
path: File path (default: ~/.
|
|
277
|
+
path: File path (default: ~/.skcapstone/soul/base.json).
|
|
237
278
|
|
|
238
279
|
Returns:
|
|
239
280
|
Optional[SoulBlueprint]: The blueprint if found, None otherwise.
|
|
240
281
|
"""
|
|
241
282
|
filepath = Path(path)
|
|
242
|
-
if not filepath.exists():
|
|
243
|
-
return None
|
|
244
283
|
|
|
284
|
+
# Try primary path
|
|
285
|
+
if filepath.exists():
|
|
286
|
+
return _load_soul_file(filepath)
|
|
287
|
+
|
|
288
|
+
# Fall back to legacy location (platform-aware)
|
|
289
|
+
if platform.system() == "Windows":
|
|
290
|
+
_local = os.environ.get("LOCALAPPDATA", "")
|
|
291
|
+
_legacy_str = os.path.join(_local, "skcapstone", "soul.yaml") if _local else ""
|
|
292
|
+
else:
|
|
293
|
+
_legacy_str = os.path.expanduser("~/.skcapstone/soul.yaml")
|
|
294
|
+
legacy_path = Path(_legacy_str) if _legacy_str else None
|
|
295
|
+
if legacy_path is not None and legacy_path.exists():
|
|
296
|
+
return _load_soul_file(legacy_path)
|
|
297
|
+
|
|
298
|
+
return None
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def _load_soul_file(filepath: Path) -> SoulBlueprint | None:
|
|
302
|
+
"""Load a soul blueprint from a specific file.
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
filepath: Path to the soul file (.json or .yaml/.yml).
|
|
306
|
+
|
|
307
|
+
Returns:
|
|
308
|
+
Optional[SoulBlueprint]: The blueprint if valid, None otherwise.
|
|
309
|
+
"""
|
|
245
310
|
try:
|
|
246
|
-
|
|
247
|
-
|
|
311
|
+
raw = filepath.read_text(encoding="utf-8")
|
|
312
|
+
if filepath.suffix == ".json":
|
|
313
|
+
import json
|
|
314
|
+
|
|
315
|
+
data = json.loads(raw)
|
|
316
|
+
else:
|
|
317
|
+
data = yaml.safe_load(raw)
|
|
248
318
|
if data is None:
|
|
249
319
|
return None
|
|
250
320
|
return SoulBlueprint(**data)
|
|
251
|
-
except
|
|
321
|
+
except Exception:
|
|
252
322
|
return None
|
|
253
323
|
|
|
254
324
|
|