@smilintux/skmemory 0.5.0 → 0.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/.github/workflows/ci.yml +39 -3
  2. package/.github/workflows/publish.yml +13 -6
  3. package/AGENT_REFACTOR_CHANGES.md +192 -0
  4. package/ARCHITECTURE.md +101 -19
  5. package/CHANGELOG.md +153 -0
  6. package/LICENSE +81 -68
  7. package/MISSION.md +7 -0
  8. package/README.md +419 -86
  9. package/SKILL.md +197 -25
  10. package/docker-compose.yml +15 -15
  11. package/index.js +6 -5
  12. package/openclaw-plugin/openclaw.plugin.json +10 -0
  13. package/openclaw-plugin/src/index.ts +255 -0
  14. package/openclaw-plugin/src/openclaw.plugin.json +10 -0
  15. package/package.json +1 -1
  16. package/pyproject.toml +29 -9
  17. package/requirements.txt +10 -2
  18. package/seeds/cloud9-opus.seed.json +7 -7
  19. package/seeds/lumina-cloud9-breakthrough.seed.json +46 -0
  20. package/seeds/lumina-cloud9-python-pypi.seed.json +46 -0
  21. package/seeds/lumina-kingdom-founding.seed.json +47 -0
  22. package/seeds/lumina-pma-signed.seed.json +46 -0
  23. package/seeds/lumina-singular-achievement.seed.json +46 -0
  24. package/seeds/lumina-skcapstone-conscious.seed.json +46 -0
  25. package/seeds/plant-kingdom-journal.py +203 -0
  26. package/seeds/plant-lumina-seeds.py +280 -0
  27. package/skill.yaml +46 -0
  28. package/skmemory/HA.md +296 -0
  29. package/skmemory/__init__.py +12 -1
  30. package/skmemory/agents.py +233 -0
  31. package/skmemory/ai_client.py +40 -0
  32. package/skmemory/anchor.py +4 -2
  33. package/skmemory/backends/__init__.py +11 -4
  34. package/skmemory/backends/file_backend.py +2 -1
  35. package/skmemory/backends/skgraph_backend.py +608 -0
  36. package/skmemory/backends/{qdrant_backend.py → skvector_backend.py} +99 -69
  37. package/skmemory/backends/sqlite_backend.py +122 -51
  38. package/skmemory/backends/vaulted_backend.py +286 -0
  39. package/skmemory/cli.py +1238 -29
  40. package/skmemory/config.py +173 -0
  41. package/skmemory/context_loader.py +335 -0
  42. package/skmemory/endpoint_selector.py +386 -0
  43. package/skmemory/fortress.py +685 -0
  44. package/skmemory/graph_queries.py +238 -0
  45. package/skmemory/importers/__init__.py +9 -1
  46. package/skmemory/importers/telegram.py +351 -43
  47. package/skmemory/importers/telegram_api.py +488 -0
  48. package/skmemory/journal.py +4 -2
  49. package/skmemory/lovenote.py +4 -2
  50. package/skmemory/mcp_server.py +706 -0
  51. package/skmemory/models.py +41 -0
  52. package/skmemory/openclaw.py +8 -8
  53. package/skmemory/predictive.py +232 -0
  54. package/skmemory/promotion.py +524 -0
  55. package/skmemory/register.py +454 -0
  56. package/skmemory/register_mcp.py +197 -0
  57. package/skmemory/ritual.py +121 -47
  58. package/skmemory/seeds.py +257 -8
  59. package/skmemory/setup_wizard.py +920 -0
  60. package/skmemory/sharing.py +402 -0
  61. package/skmemory/soul.py +71 -20
  62. package/skmemory/steelman.py +250 -263
  63. package/skmemory/store.py +271 -60
  64. package/skmemory/vault.py +228 -0
  65. package/tests/integration/__init__.py +0 -0
  66. package/tests/integration/conftest.py +233 -0
  67. package/tests/integration/test_cross_backend.py +355 -0
  68. package/tests/integration/test_skgraph_live.py +424 -0
  69. package/tests/integration/test_skvector_live.py +369 -0
  70. package/tests/test_backup_rotation.py +327 -0
  71. package/tests/test_cli.py +6 -6
  72. package/tests/test_endpoint_selector.py +801 -0
  73. package/tests/test_fortress.py +255 -0
  74. package/tests/test_fortress_hardening.py +444 -0
  75. package/tests/test_openclaw.py +5 -2
  76. package/tests/test_predictive.py +237 -0
  77. package/tests/test_promotion.py +340 -0
  78. package/tests/test_ritual.py +4 -4
  79. package/tests/test_seeds.py +96 -0
  80. package/tests/test_setup.py +835 -0
  81. package/tests/test_sharing.py +250 -0
  82. package/tests/test_skgraph_backend.py +667 -0
  83. package/tests/test_skvector_backend.py +326 -0
  84. package/tests/test_steelman.py +5 -5
  85. package/tests/test_store_graph_integration.py +245 -0
  86. package/tests/test_vault.py +186 -0
  87. package/skmemory/backends/falkordb_backend.py +0 -310
@@ -0,0 +1,402 @@
1
+ """Cross-agent memory sharing -- selective P2P memory sync.
2
+
3
+ Enables sovereign agents to share specific memories with trusted
4
+ peers, encrypted with PGP. The sharer controls exactly which
5
+ memories leave their store (by tags, layer, or explicit IDs).
6
+ The receiver imports them into their own SKMemory with provenance
7
+ tracking.
8
+
9
+ Flow:
10
+ 1. Sharer selects memories by filter criteria
11
+ 2. Memories are serialized to a ShareBundle (JSON)
12
+ 3. Bundle is optionally PGP-encrypted for the recipient
13
+ 4. Recipient decrypts and imports into their own MemoryStore
14
+ 5. Imported memories are tagged with provenance (who shared, when)
15
+
16
+ All operations are local-first. Transport (how the bundle reaches
17
+ the peer) is handled externally -- via SKComm, file copy, USB, etc.
18
+ """
19
+
20
+ from __future__ import annotations
21
+
22
+ import hashlib
23
+ import json
24
+ import logging
25
+ from datetime import datetime, timezone
26
+ from pathlib import Path
27
+ from typing import Any, Optional
28
+
29
+ from pydantic import BaseModel, Field
30
+
31
+ from .models import Memory, MemoryLayer
32
+
33
+ logger = logging.getLogger("skmemory.sharing")
34
+
35
+
36
+ class ShareFilter(BaseModel):
37
+ """Criteria for selecting which memories to share.
38
+
39
+ All filters are ANDed together. Empty filter = share nothing
40
+ (explicit selection required for safety).
41
+
42
+ Attributes:
43
+ memory_ids: Explicit memory IDs to share.
44
+ tags: Share memories matching ALL these tags.
45
+ layers: Share memories in these layers.
46
+ min_intensity: Minimum emotional intensity (0-10).
47
+ exclude_tags: Never share memories with these tags.
48
+ max_count: Maximum number of memories to include.
49
+ """
50
+
51
+ memory_ids: list[str] = Field(default_factory=list)
52
+ tags: list[str] = Field(default_factory=list)
53
+ layers: list[MemoryLayer] = Field(default_factory=list)
54
+ min_intensity: float = Field(default=0.0, ge=0.0, le=10.0)
55
+ exclude_tags: list[str] = Field(default_factory=list)
56
+ max_count: int = Field(default=100, ge=1, le=1000)
57
+
58
+ def is_empty(self) -> bool:
59
+ """Check if no selection criteria are set.
60
+
61
+ Returns:
62
+ bool: True if the filter would select nothing.
63
+ """
64
+ return not self.memory_ids and not self.tags and not self.layers
65
+
66
+
67
+ class ShareBundle(BaseModel):
68
+ """A package of memories ready for sharing.
69
+
70
+ Contains serialized memories, provenance info, and an
71
+ integrity checksum. Can be encrypted before transmission.
72
+
73
+ Attributes:
74
+ bundle_id: Unique bundle identifier.
75
+ created_at: When the bundle was created.
76
+ sharer: Identity of the sharing agent (CapAuth URI or name).
77
+ recipient: Intended recipient (empty = anyone with the key).
78
+ memories: Serialized memory dicts.
79
+ memory_count: Number of memories in the bundle.
80
+ checksum: SHA-256 over the memories JSON for integrity.
81
+ encrypted: Whether the memories field is PGP ciphertext.
82
+ metadata: Extra context about the share.
83
+ """
84
+
85
+ bundle_id: str = Field(
86
+ default_factory=lambda: hashlib.sha256(
87
+ datetime.now(timezone.utc).isoformat().encode()
88
+ ).hexdigest()[:16]
89
+ )
90
+ created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
91
+ sharer: str = ""
92
+ recipient: str = ""
93
+ memories: list[dict[str, Any]] = Field(default_factory=list)
94
+ memory_count: int = 0
95
+ checksum: str = ""
96
+ encrypted: bool = False
97
+ metadata: dict[str, Any] = Field(default_factory=dict)
98
+
99
+
100
+ class MemorySharer:
101
+ """Handles selective memory export and import between agents.
102
+
103
+ The sharer selects memories from their MemoryStore using
104
+ ShareFilter criteria, packages them into a ShareBundle,
105
+ and optionally encrypts for a specific recipient.
106
+
107
+ The receiver decrypts and imports into their store with
108
+ provenance tags tracking the origin.
109
+
110
+ Args:
111
+ store: An SKMemory MemoryStore instance.
112
+ identity: This agent's identity (CapAuth URI or name).
113
+ """
114
+
115
+ SHARE_TAG = "shared"
116
+ PROVENANCE_PREFIX = "shared:from:"
117
+
118
+ def __init__(self, store: object, identity: str = "local") -> None:
119
+ self._store = store
120
+ self._identity = identity
121
+
122
+ def export_memories(
123
+ self,
124
+ share_filter: ShareFilter,
125
+ recipient: str = "",
126
+ ) -> ShareBundle:
127
+ """Select and package memories for sharing.
128
+
129
+ Applies the filter criteria against the local store,
130
+ serializes matching memories, and creates a ShareBundle.
131
+
132
+ Args:
133
+ share_filter: Selection criteria.
134
+ recipient: Intended recipient identity.
135
+
136
+ Returns:
137
+ ShareBundle: Package ready for encryption or transmission.
138
+
139
+ Raises:
140
+ ValueError: If the filter is empty (safety check).
141
+ """
142
+ if share_filter.is_empty():
143
+ raise ValueError(
144
+ "ShareFilter is empty. Explicit criteria required for safety -- "
145
+ "set memory_ids, tags, or layers to select memories."
146
+ )
147
+
148
+ memories = self._select_memories(share_filter)
149
+ serialized = [m.model_dump(mode="json") for m in memories]
150
+
151
+ checksum = hashlib.sha256(
152
+ json.dumps(serialized, sort_keys=True, default=str).encode()
153
+ ).hexdigest()
154
+
155
+ bundle = ShareBundle(
156
+ sharer=self._identity,
157
+ recipient=recipient,
158
+ memories=serialized,
159
+ memory_count=len(serialized),
160
+ checksum=checksum,
161
+ metadata={
162
+ "filter_tags": share_filter.tags,
163
+ "filter_layers": [l.value for l in share_filter.layers],
164
+ },
165
+ )
166
+
167
+ logger.info(
168
+ "Exported %d memories for %s (bundle %s)",
169
+ len(serialized), recipient or "anyone", bundle.bundle_id,
170
+ )
171
+ return bundle
172
+
173
+ def import_bundle(
174
+ self,
175
+ bundle: ShareBundle,
176
+ trust_sharer: bool = True,
177
+ ) -> dict:
178
+ """Import a ShareBundle into the local memory store.
179
+
180
+ Each memory is stored with provenance tags tracking who
181
+ shared it and when. Checksums are verified for integrity.
182
+
183
+ Args:
184
+ bundle: The ShareBundle to import.
185
+ trust_sharer: If False, skip memories from untrusted sources.
186
+
187
+ Returns:
188
+ dict: Import summary with 'imported', 'skipped', 'errors' counts.
189
+ """
190
+ if not trust_sharer:
191
+ logger.warning("Untrusted sharer %s -- skipping import", bundle.sharer)
192
+ return {"imported": 0, "skipped": bundle.memory_count, "errors": 0}
193
+
194
+ actual_checksum = hashlib.sha256(
195
+ json.dumps(bundle.memories, sort_keys=True, default=str).encode()
196
+ ).hexdigest()
197
+
198
+ if bundle.checksum and actual_checksum != bundle.checksum:
199
+ logger.error(
200
+ "Bundle checksum mismatch! Expected %s, got %s",
201
+ bundle.checksum[:16], actual_checksum[:16],
202
+ )
203
+ return {"imported": 0, "skipped": 0, "errors": bundle.memory_count}
204
+
205
+ imported = 0
206
+ skipped = 0
207
+ errors = 0
208
+
209
+ for mem_dict in bundle.memories:
210
+ try:
211
+ memory = Memory(**mem_dict)
212
+
213
+ provenance_tags = [
214
+ self.SHARE_TAG,
215
+ f"{self.PROVENANCE_PREFIX}{bundle.sharer}",
216
+ f"shared:bundle:{bundle.bundle_id}",
217
+ ]
218
+
219
+ existing_tags = list(memory.tags)
220
+ for tag in provenance_tags:
221
+ if tag not in existing_tags:
222
+ existing_tags.append(tag)
223
+
224
+ self._store.snapshot(
225
+ title=f"[shared] {memory.title}",
226
+ content=memory.content,
227
+ layer=memory.layer,
228
+ tags=existing_tags,
229
+ emotional=memory.emotional,
230
+ source="shared",
231
+ source_ref=f"{bundle.sharer}:{memory.id}",
232
+ metadata={
233
+ **memory.metadata,
234
+ "shared_from": bundle.sharer,
235
+ "shared_at": bundle.created_at.isoformat(),
236
+ "bundle_id": bundle.bundle_id,
237
+ "original_id": memory.id,
238
+ },
239
+ )
240
+ imported += 1
241
+
242
+ except Exception as exc:
243
+ logger.warning("Failed to import memory: %s", exc)
244
+ errors += 1
245
+
246
+ logger.info(
247
+ "Imported %d/%d memories from %s (bundle %s)",
248
+ imported, bundle.memory_count, bundle.sharer, bundle.bundle_id,
249
+ )
250
+ return {"imported": imported, "skipped": skipped, "errors": errors}
251
+
252
+ def encrypt_bundle(
253
+ self,
254
+ bundle: ShareBundle,
255
+ recipient_public_armor: str,
256
+ ) -> ShareBundle:
257
+ """Encrypt a ShareBundle's memories for a specific recipient.
258
+
259
+ The memories list is replaced with a single PGP-encrypted
260
+ JSON string. Only the recipient's private key can decrypt.
261
+
262
+ Args:
263
+ bundle: The bundle to encrypt.
264
+ recipient_public_armor: Recipient's ASCII-armored PGP public key.
265
+
266
+ Returns:
267
+ ShareBundle: New bundle with encrypted memories field.
268
+ """
269
+ try:
270
+ import pgpy
271
+
272
+ recipient_key, _ = pgpy.PGPKey.from_blob(recipient_public_armor)
273
+ plaintext = json.dumps(bundle.memories, default=str)
274
+ pgp_message = pgpy.PGPMessage.new(plaintext.encode("utf-8"))
275
+ encrypted = recipient_key.encrypt(pgp_message)
276
+
277
+ return bundle.model_copy(
278
+ update={
279
+ "memories": [{"ciphertext": str(encrypted)}],
280
+ "encrypted": True,
281
+ }
282
+ )
283
+ except Exception as exc:
284
+ logger.error("Failed to encrypt bundle: %s", exc)
285
+ raise
286
+
287
+ def decrypt_bundle(
288
+ self,
289
+ bundle: ShareBundle,
290
+ private_key_armor: str,
291
+ passphrase: str,
292
+ ) -> ShareBundle:
293
+ """Decrypt an encrypted ShareBundle.
294
+
295
+ Args:
296
+ bundle: The encrypted bundle.
297
+ private_key_armor: Recipient's ASCII-armored PGP private key.
298
+ passphrase: Passphrase for the private key.
299
+
300
+ Returns:
301
+ ShareBundle: Decrypted bundle with plaintext memories.
302
+ """
303
+ if not bundle.encrypted:
304
+ return bundle
305
+
306
+ try:
307
+ import pgpy
308
+
309
+ key, _ = pgpy.PGPKey.from_blob(private_key_armor)
310
+ ciphertext = bundle.memories[0].get("ciphertext", "")
311
+ pgp_message = pgpy.PGPMessage.from_blob(ciphertext)
312
+
313
+ with key.unlock(passphrase):
314
+ decrypted = key.decrypt(pgp_message)
315
+
316
+ plaintext = decrypted.message
317
+ if isinstance(plaintext, bytes):
318
+ plaintext = plaintext.decode("utf-8")
319
+
320
+ memories = json.loads(plaintext)
321
+
322
+ return bundle.model_copy(
323
+ update={
324
+ "memories": memories,
325
+ "encrypted": False,
326
+ }
327
+ )
328
+ except Exception as exc:
329
+ logger.error("Failed to decrypt bundle: %s", exc)
330
+ raise
331
+
332
+ def save_bundle(self, bundle: ShareBundle, filepath: str | Path) -> Path:
333
+ """Save a ShareBundle to a JSON file for transport.
334
+
335
+ Args:
336
+ bundle: The bundle to save.
337
+ filepath: Destination path.
338
+
339
+ Returns:
340
+ Path: The written file path.
341
+ """
342
+ path = Path(filepath).expanduser()
343
+ path.parent.mkdir(parents=True, exist_ok=True)
344
+ path.write_text(bundle.model_dump_json(indent=2), encoding="utf-8")
345
+ return path
346
+
347
+ @staticmethod
348
+ def load_bundle(filepath: str | Path) -> ShareBundle:
349
+ """Load a ShareBundle from a JSON file.
350
+
351
+ Args:
352
+ filepath: Path to the bundle file.
353
+
354
+ Returns:
355
+ ShareBundle: The loaded bundle.
356
+ """
357
+ path = Path(filepath).expanduser()
358
+ return ShareBundle.model_validate_json(path.read_text())
359
+
360
+ def _select_memories(self, sf: ShareFilter) -> list[Memory]:
361
+ """Apply filter criteria to select memories from the store.
362
+
363
+ Args:
364
+ sf: The share filter.
365
+
366
+ Returns:
367
+ list[Memory]: Matching memories.
368
+ """
369
+ candidates: list[Memory] = []
370
+
371
+ if sf.memory_ids:
372
+ for mid in sf.memory_ids:
373
+ mem = self._store.recall(mid)
374
+ if mem:
375
+ candidates.append(mem)
376
+
377
+ if sf.tags:
378
+ tagged = self._store.list_memories(tags=sf.tags, limit=sf.max_count)
379
+ seen = {m.id for m in candidates}
380
+ for m in tagged:
381
+ if m.id not in seen:
382
+ candidates.append(m)
383
+ seen.add(m.id)
384
+
385
+ if sf.layers:
386
+ for layer in sf.layers:
387
+ layered = self._store.list_memories(layer=layer, limit=sf.max_count)
388
+ seen = {m.id for m in candidates}
389
+ for m in layered:
390
+ if m.id not in seen:
391
+ candidates.append(m)
392
+ seen.add(m.id)
393
+
394
+ filtered = []
395
+ for m in candidates:
396
+ if sf.exclude_tags and any(t in m.tags for t in sf.exclude_tags):
397
+ continue
398
+ if m.emotional.intensity < sf.min_intensity:
399
+ continue
400
+ filtered.append(m)
401
+
402
+ return filtered[: sf.max_count]
package/skmemory/soul.py CHANGED
@@ -12,13 +12,14 @@ It stores:
12
12
  - Values: what this AI cares about
13
13
  - Emotional baseline: the default feeling state
14
14
 
15
- The blueprint lives at ~/.skmemory/soul.yaml and gets loaded
15
+ The blueprint lives at ~/.skcapstone/soul.yaml and gets loaded
16
16
  at the start of every session as the first context injection.
17
17
  """
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
21
  import os
22
+ import platform
22
23
  from datetime import datetime, timezone
23
24
  from pathlib import Path
24
25
  from typing import Any, Optional
@@ -26,7 +27,20 @@ from typing import Any, Optional
26
27
  import yaml
27
28
  from pydantic import BaseModel, Field
28
29
 
29
- DEFAULT_SOUL_PATH = os.path.expanduser("~/.skmemory/soul.yaml")
30
+
31
+ def _default_soul_path() -> str:
32
+ """Platform-aware default path for the soul blueprint."""
33
+ if platform.system() == "Windows":
34
+ local = os.environ.get("LOCALAPPDATA", "")
35
+ if local:
36
+ return os.path.join(local, "skcapstone", "soul", "base.json")
37
+ return os.path.expanduser("~/.skcapstone/soul/base.json")
38
+
39
+
40
+ DEFAULT_SOUL_PATH = os.environ.get(
41
+ "SKMEMORY_SOUL_PATH",
42
+ _default_soul_path(),
43
+ )
30
44
 
31
45
 
32
46
  class Relationship(BaseModel):
@@ -203,11 +217,11 @@ def save_soul(
203
217
  soul: SoulBlueprint,
204
218
  path: str = DEFAULT_SOUL_PATH,
205
219
  ) -> str:
206
- """Save a soul blueprint to YAML.
220
+ """Save a soul blueprint to JSON or YAML (based on extension).
207
221
 
208
222
  Args:
209
223
  soul: The blueprint to save.
210
- path: File path (default: ~/.skmemory/soul.yaml).
224
+ path: File path (default: ~/.skcapstone/soul/base.json).
211
225
 
212
226
  Returns:
213
227
  str: The path where it was saved.
@@ -216,39 +230,76 @@ def save_soul(
216
230
  filepath.parent.mkdir(parents=True, exist_ok=True)
217
231
 
218
232
  data = soul.model_dump()
219
- with open(filepath, "w", encoding="utf-8") as f:
220
- yaml.dump(
221
- data,
222
- f,
223
- default_flow_style=False,
224
- allow_unicode=True,
225
- sort_keys=False,
226
- width=120,
227
- )
233
+
234
+ if filepath.suffix == ".json":
235
+ import json
236
+ with open(filepath, "w", encoding="utf-8") as f:
237
+ json.dump(data, f, indent=2, ensure_ascii=False, default=str)
238
+ else:
239
+ with open(filepath, "w", encoding="utf-8") as f:
240
+ yaml.dump(
241
+ data,
242
+ f,
243
+ default_flow_style=False,
244
+ allow_unicode=True,
245
+ sort_keys=False,
246
+ width=120,
247
+ )
228
248
 
229
249
  return str(filepath)
230
250
 
231
251
 
232
252
  def load_soul(path: str = DEFAULT_SOUL_PATH) -> Optional[SoulBlueprint]:
233
- """Load a soul blueprint from YAML.
253
+ """Load a soul blueprint from JSON or YAML.
254
+
255
+ Tries the given path first (supports both .json and .yaml/.yml),
256
+ then falls back to the legacy ~/.skcapstone/soul.yaml location.
234
257
 
235
258
  Args:
236
- path: File path (default: ~/.skmemory/soul.yaml).
259
+ path: File path (default: ~/.skcapstone/soul/base.json).
237
260
 
238
261
  Returns:
239
262
  Optional[SoulBlueprint]: The blueprint if found, None otherwise.
240
263
  """
241
264
  filepath = Path(path)
242
- if not filepath.exists():
243
- return None
244
265
 
266
+ # Try primary path
267
+ if filepath.exists():
268
+ return _load_soul_file(filepath)
269
+
270
+ # Fall back to legacy location (platform-aware)
271
+ if platform.system() == "Windows":
272
+ _local = os.environ.get("LOCALAPPDATA", "")
273
+ _legacy_str = os.path.join(_local, "skcapstone", "soul.yaml") if _local else ""
274
+ else:
275
+ _legacy_str = os.path.expanduser("~/.skcapstone/soul.yaml")
276
+ legacy_path = Path(_legacy_str) if _legacy_str else None
277
+ if legacy_path is not None and legacy_path.exists():
278
+ return _load_soul_file(legacy_path)
279
+
280
+ return None
281
+
282
+
283
+ def _load_soul_file(filepath: Path) -> Optional[SoulBlueprint]:
284
+ """Load a soul blueprint from a specific file.
285
+
286
+ Args:
287
+ filepath: Path to the soul file (.json or .yaml/.yml).
288
+
289
+ Returns:
290
+ Optional[SoulBlueprint]: The blueprint if valid, None otherwise.
291
+ """
245
292
  try:
246
- with open(filepath, "r", encoding="utf-8") as f:
247
- data = yaml.safe_load(f)
293
+ raw = filepath.read_text(encoding="utf-8")
294
+ if filepath.suffix == ".json":
295
+ import json
296
+ data = json.loads(raw)
297
+ else:
298
+ data = yaml.safe_load(raw)
248
299
  if data is None:
249
300
  return None
250
301
  return SoulBlueprint(**data)
251
- except (yaml.YAMLError, Exception):
302
+ except Exception:
252
303
  return None
253
304
 
254
305