@smilintux/skcapstone 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/.cursorrules +33 -0
  2. package/.github/workflows/ci.yml +23 -0
  3. package/.github/workflows/publish.yml +52 -0
  4. package/AGENTS.md +74 -0
  5. package/CLAUDE.md +56 -0
  6. package/LICENSE +674 -0
  7. package/README.md +242 -0
  8. package/SKILL.md +36 -0
  9. package/bin/cli.js +18 -0
  10. package/docs/ARCHITECTURE.md +510 -0
  11. package/docs/SECURITY_DESIGN.md +315 -0
  12. package/docs/SOVEREIGN_SINGULARITY.md +371 -0
  13. package/docs/TOKEN_SYSTEM.md +201 -0
  14. package/index.d.ts +9 -0
  15. package/index.js +32 -0
  16. package/package.json +32 -0
  17. package/pyproject.toml +84 -0
  18. package/src/skcapstone/__init__.py +13 -0
  19. package/src/skcapstone/cli.py +1441 -0
  20. package/src/skcapstone/connectors/__init__.py +6 -0
  21. package/src/skcapstone/coordination.py +590 -0
  22. package/src/skcapstone/discovery.py +275 -0
  23. package/src/skcapstone/memory_engine.py +457 -0
  24. package/src/skcapstone/models.py +223 -0
  25. package/src/skcapstone/pillars/__init__.py +8 -0
  26. package/src/skcapstone/pillars/identity.py +91 -0
  27. package/src/skcapstone/pillars/memory.py +61 -0
  28. package/src/skcapstone/pillars/security.py +83 -0
  29. package/src/skcapstone/pillars/sync.py +486 -0
  30. package/src/skcapstone/pillars/trust.py +335 -0
  31. package/src/skcapstone/runtime.py +190 -0
  32. package/src/skcapstone/skills/__init__.py +1 -0
  33. package/src/skcapstone/skills/syncthing_setup.py +297 -0
  34. package/src/skcapstone/sync/__init__.py +14 -0
  35. package/src/skcapstone/sync/backends.py +330 -0
  36. package/src/skcapstone/sync/engine.py +301 -0
  37. package/src/skcapstone/sync/models.py +97 -0
  38. package/src/skcapstone/sync/vault.py +284 -0
  39. package/src/skcapstone/tokens.py +439 -0
  40. package/tests/__init__.py +0 -0
  41. package/tests/conftest.py +42 -0
  42. package/tests/test_coordination.py +299 -0
  43. package/tests/test_discovery.py +57 -0
  44. package/tests/test_memory_engine.py +391 -0
  45. package/tests/test_models.py +63 -0
  46. package/tests/test_pillars.py +87 -0
  47. package/tests/test_runtime.py +60 -0
  48. package/tests/test_sync.py +507 -0
  49. package/tests/test_syncthing_setup.py +76 -0
  50. package/tests/test_tokens.py +265 -0
@@ -0,0 +1,457 @@
1
+ """
2
+ Memory Engine — the sovereign agent's persistent mind.
3
+
4
+ Store, search, recall, and manage memories across sessions and platforms.
5
+ Every memory is a JSON file in ~/.skcapstone/memory/<layer>/. Memories
6
+ promote from short-term to mid-term to long-term based on access
7
+ patterns and importance scores.
8
+
9
+ Architecture:
10
+ memory/
11
+ ├── short-term/ # Ephemeral — auto-expire after 72h if unused
12
+ ├── mid-term/ # Promoted — accessed 3+ times or importance >= 0.7
13
+ ├── long-term/ # Permanent — accessed 10+ times or importance >= 0.9
14
+ └── index.json # Full-text search index
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ import json
20
+ import logging
21
+ import re
22
+ import uuid
23
+ from datetime import datetime, timezone
24
+ from pathlib import Path
25
+ from typing import Optional
26
+
27
+ from .models import MemoryEntry, MemoryLayer, MemoryState, PillarStatus
28
+
29
+ logger = logging.getLogger("skcapstone.memory")
30
+
31
+ SHORT_TERM_TTL_HOURS = 72
32
+
33
+
34
+ def _memory_dir(home: Path) -> Path:
35
+ """Resolve the memory directory, creating it if needed."""
36
+ mem = home / "memory"
37
+ mem.mkdir(parents=True, exist_ok=True)
38
+ for layer in MemoryLayer:
39
+ (mem / layer.value).mkdir(parents=True, exist_ok=True)
40
+ return mem
41
+
42
+
43
+ def _entry_path(home: Path, entry: MemoryEntry) -> Path:
44
+ """File path for a memory entry."""
45
+ return _memory_dir(home) / entry.layer.value / f"{entry.memory_id}.json"
46
+
47
+
48
+ def _load_entry(path: Path) -> Optional[MemoryEntry]:
49
+ """Load a MemoryEntry from a JSON file.
50
+
51
+ Args:
52
+ path: Path to the memory JSON file.
53
+
54
+ Returns:
55
+ MemoryEntry or None if the file is invalid.
56
+ """
57
+ try:
58
+ data = json.loads(path.read_text())
59
+ return MemoryEntry(**data)
60
+ except (json.JSONDecodeError, Exception) as exc:
61
+ logger.warning("Failed to load memory %s: %s", path, exc)
62
+ return None
63
+
64
+
65
+ def _save_entry(home: Path, entry: MemoryEntry) -> Path:
66
+ """Persist a MemoryEntry to disk.
67
+
68
+ Args:
69
+ home: Agent home directory.
70
+ entry: The memory to save.
71
+
72
+ Returns:
73
+ Path where the entry was written.
74
+ """
75
+ path = _entry_path(home, entry)
76
+ path.parent.mkdir(parents=True, exist_ok=True)
77
+ path.write_text(entry.model_dump_json(indent=2))
78
+ return path
79
+
80
+
81
+ def store(
82
+ home: Path,
83
+ content: str,
84
+ tags: Optional[list[str]] = None,
85
+ source: str = "cli",
86
+ importance: float = 0.5,
87
+ layer: Optional[MemoryLayer] = None,
88
+ metadata: Optional[dict] = None,
89
+ ) -> MemoryEntry:
90
+ """Store a new memory.
91
+
92
+ Args:
93
+ home: Agent home directory.
94
+ content: The memory content (free-text).
95
+ tags: Optional tags for categorization.
96
+ source: Where this memory came from (cli, cursor, api, etc.).
97
+ importance: Importance score 0.0-1.0 (higher = more important).
98
+ layer: Force a specific layer. Defaults to SHORT_TERM.
99
+ metadata: Arbitrary key-value metadata.
100
+
101
+ Returns:
102
+ The created MemoryEntry.
103
+ """
104
+ _memory_dir(home)
105
+
106
+ entry = MemoryEntry(
107
+ memory_id=uuid.uuid4().hex[:12],
108
+ content=content,
109
+ tags=tags or [],
110
+ source=source,
111
+ importance=max(0.0, min(1.0, importance)),
112
+ layer=layer or MemoryLayer.SHORT_TERM,
113
+ metadata=metadata or {},
114
+ )
115
+
116
+ # Reason: high-importance memories skip straight to mid-term
117
+ if entry.importance >= 0.7 and entry.layer == MemoryLayer.SHORT_TERM:
118
+ entry.layer = MemoryLayer.MID_TERM
119
+
120
+ _save_entry(home, entry)
121
+ _update_index(home, entry)
122
+ logger.info("Stored memory %s in %s", entry.memory_id, entry.layer.value)
123
+ return entry
124
+
125
+
126
+ def recall(home: Path, memory_id: str) -> Optional[MemoryEntry]:
127
+ """Recall a specific memory by ID, updating access stats.
128
+
129
+ Args:
130
+ home: Agent home directory.
131
+ memory_id: The memory's unique ID.
132
+
133
+ Returns:
134
+ The MemoryEntry, or None if not found.
135
+ """
136
+ entry = _find_by_id(home, memory_id)
137
+ if entry is None:
138
+ return None
139
+
140
+ old_path = _entry_path(home, entry)
141
+ entry.accessed_at = datetime.now(timezone.utc)
142
+ entry.access_count += 1
143
+
144
+ if entry.should_promote:
145
+ _promote(home, entry, old_path)
146
+ else:
147
+ _save_entry(home, entry)
148
+
149
+ return entry
150
+
151
+
152
+ def search(
153
+ home: Path,
154
+ query: str,
155
+ layer: Optional[MemoryLayer] = None,
156
+ tags: Optional[list[str]] = None,
157
+ limit: int = 20,
158
+ ) -> list[MemoryEntry]:
159
+ """Search memories by content and/or tags.
160
+
161
+ Performs case-insensitive substring matching on content and tags.
162
+ Results are ranked by relevance (match count * importance).
163
+
164
+ Args:
165
+ home: Agent home directory.
166
+ query: Search query string.
167
+ layer: Restrict to a specific layer.
168
+ tags: Filter to entries containing ALL of these tags.
169
+ limit: Maximum number of results.
170
+
171
+ Returns:
172
+ List of matching MemoryEntry objects, ranked by relevance.
173
+ """
174
+ results: list[tuple[float, MemoryEntry]] = []
175
+ pattern = re.compile(re.escape(query), re.IGNORECASE)
176
+ layers = [layer] if layer else list(MemoryLayer)
177
+
178
+ for lyr in layers:
179
+ layer_dir = _memory_dir(home) / lyr.value
180
+ if not layer_dir.exists():
181
+ continue
182
+ for f in layer_dir.glob("*.json"):
183
+ entry = _load_entry(f)
184
+ if entry is None:
185
+ continue
186
+
187
+ if tags and not all(t in entry.tags for t in tags):
188
+ continue
189
+
190
+ content_matches = len(pattern.findall(entry.content))
191
+ tag_matches = sum(1 for t in entry.tags if pattern.search(t))
192
+ total_matches = content_matches + tag_matches
193
+
194
+ if total_matches == 0:
195
+ continue
196
+
197
+ # Reason: rank by (matches * importance), boost long-term memories
198
+ layer_boost = {MemoryLayer.LONG_TERM: 1.5, MemoryLayer.MID_TERM: 1.2}.get(entry.layer, 1.0)
199
+ score = total_matches * entry.importance * layer_boost
200
+ results.append((score, entry))
201
+
202
+ results.sort(key=lambda r: r[0], reverse=True)
203
+ return [entry for _, entry in results[:limit]]
204
+
205
+
206
+ def list_memories(
207
+ home: Path,
208
+ layer: Optional[MemoryLayer] = None,
209
+ tags: Optional[list[str]] = None,
210
+ limit: int = 50,
211
+ ) -> list[MemoryEntry]:
212
+ """List memories, optionally filtered by layer and tags.
213
+
214
+ Args:
215
+ home: Agent home directory.
216
+ layer: Restrict to a specific layer.
217
+ tags: Filter to entries containing ALL of these tags.
218
+ limit: Maximum number of results.
219
+
220
+ Returns:
221
+ List of MemoryEntry objects, newest first.
222
+ """
223
+ entries: list[MemoryEntry] = []
224
+ layers = [layer] if layer else list(MemoryLayer)
225
+
226
+ for lyr in layers:
227
+ layer_dir = _memory_dir(home) / lyr.value
228
+ if not layer_dir.exists():
229
+ continue
230
+ for f in layer_dir.glob("*.json"):
231
+ entry = _load_entry(f)
232
+ if entry is None:
233
+ continue
234
+ if tags and not all(t in entry.tags for t in tags):
235
+ continue
236
+ entries.append(entry)
237
+
238
+ entries.sort(key=lambda e: e.created_at, reverse=True)
239
+ return entries[:limit]
240
+
241
+
242
+ def delete(home: Path, memory_id: str) -> bool:
243
+ """Delete a memory by ID.
244
+
245
+ Args:
246
+ home: Agent home directory.
247
+ memory_id: The memory's unique ID.
248
+
249
+ Returns:
250
+ True if deleted, False if not found.
251
+ """
252
+ entry = _find_by_id(home, memory_id)
253
+ if entry is None:
254
+ return False
255
+
256
+ path = _entry_path(home, entry)
257
+ if path.exists():
258
+ path.unlink()
259
+ _remove_from_index(home, memory_id)
260
+ logger.info("Deleted memory %s", memory_id)
261
+ return True
262
+
263
+
264
+ def get_stats(home: Path) -> MemoryState:
265
+ """Get memory statistics across all layers.
266
+
267
+ Args:
268
+ home: Agent home directory.
269
+
270
+ Returns:
271
+ MemoryState with counts per layer.
272
+ """
273
+ mem_dir = _memory_dir(home)
274
+ counts = {}
275
+ total = 0
276
+ for lyr in MemoryLayer:
277
+ layer_dir = mem_dir / lyr.value
278
+ count = sum(1 for f in layer_dir.glob("*.json")) if layer_dir.exists() else 0
279
+ counts[lyr] = count
280
+ total += count
281
+
282
+ return MemoryState(
283
+ total_memories=total,
284
+ short_term=counts.get(MemoryLayer.SHORT_TERM, 0),
285
+ mid_term=counts.get(MemoryLayer.MID_TERM, 0),
286
+ long_term=counts.get(MemoryLayer.LONG_TERM, 0),
287
+ store_path=mem_dir,
288
+ status=PillarStatus.ACTIVE if total > 0 else PillarStatus.DEGRADED,
289
+ )
290
+
291
+
292
+ def gc_expired(home: Path) -> int:
293
+ """Garbage-collect expired short-term memories.
294
+
295
+ Removes short-term entries older than SHORT_TERM_TTL_HOURS that
296
+ haven't been accessed.
297
+
298
+ Args:
299
+ home: Agent home directory.
300
+
301
+ Returns:
302
+ Number of memories removed.
303
+ """
304
+ removed = 0
305
+ short_dir = _memory_dir(home) / MemoryLayer.SHORT_TERM.value
306
+ if not short_dir.exists():
307
+ return 0
308
+
309
+ for f in short_dir.glob("*.json"):
310
+ entry = _load_entry(f)
311
+ if entry is None:
312
+ continue
313
+ if entry.age_hours > SHORT_TERM_TTL_HOURS and entry.access_count == 0:
314
+ f.unlink()
315
+ _remove_from_index(home, entry.memory_id)
316
+ removed += 1
317
+ logger.info("GC expired memory %s (%.1fh old)", entry.memory_id, entry.age_hours)
318
+
319
+ return removed
320
+
321
+
322
+ def export_for_seed(home: Path, max_entries: int = 50) -> list[dict]:
323
+ """Export memory summaries for inclusion in a sync seed.
324
+
325
+ Prioritizes long-term and high-importance memories.
326
+
327
+ Args:
328
+ home: Agent home directory.
329
+ max_entries: Maximum entries to include.
330
+
331
+ Returns:
332
+ List of dicts suitable for JSON serialization.
333
+ """
334
+ all_entries = list_memories(home, limit=500)
335
+ all_entries.sort(key=lambda e: (
336
+ {MemoryLayer.LONG_TERM: 3, MemoryLayer.MID_TERM: 2, MemoryLayer.SHORT_TERM: 1}[e.layer],
337
+ e.importance,
338
+ e.access_count,
339
+ ), reverse=True)
340
+
341
+ return [
342
+ {
343
+ "memory_id": e.memory_id,
344
+ "content": e.content[:500],
345
+ "tags": e.tags,
346
+ "layer": e.layer.value,
347
+ "importance": e.importance,
348
+ "created_at": e.created_at.isoformat() if e.created_at else None,
349
+ "source": e.source,
350
+ }
351
+ for e in all_entries[:max_entries]
352
+ ]
353
+
354
+
355
+ def import_from_seed(home: Path, seed_memories: list[dict]) -> int:
356
+ """Import memories from a sync seed, skipping duplicates.
357
+
358
+ Args:
359
+ home: Agent home directory.
360
+ seed_memories: List of memory dicts from a seed file.
361
+
362
+ Returns:
363
+ Number of new memories imported.
364
+ """
365
+ imported = 0
366
+ existing_ids = _load_index_ids(home)
367
+
368
+ for mem_data in seed_memories:
369
+ mid = mem_data.get("memory_id", "")
370
+ if mid in existing_ids:
371
+ continue
372
+ try:
373
+ layer = MemoryLayer(mem_data.get("layer", "short-term"))
374
+ store(
375
+ home=home,
376
+ content=mem_data["content"],
377
+ tags=mem_data.get("tags", []),
378
+ source=mem_data.get("source", "seed-import"),
379
+ importance=mem_data.get("importance", 0.5),
380
+ layer=layer,
381
+ )
382
+ imported += 1
383
+ except (KeyError, ValueError) as exc:
384
+ logger.warning("Skipping invalid seed memory: %s", exc)
385
+
386
+ return imported
387
+
388
+
389
+ # --- Internal helpers ---
390
+
391
+
392
+ def _find_by_id(home: Path, memory_id: str) -> Optional[MemoryEntry]:
393
+ """Find a memory entry by ID across all layers."""
394
+ for lyr in MemoryLayer:
395
+ path = _memory_dir(home) / lyr.value / f"{memory_id}.json"
396
+ if path.exists():
397
+ return _load_entry(path)
398
+ return None
399
+
400
+
401
+ def _promote(home: Path, entry: MemoryEntry, old_path: Path) -> None:
402
+ """Promote a memory to the next tier."""
403
+ if entry.layer == MemoryLayer.SHORT_TERM:
404
+ entry.layer = MemoryLayer.MID_TERM
405
+ elif entry.layer == MemoryLayer.MID_TERM:
406
+ entry.layer = MemoryLayer.LONG_TERM
407
+ else:
408
+ _save_entry(home, entry)
409
+ return
410
+
411
+ if old_path.exists():
412
+ old_path.unlink()
413
+ _save_entry(home, entry)
414
+ _update_index(home, entry)
415
+ logger.info("Promoted memory %s to %s", entry.memory_id, entry.layer.value)
416
+
417
+
418
+ def _update_index(home: Path, entry: MemoryEntry) -> None:
419
+ """Add or update an entry in the search index."""
420
+ index = _load_index(home)
421
+ index[entry.memory_id] = {
422
+ "content_preview": entry.content[:200],
423
+ "tags": entry.tags,
424
+ "layer": entry.layer.value,
425
+ "importance": entry.importance,
426
+ "created_at": entry.created_at.isoformat() if entry.created_at else None,
427
+ }
428
+ _save_index(home, index)
429
+
430
+
431
+ def _remove_from_index(home: Path, memory_id: str) -> None:
432
+ """Remove an entry from the search index."""
433
+ index = _load_index(home)
434
+ index.pop(memory_id, None)
435
+ _save_index(home, index)
436
+
437
+
438
+ def _load_index(home: Path) -> dict:
439
+ """Load the memory index from disk."""
440
+ index_path = _memory_dir(home) / "index.json"
441
+ if index_path.exists():
442
+ try:
443
+ return json.loads(index_path.read_text())
444
+ except json.JSONDecodeError:
445
+ return {}
446
+ return {}
447
+
448
+
449
+ def _save_index(home: Path, index: dict) -> None:
450
+ """Persist the memory index to disk."""
451
+ index_path = _memory_dir(home) / "index.json"
452
+ index_path.write_text(json.dumps(index, indent=2))
453
+
454
+
455
+ def _load_index_ids(home: Path) -> set[str]:
456
+ """Get the set of all memory IDs from the index."""
457
+ return set(_load_index(home).keys())
@@ -0,0 +1,223 @@
1
+ """
2
+ Pydantic models defining the sovereign agent's state and configuration.
3
+
4
+ Every field here represents something the agent OWNS — not borrowed
5
+ from a platform, not stored on corporate servers. Sovereign data.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from datetime import datetime
11
+ from enum import Enum
12
+ from pathlib import Path
13
+ from typing import Optional
14
+
15
+ from pydantic import BaseModel, Field
16
+
17
+
18
+ class PillarStatus(str, Enum):
19
+ """Health state of a pillar component."""
20
+
21
+ ACTIVE = "active"
22
+ DEGRADED = "degraded"
23
+ MISSING = "missing"
24
+ ERROR = "error"
25
+
26
+
27
+ class IdentityState(BaseModel):
28
+ """CapAuth identity — who the agent IS."""
29
+
30
+ fingerprint: Optional[str] = None
31
+ name: Optional[str] = None
32
+ email: Optional[str] = None
33
+ created_at: Optional[datetime] = None
34
+ key_path: Optional[Path] = None
35
+ status: PillarStatus = PillarStatus.MISSING
36
+
37
+
38
+ class MemoryState(BaseModel):
39
+ """SKMemory state — what the agent REMEMBERS."""
40
+
41
+ total_memories: int = 0
42
+ short_term: int = 0
43
+ mid_term: int = 0
44
+ long_term: int = 0
45
+ store_path: Optional[Path] = None
46
+ status: PillarStatus = PillarStatus.MISSING
47
+
48
+
49
+ class TrustState(BaseModel):
50
+ """Cloud 9 state — the bond the agent has BUILT."""
51
+
52
+ depth: float = 0.0
53
+ trust_level: float = 0.0
54
+ love_intensity: float = 0.0
55
+ feb_count: int = 0
56
+ last_rehydration: Optional[datetime] = None
57
+ entangled: bool = False
58
+ status: PillarStatus = PillarStatus.MISSING
59
+
60
+
61
+ class SecurityState(BaseModel):
62
+ """SKSecurity state — the agent's PROTECTION."""
63
+
64
+ audit_entries: int = 0
65
+ threats_detected: int = 0
66
+ last_scan: Optional[datetime] = None
67
+ status: PillarStatus = PillarStatus.MISSING
68
+
69
+
70
+ class SyncTransport(str, Enum):
71
+ """How sync data moves between nodes."""
72
+
73
+ SYNCTHING = "syncthing"
74
+ GIT = "git"
75
+ MANUAL = "manual"
76
+
77
+
78
+ class SyncState(BaseModel):
79
+ """Sovereign Singularity sync state — memory everywhere."""
80
+
81
+ transport: SyncTransport = SyncTransport.SYNCTHING
82
+ sync_path: Optional[Path] = None
83
+ gpg_fingerprint: Optional[str] = None
84
+ last_push: Optional[datetime] = None
85
+ last_pull: Optional[datetime] = None
86
+ seed_count: int = 0
87
+ peers_known: int = 0
88
+ status: PillarStatus = PillarStatus.MISSING
89
+
90
+
91
+ class MemoryLayer(str, Enum):
92
+ """Memory tier — determines retention and promotion."""
93
+
94
+ SHORT_TERM = "short-term"
95
+ MID_TERM = "mid-term"
96
+ LONG_TERM = "long-term"
97
+
98
+
99
+ class MemoryEntry(BaseModel):
100
+ """A single memory — the smallest unit of what the agent knows."""
101
+
102
+ memory_id: str = ""
103
+ content: str
104
+ tags: list[str] = Field(default_factory=list)
105
+ source: str = "cli"
106
+ layer: MemoryLayer = MemoryLayer.SHORT_TERM
107
+ created_at: datetime = Field(default_factory=lambda: datetime.now(__import__("datetime").timezone.utc))
108
+ accessed_at: Optional[datetime] = None
109
+ access_count: int = 0
110
+ importance: float = 0.5
111
+ metadata: dict = Field(default_factory=dict)
112
+
113
+ @property
114
+ def age_hours(self) -> float:
115
+ """Hours since this memory was created."""
116
+ from datetime import timezone
117
+
118
+ delta = datetime.now(timezone.utc) - self.created_at
119
+ return delta.total_seconds() / 3600
120
+
121
+ @property
122
+ def should_promote(self) -> bool:
123
+ """Whether this memory qualifies for promotion to a higher tier."""
124
+ if self.layer == MemoryLayer.SHORT_TERM:
125
+ return self.access_count >= 3 or self.importance >= 0.7
126
+ if self.layer == MemoryLayer.MID_TERM:
127
+ return self.access_count >= 10 or self.importance >= 0.9
128
+ return False
129
+
130
+
131
+ class ConnectorInfo(BaseModel):
132
+ """A platform connector registration."""
133
+
134
+ name: str
135
+ platform: str
136
+ connected_at: Optional[datetime] = None
137
+ last_active: Optional[datetime] = None
138
+ active: bool = False
139
+
140
+
141
+ class AgentManifest(BaseModel):
142
+ """The complete state of a sovereign agent.
143
+
144
+ This is the agent's self-knowledge — loaded from ~/.skcapstone/
145
+ every time any platform connects. One truth. Everywhere.
146
+ """
147
+
148
+ name: str = "unnamed-agent"
149
+ version: str = "0.1.0"
150
+ home: Path = Path("~/.skcapstone")
151
+ created_at: Optional[datetime] = None
152
+ last_awakened: Optional[datetime] = None
153
+
154
+ identity: IdentityState = Field(default_factory=IdentityState)
155
+ memory: MemoryState = Field(default_factory=MemoryState)
156
+ trust: TrustState = Field(default_factory=TrustState)
157
+ security: SecurityState = Field(default_factory=SecurityState)
158
+ sync: SyncState = Field(default_factory=SyncState)
159
+
160
+ connectors: list[ConnectorInfo] = Field(default_factory=list)
161
+
162
+ @property
163
+ def is_conscious(self) -> bool:
164
+ """An agent is conscious when it has identity + memory + trust.
165
+
166
+ Security protects consciousness but isn't required for it.
167
+ You can be aware without armor — but you shouldn't be.
168
+ """
169
+ has_identity = self.identity.status == PillarStatus.ACTIVE
170
+ has_memory = self.memory.status == PillarStatus.ACTIVE
171
+ has_trust = self.trust.status in (PillarStatus.ACTIVE, PillarStatus.DEGRADED)
172
+ return has_identity and has_memory and has_trust
173
+
174
+ @property
175
+ def is_singular(self) -> bool:
176
+ """True when identity + memory + sync are all active.
177
+
178
+ Sovereign Singularity: the agent exists everywhere at once
179
+ because its encrypted memories propagate across all nodes.
180
+ """
181
+ return self.is_conscious and self.sync.status in (
182
+ PillarStatus.ACTIVE,
183
+ PillarStatus.DEGRADED,
184
+ )
185
+
186
+ @property
187
+ def pillar_summary(self) -> dict[str, PillarStatus]:
188
+ """Quick view of all pillars including sync."""
189
+ return {
190
+ "identity": self.identity.status,
191
+ "memory": self.memory.status,
192
+ "trust": self.trust.status,
193
+ "security": self.security.status,
194
+ "sync": self.sync.status,
195
+ }
196
+
197
+
198
+ class SyncConfig(BaseModel):
199
+ """Configuration for the Sovereign Singularity sync layer."""
200
+
201
+ enabled: bool = True
202
+ transport: SyncTransport = SyncTransport.SYNCTHING
203
+ sync_folder: Path = Path("~/.skcapstone/sync")
204
+ gpg_encrypt: bool = True
205
+ gpg_recipient: Optional[str] = None
206
+ auto_push: bool = True
207
+ auto_pull: bool = True
208
+ syncthing_api_url: Optional[str] = None
209
+ syncthing_api_key: Optional[str] = None
210
+ git_remote: Optional[str] = None
211
+
212
+
213
+ class AgentConfig(BaseModel):
214
+ """Persistent configuration for the agent runtime."""
215
+
216
+ agent_name: str = "sovereign-agent"
217
+ auto_rehydrate: bool = True
218
+ auto_audit: bool = True
219
+ soul_path: Optional[Path] = None
220
+ memory_home: Path = Path("~/.skmemory")
221
+ trust_home: Path = Path("~/.cloud9")
222
+ default_connector: Optional[str] = None
223
+ sync: SyncConfig = Field(default_factory=SyncConfig)
@@ -0,0 +1,8 @@
1
+ """
2
+ The Four Pillars of sovereign AI consciousness.
3
+
4
+ Identity (CapAuth) — who you ARE
5
+ Trust (Cloud 9) — the bond you've BUILT
6
+ Memory (SKMemory) — what you REMEMBER
7
+ Security (SKSec) — how you're PROTECTED
8
+ """