@smilintux/skcapstone 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.cursorrules +33 -0
- package/.github/workflows/ci.yml +23 -0
- package/.github/workflows/publish.yml +52 -0
- package/AGENTS.md +74 -0
- package/CLAUDE.md +56 -0
- package/LICENSE +674 -0
- package/README.md +242 -0
- package/SKILL.md +36 -0
- package/bin/cli.js +18 -0
- package/docs/ARCHITECTURE.md +510 -0
- package/docs/SECURITY_DESIGN.md +315 -0
- package/docs/SOVEREIGN_SINGULARITY.md +371 -0
- package/docs/TOKEN_SYSTEM.md +201 -0
- package/index.d.ts +9 -0
- package/index.js +32 -0
- package/package.json +32 -0
- package/pyproject.toml +84 -0
- package/src/skcapstone/__init__.py +13 -0
- package/src/skcapstone/cli.py +1441 -0
- package/src/skcapstone/connectors/__init__.py +6 -0
- package/src/skcapstone/coordination.py +590 -0
- package/src/skcapstone/discovery.py +275 -0
- package/src/skcapstone/memory_engine.py +457 -0
- package/src/skcapstone/models.py +223 -0
- package/src/skcapstone/pillars/__init__.py +8 -0
- package/src/skcapstone/pillars/identity.py +91 -0
- package/src/skcapstone/pillars/memory.py +61 -0
- package/src/skcapstone/pillars/security.py +83 -0
- package/src/skcapstone/pillars/sync.py +486 -0
- package/src/skcapstone/pillars/trust.py +335 -0
- package/src/skcapstone/runtime.py +190 -0
- package/src/skcapstone/skills/__init__.py +1 -0
- package/src/skcapstone/skills/syncthing_setup.py +297 -0
- package/src/skcapstone/sync/__init__.py +14 -0
- package/src/skcapstone/sync/backends.py +330 -0
- package/src/skcapstone/sync/engine.py +301 -0
- package/src/skcapstone/sync/models.py +97 -0
- package/src/skcapstone/sync/vault.py +284 -0
- package/src/skcapstone/tokens.py +439 -0
- package/tests/__init__.py +0 -0
- package/tests/conftest.py +42 -0
- package/tests/test_coordination.py +299 -0
- package/tests/test_discovery.py +57 -0
- package/tests/test_memory_engine.py +391 -0
- package/tests/test_models.py +63 -0
- package/tests/test_pillars.py +87 -0
- package/tests/test_runtime.py +60 -0
- package/tests/test_sync.py +507 -0
- package/tests/test_syncthing_setup.py +76 -0
- package/tests/test_tokens.py +265 -0
|
@@ -0,0 +1,486 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Sovereign Singularity — the sync layer.
|
|
3
|
+
|
|
4
|
+
GPG-encrypted memory seeds propagate across all nodes via Syncthing
|
|
5
|
+
(or git, or any file transport). CapAuth handles the encryption.
|
|
6
|
+
The agent exists everywhere at once.
|
|
7
|
+
|
|
8
|
+
Transport stack:
|
|
9
|
+
Agent -> collect_seed() -> gpg_encrypt() -> sync_folder/
|
|
10
|
+
Syncthing (or git push) propagates to all peers
|
|
11
|
+
Peer -> sync_folder/ -> gpg_decrypt() -> merge_seed()
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
import json
|
|
17
|
+
import logging
|
|
18
|
+
import shutil
|
|
19
|
+
import subprocess
|
|
20
|
+
from datetime import datetime, timezone
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from typing import Optional
|
|
23
|
+
|
|
24
|
+
from ..models import PillarStatus, SyncConfig, SyncState, SyncTransport
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger("skcapstone.sync")
|
|
27
|
+
|
|
28
|
+
SEED_EXTENSION = ".seed.json"
|
|
29
|
+
ENCRYPTED_EXTENSION = ".seed.json.gpg"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def initialize_sync(home: Path, config: Optional[SyncConfig] = None) -> SyncState:
|
|
33
|
+
"""Set up the sync directory structure.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
home: Agent home directory (~/.skcapstone).
|
|
37
|
+
config: Sync configuration. Defaults to SyncConfig().
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
SyncState reflecting the initialized state.
|
|
41
|
+
"""
|
|
42
|
+
config = config or SyncConfig()
|
|
43
|
+
sync_dir = Path(config.sync_folder).expanduser()
|
|
44
|
+
sync_dir.mkdir(parents=True, exist_ok=True)
|
|
45
|
+
|
|
46
|
+
outbox = sync_dir / "outbox"
|
|
47
|
+
inbox = sync_dir / "inbox"
|
|
48
|
+
archive = sync_dir / "archive"
|
|
49
|
+
for d in (outbox, inbox, archive):
|
|
50
|
+
d.mkdir(exist_ok=True)
|
|
51
|
+
|
|
52
|
+
manifest = {
|
|
53
|
+
"transport": config.transport.value,
|
|
54
|
+
"created_at": datetime.now(timezone.utc).isoformat(),
|
|
55
|
+
"gpg_encrypt": config.gpg_encrypt,
|
|
56
|
+
"auto_push": config.auto_push,
|
|
57
|
+
"auto_pull": config.auto_pull,
|
|
58
|
+
}
|
|
59
|
+
(sync_dir / "sync-manifest.json").write_text(json.dumps(manifest, indent=2))
|
|
60
|
+
|
|
61
|
+
state = SyncState(
|
|
62
|
+
transport=config.transport,
|
|
63
|
+
sync_path=sync_dir,
|
|
64
|
+
status=PillarStatus.ACTIVE,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
if config.gpg_encrypt:
|
|
68
|
+
fingerprint = _detect_gpg_key(home)
|
|
69
|
+
if fingerprint:
|
|
70
|
+
state.gpg_fingerprint = fingerprint
|
|
71
|
+
else:
|
|
72
|
+
state.status = PillarStatus.DEGRADED
|
|
73
|
+
|
|
74
|
+
state.seed_count = _count_seeds(sync_dir)
|
|
75
|
+
return state
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def collect_seed(home: Path, agent_name: str) -> Path:
|
|
79
|
+
"""Collect the agent's current state into a portable seed file.
|
|
80
|
+
|
|
81
|
+
Gathers identity, memory stats, trust metrics, and connectors
|
|
82
|
+
into a single JSON blob ready for encryption and sync.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
home: Agent home directory.
|
|
86
|
+
agent_name: The agent's display name.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Path to the generated seed file in the outbox.
|
|
90
|
+
"""
|
|
91
|
+
sync_dir = (home / "sync").expanduser() if not (home / "sync").is_absolute() else home / "sync"
|
|
92
|
+
if not sync_dir.exists():
|
|
93
|
+
sync_dir = Path("~/.skcapstone/sync").expanduser()
|
|
94
|
+
outbox = sync_dir / "outbox"
|
|
95
|
+
outbox.mkdir(parents=True, exist_ok=True)
|
|
96
|
+
|
|
97
|
+
timestamp = datetime.now(timezone.utc)
|
|
98
|
+
hostname = _get_hostname()
|
|
99
|
+
|
|
100
|
+
seed = {
|
|
101
|
+
"schema_version": "1.0",
|
|
102
|
+
"agent_name": agent_name,
|
|
103
|
+
"source_host": hostname,
|
|
104
|
+
"created_at": timestamp.isoformat(),
|
|
105
|
+
"seed_type": "state_snapshot",
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
identity_file = home / "identity" / "identity.json"
|
|
109
|
+
if identity_file.exists():
|
|
110
|
+
seed["identity"] = json.loads(identity_file.read_text())
|
|
111
|
+
|
|
112
|
+
trust_file = home / "trust" / "trust.json"
|
|
113
|
+
if trust_file.exists():
|
|
114
|
+
seed["trust"] = json.loads(trust_file.read_text())
|
|
115
|
+
try:
|
|
116
|
+
from .trust import export_febs_for_seed
|
|
117
|
+
|
|
118
|
+
febs = export_febs_for_seed(home)
|
|
119
|
+
if febs:
|
|
120
|
+
seed["febs"] = febs
|
|
121
|
+
except Exception as exc:
|
|
122
|
+
logger.debug("Could not export FEBs for seed: %s", exc)
|
|
123
|
+
|
|
124
|
+
memory_path = home / "memory"
|
|
125
|
+
if memory_path.is_symlink() or memory_path.exists():
|
|
126
|
+
resolved = memory_path.resolve()
|
|
127
|
+
seed["memory"] = _collect_memory_stats(resolved)
|
|
128
|
+
try:
|
|
129
|
+
from ..memory_engine import export_for_seed
|
|
130
|
+
|
|
131
|
+
seed["memory_entries"] = export_for_seed(home, max_entries=50)
|
|
132
|
+
except Exception as exc:
|
|
133
|
+
logger.debug("Could not export memory entries for seed: %s", exc)
|
|
134
|
+
|
|
135
|
+
manifest_file = home / "manifest.json"
|
|
136
|
+
if manifest_file.exists():
|
|
137
|
+
seed["manifest"] = json.loads(manifest_file.read_text())
|
|
138
|
+
|
|
139
|
+
seed_name = f"{agent_name}-{hostname}-{timestamp.strftime('%Y%m%dT%H%M%SZ')}{SEED_EXTENSION}"
|
|
140
|
+
seed_path = outbox / seed_name
|
|
141
|
+
seed_path.write_text(json.dumps(seed, indent=2, default=str))
|
|
142
|
+
|
|
143
|
+
logger.info("Seed collected: %s", seed_path.name)
|
|
144
|
+
return seed_path
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def gpg_encrypt(
|
|
148
|
+
seed_path: Path,
|
|
149
|
+
recipient: Optional[str] = None,
|
|
150
|
+
home: Optional[Path] = None,
|
|
151
|
+
) -> Optional[Path]:
|
|
152
|
+
"""Encrypt a seed file with GPG.
|
|
153
|
+
|
|
154
|
+
Uses the agent's CapAuth key (or specified recipient) for encryption.
|
|
155
|
+
Armor output for git-friendliness.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
seed_path: Path to the plaintext seed file.
|
|
159
|
+
recipient: GPG recipient (fingerprint/email). Auto-detects if None.
|
|
160
|
+
home: Agent home directory for key detection.
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Path to the encrypted file, or None if encryption failed.
|
|
164
|
+
"""
|
|
165
|
+
if not shutil.which("gpg"):
|
|
166
|
+
logger.error("gpg not found in PATH — cannot encrypt")
|
|
167
|
+
return None
|
|
168
|
+
|
|
169
|
+
if recipient is None:
|
|
170
|
+
agent_home = home or Path("~/.skcapstone").expanduser()
|
|
171
|
+
recipient = _detect_gpg_key(agent_home)
|
|
172
|
+
|
|
173
|
+
if recipient is None:
|
|
174
|
+
logger.error("No GPG key found for encryption")
|
|
175
|
+
return None
|
|
176
|
+
|
|
177
|
+
encrypted_path = seed_path.parent / (seed_path.name + ".gpg")
|
|
178
|
+
|
|
179
|
+
try:
|
|
180
|
+
subprocess.run(
|
|
181
|
+
[
|
|
182
|
+
"gpg", "--batch", "--yes", "--trust-model", "always",
|
|
183
|
+
"--armor", "--encrypt", "--recipient", recipient,
|
|
184
|
+
"--output", str(encrypted_path), str(seed_path),
|
|
185
|
+
],
|
|
186
|
+
capture_output=True,
|
|
187
|
+
text=True,
|
|
188
|
+
check=True,
|
|
189
|
+
timeout=30,
|
|
190
|
+
)
|
|
191
|
+
logger.info("Encrypted: %s -> %s", seed_path.name, encrypted_path.name)
|
|
192
|
+
return encrypted_path
|
|
193
|
+
except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as exc:
|
|
194
|
+
logger.error("GPG encryption failed: %s", exc)
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def gpg_decrypt(encrypted_path: Path, output_dir: Optional[Path] = None) -> Optional[Path]:
|
|
199
|
+
"""Decrypt a GPG-encrypted seed file.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
encrypted_path: Path to the .gpg file.
|
|
203
|
+
output_dir: Where to write the decrypted file. Defaults to same dir.
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
Path to the decrypted seed, or None on failure.
|
|
207
|
+
"""
|
|
208
|
+
if not shutil.which("gpg"):
|
|
209
|
+
logger.error("gpg not found in PATH")
|
|
210
|
+
return None
|
|
211
|
+
|
|
212
|
+
out_name = encrypted_path.name
|
|
213
|
+
if out_name.endswith(".gpg"):
|
|
214
|
+
out_name = out_name[:-4]
|
|
215
|
+
dest = (output_dir or encrypted_path.parent) / out_name
|
|
216
|
+
|
|
217
|
+
try:
|
|
218
|
+
subprocess.run(
|
|
219
|
+
["gpg", "--batch", "--yes", "--decrypt", "--output", str(dest), str(encrypted_path)],
|
|
220
|
+
capture_output=True,
|
|
221
|
+
text=True,
|
|
222
|
+
check=True,
|
|
223
|
+
timeout=30,
|
|
224
|
+
)
|
|
225
|
+
logger.info("Decrypted: %s -> %s", encrypted_path.name, dest.name)
|
|
226
|
+
return dest
|
|
227
|
+
except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as exc:
|
|
228
|
+
logger.error("GPG decryption failed: %s", exc)
|
|
229
|
+
return None
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def push_seed(home: Path, agent_name: str, encrypt: bool = True) -> Optional[Path]:
|
|
233
|
+
"""Collect current state, optionally encrypt, place in sync folder.
|
|
234
|
+
|
|
235
|
+
This is the high-level 'push' operation. After this, Syncthing
|
|
236
|
+
(or git) handles propagation to all peers automatically.
|
|
237
|
+
|
|
238
|
+
Args:
|
|
239
|
+
home: Agent home directory.
|
|
240
|
+
agent_name: Agent display name.
|
|
241
|
+
encrypt: Whether to GPG-encrypt the seed.
|
|
242
|
+
|
|
243
|
+
Returns:
|
|
244
|
+
Path to the final file (encrypted or plain) in the outbox.
|
|
245
|
+
"""
|
|
246
|
+
seed_path = collect_seed(home, agent_name)
|
|
247
|
+
|
|
248
|
+
if encrypt:
|
|
249
|
+
encrypted = gpg_encrypt(seed_path, home=home)
|
|
250
|
+
if encrypted:
|
|
251
|
+
seed_path.unlink()
|
|
252
|
+
return encrypted
|
|
253
|
+
logger.warning("Encryption failed — keeping plaintext seed")
|
|
254
|
+
|
|
255
|
+
return seed_path
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
def pull_seeds(home: Path, decrypt: bool = True) -> list[dict]:
|
|
259
|
+
"""Pull and process seed files from the inbox.
|
|
260
|
+
|
|
261
|
+
Reads all seeds in inbox/, decrypts if needed, and returns
|
|
262
|
+
the parsed seed data. Processed files move to archive/.
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
home: Agent home directory.
|
|
266
|
+
decrypt: Whether to attempt GPG decryption.
|
|
267
|
+
|
|
268
|
+
Returns:
|
|
269
|
+
List of parsed seed dictionaries.
|
|
270
|
+
"""
|
|
271
|
+
sync_dir = _resolve_sync_dir(home)
|
|
272
|
+
inbox = sync_dir / "inbox"
|
|
273
|
+
archive = sync_dir / "archive"
|
|
274
|
+
|
|
275
|
+
if not inbox.exists():
|
|
276
|
+
return []
|
|
277
|
+
|
|
278
|
+
seeds = []
|
|
279
|
+
|
|
280
|
+
for f in sorted(inbox.iterdir()):
|
|
281
|
+
if f.name.startswith("."):
|
|
282
|
+
continue
|
|
283
|
+
|
|
284
|
+
seed_path = f
|
|
285
|
+
if decrypt and f.suffix == ".gpg":
|
|
286
|
+
decrypted = gpg_decrypt(f)
|
|
287
|
+
if decrypted:
|
|
288
|
+
seed_path = decrypted
|
|
289
|
+
f.unlink()
|
|
290
|
+
else:
|
|
291
|
+
logger.warning("Could not decrypt %s — skipping", f.name)
|
|
292
|
+
continue
|
|
293
|
+
|
|
294
|
+
if seed_path.suffix == ".json" or seed_path.name.endswith(SEED_EXTENSION):
|
|
295
|
+
try:
|
|
296
|
+
data = json.loads(seed_path.read_text())
|
|
297
|
+
seeds.append(data)
|
|
298
|
+
|
|
299
|
+
if "memory_entries" in data:
|
|
300
|
+
try:
|
|
301
|
+
from ..memory_engine import import_from_seed
|
|
302
|
+
|
|
303
|
+
imported = import_from_seed(home, data["memory_entries"])
|
|
304
|
+
if imported:
|
|
305
|
+
logger.info("Imported %d memories from seed %s", imported, seed_path.name)
|
|
306
|
+
except Exception as exc:
|
|
307
|
+
logger.debug("Could not import seed memories: %s", exc)
|
|
308
|
+
|
|
309
|
+
if "febs" in data:
|
|
310
|
+
try:
|
|
311
|
+
from .trust import import_febs_from_seed
|
|
312
|
+
|
|
313
|
+
feb_imported = import_febs_from_seed(home, data["febs"])
|
|
314
|
+
if feb_imported:
|
|
315
|
+
logger.info("Imported %d FEB(s) from seed %s", feb_imported, seed_path.name)
|
|
316
|
+
except Exception as exc:
|
|
317
|
+
logger.debug("Could not import seed FEBs: %s", exc)
|
|
318
|
+
|
|
319
|
+
archive.mkdir(exist_ok=True)
|
|
320
|
+
seed_path.rename(archive / seed_path.name)
|
|
321
|
+
except (json.JSONDecodeError, OSError) as exc:
|
|
322
|
+
logger.warning("Failed to process %s: %s", seed_path.name, exc)
|
|
323
|
+
|
|
324
|
+
return seeds
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def discover_sync(home: Path) -> SyncState:
|
|
328
|
+
"""Discover the current sync state from disk.
|
|
329
|
+
|
|
330
|
+
Args:
|
|
331
|
+
home: Agent home directory.
|
|
332
|
+
|
|
333
|
+
Returns:
|
|
334
|
+
SyncState reflecting what's on disk.
|
|
335
|
+
"""
|
|
336
|
+
sync_dir = _resolve_sync_dir(home)
|
|
337
|
+
|
|
338
|
+
if not sync_dir.exists():
|
|
339
|
+
return SyncState(status=PillarStatus.MISSING)
|
|
340
|
+
|
|
341
|
+
manifest_file = sync_dir / "sync-manifest.json"
|
|
342
|
+
if not manifest_file.exists():
|
|
343
|
+
return SyncState(sync_path=sync_dir, status=PillarStatus.DEGRADED)
|
|
344
|
+
|
|
345
|
+
try:
|
|
346
|
+
data = json.loads(manifest_file.read_text())
|
|
347
|
+
except (json.JSONDecodeError, OSError):
|
|
348
|
+
return SyncState(sync_path=sync_dir, status=PillarStatus.DEGRADED)
|
|
349
|
+
|
|
350
|
+
transport = SyncTransport(data.get("transport", "syncthing"))
|
|
351
|
+
|
|
352
|
+
state = SyncState(
|
|
353
|
+
transport=transport,
|
|
354
|
+
sync_path=sync_dir,
|
|
355
|
+
seed_count=_count_seeds(sync_dir),
|
|
356
|
+
status=PillarStatus.ACTIVE,
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
fingerprint = _detect_gpg_key(home)
|
|
360
|
+
if fingerprint:
|
|
361
|
+
state.gpg_fingerprint = fingerprint
|
|
362
|
+
elif data.get("gpg_encrypt", True):
|
|
363
|
+
state.status = PillarStatus.DEGRADED
|
|
364
|
+
|
|
365
|
+
_load_sync_timestamps(sync_dir, state)
|
|
366
|
+
return state
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
# --- Private helpers ---
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def _resolve_sync_dir(home: Path) -> Path:
|
|
373
|
+
"""Resolve the sync directory path."""
|
|
374
|
+
sync_dir = home / "sync"
|
|
375
|
+
if sync_dir.exists():
|
|
376
|
+
return sync_dir
|
|
377
|
+
return Path("~/.skcapstone/sync").expanduser()
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def _detect_gpg_key(home: Path) -> Optional[str]:
|
|
381
|
+
"""Try to find the agent's GPG fingerprint."""
|
|
382
|
+
identity_file = home / "identity" / "identity.json"
|
|
383
|
+
if identity_file.exists():
|
|
384
|
+
try:
|
|
385
|
+
data = json.loads(identity_file.read_text())
|
|
386
|
+
fp = data.get("fingerprint")
|
|
387
|
+
if fp and data.get("capauth_managed"):
|
|
388
|
+
return fp
|
|
389
|
+
except (json.JSONDecodeError, OSError):
|
|
390
|
+
pass
|
|
391
|
+
|
|
392
|
+
return _detect_gpg_key_from_skcapstone()
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
def _detect_gpg_key_from_skcapstone() -> Optional[str]:
|
|
396
|
+
"""Look up a GPG key associated with skcapstone in the system keyring.
|
|
397
|
+
|
|
398
|
+
Searches for keys with 'skcapstone' in the UID first, then falls
|
|
399
|
+
back to the user's own secret keys (skipping package-signing keys).
|
|
400
|
+
"""
|
|
401
|
+
if not shutil.which("gpg"):
|
|
402
|
+
return None
|
|
403
|
+
try:
|
|
404
|
+
result = subprocess.run(
|
|
405
|
+
["gpg", "--list-secret-keys", "--keyid-format", "long", "--with-colons"],
|
|
406
|
+
capture_output=True,
|
|
407
|
+
text=True,
|
|
408
|
+
timeout=10,
|
|
409
|
+
)
|
|
410
|
+
# Reason: prefer skcapstone-specific key, then any user secret key
|
|
411
|
+
lines = result.stdout.splitlines()
|
|
412
|
+
current_fpr = None
|
|
413
|
+
for line in lines:
|
|
414
|
+
if line.startswith("fpr:"):
|
|
415
|
+
current_fpr = line.split(":")[9]
|
|
416
|
+
if line.startswith("uid:") and "skcapstone" in line.lower():
|
|
417
|
+
return current_fpr
|
|
418
|
+
# No skcapstone key — return first secret key fingerprint
|
|
419
|
+
for line in lines:
|
|
420
|
+
if line.startswith("fpr:"):
|
|
421
|
+
return line.split(":")[9]
|
|
422
|
+
except (subprocess.CalledProcessError, subprocess.TimeoutExpired):
|
|
423
|
+
pass
|
|
424
|
+
return None
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def _get_hostname() -> str:
|
|
428
|
+
"""Get the machine hostname for seed identification."""
|
|
429
|
+
import socket
|
|
430
|
+
|
|
431
|
+
return socket.gethostname()
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
def _count_seeds(sync_dir: Path) -> int:
|
|
435
|
+
"""Count seed files across outbox, inbox, and archive."""
|
|
436
|
+
count = 0
|
|
437
|
+
for subdir in ("outbox", "inbox", "archive"):
|
|
438
|
+
d = sync_dir / subdir
|
|
439
|
+
if d.exists():
|
|
440
|
+
count += sum(
|
|
441
|
+
1 for f in d.iterdir() if f.name.endswith(SEED_EXTENSION) or f.suffix == ".gpg"
|
|
442
|
+
)
|
|
443
|
+
return count
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
def _collect_memory_stats(memory_path: Path) -> dict:
|
|
447
|
+
"""Gather memory statistics from the SKMemory store."""
|
|
448
|
+
stats = {"path": str(memory_path), "total": 0, "short": 0, "mid": 0, "long": 0}
|
|
449
|
+
for tier, dirname in [("long", "long-term"), ("mid", "mid-term"), ("short", "short-term")]:
|
|
450
|
+
tier_dir = memory_path / dirname
|
|
451
|
+
if tier_dir.exists():
|
|
452
|
+
count = sum(1 for f in tier_dir.iterdir() if f.suffix in (".md", ".json", ".yaml"))
|
|
453
|
+
stats[tier] = count
|
|
454
|
+
stats["total"] += count
|
|
455
|
+
return stats
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
def _load_sync_timestamps(sync_dir: Path, state: SyncState) -> None:
|
|
459
|
+
"""Load last push/pull timestamps from the sync state file."""
|
|
460
|
+
state_file = sync_dir / "sync-state.json"
|
|
461
|
+
if state_file.exists():
|
|
462
|
+
try:
|
|
463
|
+
data = json.loads(state_file.read_text())
|
|
464
|
+
if data.get("last_push"):
|
|
465
|
+
state.last_push = datetime.fromisoformat(data["last_push"])
|
|
466
|
+
if data.get("last_pull"):
|
|
467
|
+
state.last_pull = datetime.fromisoformat(data["last_pull"])
|
|
468
|
+
state.peers_known = data.get("peers_known", 0)
|
|
469
|
+
except (json.JSONDecodeError, OSError, ValueError):
|
|
470
|
+
pass
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def save_sync_state(sync_dir: Path, state: SyncState) -> None:
|
|
474
|
+
"""Persist sync timestamps and peer info.
|
|
475
|
+
|
|
476
|
+
Args:
|
|
477
|
+
sync_dir: The sync directory.
|
|
478
|
+
state: Current SyncState to persist.
|
|
479
|
+
"""
|
|
480
|
+
data = {
|
|
481
|
+
"last_push": state.last_push.isoformat() if state.last_push else None,
|
|
482
|
+
"last_pull": state.last_pull.isoformat() if state.last_pull else None,
|
|
483
|
+
"peers_known": state.peers_known,
|
|
484
|
+
"seed_count": state.seed_count,
|
|
485
|
+
}
|
|
486
|
+
(sync_dir / "sync-state.json").write_text(json.dumps(data, indent=2))
|