@wipcomputer/wip-ldm-os 0.4.70 → 0.4.72

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/SKILL.md CHANGED
@@ -9,7 +9,7 @@ license: MIT
9
9
  compatibility: Requires git, npm, node. Node.js 18+.
10
10
  metadata:
11
11
  display-name: "LDM OS"
12
- version: "0.4.70"
12
+ version: "0.4.72"
13
13
  homepage: "https://github.com/wipcomputer/wip-ldm-os"
14
14
  author: "Parker Todd Brooks"
15
15
  category: infrastructure
package/bin/ldm.js CHANGED
@@ -20,7 +20,7 @@
20
20
  * ldm --version Show version
21
21
  */
22
22
 
23
- import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync, cpSync, chmodSync, unlinkSync, readlinkSync, renameSync } from 'node:fs';
23
+ import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync, cpSync, chmodSync, unlinkSync, readlinkSync, renameSync, statSync } from 'node:fs';
24
24
  import { join, basename, resolve, dirname } from 'node:path';
25
25
  import { execSync } from 'node:child_process';
26
26
  import { fileURLToPath } from 'node:url';
@@ -53,11 +53,16 @@ try {
53
53
  PKG_VERSION = JSON.parse(readFileSync(pkgPath, 'utf8')).version;
54
54
  } catch {}
55
55
 
56
- // Read catalog
57
- const catalogPath = join(__dirname, '..', 'catalog.json');
56
+ // Read catalog: prefer ~/.ldm/catalog.json (user-editable), fall back to npm package (#262)
57
+ const localCatalogPath = join(LDM_ROOT, 'catalog.json');
58
+ const packageCatalogPath = join(__dirname, '..', 'catalog.json');
58
59
  let CATALOG = { components: [] };
59
60
  try {
60
- CATALOG = JSON.parse(readFileSync(catalogPath, 'utf8'));
61
+ if (existsSync(localCatalogPath)) {
62
+ CATALOG = JSON.parse(readFileSync(localCatalogPath, 'utf8'));
63
+ } else {
64
+ CATALOG = JSON.parse(readFileSync(packageCatalogPath, 'utf8'));
65
+ }
61
66
  } catch {}
62
67
 
63
68
  // Auto-sync version.json when CLI version drifts (#33)
@@ -320,6 +325,19 @@ function loadCatalog() {
320
325
  return CATALOG.components || [];
321
326
  }
322
327
 
328
+ // Seed ~/.ldm/catalog.json from the npm package if it doesn't exist (#262)
329
+ function seedLocalCatalog() {
330
+ if (existsSync(localCatalogPath)) return false;
331
+ try {
332
+ const pkgCatalog = readFileSync(packageCatalogPath, 'utf8');
333
+ mkdirSync(LDM_ROOT, { recursive: true });
334
+ writeFileSync(localCatalogPath, pkgCatalog);
335
+ return true;
336
+ } catch {
337
+ return false;
338
+ }
339
+ }
340
+
323
341
  function findInCatalog(id) {
324
342
  const q = id.toLowerCase();
325
343
  // Strip org/ prefix for matching (e.g. "wipcomputer/openclaw-tavily" -> "openclaw-tavily")
@@ -357,7 +375,8 @@ function findInCatalog(id) {
357
375
  // Replaces the old execSync('ldm install ${c.repo}') which spawned
358
376
  // a full installer process for each component.
359
377
  async function installCatalogComponent(c) {
360
- const { installFromPath } = await import('../lib/deploy.mjs');
378
+ const { installFromPath, setFlags: setDeployFlags } = await import('../lib/deploy.mjs');
379
+ setDeployFlags({ dryRun: DRY_RUN, jsonOutput: JSON_OUTPUT, origin: 'catalog' }); // #262
361
380
  const repoTarget = c.repo;
362
381
  const repoName = basename(repoTarget);
363
382
  const repoPath = join(LDM_TMP, repoName);
@@ -392,6 +411,174 @@ async function installCatalogComponent(c) {
392
411
  }
393
412
 
394
413
  // ── Bridge deploy (#245) ──
414
+ // Deploy all scripts from scripts/ to ~/.ldm/bin/
415
+ // Called from both cmdInit() and cmdInstallCatalog() so script fixes land on every update.
416
+ function deployScripts() {
417
+ const scriptsSrc = join(__dirname, '..', 'scripts');
418
+ if (!existsSync(scriptsSrc)) return 0;
419
+ mkdirSync(join(LDM_ROOT, 'bin'), { recursive: true });
420
+ let count = 0;
421
+ for (const file of readdirSync(scriptsSrc)) {
422
+ if (!file.endsWith('.sh')) continue;
423
+ const src = join(scriptsSrc, file);
424
+ const dest = join(LDM_ROOT, 'bin', file);
425
+ cpSync(src, dest);
426
+ chmodSync(dest, 0o755);
427
+ count++;
428
+ }
429
+ if (count > 0) {
430
+ console.log(` + ${count} script(s) deployed to ~/.ldm/bin/`);
431
+ }
432
+ return count;
433
+ }
434
+
435
+ // Deploy personalized docs to both settings/docs/ and library/documentation/
436
+ // Called from both cmdInit() and cmdInstallCatalog() so doc fixes land on every update.
437
+ function deployDocs() {
438
+ const docsSrc = join(__dirname, '..', 'shared', 'docs');
439
+ if (!existsSync(docsSrc)) return 0;
440
+
441
+ let workspacePath = '';
442
+ try {
443
+ const ldmConfig = JSON.parse(readFileSync(join(LDM_ROOT, 'config.json'), 'utf8'));
444
+ workspacePath = (ldmConfig.workspace || '').replace('~', HOME);
445
+ } catch { return 0; }
446
+ if (!workspacePath || !existsSync(workspacePath)) return 0;
447
+
448
+ // Read config for template vars
449
+ let ldmConfig;
450
+ try {
451
+ ldmConfig = JSON.parse(readFileSync(join(LDM_ROOT, 'config.json'), 'utf8'));
452
+ } catch { return 0; }
453
+
454
+ const sc = ldmConfig;
455
+
456
+ // Agents from config (rich objects with harness/machine/prefix)
457
+ const agentsObj = sc.agents || {};
458
+ const agentsList = Object.entries(agentsObj).map(([id, a]) => `${id} (${a.harness} on ${a.machine})`).join(', ');
459
+ const agentsDetail = Object.entries(agentsObj).map(([id, a]) => `- **${id}**: ${a.harness} on ${a.machine}, branch prefix \`${a.prefix}/\``).join('\n');
460
+
461
+ // Harnesses from config
462
+ const harnessConfig = sc.harnesses || {};
463
+ const harnessesDetected = Object.entries(harnessConfig).filter(([,h]) => h.detected).map(([name]) => name);
464
+ const harnessesList = harnessesDetected.length > 0 ? harnessesDetected.join(', ') : 'run ldm install to detect';
465
+
466
+ const templateVars = {
467
+ 'name': sc.name || '',
468
+ 'org': sc.org || '',
469
+ 'timezone': sc.timezone || '',
470
+ 'paths.workspace': (sc.paths?.workspace || '').replace('~', HOME),
471
+ 'paths.ldm': (sc.paths?.ldm || '').replace('~', HOME),
472
+ 'paths.openclaw': (sc.paths?.openclaw || '').replace('~', HOME),
473
+ 'paths.icloud': (sc.paths?.icloud || '').replace('~', HOME),
474
+ 'memory.local': (sc.memory?.local || '').replace('~', HOME),
475
+ 'deploy.website': sc.deploy?.website || '',
476
+ 'backup.keep': String(sc.backup?.keep || 7),
477
+ 'agents_list': agentsList,
478
+ 'agents_detail': agentsDetail,
479
+ 'harnesses_list': harnessesList,
480
+ };
481
+
482
+ function renderTemplates(destDir) {
483
+ mkdirSync(destDir, { recursive: true });
484
+ let count = 0;
485
+ for (const file of readdirSync(docsSrc)) {
486
+ if (!file.endsWith('.tmpl')) continue;
487
+ let content = readFileSync(join(docsSrc, file), 'utf8');
488
+ content = content.replace(/\{\{([^}]+)\}\}/g, (match, key) => {
489
+ return templateVars[key.trim()] || match;
490
+ });
491
+ const outName = file.replace('.tmpl', '');
492
+ writeFileSync(join(destDir, outName), content);
493
+ count++;
494
+ }
495
+ return count;
496
+ }
497
+
498
+ // Deploy to settings/docs/ (agent reference)
499
+ const docsDest = join(workspacePath, 'settings', 'docs');
500
+ const docsCount = renderTemplates(docsDest);
501
+ if (docsCount > 0) {
502
+ console.log(` + ${docsCount} personalized doc(s) deployed to ${docsDest.replace(HOME, '~')}/`);
503
+ }
504
+
505
+ // Deploy to library/documentation/ (human-readable library copy)
506
+ const libraryDest = join(workspacePath, 'library', 'documentation');
507
+ if (existsSync(join(workspacePath, 'library'))) {
508
+ const libCount = renderTemplates(libraryDest);
509
+ if (libCount > 0) {
510
+ console.log(` + ${libCount} doc(s) deployed to ${libraryDest.replace(HOME, '~')}/`);
511
+ }
512
+ }
513
+
514
+ return docsCount;
515
+ }
516
+
517
+ // Check backup health: is a trigger configured, did it run recently, is iCloud set up?
518
+ // Called from cmdInstallCatalog() on every install.
519
+ function checkBackupHealth() {
520
+ const config = readJSON(join(LDM_ROOT, 'config.json'));
521
+ if (!config) return;
522
+
523
+ const backup = config.backup || {};
524
+ const issues = [];
525
+
526
+ // Check iCloud offsite
527
+ const icloudPath = config.paths?.icloudBackup || backup.icloudPath;
528
+ if (!icloudPath) {
529
+ issues.push('iCloud offsite not configured. Add paths.icloudBackup to ~/.ldm/config.json');
530
+ } else {
531
+ const expandedPath = icloudPath.replace(/^~/, HOME);
532
+ if (!existsSync(expandedPath)) {
533
+ try { mkdirSync(expandedPath, { recursive: true }); } catch {}
534
+ if (!existsSync(expandedPath)) {
535
+ issues.push(`iCloud path does not exist: ${icloudPath}`);
536
+ }
537
+ }
538
+ }
539
+
540
+ // Check LaunchAgent
541
+ try {
542
+ const label = backup.triggerLabel || 'ai.openclaw.ldm-backup';
543
+ const result = execSync(`launchctl list ${label} 2>/dev/null`, { encoding: 'utf8', timeout: 3000 });
544
+ if (!result) issues.push(`LaunchAgent ${label} not loaded`);
545
+ } catch {
546
+ issues.push('Backup LaunchAgent not loaded. Backups may not run automatically.');
547
+ }
548
+
549
+ // Check last backup age
550
+ const backupRoot = join(LDM_ROOT, 'backups');
551
+ if (existsSync(backupRoot)) {
552
+ const dirs = readdirSync(backupRoot)
553
+ .filter(d => d.match(/^20\d{2}-\d{2}-\d{2}--/) && statSync(join(backupRoot, d)).isDirectory())
554
+ .sort()
555
+ .reverse();
556
+ if (dirs.length > 0) {
557
+ const latest = dirs[0];
558
+ const latestDate = latest.replace(/--.*/, '').replace(/-/g, '/');
559
+ const age = Date.now() - new Date(latestDate).getTime();
560
+ const hours = Math.round(age / (1000 * 60 * 60));
561
+ if (hours > 36) {
562
+ issues.push(`Last backup is ${hours} hours old (${latest}). Expected within 24 hours.`);
563
+ }
564
+ } else {
565
+ issues.push('No backups found. Run: ldm backup');
566
+ }
567
+ }
568
+
569
+ // Check backup script exists
570
+ const scriptPath = join(LDM_ROOT, 'bin', 'ldm-backup.sh');
571
+ if (!existsSync(scriptPath)) {
572
+ issues.push('Backup script missing at ~/.ldm/bin/ldm-backup.sh. Run: ldm init');
573
+ }
574
+
575
+ if (issues.length > 0) {
576
+ for (const issue of issues) {
577
+ console.log(` ! Backup: ${issue}`);
578
+ }
579
+ }
580
+ }
581
+
395
582
  // The bridge (src/bridge/) builds to dist/bridge/ and ships in the npm package.
396
583
  // After `npm install -g`, the updated files live at the npm package location but
397
584
  // never get copied to ~/.ldm/extensions/lesa-bridge/dist/. This function fixes that.
@@ -653,10 +840,15 @@ async function cmdInit() {
653
840
 
654
841
  // Seed registry if missing
655
842
  if (!existsSync(REGISTRY_PATH)) {
656
- writeJSON(REGISTRY_PATH, { _format: 'v1', extensions: {} });
843
+ writeJSON(REGISTRY_PATH, { _format: 'v2', extensions: {} });
657
844
  console.log(` + registry.json created`);
658
845
  }
659
846
 
847
+ // Seed local catalog from npm package (#262)
848
+ if (seedLocalCatalog()) {
849
+ console.log(` + catalog.json seeded to ~/.ldm/catalog.json`);
850
+ }
851
+
660
852
  // Install global git pre-commit hook (blocks commits on main)
661
853
  const hooksDir = join(LDM_ROOT, 'hooks');
662
854
  const preCommitDest = join(hooksDir, 'pre-commit');
@@ -700,29 +892,8 @@ async function cmdInit() {
700
892
  }
701
893
  }
702
894
 
703
- // Deploy backup + restore scripts (#119)
704
- const backupSrc = join(__dirname, '..', 'scripts', 'ldm-backup.sh');
705
- const backupDest = join(LDM_ROOT, 'bin', 'ldm-backup.sh');
706
- if (existsSync(backupSrc)) {
707
- mkdirSync(join(LDM_ROOT, 'bin'), { recursive: true });
708
- cpSync(backupSrc, backupDest);
709
- chmodSync(backupDest, 0o755);
710
- console.log(` + ldm-backup.sh deployed to ~/.ldm/bin/`);
711
- }
712
- const restoreSrc = join(__dirname, '..', 'scripts', 'ldm-restore.sh');
713
- const restoreDest = join(LDM_ROOT, 'bin', 'ldm-restore.sh');
714
- if (existsSync(restoreSrc)) {
715
- cpSync(restoreSrc, restoreDest);
716
- chmodSync(restoreDest, 0o755);
717
- console.log(` + ldm-restore.sh deployed to ~/.ldm/bin/`);
718
- }
719
- const summarySrc = join(__dirname, '..', 'scripts', 'ldm-summary.sh');
720
- const summaryDest = join(LDM_ROOT, 'bin', 'ldm-summary.sh');
721
- if (existsSync(summarySrc)) {
722
- cpSync(summarySrc, summaryDest);
723
- chmodSync(summaryDest, 0o755);
724
- console.log(` + ldm-summary.sh deployed to ~/.ldm/bin/`);
725
- }
895
+ // Deploy all scripts from scripts/ to ~/.ldm/bin/ (#119)
896
+ deployScripts();
726
897
 
727
898
  // Deploy shared rules to ~/.ldm/shared/rules/ and to harnesses
728
899
  const rulesSrc = join(__dirname, '..', 'shared', 'rules');
@@ -835,67 +1006,8 @@ async function cmdInit() {
835
1006
  }
836
1007
  } catch {}
837
1008
 
838
- // Deploy personalized docs to settings/docs/ (from templates + config.json)
839
- const docsSrc = join(__dirname, '..', 'shared', 'docs');
840
- if (existsSync(docsSrc)) {
841
- let workspacePath = '';
842
- try {
843
- const ldmConfig = JSON.parse(readFileSync(join(LDM_ROOT, 'config.json'), 'utf8'));
844
- workspacePath = (ldmConfig.workspace || '').replace('~', HOME);
845
-
846
- if (workspacePath && existsSync(workspacePath)) {
847
- const docsDest = join(workspacePath, 'settings', 'docs');
848
- mkdirSync(docsDest, { recursive: true });
849
- let docsCount = 0;
850
-
851
- // Build template values from ~/.ldm/config.json (unified config)
852
- // Legacy: settings/config.json was a separate file, now merged into config.json
853
- const sc = ldmConfig;
854
- const lc = ldmConfig;
855
-
856
- // Agents from settings config (rich objects with harness/machine/prefix)
857
- const agentsObj = sc.agents || {};
858
- const agentsList = Object.entries(agentsObj).map(([id, a]) => `${id} (${a.harness} on ${a.machine})`).join(', ');
859
- const agentsDetail = Object.entries(agentsObj).map(([id, a]) => `- **${id}**: ${a.harness} on ${a.machine}, branch prefix \`${a.prefix}/\``).join('\n');
860
-
861
- // Harnesses from ldm config
862
- const harnessConfig = lc.harnesses || {};
863
- const harnessesDetected = Object.entries(harnessConfig).filter(([,h]) => h.detected).map(([name]) => name);
864
- const harnessesList = harnessesDetected.length > 0 ? harnessesDetected.join(', ') : 'run ldm install to detect';
865
-
866
- const templateVars = {
867
- 'name': sc.name || '',
868
- 'org': sc.org || '',
869
- 'timezone': sc.timezone || '',
870
- 'paths.workspace': (sc.paths?.workspace || '').replace('~', HOME),
871
- 'paths.ldm': (sc.paths?.ldm || '').replace('~', HOME),
872
- 'paths.openclaw': (sc.paths?.openclaw || '').replace('~', HOME),
873
- 'paths.icloud': (sc.paths?.icloud || '').replace('~', HOME),
874
- 'memory.local': (sc.memory?.local || '').replace('~', HOME),
875
- 'deploy.website': sc.deploy?.website || '',
876
- 'backup.keep': String(sc.backup?.keep || 7),
877
- 'agents_list': agentsList,
878
- 'agents_detail': agentsDetail,
879
- 'harnesses_list': harnessesList,
880
- };
881
-
882
- for (const file of readdirSync(docsSrc)) {
883
- if (!file.endsWith('.tmpl')) continue;
884
- let content = readFileSync(join(docsSrc, file), 'utf8');
885
- // Replace template vars
886
- content = content.replace(/\{\{([^}]+)\}\}/g, (match, key) => {
887
- return templateVars[key.trim()] || match;
888
- });
889
- const outName = file.replace('.tmpl', '');
890
- writeFileSync(join(docsDest, outName), content);
891
- docsCount++;
892
- }
893
- if (docsCount > 0) {
894
- console.log(` + ${docsCount} personalized doc(s) deployed to ${docsDest.replace(HOME, '~')}/`);
895
- }
896
- }
897
- } catch {}
898
- }
1009
+ // Deploy personalized docs to settings/docs/ and library/documentation/
1010
+ deployDocs();
899
1011
 
900
1012
  // Deploy LaunchAgents to ~/Library/LaunchAgents/
901
1013
  // Templates use {{HOME}} and {{OPENCLAW_GATEWAY_TOKEN}} placeholders, replaced at deploy time.
@@ -1064,7 +1176,7 @@ async function cmdInstall() {
1064
1176
  // Refresh harness detection (catches newly installed harnesses)
1065
1177
  detectHarnesses();
1066
1178
 
1067
- setFlags({ dryRun: DRY_RUN, jsonOutput: JSON_OUTPUT });
1179
+ setFlags({ dryRun: DRY_RUN, jsonOutput: JSON_OUTPUT, origin: 'manual' });
1068
1180
 
1069
1181
  // --help flag (#81)
1070
1182
  if (args.includes('--help') || args.includes('-h')) {
@@ -1108,6 +1220,7 @@ async function cmdInstall() {
1108
1220
  // Check if target is a catalog ID (e.g. "memory-crystal")
1109
1221
  const catalogEntry = findInCatalog(resolvedTarget);
1110
1222
  if (catalogEntry) {
1223
+ setFlags({ dryRun: DRY_RUN, jsonOutput: JSON_OUTPUT, origin: 'catalog' }); // #262
1111
1224
  console.log('');
1112
1225
  console.log(` Resolved "${target}" via catalog to ${catalogEntry.repo}`);
1113
1226
 
@@ -1234,6 +1347,98 @@ async function cmdInstall() {
1234
1347
  }
1235
1348
  }
1236
1349
 
1350
+ // ── Registry migration (#262) ──
1351
+ // Upgrades old v1 registry entries to v2 format with source info.
1352
+ // Runs once per install. Idempotent: entries that already have source are skipped.
1353
+
1354
+ function migrateRegistry() {
1355
+ const registry = readJSON(REGISTRY_PATH);
1356
+ if (!registry?.extensions) return 0;
1357
+
1358
+ const components = loadCatalog();
1359
+ let migrated = 0;
1360
+
1361
+ for (const [name, entry] of Object.entries(registry.extensions)) {
1362
+ // Skip entries that have already been migrated to v2 format.
1363
+ // An entry is fully migrated if it has: installed (object), paths, origin,
1364
+ // and source is either structured (object with type) or explicitly null.
1365
+ const hasV2Installed = entry.installed && typeof entry.installed === 'object' && entry.installed.version;
1366
+ const sourceIsResolved = entry.source === null || (typeof entry.source === 'object' && entry.source?.type);
1367
+ if (hasV2Installed && entry.paths && entry.origin && sourceIsResolved) continue;
1368
+
1369
+ const newSource = { type: 'github' };
1370
+ let hasSource = false;
1371
+
1372
+ // Try 1: match against catalog for source info
1373
+ const catalogMatch = components.find(c => {
1374
+ const matches = c.registryMatches || [c.id];
1375
+ return matches.includes(name) || c.id === name;
1376
+ });
1377
+ if (catalogMatch) {
1378
+ if (catalogMatch.repo) { newSource.repo = catalogMatch.repo; hasSource = true; }
1379
+ if (catalogMatch.npm) { newSource.npm = catalogMatch.npm; hasSource = true; }
1380
+ if (!entry.origin) entry.origin = 'catalog';
1381
+ }
1382
+
1383
+ // Try 2: read from the installed extension's package.json repository field
1384
+ if (!hasSource || !newSource.repo) {
1385
+ const extPkgPath = join(LDM_EXTENSIONS, name, 'package.json');
1386
+ const extPkg = readJSON(extPkgPath);
1387
+ if (extPkg?.name && !newSource.npm) {
1388
+ newSource.npm = extPkg.name;
1389
+ hasSource = true;
1390
+ }
1391
+ if (extPkg?.repository) {
1392
+ const raw = typeof extPkg.repository === 'string'
1393
+ ? extPkg.repository
1394
+ : extPkg.repository.url || '';
1395
+ const ghMatch = raw.match(/github\.com[:/]([^/]+\/[^/.]+)/);
1396
+ if (ghMatch) {
1397
+ newSource.repo = ghMatch[1].replace(/\.git$/, '');
1398
+ hasSource = true;
1399
+ }
1400
+ }
1401
+ }
1402
+
1403
+ if (hasSource) {
1404
+ entry.source = newSource;
1405
+ } else if (typeof entry.source === 'string') {
1406
+ // Legacy string source (path or URL). Clear it since we couldn't build structured source.
1407
+ entry.source = null;
1408
+ }
1409
+
1410
+ // Migrate flat version to installed block
1411
+ if (!entry.installed || typeof entry.installed !== 'object') {
1412
+ entry.installed = {
1413
+ version: entry.version || 'unknown',
1414
+ installedAt: entry.updatedAt || new Date().toISOString(),
1415
+ updatedAt: entry.updatedAt || new Date().toISOString(),
1416
+ };
1417
+ }
1418
+
1419
+ // Migrate flat paths to paths block
1420
+ if (!entry.paths) {
1421
+ entry.paths = {};
1422
+ if (entry.ldmPath) entry.paths.ldm = entry.ldmPath;
1423
+ if (entry.ocPath) entry.paths.openclaw = entry.ocPath;
1424
+ }
1425
+
1426
+ // Set origin if missing
1427
+ if (!entry.origin) {
1428
+ entry.origin = 'manual';
1429
+ }
1430
+
1431
+ migrated++;
1432
+ }
1433
+
1434
+ if (migrated > 0) {
1435
+ registry._format = 'v2';
1436
+ writeJSON(REGISTRY_PATH, registry);
1437
+ }
1438
+
1439
+ return migrated;
1440
+ }
1441
+
1237
1442
  // ── Auto-detect unregistered extensions ──
1238
1443
 
1239
1444
  function autoDetectExtensions() {
@@ -1318,11 +1523,29 @@ async function cmdInstallCatalog() {
1318
1523
 
1319
1524
  autoDetectExtensions();
1320
1525
 
1526
+ // Migrate old registry entries to v2 format (#262)
1527
+ const migrated = migrateRegistry();
1528
+ if (migrated > 0) {
1529
+ console.log(` + Migrated ${migrated} registry entries to v2 format (source info added)`);
1530
+ }
1531
+
1532
+ // Seed local catalog if missing (#262)
1533
+ if (seedLocalCatalog()) {
1534
+ console.log(` + catalog.json seeded to ~/.ldm/catalog.json`);
1535
+ }
1536
+
1321
1537
  // Deploy bridge files after self-update or on every catalog install (#245, #251)
1322
1538
  // After npm install -g, the new bridge files are in the npm package but not
1323
1539
  // in the extension directories. This copies them to both LDM and OpenClaw targets.
1324
1540
  deployBridge();
1325
1541
 
1542
+ // Deploy scripts and docs on every install so fixes land without re-init
1543
+ deployScripts();
1544
+ deployDocs();
1545
+
1546
+ // Check backup configuration
1547
+ checkBackupHealth();
1548
+
1326
1549
  const { detectSystemState, reconcileState, formatReconciliation } = await import('../lib/state.mjs');
1327
1550
  const state = detectSystemState();
1328
1551
  const reconciled = reconcileState(state);
@@ -1528,7 +1751,9 @@ async function cmdInstallCatalog() {
1528
1751
  console.log('');
1529
1752
  }
1530
1753
 
1531
- // Build the update plan: check ALL installed extensions against npm (#55)
1754
+ // Build the update plan from REGISTRY entries (#262)
1755
+ // The registry is the source of truth. Each entry has source info (npm, repo)
1756
+ // that tells us where to check for updates.
1532
1757
  const npmUpdates = [];
1533
1758
 
1534
1759
  // Check CLI self-update (#132)
@@ -1548,59 +1773,104 @@ async function cmdInstallCatalog() {
1548
1773
  }
1549
1774
  } catch {}
1550
1775
 
1551
- // Check every installed extension against npm via catalog
1552
- console.log(' Checking npm for updates...');
1553
- for (const [name, entry] of Object.entries(reconciled)) {
1554
- if (!entry.deployedLdm && !entry.deployedOc) continue; // not installed
1776
+ // Check every registered extension for updates (#262)
1777
+ // Source of truth: registry entry's source.npm and source.repo fields.
1778
+ // Fallback: extension's package.json (for old entries without source info).
1779
+ console.log(' Checking for updates...');
1780
+ const registryEntries = Object.entries(registry?.extensions || {});
1781
+ const checkedNames = new Set(); // track what we've checked
1555
1782
 
1556
- // Get npm package name from the installed extension's own package.json
1557
- const extPkgPath = join(LDM_EXTENSIONS, name, 'package.json');
1558
- const extPkg = readJSON(extPkgPath);
1559
- const npmPkg = extPkg?.name;
1560
- if (!npmPkg) continue; // no package name, skip
1783
+ for (const [name, regEntry] of registryEntries) {
1784
+ // Skip entries with no installed version
1785
+ const currentVersion = regEntry?.installed?.version || regEntry?.version;
1786
+ if (!currentVersion) continue;
1561
1787
 
1562
- // Find catalog entry for the repo URL (used for clone if update needed)
1788
+ // Skip pinned components (e.g. OpenClaw)
1563
1789
  const catalogEntry = components.find(c => {
1564
1790
  const matches = c.registryMatches || [c.id];
1565
1791
  return matches.includes(name) || c.id === name;
1566
1792
  });
1567
-
1568
- // Skip pinned components (e.g. OpenClaw). Upgrades must be explicit.
1569
1793
  if (catalogEntry?.pinned) continue;
1570
1794
 
1571
- // Fallback: use repository.url from extension's package.json (#82)
1572
- let repoUrl = catalogEntry?.repo || null;
1573
- if (!repoUrl && extPkg?.repository) {
1574
- const raw = typeof extPkg.repository === 'string'
1575
- ? extPkg.repository
1576
- : extPkg.repository.url || '';
1577
- const ghMatch = raw.match(/github\.com[:/]([^/]+\/[^/.]+)/);
1578
- if (ghMatch) repoUrl = ghMatch[1];
1795
+ // Get npm package name from registry source (v2) or extension's package.json (legacy)
1796
+ const sourceNpm = regEntry?.source?.npm;
1797
+ const sourceRepo = regEntry?.source?.repo;
1798
+ let npmPkg = sourceNpm || null;
1799
+
1800
+ // Fallback: read from installed extension's package.json
1801
+ if (!npmPkg) {
1802
+ const extPkgPath = join(LDM_EXTENSIONS, name, 'package.json');
1803
+ const extPkg = readJSON(extPkgPath);
1804
+ npmPkg = extPkg?.name || null;
1579
1805
  }
1580
1806
 
1581
- const currentVersion = entry.ldmVersion || entry.ocVersion;
1582
- if (!currentVersion) continue;
1807
+ // Determine repo URL for cloning updates
1808
+ let repoUrl = sourceRepo || catalogEntry?.repo || null;
1809
+ if (!repoUrl) {
1810
+ const extPkgPath = join(LDM_EXTENSIONS, name, 'package.json');
1811
+ const extPkg = readJSON(extPkgPath);
1812
+ if (extPkg?.repository) {
1813
+ const raw = typeof extPkg.repository === 'string'
1814
+ ? extPkg.repository
1815
+ : extPkg.repository.url || '';
1816
+ const ghMatch = raw.match(/github\.com[:/]([^/]+\/[^/.]+)/);
1817
+ if (ghMatch) repoUrl = ghMatch[1];
1818
+ }
1819
+ }
1583
1820
 
1584
- try {
1585
- const latestVersion = execSync(`npm view ${npmPkg} version 2>/dev/null`, {
1586
- encoding: 'utf8', timeout: 10000,
1587
- }).trim();
1821
+ // Check npm for updates (fast, one HTTP call)
1822
+ if (npmPkg) {
1823
+ try {
1824
+ const latestVersion = execSync(`npm view ${npmPkg} version 2>/dev/null`, {
1825
+ encoding: 'utf8', timeout: 10000,
1826
+ }).trim();
1588
1827
 
1589
- if (latestVersion && latestVersion !== currentVersion) {
1590
- npmUpdates.push({
1591
- ...entry,
1592
- catalogRepo: repoUrl,
1593
- catalogNpm: npmPkg,
1594
- currentVersion,
1595
- latestVersion,
1596
- hasUpdate: true,
1828
+ if (latestVersion && latestVersion !== currentVersion) {
1829
+ npmUpdates.push({
1830
+ name,
1831
+ catalogRepo: repoUrl,
1832
+ catalogNpm: npmPkg,
1833
+ currentVersion,
1834
+ latestVersion,
1835
+ hasUpdate: true,
1836
+ });
1837
+ }
1838
+ } catch {}
1839
+ checkedNames.add(name);
1840
+ continue;
1841
+ }
1842
+
1843
+ // No npm package. Check GitHub tags via git ls-remote (#262).
1844
+ // Works for private repos with SSH access.
1845
+ if (repoUrl) {
1846
+ try {
1847
+ const sshUrl = `git@github.com:${repoUrl}.git`;
1848
+ const tags = execSync(`git ls-remote --tags --sort=-v:refname "${sshUrl}" 2>/dev/null`, {
1849
+ encoding: 'utf8', timeout: 15000,
1597
1850
  });
1598
- }
1599
- } catch {}
1851
+ // Parse latest semver tag
1852
+ const tagMatch = tags.match(/refs\/tags\/v?(\d+\.\d+\.\d+)/);
1853
+ if (tagMatch) {
1854
+ const latestVersion = tagMatch[1];
1855
+ if (latestVersion !== currentVersion) {
1856
+ npmUpdates.push({
1857
+ name,
1858
+ catalogRepo: repoUrl,
1859
+ catalogNpm: repoUrl, // display repo URL since no npm package
1860
+ currentVersion,
1861
+ latestVersion,
1862
+ hasUpdate: true,
1863
+ });
1864
+ }
1865
+ }
1866
+ } catch {}
1867
+ checkedNames.add(name);
1868
+ }
1600
1869
  }
1601
1870
 
1602
- // Check global CLIs not tracked by extension loop (#81)
1871
+ // Check global CLIs not tracked by registry (#81)
1603
1872
  for (const [binName, binInfo] of Object.entries(state.cliBinaries || {})) {
1873
+ if (checkedNames.has(binName)) continue;
1604
1874
  const catalogComp = components.find(c =>
1605
1875
  (c.cliMatches || []).includes(binName)
1606
1876
  );
@@ -1635,18 +1905,17 @@ async function cmdInstallCatalog() {
1635
1905
  // Check parent packages for toolbox-style repos (#132)
1636
1906
  // If sub-tools are installed but the parent npm package has a newer version,
1637
1907
  // report the parent as needing an update (not the individual sub-tool).
1638
- // Don't skip packages already found by the extension loop. The parent check
1639
- // REPLACES sub-tool entries with the parent name.
1640
1908
  const checkedParentNpm = new Set();
1641
1909
  for (const comp of components) {
1642
1910
  if (!comp.npm || checkedParentNpm.has(comp.npm)) continue;
1643
1911
  if (!comp.registryMatches || comp.registryMatches.length === 0) continue;
1644
1912
 
1645
1913
  // If any registryMatch is installed, check the parent package
1646
- const installedMatch = comp.registryMatches.find(m => reconciled[m]);
1914
+ const installedMatch = comp.registryMatches.find(m => registry?.extensions?.[m]);
1647
1915
  if (!installedMatch) continue;
1648
1916
 
1649
- const currentVersion = reconciled[installedMatch]?.ldmVersion || reconciled[installedMatch]?.ocVersion || '?';
1917
+ const matchEntry = registry.extensions[installedMatch];
1918
+ const currentVersion = matchEntry?.installed?.version || matchEntry?.version || '?';
1650
1919
 
1651
1920
  try {
1652
1921
  const latest = execSync(`npm view ${comp.npm} version 2>/dev/null`, {
@@ -1654,8 +1923,6 @@ async function cmdInstallCatalog() {
1654
1923
  }).trim();
1655
1924
  if (latest && latest !== currentVersion) {
1656
1925
  // Remove any sub-tool entries that belong to this parent.
1657
- // Match by name in registryMatches (sub-tools have their own npm names,
1658
- // not the parent's, so catalogNpm comparison doesn't work).
1659
1926
  const parentMatches = new Set(comp.registryMatches || []);
1660
1927
  for (let i = npmUpdates.length - 1; i >= 0; i--) {
1661
1928
  if (!npmUpdates[i].isCLI && parentMatches.has(npmUpdates[i].name)) {
@@ -1841,7 +2108,7 @@ async function cmdInstallCatalog() {
1841
2108
  const manifestPath = createRevertManifest(
1842
2109
  `ldm install (update ${totalUpdates} extensions)`,
1843
2110
  npmUpdates.map(e => ({
1844
- action: 'update-from-catalog',
2111
+ action: 'update-from-registry',
1845
2112
  name: e.name,
1846
2113
  currentVersion: e.currentVersion,
1847
2114
  latestVersion: e.latestVersion,
@@ -1852,11 +2119,11 @@ async function cmdInstallCatalog() {
1852
2119
  console.log('');
1853
2120
 
1854
2121
  const { setFlags, installFromPath } = await import('../lib/deploy.mjs');
1855
- setFlags({ dryRun: DRY_RUN, jsonOutput: JSON_OUTPUT });
2122
+ setFlags({ dryRun: DRY_RUN, jsonOutput: JSON_OUTPUT, origin: 'catalog' }); // #262
1856
2123
 
1857
2124
  let updated = 0;
1858
2125
 
1859
- // Update from npm via catalog repos (#55) and CLIs (#81)
2126
+ // Update from registry sources (#262, replaces old catalog-based update loop)
1860
2127
  for (const entry of npmUpdates) {
1861
2128
  // CLI self-update is handled by the self-update block at the top of cmdInstallCatalog()
1862
2129
  if (entry.isCLI) continue;
@@ -1882,14 +2149,20 @@ async function cmdInstallCatalog() {
1882
2149
  execSync(`ldm install ${entry.catalogRepo}`, { stdio: 'inherit' });
1883
2150
  updated++;
1884
2151
 
1885
- // For parent packages, update registry version for all sub-tools (#139)
2152
+ // For parent packages, update registry version for all sub-tools (#139, #262)
1886
2153
  if (entry.isParent && entry.registryMatches) {
1887
2154
  const registry = readJSON(REGISTRY_PATH);
1888
2155
  if (registry?.extensions) {
2156
+ const now = new Date().toISOString();
1889
2157
  for (const subTool of entry.registryMatches) {
1890
2158
  if (registry.extensions[subTool]) {
1891
2159
  registry.extensions[subTool].version = entry.latestVersion;
1892
- registry.extensions[subTool].updatedAt = new Date().toISOString();
2160
+ registry.extensions[subTool].updatedAt = now;
2161
+ // Also update v2 installed block
2162
+ if (registry.extensions[subTool].installed) {
2163
+ registry.extensions[subTool].installed.version = entry.latestVersion;
2164
+ registry.extensions[subTool].installed.updatedAt = now;
2165
+ }
1893
2166
  }
1894
2167
  }
1895
2168
  writeFileSync(REGISTRY_PATH, JSON.stringify(registry, null, 2));
@@ -2293,14 +2566,18 @@ function cmdStatus() {
2293
2566
  if (latest && latest !== PKG_VERSION) cliUpdate = latest;
2294
2567
  } catch {}
2295
2568
 
2296
- // Check extensions against npm
2569
+ // Check extensions against npm using registry source info (#262)
2297
2570
  const updates = [];
2298
2571
  for (const [name, info] of Object.entries(registry?.extensions || {})) {
2299
- const extPkgPath = join(LDM_EXTENSIONS, name, 'package.json');
2300
- const extPkg = readJSON(extPkgPath);
2301
- const npmPkg = extPkg?.name;
2572
+ // Use registry source.npm (v2) or fall back to extension's package.json
2573
+ let npmPkg = info?.source?.npm || null;
2574
+ if (!npmPkg) {
2575
+ const extPkgPath = join(LDM_EXTENSIONS, name, 'package.json');
2576
+ const extPkg = readJSON(extPkgPath);
2577
+ npmPkg = extPkg?.name;
2578
+ }
2302
2579
  if (!npmPkg) continue;
2303
- const currentVersion = extPkg.version || info.version;
2580
+ const currentVersion = info?.installed?.version || info.version;
2304
2581
  if (!currentVersion) continue;
2305
2582
  try {
2306
2583
  const latest = execSync(`npm view ${npmPkg} version 2>/dev/null`, {
package/catalog.json CHANGED
@@ -301,6 +301,24 @@
301
301
  "installs": {
302
302
  "ocPlugin": "Web search and content extraction"
303
303
  }
304
+ },
305
+ {
306
+ "id": "private-mode",
307
+ "name": "Private Mode",
308
+ "description": "Privacy controls for AI agents. Pause memory capture, scan storage, wipe history.",
309
+ "npm": "private-mode",
310
+ "repo": "wipcomputer/wip-private-mode-private",
311
+ "registryMatches": [
312
+ "private-mode",
313
+ "wip-private-mode"
314
+ ],
315
+ "cliMatches": [],
316
+ "recommended": false,
317
+ "status": "stable",
318
+ "postInstall": null,
319
+ "installs": {
320
+ "ocPlugin": "Private mode toggle, memory status indicator, wipe scan/search/execute"
321
+ }
304
322
  }
305
323
  ]
306
324
  }
@@ -0,0 +1,108 @@
1
+ # Backup
2
+
3
+ ## One Script, One Place
4
+
5
+ `~/.ldm/bin/ldm-backup.sh` runs daily at 3:00 AM via LaunchAgent `ai.openclaw.ldm-backup`. It backs up everything to `~/.ldm/backups/`, then tars it to iCloud for offsite.
6
+
7
+ ## What Gets Backed Up
8
+
9
+ | Source | Method | What's in it |
10
+ |--------|--------|-------------|
11
+ | `~/.ldm/memory/crystal.db` | sqlite3 .backup | Irreplaceable memory (all agents) |
12
+ | `~/.ldm/agents/` | cp -a | Identity files, journals, daily logs |
13
+ | `~/.ldm/state/` | cp -a | Config, version, registry |
14
+ | `~/.ldm/config.json` | cp | Workspace pointer, org |
15
+ | `~/.openclaw/memory/main.sqlite` | sqlite3 .backup | OC conversations |
16
+ | `~/.openclaw/memory/context-embeddings.sqlite` | sqlite3 .backup | Embeddings |
17
+ | `~/.openclaw/workspace/` | tar | Shared context, daily logs |
18
+ | `~/.openclaw/agents/main/sessions/` | tar | OC session JSONL |
19
+ | `~/.openclaw/openclaw.json` | cp | OC config |
20
+ | `~/.claude/CLAUDE.md` | cp | CC instructions |
21
+ | `~/.claude/settings.json` | cp | CC settings |
22
+ | `~/.claude/projects/` | tar | CC auto-memory + transcripts |
23
+ | Workspace directory | tar (excludes node_modules, .git/objects, old backups, _trash) | Entire workspace |
24
+
25
+ **NOT backed up:** node_modules/, .git/objects/ (reconstructable), extensions (reinstallable), ~/.claude/cache.
26
+
27
+ ## Backup Structure
28
+
29
+ ```
30
+ ~/.ldm/backups/2026-03-24--09-50-22/
31
+ ldm/
32
+ memory/crystal.db
33
+ agents/
34
+ state/
35
+ config.json
36
+ openclaw/
37
+ memory/main.sqlite
38
+ memory/context-embeddings.sqlite
39
+ workspace.tar
40
+ sessions.tar
41
+ openclaw.json
42
+ claude/
43
+ CLAUDE.md
44
+ settings.json
45
+ projects.tar
46
+ <workspace>.tar
47
+ ```
48
+
49
+ ## iCloud Offsite
50
+
51
+ After local backup, the entire dated folder is compressed and copied to iCloud. The destination path is read from `~/.ldm/config.json` at `paths.icloudBackup`.
52
+
53
+ One file per backup. iCloud syncs it across devices. Rotation matches the local retention setting.
54
+
55
+ ## How to Run
56
+
57
+ ```bash
58
+ ~/.ldm/bin/ldm-backup.sh # run backup now
59
+ ~/.ldm/bin/ldm-backup.sh --dry-run # preview what would be backed up
60
+ ~/.ldm/bin/ldm-backup.sh --keep 14 # keep 14 days instead of 7
61
+ ~/.ldm/bin/ldm-backup.sh --include-secrets # include ~/.ldm/secrets/
62
+ ```
63
+
64
+ You can also run via the CLI:
65
+
66
+ ```bash
67
+ ldm backup # run backup now
68
+ ldm backup --dry-run # preview with sizes
69
+ ldm backup --pin "before upgrade" # pin latest backup so rotation skips it
70
+ ```
71
+
72
+ ## How to Restore
73
+
74
+ ```bash
75
+ ~/.ldm/bin/ldm-restore.sh # list available backups
76
+ ~/.ldm/bin/ldm-restore.sh 2026-03-24--09-50-22 # restore everything
77
+ ~/.ldm/bin/ldm-restore.sh --only ldm <backup> # restore only crystal.db + agents
78
+ ~/.ldm/bin/ldm-restore.sh --only openclaw <backup> # restore only OC data
79
+ ~/.ldm/bin/ldm-restore.sh --from-icloud <file> # restore from iCloud tar
80
+ ~/.ldm/bin/ldm-restore.sh --dry-run <backup> # preview
81
+ ```
82
+
83
+ After restore: `openclaw gateway restart` then `crystal status` to verify.
84
+
85
+ ## Schedule
86
+
87
+ | What | When | How |
88
+ |------|------|-----|
89
+ | Backup | 3:00 AM | LaunchAgent `ai.openclaw.ldm-backup` |
90
+
91
+ One LaunchAgent. One script. No Full Disk Access currently (target: midnight via LDMDevTools.app once PID error is fixed). Verify is built into the script (exit code + log).
92
+
93
+ ## Config
94
+
95
+ All backup settings live in `~/.ldm/config.json`:
96
+ - `paths.workspace` ... workspace path
97
+ - `paths.icloudBackup` ... iCloud offsite destination
98
+ - `backup.keep` ... retention days (default: 7)
99
+ - `backup.includeSecrets` ... whether to include `~/.ldm/secrets/`
100
+ - `org` ... used for tar filename prefix
101
+
102
+ ## Logs
103
+
104
+ `~/.ldm/logs/backup.log` (LaunchAgent stdout/stderr)
105
+
106
+ ## Technical Details
107
+
108
+ See [TECHNICAL.md](./TECHNICAL.md) for config schema, LaunchAgent plist, rotation logic, and script internals.
@@ -0,0 +1,112 @@
1
+ # Backup: Technical Details
2
+
3
+ ## Config Schema
4
+
5
+ All backup settings are in `~/.ldm/config.json`. The backup script reads these at runtime.
6
+
7
+ ```json
8
+ {
9
+ "org": "wipcomputerinc",
10
+ "paths": {
11
+ "workspace": "~/wipcomputerinc",
12
+ "ldm": "~/.ldm",
13
+ "claude": "~/.claude",
14
+ "openclaw": "~/.openclaw",
15
+ "icloudBackup": "~/Library/Mobile Documents/com~apple~CloudDocs/wipcomputerinc-icloud/backups"
16
+ },
17
+ "backup": {
18
+ "keep": 7,
19
+ "includeSecrets": false
20
+ }
21
+ }
22
+ ```
23
+
24
+ | Key | Type | Default | Description |
25
+ |-----|------|---------|-------------|
26
+ | `paths.workspace` | string | required | Root workspace directory to back up |
27
+ | `paths.icloudBackup` | string | optional | iCloud destination for offsite copies |
28
+ | `backup.keep` | number | 7 | Days of backups to keep before rotation |
29
+ | `backup.includeSecrets` | boolean | false | Whether to include `~/.ldm/secrets/` |
30
+ | `org` | string | required | Used as prefix in iCloud tar filenames |
31
+
32
+ ## Script Location
33
+
34
+ - **Source:** `scripts/ldm-backup.sh` in the wip-ldm-os-private repo
35
+ - **Deployed to:** `~/.ldm/bin/ldm-backup.sh`
36
+ - **Deployed by:** `deployScripts()` in `bin/ldm.js`, called during both `ldm init` and `ldm install`
37
+ - **Restore script:** `scripts/ldm-restore.sh` deployed to `~/.ldm/bin/ldm-restore.sh`
38
+
39
+ All `.sh` files in the repo's `scripts/` directory are deployed to `~/.ldm/bin/` on every `ldm install`. This means script fixes land automatically on the next update without requiring a full `ldm init`.
40
+
41
+ ## LaunchAgent
42
+
43
+ **Label:** `ai.openclaw.ldm-backup`
44
+ **Plist source:** `shared/launchagents/ai.openclaw.ldm-backup.plist`
45
+ **Deployed to:** `~/Library/LaunchAgents/ai.openclaw.ldm-backup.plist`
46
+
47
+ ```xml
48
+ <key>StartCalendarInterval</key>
49
+ <dict>
50
+ <key>Hour</key>
51
+ <integer>3</integer>
52
+ <key>Minute</key>
53
+ <integer>0</integer>
54
+ </dict>
55
+ ```
56
+
57
+ The plist uses `{{HOME}}` placeholders that are replaced at deploy time by `ldm init`.
58
+
59
+ **Logs:** stdout and stderr both go to `~/.ldm/logs/backup.log`.
60
+
61
+ **No Full Disk Access (FDA):** The LaunchAgent runs at 3:00 AM without FDA. Some paths (like `~/Library/Messages/`) are inaccessible without FDA. The target is to move the trigger to midnight via LDMDevTools.app (which has FDA) once the PID error is resolved.
62
+
63
+ ### Dead Triggers (Cleaned Automatically)
64
+
65
+ The `cleanDeadBackupTriggers()` function in `ldm.js` removes old competing triggers on every `ldm init`:
66
+ - Old cron entries referencing `LDMDevTools.app`
67
+ - `com.wipcomputer.daily-backup` LaunchAgent
68
+ - OpenClaw `backup-verify` cron entries
69
+
70
+ Only `ai.openclaw.ldm-backup` should exist.
71
+
72
+ ## Rotation Logic
73
+
74
+ The backup script handles rotation after a successful backup:
75
+
76
+ 1. List all dated directories in `~/.ldm/backups/` (format: `YYYY-MM-DD--HH-MM-SS`)
77
+ 2. Sort by name (which sorts chronologically)
78
+ 3. Skip any directory containing a `.pinned` marker file
79
+ 4. Delete directories beyond the `keep` count (oldest first)
80
+ 5. Same rotation logic applies to iCloud tars at `paths.icloudBackup`
81
+
82
+ **Pinning:** `ldm backup --pin "reason"` creates a `.pinned` file in the latest backup directory. Pinned backups are never rotated.
83
+
84
+ ## iCloud Offsite Details
85
+
86
+ After the local backup completes:
87
+
88
+ 1. Tar + gzip the entire dated backup directory
89
+ 2. Filename format: `<org>-<machine>-<timestamp>.tar.gz`
90
+ 3. Copy to `paths.icloudBackup` (from config.json)
91
+ 4. Apply the same rotation (keep N, skip pinned)
92
+ 5. iCloud syncs the file to all devices automatically
93
+
94
+ The iCloud path must exist. The script does not create it. `ldm init` does not create it either. Create it manually if it does not exist.
95
+
96
+ ## SQLite Safety
97
+
98
+ SQLite files are backed up using `sqlite3 .backup`, not `cp`. This ensures a consistent snapshot even if the database is being written to. The script checks for the `sqlite3` binary and skips database backup with a warning if it is not found.
99
+
100
+ Files backed up this way:
101
+ - `~/.ldm/memory/crystal.db`
102
+ - `~/.openclaw/memory/main.sqlite`
103
+ - `~/.openclaw/memory/context-embeddings.sqlite`
104
+
105
+ ## Excludes
106
+
107
+ The workspace tar excludes:
108
+ - `node_modules/` ... reconstructable via npm install
109
+ - `.git/objects/` ... reconstructable via git fetch
110
+ - `backups/` ... avoids recursive backup
111
+ - `_trash/` ... already deleted content
112
+ - `*.tar.gz` ... avoids backing up old backup archives
package/lib/deploy.mjs CHANGED
@@ -30,10 +30,12 @@ const REGISTRY_PATH = join(LDM_EXTENSIONS, 'registry.json');
30
30
 
31
31
  let DRY_RUN = false;
32
32
  let JSON_OUTPUT = false;
33
+ let INSTALL_ORIGIN = 'manual'; // #262: tracks how an extension was installed
33
34
 
34
35
  export function setFlags(opts = {}) {
35
36
  DRY_RUN = opts.dryRun || false;
36
37
  JSON_OUTPUT = opts.jsonOutput || false;
38
+ if (opts.origin) INSTALL_ORIGIN = opts.origin;
37
39
  }
38
40
 
39
41
  function log(msg) { if (!JSON_OUTPUT) console.log(` ${msg}`); }
@@ -164,16 +166,104 @@ function getHarnesses() {
164
166
  function updateRegistry(name, info) {
165
167
  const registry = loadRegistry();
166
168
  const existing = registry.extensions[name];
167
- const isCore = CORE_EXTENSIONS.has(name);
169
+ const now = new Date().toISOString();
170
+
171
+ // Build the v2 registry entry (#262)
172
+ // Merge source info: keep existing source unless new info provides it
173
+ const existingSource = existing?.source;
174
+ let newSource = info._source || existingSource || null;
175
+ // Legacy: info.source was a string (path or URL). Convert to object.
176
+ if (typeof existingSource === 'string' && !newSource) {
177
+ newSource = null; // Drop legacy string source, migration will fix it
178
+ }
179
+ if (typeof info.source === 'string') {
180
+ // Legacy caller passing a string. Don't overwrite structured source.
181
+ if (!newSource || typeof newSource === 'string') newSource = null;
182
+ }
183
+
184
+ // Build paths object from ldmPath/ocPath
185
+ const paths = existing?.paths || {};
186
+ if (info.ldmPath) paths.ldm = info.ldmPath;
187
+ if (info.ocPath) paths.openclaw = info.ocPath;
188
+ // Backwards compat: also keep flat ldmPath/ocPath
189
+ const ldmPath = info.ldmPath || existing?.ldmPath || paths.ldm;
190
+ const ocPath = info.ocPath || existing?.ocPath || paths.openclaw;
191
+
192
+ // Build installed block
193
+ const installed = existing?.installed || {};
194
+ if (typeof installed === 'object' && installed !== null) {
195
+ installed.version = info.version || installed.version || existing?.version;
196
+ if (!installed.installedAt) installed.installedAt = now;
197
+ installed.updatedAt = now;
198
+ }
199
+
200
+ // Origin: keep existing, or use from info, or default to "manual"
201
+ const origin = info._origin || existing?.origin || 'manual';
202
+
168
203
  registry.extensions[name] = {
169
- ...existing,
170
- ...info,
171
- enabled: existing?.enabled ?? true, // New installs are enabled by default. User runs ldm disable to turn off.
172
- updatedAt: new Date().toISOString(),
204
+ // v2 structured fields (#262)
205
+ source: newSource,
206
+ installed,
207
+ paths,
208
+ interfaces: info.interfaces || existing?.interfaces || [],
209
+ origin,
210
+ // Backwards-compatible flat fields (read by existing code)
211
+ name: info.name || existing?.name || name,
212
+ version: info.version || existing?.version || 'unknown',
213
+ ldmPath,
214
+ ocPath,
215
+ enabled: existing?.enabled ?? true,
216
+ updatedAt: now,
173
217
  };
174
218
  saveRegistry(registry);
175
219
  }
176
220
 
221
+ /**
222
+ * Build structured source info from a repo path and package.json (#262).
223
+ * Returns { type, repo, npm } or null if we can't determine the source.
224
+ */
225
+ function buildSourceInfo(repoPath, pkg) {
226
+ const source = { type: 'github' };
227
+ let hasInfo = false;
228
+
229
+ // Extract GitHub repo from package.json repository field
230
+ if (pkg?.repository) {
231
+ const raw = typeof pkg.repository === 'string'
232
+ ? pkg.repository
233
+ : pkg.repository.url || '';
234
+ const ghMatch = raw.match(/github\.com[:/]([^/]+\/[^/.]+)/);
235
+ if (ghMatch) {
236
+ source.repo = ghMatch[1].replace(/\.git$/, '');
237
+ hasInfo = true;
238
+ }
239
+ }
240
+
241
+ // Extract npm package name
242
+ if (pkg?.name) {
243
+ source.npm = pkg.name;
244
+ hasInfo = true;
245
+ }
246
+
247
+ // If the repo path is inside ~/.ldm/tmp/, it was cloned from somewhere.
248
+ // Try to get the remote URL from git.
249
+ if (!source.repo) {
250
+ try {
251
+ const remote = execSync('git remote get-url origin 2>/dev/null', {
252
+ cwd: repoPath,
253
+ encoding: 'utf8',
254
+ timeout: 5000,
255
+ }).trim();
256
+ const ghMatch = remote.match(/github\.com[:/]([^/]+\/[^/.]+)/);
257
+ if (ghMatch) {
258
+ source.repo = ghMatch[1].replace(/\.git$/, '');
259
+ hasInfo = true;
260
+ }
261
+ } catch {}
262
+ }
263
+
264
+ return hasInfo ? source : null;
265
+ }
266
+
177
267
  // ── Migration detection ──
178
268
 
179
269
  function findExistingInstalls(toolName, pkg, ocPluginConfig) {
@@ -905,18 +995,15 @@ export function installSingleTool(toolPath) {
905
995
  }
906
996
 
907
997
  let installed = 0;
908
- // Don't store /tmp/ clone paths as source (#54). Use the repo URL from package.json if available.
909
- let source = toolPath;
910
- const isTmpPath = toolPath.startsWith('/tmp/') || toolPath.startsWith('/private/tmp/');
911
- if (isTmpPath && pkg?.repository?.url) {
912
- source = pkg.repository.url.replace(/^git\+/, '').replace(/\.git$/, '');
913
- } else if (isTmpPath) {
914
- source = null; // better than a /tmp/ path
915
- }
998
+
999
+ // Build structured source info for registry (#262)
1000
+ const sourceInfo = buildSourceInfo(toolPath, pkg);
916
1001
  const registryInfo = {
917
1002
  name: toolName,
918
1003
  version: pkg?.version || 'unknown',
919
- source,
1004
+ source: null, // legacy field, kept for backwards compat
1005
+ _source: sourceInfo, // v2 structured source, consumed by updateRegistry
1006
+ _origin: INSTALL_ORIGIN, // #262: "catalog", "manual", or "dependency"
920
1007
  interfaces: ifaceNames,
921
1008
  };
922
1009
 
@@ -1140,4 +1227,4 @@ export function disableExtension(name) {
1140
1227
 
1141
1228
  // ── Exports for ldm CLI ──
1142
1229
 
1143
- export { loadRegistry, saveRegistry, updateRegistry, readJSON, writeJSON, runBuildIfNeeded, resolveLocalDeps, CORE_EXTENSIONS };
1230
+ export { loadRegistry, saveRegistry, updateRegistry, readJSON, writeJSON, runBuildIfNeeded, resolveLocalDeps, buildSourceInfo, CORE_EXTENSIONS };
package/lib/state.mjs CHANGED
@@ -160,7 +160,7 @@ export function reconcileState(systemState) {
160
160
  inRegistry: !!reg,
161
161
  registryVersion: reg?.version || null,
162
162
  registrySource: reg?.source || null,
163
- registryHasSource: !!(reg?.source && existsSync(reg.source)),
163
+ registryHasSource: !!(reg?.source && (typeof reg.source === 'string' ? existsSync(reg.source) : !!reg.source.repo)),
164
164
  registryInterfaces: reg?.interfaces || [],
165
165
  // Deployed
166
166
  deployedLdm: !!ldm,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@wipcomputer/wip-ldm-os",
3
- "version": "0.4.70",
3
+ "version": "0.4.72",
4
4
  "type": "module",
5
5
  "description": "LDM OS: identity, memory, and sovereignty infrastructure for AI agents",
6
6
  "engines": {
@@ -2,7 +2,7 @@
2
2
 
3
3
  ## One Script, One Place
4
4
 
5
- `~/.ldm/bin/ldm-backup.sh` runs daily at midnight via LDM Dev Tools.app. It backs up everything to `~/.ldm/backups/`, then tars it to iCloud for offsite.
5
+ `~/.ldm/bin/ldm-backup.sh` runs daily at 3:00 AM via LaunchAgent `ai.openclaw.ldm-backup`. It backs up everything to `~/.ldm/backups/`, then tars it to iCloud for offsite.
6
6
 
7
7
  ## What Gets Backed Up
8
8
 
@@ -48,14 +48,14 @@
48
48
 
49
49
  ## iCloud Offsite
50
50
 
51
- After local backup, the entire dated folder is compressed and copied to iCloud:
51
+ After local backup, the entire dated folder is compressed and copied to iCloud. The iCloud path is read from `~/.ldm/config.json` at `paths.icloudBackup`.
52
52
 
53
53
  ```
54
54
  ~/Library/Mobile Documents/com~apple~CloudDocs/wipcomputerinc-icloud/backups/
55
55
  wipcomputerinc-lesa-2026-03-24--09-50-22.tar.gz
56
56
  ```
57
57
 
58
- One file per backup. iCloud syncs it across devices. Rotates to 7 days.
58
+ One file per backup. iCloud syncs it across devices. Rotates to {{backup.keep}} days.
59
59
 
60
60
  ## How to Run
61
61
 
@@ -83,19 +83,22 @@ After restore: `openclaw gateway restart` then `crystal status` to verify.
83
83
 
84
84
  | What | When | How |
85
85
  |------|------|-----|
86
- | Backup | Midnight | cron -> LDM Dev Tools.app -> ~/.ldm/bin/ldm-backup.sh |
86
+ | Backup | 3:00 AM | LaunchAgent `ai.openclaw.ldm-backup` runs `~/.ldm/bin/ldm-backup.sh` |
87
87
 
88
- One cron entry. One script. One app. Verify is built into the script (exit code + log).
88
+ One LaunchAgent. One script. No Full Disk Access currently (target: midnight via LDMDevTools.app once PID error is fixed). Verify is built into the script (exit code + log).
89
89
 
90
90
  ## Config
91
91
 
92
- Backup reads from two config files:
93
- - `~/.ldm/config.json` ... workspace path, org name
94
- - `~/wipcomputerinc/settings/config.json` ... backup.keep (retention days), paths.icloudBackup
92
+ All backup settings live in `~/.ldm/config.json`:
93
+ - `paths.workspace` ... workspace path
94
+ - `paths.icloudBackup` ... iCloud offsite destination
95
+ - `backup.keep` ... retention days (default: 7)
96
+ - `backup.includeSecrets` ... whether to include `~/.ldm/secrets/`
97
+ - `org` ... used for tar filename prefix
95
98
 
96
99
  ## Logs
97
100
 
98
- `~/.ldm/logs/cron.log` (via LDM Dev Tools.app stdout)
101
+ `~/.ldm/logs/backup.log` (LaunchAgent stdout/stderr)
99
102
 
100
103
  ---
101
104
 
@@ -103,6 +106,6 @@ Backup reads from two config files:
103
106
 
104
107
  **Local backups:** `~/.ldm/backups/`
105
108
  **iCloud offsite:** `~/Library/Mobile Documents/com~apple~CloudDocs/wipcomputerinc-icloud/backups/`
106
- **Schedule:** Midnight via LDM Dev Tools.app
107
- **Retention:** 7 days local, 7 days iCloud
109
+ **Schedule:** 3:00 AM via LaunchAgent `ai.openclaw.ldm-backup`
110
+ **Retention:** {{backup.keep}} days local, {{backup.keep}} days iCloud
108
111
  **Script:** `~/.ldm/bin/ldm-backup.sh` (deployed by `ldm install`)