@wipcomputer/wip-ldm-os 0.4.39 → 0.4.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/SKILL.md +1 -1
- package/package.json +3 -1
- package/scripts/backfill-summaries.sh +112 -0
- package/scripts/ldm-backup.sh +251 -0
- package/scripts/ldm-restore.sh +224 -0
- package/scripts/ldm-summary.sh +239 -0
- package/shared/prompts/daily-agent-summary.md +13 -0
- package/shared/prompts/daily-dev.md +12 -0
- package/shared/prompts/monthly-agent-summary.md +13 -0
- package/shared/prompts/org-daily-team.md +12 -0
- package/shared/prompts/quarterly-agent-summary.md +14 -0
- package/shared/prompts/weekly-agent-summary.md +14 -0
- package/shared/rules/git-conventions.md +29 -0
- package/shared/rules/release-pipeline.md +25 -0
- package/shared/rules/security.md +17 -0
- package/shared/rules/workspace-boundaries.md +21 -0
- package/shared/rules/writing-style.md +5 -0
package/SKILL.md
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@wipcomputer/wip-ldm-os",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.41",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "LDM OS: identity, memory, and sovereignty infrastructure for AI agents",
|
|
6
6
|
"engines": {
|
|
@@ -37,6 +37,8 @@
|
|
|
37
37
|
"dist/bridge/",
|
|
38
38
|
"templates/",
|
|
39
39
|
"docs/",
|
|
40
|
+
"shared/",
|
|
41
|
+
"scripts/",
|
|
40
42
|
"catalog.json",
|
|
41
43
|
"SKILL.md"
|
|
42
44
|
],
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# backfill-summaries.sh — Generate all historical summaries from day 1.
|
|
3
|
+
# Part of Total Recall. Uses ldm-summary.sh with --force --date.
|
|
4
|
+
#
|
|
5
|
+
# Usage:
|
|
6
|
+
# backfill-summaries.sh # full backfill (Feb 5 to yesterday)
|
|
7
|
+
# backfill-summaries.sh --dry-run # preview
|
|
8
|
+
# backfill-summaries.sh --from 2026-03-01 # partial backfill
|
|
9
|
+
#
|
|
10
|
+
# Order: dailies first, then weeklies, monthlies, quarterly.
|
|
11
|
+
# Each level reads the level below. Must complete in order.
|
|
12
|
+
|
|
13
|
+
set -euo pipefail
|
|
14
|
+
export PATH="/opt/homebrew/bin:/usr/local/bin:$PATH"
|
|
15
|
+
|
|
16
|
+
SUMMARY_SCRIPT="$HOME/.ldm/bin/ldm-summary.sh"
|
|
17
|
+
DRY_RUN=false
|
|
18
|
+
FROM_DATE="2026-02-05"
|
|
19
|
+
|
|
20
|
+
while [[ $# -gt 0 ]]; do
|
|
21
|
+
case "$1" in
|
|
22
|
+
--dry-run) DRY_RUN=true; shift ;;
|
|
23
|
+
--from) FROM_DATE="$2"; shift 2 ;;
|
|
24
|
+
*) echo "Unknown: $1" >&2; exit 1 ;;
|
|
25
|
+
esac
|
|
26
|
+
done
|
|
27
|
+
|
|
28
|
+
if [ ! -f "$SUMMARY_SCRIPT" ]; then
|
|
29
|
+
echo "ERROR: ldm-summary.sh not found. Run ldm install first." >&2
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
YESTERDAY=$(python3 -c "from datetime import datetime, timedelta; print((datetime.now()-timedelta(days=1)).strftime('%Y-%m-%d'))")
|
|
34
|
+
FLAGS="--force"
|
|
35
|
+
[ "$DRY_RUN" = true ] && FLAGS="$FLAGS --dry-run"
|
|
36
|
+
|
|
37
|
+
echo "=== Total Recall: Backfill Summaries ==="
|
|
38
|
+
echo " From: $FROM_DATE"
|
|
39
|
+
echo " To: $YESTERDAY"
|
|
40
|
+
echo " Mode: $([ "$DRY_RUN" = true ] && echo 'DRY RUN' || echo 'LIVE')"
|
|
41
|
+
echo ""
|
|
42
|
+
|
|
43
|
+
# ── Step 1: Dailies ──
|
|
44
|
+
|
|
45
|
+
echo "=== STEP 1: Daily summaries ==="
|
|
46
|
+
DAILY_COUNT=0
|
|
47
|
+
for date in $(python3 -c "
|
|
48
|
+
from datetime import datetime, timedelta
|
|
49
|
+
d = datetime.strptime('$FROM_DATE', '%Y-%m-%d')
|
|
50
|
+
end = datetime.strptime('$YESTERDAY', '%Y-%m-%d')
|
|
51
|
+
while d <= end:
|
|
52
|
+
print(d.strftime('%Y-%m-%d'))
|
|
53
|
+
d += timedelta(days=1)
|
|
54
|
+
"); do
|
|
55
|
+
echo "--- $date ---"
|
|
56
|
+
bash "$SUMMARY_SCRIPT" daily --date "$date" $FLAGS 2>&1 | grep -E "->|DRY RUN|No data|No git"
|
|
57
|
+
DAILY_COUNT=$((DAILY_COUNT + 1))
|
|
58
|
+
done
|
|
59
|
+
echo " Dailies: $DAILY_COUNT days"
|
|
60
|
+
echo ""
|
|
61
|
+
|
|
62
|
+
# ── Step 2: Weeklies (Sunday to Saturday) ──
|
|
63
|
+
|
|
64
|
+
echo "=== STEP 2: Weekly summaries ==="
|
|
65
|
+
WEEKLY_COUNT=0
|
|
66
|
+
for date in $(python3 -c "
|
|
67
|
+
from datetime import datetime, timedelta
|
|
68
|
+
# Start from first Sunday >= FROM_DATE
|
|
69
|
+
d = datetime.strptime('$FROM_DATE', '%Y-%m-%d')
|
|
70
|
+
while d.weekday() != 6: d += timedelta(days=1) # find Sunday
|
|
71
|
+
end = datetime.now()
|
|
72
|
+
while d <= end:
|
|
73
|
+
# Use the Saturday (end of week) as the date
|
|
74
|
+
sat = d + timedelta(days=6)
|
|
75
|
+
print(sat.strftime('%Y-%m-%d'))
|
|
76
|
+
d += timedelta(days=7)
|
|
77
|
+
"); do
|
|
78
|
+
echo "--- Week ending $date ---"
|
|
79
|
+
bash "$SUMMARY_SCRIPT" weekly --date "$date" $FLAGS 2>&1 | grep -E "->|DRY RUN|No "
|
|
80
|
+
WEEKLY_COUNT=$((WEEKLY_COUNT + 1))
|
|
81
|
+
done
|
|
82
|
+
echo " Weeklies: $WEEKLY_COUNT weeks"
|
|
83
|
+
echo ""
|
|
84
|
+
|
|
85
|
+
# ── Step 3: Monthlies ──
|
|
86
|
+
|
|
87
|
+
echo "=== STEP 3: Monthly summaries ==="
|
|
88
|
+
for date in $(python3 -c "
|
|
89
|
+
from datetime import datetime
|
|
90
|
+
import calendar
|
|
91
|
+
d = datetime.strptime('$FROM_DATE', '%Y-%m-%d')
|
|
92
|
+
now = datetime.now()
|
|
93
|
+
while d <= now:
|
|
94
|
+
last_day = calendar.monthrange(d.year, d.month)[1]
|
|
95
|
+
print(f'{d.year}-{d.month:02d}-{last_day:02d}')
|
|
96
|
+
if d.month == 12: d = d.replace(year=d.year+1, month=1, day=1)
|
|
97
|
+
else: d = d.replace(month=d.month+1, day=1)
|
|
98
|
+
"); do
|
|
99
|
+
echo "--- Month ending $date ---"
|
|
100
|
+
bash "$SUMMARY_SCRIPT" monthly --date "$date" $FLAGS 2>&1 | grep -E "->|DRY RUN|No "
|
|
101
|
+
done
|
|
102
|
+
echo ""
|
|
103
|
+
|
|
104
|
+
# ── Step 4: Quarterly ──
|
|
105
|
+
|
|
106
|
+
echo "=== STEP 4: Quarterly summary ==="
|
|
107
|
+
bash "$SUMMARY_SCRIPT" quarterly --date "$(date +%Y-%m-%d)" $FLAGS 2>&1 | grep -E "->|DRY RUN|No "
|
|
108
|
+
|
|
109
|
+
echo ""
|
|
110
|
+
echo "=== Backfill complete ==="
|
|
111
|
+
echo " $DAILY_COUNT dailies processed"
|
|
112
|
+
echo " $WEEKLY_COUNT weeklies processed"
|
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ldm-backup.sh — Unified backup for LDM OS
|
|
3
|
+
# Backs up: ~/.ldm/, ~/.openclaw/, ~/.claude/, ~/wipcomputerinc/
|
|
4
|
+
# Handles SQLite safely (sqlite3 .backup). Tars to iCloud for offsite.
|
|
5
|
+
#
|
|
6
|
+
# Source of truth: wip-ldm-os-private/scripts/ldm-backup.sh
|
|
7
|
+
# Deployed to: ~/.ldm/bin/ldm-backup.sh (via ldm install)
|
|
8
|
+
#
|
|
9
|
+
# Usage:
|
|
10
|
+
# ldm-backup.sh # run backup
|
|
11
|
+
# ldm-backup.sh --dry-run # preview what would be backed up
|
|
12
|
+
# ldm-backup.sh --keep 14 # keep last 14 backups (default: 7)
|
|
13
|
+
# ldm-backup.sh --include-secrets # include ~/.ldm/secrets/
|
|
14
|
+
#
|
|
15
|
+
# Config: ~/.ldm/config.json (workspace path) + {workspace}/settings/config.json (backup settings)
|
|
16
|
+
|
|
17
|
+
set -euo pipefail
|
|
18
|
+
|
|
19
|
+
export PATH="/opt/homebrew/bin:/usr/local/bin:$PATH"
|
|
20
|
+
|
|
21
|
+
LDM_HOME="$HOME/.ldm"
|
|
22
|
+
OC_HOME="$HOME/.openclaw"
|
|
23
|
+
CLAUDE_HOME="$HOME/.claude"
|
|
24
|
+
BACKUP_ROOT="$LDM_HOME/backups"
|
|
25
|
+
KEEP=7
|
|
26
|
+
INCLUDE_SECRETS=false
|
|
27
|
+
DRY_RUN=false
|
|
28
|
+
|
|
29
|
+
# Parse flags
|
|
30
|
+
while [[ $# -gt 0 ]]; do
|
|
31
|
+
case "$1" in
|
|
32
|
+
--keep) KEEP="$2"; shift 2 ;;
|
|
33
|
+
--include-secrets) INCLUDE_SECRETS=true; shift ;;
|
|
34
|
+
--dry-run) DRY_RUN=true; shift ;;
|
|
35
|
+
*) echo "Unknown flag: $1" >&2; exit 1 ;;
|
|
36
|
+
esac
|
|
37
|
+
done
|
|
38
|
+
|
|
39
|
+
# Read workspace path from ~/.ldm/config.json
|
|
40
|
+
WORKSPACE=""
|
|
41
|
+
if [ -f "$LDM_HOME/config.json" ]; then
|
|
42
|
+
WORKSPACE=$(python3 -c "import json; print(json.load(open('$LDM_HOME/config.json')).get('workspace',''))" 2>/dev/null || true)
|
|
43
|
+
fi
|
|
44
|
+
if [ -z "$WORKSPACE" ]; then
|
|
45
|
+
echo "WARNING: No workspace in ~/.ldm/config.json. Skipping workspace backup."
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
# Read iCloud backup path from workspace config
|
|
49
|
+
ICLOUD_BACKUP=""
|
|
50
|
+
if [ -n "$WORKSPACE" ] && [ -f "$WORKSPACE/settings/config.json" ]; then
|
|
51
|
+
ICLOUD_BACKUP=$(python3 -c "
|
|
52
|
+
import json, os
|
|
53
|
+
c = json.load(open('$WORKSPACE/settings/config.json'))
|
|
54
|
+
p = c.get('paths',{}).get('icloudBackup','')
|
|
55
|
+
print(os.path.expanduser(p))
|
|
56
|
+
" 2>/dev/null || true)
|
|
57
|
+
fi
|
|
58
|
+
|
|
59
|
+
# Read keep from workspace config (override if set there)
|
|
60
|
+
if [ -n "$WORKSPACE" ] && [ -f "$WORKSPACE/settings/config.json" ]; then
|
|
61
|
+
CONFIG_KEEP=$(python3 -c "import json; print(json.load(open('$WORKSPACE/settings/config.json')).get('backup',{}).get('keep',0))" 2>/dev/null || true)
|
|
62
|
+
if [ -n "$CONFIG_KEEP" ] && [ "$CONFIG_KEEP" -gt 0 ] 2>/dev/null; then
|
|
63
|
+
KEEP="$CONFIG_KEEP"
|
|
64
|
+
fi
|
|
65
|
+
fi
|
|
66
|
+
|
|
67
|
+
DATE=$(date +%Y-%m-%d--%H-%M-%S)
|
|
68
|
+
DEST="$BACKUP_ROOT/$DATE"
|
|
69
|
+
|
|
70
|
+
echo "=== LDM Backup: $DATE ==="
|
|
71
|
+
echo " Local: $DEST"
|
|
72
|
+
echo " iCloud: ${ICLOUD_BACKUP:-not configured}"
|
|
73
|
+
echo " Keep: $KEEP days"
|
|
74
|
+
echo " Workspace: ${WORKSPACE:-not configured}"
|
|
75
|
+
echo ""
|
|
76
|
+
|
|
77
|
+
if [ "$DRY_RUN" = true ]; then
|
|
78
|
+
echo "[DRY RUN] Would back up:"
|
|
79
|
+
echo " ~/.ldm/memory/crystal.db (sqlite3 .backup)"
|
|
80
|
+
echo " ~/.ldm/agents/ (cp -a)"
|
|
81
|
+
echo " ~/.ldm/state/ (cp -a)"
|
|
82
|
+
echo " ~/.ldm/config.json (cp)"
|
|
83
|
+
[ -f "$OC_HOME/memory/main.sqlite" ] && echo " ~/.openclaw/memory/main.sqlite (sqlite3 .backup) [$(du -sh "$OC_HOME/memory/main.sqlite" | cut -f1)]"
|
|
84
|
+
[ -f "$OC_HOME/memory/context-embeddings.sqlite" ] && echo " ~/.openclaw/memory/context-embeddings.sqlite (sqlite3 .backup)"
|
|
85
|
+
[ -d "$OC_HOME/workspace" ] && echo " ~/.openclaw/workspace/ (tar)"
|
|
86
|
+
[ -d "$OC_HOME/agents/main/sessions" ] && echo " ~/.openclaw/agents/main/sessions/ (tar)"
|
|
87
|
+
[ -f "$OC_HOME/openclaw.json" ] && echo " ~/.openclaw/openclaw.json (cp)"
|
|
88
|
+
[ -f "$CLAUDE_HOME/CLAUDE.md" ] && echo " ~/.claude/CLAUDE.md (cp)"
|
|
89
|
+
[ -f "$CLAUDE_HOME/settings.json" ] && echo " ~/.claude/settings.json (cp)"
|
|
90
|
+
[ -d "$CLAUDE_HOME/projects" ] && echo " ~/.claude/projects/ (tar)"
|
|
91
|
+
[ -n "$WORKSPACE" ] && echo " $WORKSPACE/ (tar, excludes node_modules/.git/objects)"
|
|
92
|
+
[ "$INCLUDE_SECRETS" = true ] && echo " ~/.ldm/secrets/ (cp -a)"
|
|
93
|
+
echo ""
|
|
94
|
+
echo "[DRY RUN] No files modified."
|
|
95
|
+
exit 0
|
|
96
|
+
fi
|
|
97
|
+
|
|
98
|
+
# Preflight
|
|
99
|
+
if [ ! -d "$LDM_HOME" ]; then
|
|
100
|
+
echo "ERROR: ~/.ldm/ not found" >&2
|
|
101
|
+
exit 1
|
|
102
|
+
fi
|
|
103
|
+
|
|
104
|
+
mkdir -p "$DEST/ldm/memory" "$DEST/openclaw/memory" "$DEST/claude"
|
|
105
|
+
|
|
106
|
+
# ── 1. Back up ~/.ldm/ ──
|
|
107
|
+
|
|
108
|
+
echo "--- ~/.ldm/ ---"
|
|
109
|
+
|
|
110
|
+
# Crystal DB (safe sqlite3 .backup)
|
|
111
|
+
CRYSTAL_DB="$LDM_HOME/memory/crystal.db"
|
|
112
|
+
if [ -f "$CRYSTAL_DB" ]; then
|
|
113
|
+
if command -v sqlite3 &>/dev/null; then
|
|
114
|
+
sqlite3 "$CRYSTAL_DB" ".backup '$DEST/ldm/memory/crystal.db'"
|
|
115
|
+
echo " crystal.db: sqlite3 .backup OK"
|
|
116
|
+
else
|
|
117
|
+
cp "$CRYSTAL_DB" "$DEST/ldm/memory/crystal.db"
|
|
118
|
+
[ -f "$CRYSTAL_DB-wal" ] && cp "$CRYSTAL_DB-wal" "$DEST/ldm/memory/crystal.db-wal"
|
|
119
|
+
[ -f "$CRYSTAL_DB-shm" ] && cp "$CRYSTAL_DB-shm" "$DEST/ldm/memory/crystal.db-shm"
|
|
120
|
+
echo " crystal.db: file copy (no sqlite3)"
|
|
121
|
+
fi
|
|
122
|
+
else
|
|
123
|
+
echo " crystal.db: not found (skipped)"
|
|
124
|
+
fi
|
|
125
|
+
|
|
126
|
+
# Config
|
|
127
|
+
[ -f "$LDM_HOME/config.json" ] && cp "$LDM_HOME/config.json" "$DEST/ldm/config.json" && echo " config.json: OK"
|
|
128
|
+
|
|
129
|
+
# State
|
|
130
|
+
[ -d "$LDM_HOME/state" ] && cp -a "$LDM_HOME/state" "$DEST/ldm/state" && echo " state/: OK"
|
|
131
|
+
|
|
132
|
+
# Agents (identity, journals, daily logs)
|
|
133
|
+
[ -d "$LDM_HOME/agents" ] && cp -a "$LDM_HOME/agents" "$DEST/ldm/agents" && echo " agents/: OK"
|
|
134
|
+
|
|
135
|
+
# Secrets (optional)
|
|
136
|
+
if [ "$INCLUDE_SECRETS" = true ] && [ -d "$LDM_HOME/secrets" ]; then
|
|
137
|
+
cp -a "$LDM_HOME/secrets" "$DEST/ldm/secrets"
|
|
138
|
+
chmod 700 "$DEST/ldm/secrets"
|
|
139
|
+
echo " secrets/: OK"
|
|
140
|
+
fi
|
|
141
|
+
|
|
142
|
+
# ── 2. Back up ~/.openclaw/ ──
|
|
143
|
+
|
|
144
|
+
echo "--- ~/.openclaw/ ---"
|
|
145
|
+
|
|
146
|
+
# main.sqlite (safe sqlite3 .backup)
|
|
147
|
+
if [ -f "$OC_HOME/memory/main.sqlite" ]; then
|
|
148
|
+
if command -v sqlite3 &>/dev/null; then
|
|
149
|
+
sqlite3 "$OC_HOME/memory/main.sqlite" ".backup '$DEST/openclaw/memory/main.sqlite'"
|
|
150
|
+
echo " main.sqlite: sqlite3 .backup OK"
|
|
151
|
+
else
|
|
152
|
+
cp "$OC_HOME/memory/main.sqlite" "$DEST/openclaw/memory/main.sqlite"
|
|
153
|
+
[ -f "$OC_HOME/memory/main.sqlite-wal" ] && cp "$OC_HOME/memory/main.sqlite-wal" "$DEST/openclaw/memory/main.sqlite-wal"
|
|
154
|
+
echo " main.sqlite: file copy"
|
|
155
|
+
fi
|
|
156
|
+
fi
|
|
157
|
+
|
|
158
|
+
# context-embeddings.sqlite
|
|
159
|
+
if [ -f "$OC_HOME/memory/context-embeddings.sqlite" ]; then
|
|
160
|
+
if command -v sqlite3 &>/dev/null; then
|
|
161
|
+
sqlite3 "$OC_HOME/memory/context-embeddings.sqlite" ".backup '$DEST/openclaw/memory/context-embeddings.sqlite'"
|
|
162
|
+
echo " context-embeddings: sqlite3 .backup OK"
|
|
163
|
+
else
|
|
164
|
+
cp "$OC_HOME/memory/context-embeddings.sqlite" "$DEST/openclaw/memory/context-embeddings.sqlite"
|
|
165
|
+
echo " context-embeddings: file copy"
|
|
166
|
+
fi
|
|
167
|
+
fi
|
|
168
|
+
|
|
169
|
+
# Workspace
|
|
170
|
+
[ -d "$OC_HOME/workspace" ] && tar -cf "$DEST/openclaw/workspace.tar" -C "$OC_HOME" workspace 2>/dev/null && echo " workspace/: tar OK"
|
|
171
|
+
|
|
172
|
+
# OC sessions
|
|
173
|
+
[ -d "$OC_HOME/agents/main/sessions" ] && tar -cf "$DEST/openclaw/sessions.tar" -C "$OC_HOME/agents/main" sessions 2>/dev/null && echo " sessions/: tar OK"
|
|
174
|
+
|
|
175
|
+
# OC config
|
|
176
|
+
[ -f "$OC_HOME/openclaw.json" ] && cp "$OC_HOME/openclaw.json" "$DEST/openclaw/openclaw.json" && echo " openclaw.json: OK"
|
|
177
|
+
|
|
178
|
+
# State files
|
|
179
|
+
for f in session-export-state.json cc-export-watermark.json cc-capture-watermark.json memory-capture-state.json; do
|
|
180
|
+
[ -f "$OC_HOME/memory/$f" ] && cp "$OC_HOME/memory/$f" "$DEST/openclaw/memory/$f"
|
|
181
|
+
done
|
|
182
|
+
echo " state files: OK"
|
|
183
|
+
|
|
184
|
+
# ── 3. Back up ~/.claude/ ──
|
|
185
|
+
|
|
186
|
+
echo "--- ~/.claude/ ---"
|
|
187
|
+
|
|
188
|
+
[ -f "$CLAUDE_HOME/CLAUDE.md" ] && cp "$CLAUDE_HOME/CLAUDE.md" "$DEST/claude/CLAUDE.md" && echo " CLAUDE.md: OK"
|
|
189
|
+
[ -f "$CLAUDE_HOME/settings.json" ] && cp "$CLAUDE_HOME/settings.json" "$DEST/claude/settings.json" && echo " settings.json: OK"
|
|
190
|
+
[ -d "$CLAUDE_HOME/projects" ] && tar -cf "$DEST/claude/projects.tar" -C "$CLAUDE_HOME" projects 2>/dev/null && echo " projects/: tar OK"
|
|
191
|
+
|
|
192
|
+
# ── 4. Back up workspace ──
|
|
193
|
+
|
|
194
|
+
if [ -n "$WORKSPACE" ] && [ -d "$WORKSPACE" ]; then
|
|
195
|
+
echo "--- $WORKSPACE/ ---"
|
|
196
|
+
tar -cf "$DEST/wipcomputerinc.tar" \
|
|
197
|
+
--exclude "node_modules" \
|
|
198
|
+
--exclude ".git/objects" \
|
|
199
|
+
--exclude ".DS_Store" \
|
|
200
|
+
--exclude "*/staff/cc-mini/documents/backups" \
|
|
201
|
+
--exclude "*/_temp/backups" \
|
|
202
|
+
--exclude "*/_trash" \
|
|
203
|
+
-C "$(dirname "$WORKSPACE")" "$(basename "$WORKSPACE")" 2>/dev/null \
|
|
204
|
+
&& echo " workspace: tar OK" \
|
|
205
|
+
|| echo " workspace: tar FAILED"
|
|
206
|
+
fi
|
|
207
|
+
|
|
208
|
+
# ── 5. iCloud offsite ──
|
|
209
|
+
|
|
210
|
+
if [ -n "$ICLOUD_BACKUP" ] && [ -d "$(dirname "$ICLOUD_BACKUP")" ]; then
|
|
211
|
+
echo "--- iCloud offsite ---"
|
|
212
|
+
mkdir -p "$ICLOUD_BACKUP"
|
|
213
|
+
ORG=$(python3 -c "import json; print(json.load(open('$LDM_HOME/config.json')).get('org','ldmos'))" 2>/dev/null || echo "ldmos")
|
|
214
|
+
DEVICE=$(hostname -s)
|
|
215
|
+
TAR_NAME="${ORG}-${DEVICE}-${DATE}.tar.gz"
|
|
216
|
+
tar -czf "$ICLOUD_BACKUP/$TAR_NAME" -C "$BACKUP_ROOT" "$DATE" 2>/dev/null \
|
|
217
|
+
&& echo " $TAR_NAME: OK" \
|
|
218
|
+
|| echo " iCloud tar: FAILED"
|
|
219
|
+
|
|
220
|
+
# Rotate iCloud tars
|
|
221
|
+
ICLOUD_COUNT=$(ls -1 "$ICLOUD_BACKUP"/*.tar.gz 2>/dev/null | wc -l | tr -d ' ')
|
|
222
|
+
if [ "$ICLOUD_COUNT" -gt "$KEEP" ]; then
|
|
223
|
+
REMOVE_COUNT=$((ICLOUD_COUNT - KEEP))
|
|
224
|
+
ls -1t "$ICLOUD_BACKUP"/*.tar.gz | tail -n "$REMOVE_COUNT" | while read OLD; do
|
|
225
|
+
rm -f "$OLD"
|
|
226
|
+
echo " Rotated: $(basename "$OLD")"
|
|
227
|
+
done
|
|
228
|
+
fi
|
|
229
|
+
fi
|
|
230
|
+
|
|
231
|
+
# ── 6. Rotate local backups ──
|
|
232
|
+
|
|
233
|
+
echo "--- Rotation ---"
|
|
234
|
+
BACKUP_COUNT=$(ls -1d "$BACKUP_ROOT"/20??-??-??--* 2>/dev/null | wc -l | tr -d ' ')
|
|
235
|
+
if [ "$BACKUP_COUNT" -gt "$KEEP" ]; then
|
|
236
|
+
REMOVE_COUNT=$((BACKUP_COUNT - KEEP))
|
|
237
|
+
ls -1d "$BACKUP_ROOT"/20??-??-??--* | head -n "$REMOVE_COUNT" | while read OLD; do
|
|
238
|
+
rm -rf "$OLD"
|
|
239
|
+
echo " Removed: $(basename "$OLD")"
|
|
240
|
+
done
|
|
241
|
+
fi
|
|
242
|
+
|
|
243
|
+
# ── Summary ──
|
|
244
|
+
|
|
245
|
+
TOTAL_SIZE=$(du -sh "$DEST" | cut -f1)
|
|
246
|
+
echo ""
|
|
247
|
+
echo "=== Backup complete ==="
|
|
248
|
+
echo " Location: $DEST"
|
|
249
|
+
echo " Size: $TOTAL_SIZE"
|
|
250
|
+
echo " Backups: $BACKUP_COUNT total (keeping $KEEP)"
|
|
251
|
+
[ -n "$ICLOUD_BACKUP" ] && echo " iCloud: $ICLOUD_BACKUP/"
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ldm-restore.sh — Restore from an LDM OS backup
|
|
3
|
+
# Restores: ~/.ldm/, ~/.openclaw/, ~/.claude/, ~/wipcomputerinc/
|
|
4
|
+
#
|
|
5
|
+
# Source of truth: wip-ldm-os-private/scripts/ldm-restore.sh
|
|
6
|
+
# Deployed to: ~/.ldm/bin/ldm-restore.sh (via ldm install)
|
|
7
|
+
#
|
|
8
|
+
# Usage:
|
|
9
|
+
# ldm-restore.sh # list available backups
|
|
10
|
+
# ldm-restore.sh 2026-03-24--09-50-22 # restore from specific backup
|
|
11
|
+
# ldm-restore.sh --from-icloud <file> # restore from iCloud tar
|
|
12
|
+
# ldm-restore.sh --dry-run <backup> # preview what would be restored
|
|
13
|
+
# ldm-restore.sh --only ldm <backup> # restore only ~/.ldm/ data
|
|
14
|
+
# ldm-restore.sh --only openclaw <backup> # restore only ~/.openclaw/ data
|
|
15
|
+
# ldm-restore.sh --only claude <backup> # restore only ~/.claude/ data
|
|
16
|
+
# ldm-restore.sh --only workspace <backup># restore only workspace
|
|
17
|
+
|
|
18
|
+
set -euo pipefail
|
|
19
|
+
|
|
20
|
+
export PATH="/opt/homebrew/bin:/usr/local/bin:$PATH"
|
|
21
|
+
|
|
22
|
+
LDM_HOME="$HOME/.ldm"
|
|
23
|
+
BACKUP_ROOT="$LDM_HOME/backups"
|
|
24
|
+
DRY_RUN=false
|
|
25
|
+
ONLY=""
|
|
26
|
+
FROM_ICLOUD=""
|
|
27
|
+
BACKUP_NAME=""
|
|
28
|
+
|
|
29
|
+
# Parse flags
|
|
30
|
+
while [[ $# -gt 0 ]]; do
|
|
31
|
+
case "$1" in
|
|
32
|
+
--dry-run) DRY_RUN=true; shift ;;
|
|
33
|
+
--only) ONLY="$2"; shift 2 ;;
|
|
34
|
+
--from-icloud) FROM_ICLOUD="$2"; shift 2 ;;
|
|
35
|
+
--help|-h)
|
|
36
|
+
echo "Usage: ldm-restore.sh [options] [backup-name]"
|
|
37
|
+
echo ""
|
|
38
|
+
echo "Options:"
|
|
39
|
+
echo " --dry-run Preview what would be restored"
|
|
40
|
+
echo " --only <section> Restore only: ldm, openclaw, claude, workspace"
|
|
41
|
+
echo " --from-icloud <file> Restore from iCloud .tar.gz"
|
|
42
|
+
echo ""
|
|
43
|
+
echo "Examples:"
|
|
44
|
+
echo " ldm-restore.sh # list backups"
|
|
45
|
+
echo " ldm-restore.sh 2026-03-24--09-50-22 # restore from local"
|
|
46
|
+
echo " ldm-restore.sh --only ldm 2026-03-24--09-50-22 # restore only crystal.db + agents"
|
|
47
|
+
echo " ldm-restore.sh --from-icloud ~/path/to/backup.tar.gz"
|
|
48
|
+
exit 0
|
|
49
|
+
;;
|
|
50
|
+
*) BACKUP_NAME="$1"; shift ;;
|
|
51
|
+
esac
|
|
52
|
+
done
|
|
53
|
+
|
|
54
|
+
# If restoring from iCloud tar, extract to temp dir first
|
|
55
|
+
if [ -n "$FROM_ICLOUD" ]; then
|
|
56
|
+
if [ ! -f "$FROM_ICLOUD" ]; then
|
|
57
|
+
echo "ERROR: File not found: $FROM_ICLOUD" >&2
|
|
58
|
+
exit 1
|
|
59
|
+
fi
|
|
60
|
+
echo "Extracting iCloud backup to temp dir..."
|
|
61
|
+
TEMP_DIR=$(mktemp -d)
|
|
62
|
+
tar -xzf "$FROM_ICLOUD" -C "$TEMP_DIR"
|
|
63
|
+
# Find the backup dir inside (should be one dated folder)
|
|
64
|
+
BACKUP_NAME=$(ls "$TEMP_DIR" | head -1)
|
|
65
|
+
BACKUP_ROOT="$TEMP_DIR"
|
|
66
|
+
echo " Extracted: $BACKUP_NAME"
|
|
67
|
+
fi
|
|
68
|
+
|
|
69
|
+
# List mode (no backup specified)
|
|
70
|
+
if [ -z "$BACKUP_NAME" ]; then
|
|
71
|
+
echo "Available backups:"
|
|
72
|
+
echo ""
|
|
73
|
+
if [ -d "$BACKUP_ROOT" ]; then
|
|
74
|
+
for d in $(ls -1d "$BACKUP_ROOT"/20??-??-??--* 2>/dev/null | sort -r); do
|
|
75
|
+
SIZE=$(du -sh "$d" | cut -f1)
|
|
76
|
+
echo " $(basename "$d") ($SIZE)"
|
|
77
|
+
done
|
|
78
|
+
fi
|
|
79
|
+
echo ""
|
|
80
|
+
echo "Usage: ldm-restore.sh <backup-name>"
|
|
81
|
+
echo " e.g. ldm-restore.sh 2026-03-24--09-50-22"
|
|
82
|
+
exit 0
|
|
83
|
+
fi
|
|
84
|
+
|
|
85
|
+
SRC="$BACKUP_ROOT/$BACKUP_NAME"
|
|
86
|
+
|
|
87
|
+
if [ ! -d "$SRC" ]; then
|
|
88
|
+
echo "ERROR: Backup not found: $SRC" >&2
|
|
89
|
+
exit 1
|
|
90
|
+
fi
|
|
91
|
+
|
|
92
|
+
echo "=== LDM Restore: $BACKUP_NAME ==="
|
|
93
|
+
echo " Source: $SRC"
|
|
94
|
+
echo " Mode: ${ONLY:-all}"
|
|
95
|
+
echo ""
|
|
96
|
+
|
|
97
|
+
# Read workspace path
|
|
98
|
+
WORKSPACE=""
|
|
99
|
+
if [ -f "$LDM_HOME/config.json" ]; then
|
|
100
|
+
WORKSPACE=$(python3 -c "import json; print(json.load(open('$LDM_HOME/config.json')).get('workspace',''))" 2>/dev/null || true)
|
|
101
|
+
fi
|
|
102
|
+
|
|
103
|
+
if [ "$DRY_RUN" = true ]; then
|
|
104
|
+
echo "[DRY RUN] Would restore:"
|
|
105
|
+
[ -z "$ONLY" ] || [ "$ONLY" = "ldm" ] && {
|
|
106
|
+
[ -f "$SRC/ldm/memory/crystal.db" ] && echo " crystal.db -> ~/.ldm/memory/crystal.db"
|
|
107
|
+
[ -d "$SRC/ldm/agents" ] && echo " agents/ -> ~/.ldm/agents/"
|
|
108
|
+
[ -d "$SRC/ldm/state" ] && echo " state/ -> ~/.ldm/state/"
|
|
109
|
+
[ -f "$SRC/ldm/config.json" ] && echo " config.json -> ~/.ldm/config.json"
|
|
110
|
+
}
|
|
111
|
+
[ -z "$ONLY" ] || [ "$ONLY" = "openclaw" ] && {
|
|
112
|
+
[ -f "$SRC/openclaw/memory/main.sqlite" ] && echo " main.sqlite -> ~/.openclaw/memory/main.sqlite"
|
|
113
|
+
[ -f "$SRC/openclaw/memory/context-embeddings.sqlite" ] && echo " context-embeddings.sqlite -> ~/.openclaw/memory/"
|
|
114
|
+
[ -f "$SRC/openclaw/workspace.tar" ] && echo " workspace.tar -> ~/.openclaw/workspace/"
|
|
115
|
+
[ -f "$SRC/openclaw/sessions.tar" ] && echo " sessions.tar -> ~/.openclaw/agents/main/sessions/"
|
|
116
|
+
[ -f "$SRC/openclaw/openclaw.json" ] && echo " openclaw.json -> ~/.openclaw/"
|
|
117
|
+
}
|
|
118
|
+
[ -z "$ONLY" ] || [ "$ONLY" = "claude" ] && {
|
|
119
|
+
[ -f "$SRC/claude/CLAUDE.md" ] && echo " CLAUDE.md -> ~/.claude/CLAUDE.md"
|
|
120
|
+
[ -f "$SRC/claude/settings.json" ] && echo " settings.json -> ~/.claude/settings.json"
|
|
121
|
+
[ -f "$SRC/claude/projects.tar" ] && echo " projects.tar -> ~/.claude/projects/"
|
|
122
|
+
}
|
|
123
|
+
[ -z "$ONLY" ] || [ "$ONLY" = "workspace" ] && {
|
|
124
|
+
[ -f "$SRC/wipcomputerinc.tar" ] && echo " wipcomputerinc.tar -> $WORKSPACE/"
|
|
125
|
+
}
|
|
126
|
+
echo ""
|
|
127
|
+
echo "[DRY RUN] No files modified."
|
|
128
|
+
exit 0
|
|
129
|
+
fi
|
|
130
|
+
|
|
131
|
+
echo "WARNING: This will overwrite existing files. Press Ctrl+C to cancel."
|
|
132
|
+
echo "Restoring in 5 seconds..."
|
|
133
|
+
sleep 5
|
|
134
|
+
|
|
135
|
+
# ── Restore ~/.ldm/ ──
|
|
136
|
+
|
|
137
|
+
if [ -z "$ONLY" ] || [ "$ONLY" = "ldm" ]; then
|
|
138
|
+
echo "--- Restoring ~/.ldm/ ---"
|
|
139
|
+
|
|
140
|
+
if [ -f "$SRC/ldm/memory/crystal.db" ]; then
|
|
141
|
+
cp "$SRC/ldm/memory/crystal.db" "$LDM_HOME/memory/crystal.db"
|
|
142
|
+
echo " crystal.db: OK"
|
|
143
|
+
fi
|
|
144
|
+
|
|
145
|
+
if [ -d "$SRC/ldm/agents" ]; then
|
|
146
|
+
cp -a "$SRC/ldm/agents/"* "$LDM_HOME/agents/" 2>/dev/null
|
|
147
|
+
echo " agents/: OK"
|
|
148
|
+
fi
|
|
149
|
+
|
|
150
|
+
if [ -d "$SRC/ldm/state" ]; then
|
|
151
|
+
cp -a "$SRC/ldm/state/"* "$LDM_HOME/state/" 2>/dev/null
|
|
152
|
+
echo " state/: OK"
|
|
153
|
+
fi
|
|
154
|
+
|
|
155
|
+
[ -f "$SRC/ldm/config.json" ] && cp "$SRC/ldm/config.json" "$LDM_HOME/config.json" && echo " config.json: OK"
|
|
156
|
+
fi
|
|
157
|
+
|
|
158
|
+
# ── Restore ~/.openclaw/ ──
|
|
159
|
+
|
|
160
|
+
if [ -z "$ONLY" ] || [ "$ONLY" = "openclaw" ]; then
|
|
161
|
+
echo "--- Restoring ~/.openclaw/ ---"
|
|
162
|
+
|
|
163
|
+
OC_HOME="$HOME/.openclaw"
|
|
164
|
+
|
|
165
|
+
if [ -f "$SRC/openclaw/memory/main.sqlite" ]; then
|
|
166
|
+
cp "$SRC/openclaw/memory/main.sqlite" "$OC_HOME/memory/main.sqlite"
|
|
167
|
+
echo " main.sqlite: OK"
|
|
168
|
+
fi
|
|
169
|
+
|
|
170
|
+
if [ -f "$SRC/openclaw/memory/context-embeddings.sqlite" ]; then
|
|
171
|
+
cp "$SRC/openclaw/memory/context-embeddings.sqlite" "$OC_HOME/memory/context-embeddings.sqlite"
|
|
172
|
+
echo " context-embeddings: OK"
|
|
173
|
+
fi
|
|
174
|
+
|
|
175
|
+
if [ -f "$SRC/openclaw/workspace.tar" ]; then
|
|
176
|
+
tar -xf "$SRC/openclaw/workspace.tar" -C "$OC_HOME/"
|
|
177
|
+
echo " workspace/: OK"
|
|
178
|
+
fi
|
|
179
|
+
|
|
180
|
+
if [ -f "$SRC/openclaw/sessions.tar" ]; then
|
|
181
|
+
mkdir -p "$OC_HOME/agents/main"
|
|
182
|
+
tar -xf "$SRC/openclaw/sessions.tar" -C "$OC_HOME/agents/main/"
|
|
183
|
+
echo " sessions/: OK"
|
|
184
|
+
fi
|
|
185
|
+
|
|
186
|
+
[ -f "$SRC/openclaw/openclaw.json" ] && cp "$SRC/openclaw/openclaw.json" "$OC_HOME/openclaw.json" && echo " openclaw.json: OK"
|
|
187
|
+
|
|
188
|
+
for f in session-export-state.json cc-export-watermark.json cc-capture-watermark.json memory-capture-state.json; do
|
|
189
|
+
[ -f "$SRC/openclaw/memory/$f" ] && cp "$SRC/openclaw/memory/$f" "$OC_HOME/memory/$f"
|
|
190
|
+
done
|
|
191
|
+
echo " state files: OK"
|
|
192
|
+
fi
|
|
193
|
+
|
|
194
|
+
# ── Restore ~/.claude/ ──
|
|
195
|
+
|
|
196
|
+
if [ -z "$ONLY" ] || [ "$ONLY" = "claude" ]; then
|
|
197
|
+
echo "--- Restoring ~/.claude/ ---"
|
|
198
|
+
|
|
199
|
+
[ -f "$SRC/claude/CLAUDE.md" ] && cp "$SRC/claude/CLAUDE.md" "$HOME/.claude/CLAUDE.md" && echo " CLAUDE.md: OK"
|
|
200
|
+
[ -f "$SRC/claude/settings.json" ] && cp "$SRC/claude/settings.json" "$HOME/.claude/settings.json" && echo " settings.json: OK"
|
|
201
|
+
|
|
202
|
+
if [ -f "$SRC/claude/projects.tar" ]; then
|
|
203
|
+
tar -xf "$SRC/claude/projects.tar" -C "$HOME/.claude/"
|
|
204
|
+
echo " projects/: OK"
|
|
205
|
+
fi
|
|
206
|
+
fi
|
|
207
|
+
|
|
208
|
+
# ── Restore workspace ──
|
|
209
|
+
|
|
210
|
+
if [ -z "$ONLY" ] || [ "$ONLY" = "workspace" ]; then
|
|
211
|
+
if [ -f "$SRC/wipcomputerinc.tar" ] && [ -n "$WORKSPACE" ]; then
|
|
212
|
+
echo "--- Restoring workspace ---"
|
|
213
|
+
tar -xf "$SRC/wipcomputerinc.tar" -C "$(dirname "$WORKSPACE")"
|
|
214
|
+
echo " workspace: OK"
|
|
215
|
+
fi
|
|
216
|
+
fi
|
|
217
|
+
|
|
218
|
+
# Clean up temp dir if from iCloud
|
|
219
|
+
[ -n "${TEMP_DIR:-}" ] && rm -rf "$TEMP_DIR"
|
|
220
|
+
|
|
221
|
+
echo ""
|
|
222
|
+
echo "=== Restore complete ==="
|
|
223
|
+
echo " Restart the gateway: openclaw gateway restart"
|
|
224
|
+
echo " Verify crystal: crystal status"
|
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
# ldm-summary.sh — Multi-cadence summary generator (prompt-based)
|
|
3
|
+
# Each agent gets their own summary from their own data in ~/.ldm/agents/.
|
|
4
|
+
# Org-wide combines both agent summaries.
|
|
5
|
+
#
|
|
6
|
+
# Usage:
|
|
7
|
+
# ldm-summary.sh daily # today
|
|
8
|
+
# ldm-summary.sh daily --date 2026-02-10 # specific date (backfill)
|
|
9
|
+
# ldm-summary.sh weekly # current week (Sun-Mon)
|
|
10
|
+
# ldm-summary.sh monthly / quarterly
|
|
11
|
+
# ldm-summary.sh daily --dry-run
|
|
12
|
+
# ldm-summary.sh daily --dev-only / --team-only
|
|
13
|
+
|
|
14
|
+
set -euo pipefail
|
|
15
|
+
export PATH="/opt/homebrew/bin:/usr/local/bin:$PATH"
|
|
16
|
+
|
|
17
|
+
LDM_HOME="$HOME/.ldm"
|
|
18
|
+
CADENCE="${1:-}"
|
|
19
|
+
shift || true
|
|
20
|
+
|
|
21
|
+
DRY_RUN=false
|
|
22
|
+
TEAM_ONLY=false
|
|
23
|
+
DEV_ONLY=false
|
|
24
|
+
TARGET_DATE=""
|
|
25
|
+
|
|
26
|
+
FORCE=false
|
|
27
|
+
|
|
28
|
+
while [[ $# -gt 0 ]]; do
|
|
29
|
+
case "$1" in
|
|
30
|
+
--dry-run) DRY_RUN=true; shift ;;
|
|
31
|
+
--team-only) TEAM_ONLY=true; shift ;;
|
|
32
|
+
--dev-only) DEV_ONLY=true; shift ;;
|
|
33
|
+
--date) TARGET_DATE="$2"; shift 2 ;;
|
|
34
|
+
--force) FORCE=true; shift ;;
|
|
35
|
+
*) echo "Unknown: $1" >&2; exit 1 ;;
|
|
36
|
+
esac
|
|
37
|
+
done
|
|
38
|
+
|
|
39
|
+
if [ -z "$CADENCE" ]; then
|
|
40
|
+
echo "Usage: ldm-summary.sh daily|weekly|monthly|quarterly [--date YYYY-MM-DD] [--dry-run]"
|
|
41
|
+
exit 1
|
|
42
|
+
fi
|
|
43
|
+
|
|
44
|
+
WORKSPACE=$(python3 -c "import json; print(json.load(open('$LDM_HOME/config.json')).get('workspace',''))" 2>/dev/null || true)
|
|
45
|
+
if [ -z "$WORKSPACE" ]; then echo "ERROR: No workspace" >&2; exit 1; fi
|
|
46
|
+
|
|
47
|
+
AGENTS=$(python3 -c "
|
|
48
|
+
import json; c=json.load(open('$LDM_HOME/config.json')); a=c.get('agents',[]); print(' '.join(a if isinstance(a,list) else a.keys()))
|
|
49
|
+
" 2>/dev/null || echo "cc-mini")
|
|
50
|
+
|
|
51
|
+
agent_team_name() { case "$1" in oc-lesa-mini) echo "Lēsa" ;; *) echo "$1" ;; esac; }
|
|
52
|
+
|
|
53
|
+
# Read prompt from shared/prompts/ (Dream Weaver prompt source)
|
|
54
|
+
PROMPTS_DIR="$LDM_HOME/shared/prompts"
|
|
55
|
+
read_prompt() {
|
|
56
|
+
local file="$PROMPTS_DIR/$1"
|
|
57
|
+
if [ -f "$file" ]; then
|
|
58
|
+
cat "$file"
|
|
59
|
+
else
|
|
60
|
+
echo "WARNING: Prompt not found: $file" >&2
|
|
61
|
+
echo ""
|
|
62
|
+
fi
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
DATE="${TARGET_DATE:-$(date +%Y-%m-%d)}"
|
|
66
|
+
NEXT_DATE=$(python3 -c "from datetime import datetime,timedelta; print((datetime.strptime('$DATE','%Y-%m-%d')+timedelta(days=1)).strftime('%Y-%m-%d'))")
|
|
67
|
+
|
|
68
|
+
echo "=== LDM Summary: $CADENCE ($DATE) ==="
|
|
69
|
+
echo " Workspace: $WORKSPACE"
|
|
70
|
+
echo " Agents: $AGENTS"
|
|
71
|
+
echo ""
|
|
72
|
+
|
|
73
|
+
# ── Per-agent team summaries ──
|
|
74
|
+
|
|
75
|
+
if [ "$DEV_ONLY" = false ]; then
|
|
76
|
+
echo "--- Per-agent team summaries ($CADENCE) ---"
|
|
77
|
+
|
|
78
|
+
if [ "$CADENCE" = "daily" ]; then
|
|
79
|
+
for AGENT in $AGENTS; do
|
|
80
|
+
TEAM_NAME=$(agent_team_name "$AGENT")
|
|
81
|
+
AGENT_DAILY="$LDM_HOME/agents/$AGENT/memory/daily/$DATE.md"
|
|
82
|
+
echo " $AGENT ($TEAM_NAME):"
|
|
83
|
+
|
|
84
|
+
CRYSTAL_RESULTS=$(crystal search "activity on $DATE" --agent "$AGENT" --since "$DATE" --until "$NEXT_DATE" --limit 20 --quality deep 2>/dev/null || echo "")
|
|
85
|
+
DAILY_LOG=""; [ -f "$AGENT_DAILY" ] && DAILY_LOG=$(cat "$AGENT_DAILY")
|
|
86
|
+
|
|
87
|
+
if [ -z "$CRYSTAL_RESULTS" ] && [ -z "$DAILY_LOG" ]; then
|
|
88
|
+
echo " No data. Skipping."; continue
|
|
89
|
+
fi
|
|
90
|
+
|
|
91
|
+
if [ "$DRY_RUN" = true ]; then
|
|
92
|
+
echo " [DRY RUN] Crystal: $(echo "$CRYSTAL_RESULTS" | wc -l | tr -d ' ') lines, Log: $(echo "$DAILY_LOG" | wc -l | tr -d ' ') lines"
|
|
93
|
+
continue
|
|
94
|
+
fi
|
|
95
|
+
|
|
96
|
+
PROMPT_TEMPLATE=$(read_prompt "daily-agent-summary.md")
|
|
97
|
+
PROMPT="$PROMPT_TEMPLATE
|
|
98
|
+
|
|
99
|
+
Agent: $AGENT
|
|
100
|
+
Date: $DATE
|
|
101
|
+
|
|
102
|
+
=== Daily log ===
|
|
103
|
+
$DAILY_LOG
|
|
104
|
+
|
|
105
|
+
=== Crystal search results ===
|
|
106
|
+
$CRYSTAL_RESULTS"
|
|
107
|
+
|
|
108
|
+
SUMMARY=$(claude -p "$PROMPT" --system-prompt "You are Dream Weaver. First person. Specific. Use ... for breaks. Never use em dashes." --output-format text 2>/dev/null || echo "Summary generation failed")
|
|
109
|
+
|
|
110
|
+
OUT_DIR="$WORKSPACE/team/$TEAM_NAME/automated/memory/summaries/daily"
|
|
111
|
+
mkdir -p "$OUT_DIR"
|
|
112
|
+
printf "# Daily summary ... %s (%s)\n\n%s\n" "$DATE" "$AGENT" "$SUMMARY" > "$OUT_DIR/$DATE.md"
|
|
113
|
+
echo " -> $OUT_DIR/$DATE.md"
|
|
114
|
+
done
|
|
115
|
+
|
|
116
|
+
# Org-wide: combine agent summaries
|
|
117
|
+
if [ "$DRY_RUN" = false ]; then
|
|
118
|
+
echo " Org-wide team:"
|
|
119
|
+
COMBINED=""
|
|
120
|
+
for AGENT in $AGENTS; do
|
|
121
|
+
TEAM_NAME=$(agent_team_name "$AGENT")
|
|
122
|
+
F="$WORKSPACE/team/$TEAM_NAME/automated/memory/summaries/daily/$DATE.md"
|
|
123
|
+
[ -f "$F" ] && COMBINED="$COMBINED
|
|
124
|
+
=== $AGENT ($TEAM_NAME) ===
|
|
125
|
+
$(cat "$F")
|
|
126
|
+
"
|
|
127
|
+
done
|
|
128
|
+
if [ -n "$COMBINED" ]; then
|
|
129
|
+
ORG_PROMPT_TEMPLATE=$(read_prompt "org-daily-team.md")
|
|
130
|
+
ORG_PROMPT="$ORG_PROMPT_TEMPLATE
|
|
131
|
+
|
|
132
|
+
Date: $DATE
|
|
133
|
+
|
|
134
|
+
$COMBINED"
|
|
135
|
+
ORG_SUMMARY=$(claude -p "$ORG_PROMPT" --model opus --system-prompt "You are Dream Weaver. Combine agent perspectives into one org-wide view. Use ... for breaks. Never use em dashes." --output-format text 2>/dev/null || echo "Summary generation failed")
|
|
136
|
+
ORG_DIR="$WORKSPACE/operations/updates/team/daily"
|
|
137
|
+
mkdir -p "$ORG_DIR"
|
|
138
|
+
printf "# Org team summary ... %s\n\n%s\n" "$DATE" "$ORG_SUMMARY" > "$ORG_DIR/$DATE.md"
|
|
139
|
+
echo " -> $ORG_DIR/$DATE.md"
|
|
140
|
+
fi
|
|
141
|
+
fi
|
|
142
|
+
|
|
143
|
+
else
|
|
144
|
+
# Weekly/monthly/quarterly
|
|
145
|
+
PARENT=""; case "$CADENCE" in weekly) PARENT="daily" ;; monthly) PARENT="weekly" ;; quarterly) PARENT="monthly" ;; esac
|
|
146
|
+
|
|
147
|
+
for AGENT in $AGENTS; do
|
|
148
|
+
TEAM_NAME=$(agent_team_name "$AGENT")
|
|
149
|
+
PDIR="$WORKSPACE/team/$TEAM_NAME/automated/memory/summaries/$PARENT"
|
|
150
|
+
echo " $AGENT ($TEAM_NAME):"
|
|
151
|
+
if [ ! -d "$PDIR" ] || [ -z "$(ls "$PDIR"/*.md 2>/dev/null)" ]; then echo " No $PARENT summaries."; continue; fi
|
|
152
|
+
|
|
153
|
+
INPUT=""; for f in $(ls -1 "$PDIR"/*.md 2>/dev/null | sort | tail -7); do INPUT="$INPUT
|
|
154
|
+
--- $(basename "$f") ---
|
|
155
|
+
$(cat "$f")
|
|
156
|
+
"; done
|
|
157
|
+
|
|
158
|
+
if [ "$DRY_RUN" = true ]; then echo " [DRY RUN] $(ls "$PDIR"/*.md 2>/dev/null | wc -l | tr -d ' ') files"; continue; fi
|
|
159
|
+
|
|
160
|
+
CON_PROMPT_TEMPLATE=$(read_prompt "${CADENCE}-agent-summary.md")
|
|
161
|
+
# Read prior output for continuity
|
|
162
|
+
PRIOR=""; PRIOR_FILE=$(ls -1 "$WORKSPACE/team/$TEAM_NAME/automated/memory/summaries/$CADENCE/"*.md 2>/dev/null | sort | tail -1)
|
|
163
|
+
[ -n "$PRIOR_FILE" ] && [ -f "$PRIOR_FILE" ] && PRIOR="
|
|
164
|
+
=== Previous ${CADENCE} summary (for continuity) ===
|
|
165
|
+
$(cat "$PRIOR_FILE")"
|
|
166
|
+
|
|
167
|
+
CON_PROMPT="$CON_PROMPT_TEMPLATE
|
|
168
|
+
|
|
169
|
+
Agent: $AGENT
|
|
170
|
+
Date: $DATE
|
|
171
|
+
|
|
172
|
+
=== ${PARENT} summaries ===
|
|
173
|
+
$INPUT
|
|
174
|
+
$PRIOR"
|
|
175
|
+
SUMMARY=$(claude -p "$CON_PROMPT" --system-prompt "You are Dream Weaver. First person. Consolidate. Use ... for breaks. Never use em dashes." --output-format text 2>/dev/null || echo "Summary generation failed")
|
|
176
|
+
|
|
177
|
+
OUT_DIR="$WORKSPACE/team/$TEAM_NAME/automated/memory/summaries/$CADENCE"
|
|
178
|
+
mkdir -p "$OUT_DIR"
|
|
179
|
+
printf "# %s summary ... %s (%s)\n\n%s\n" "${CADENCE^}" "$DATE" "$AGENT" "$SUMMARY" > "$OUT_DIR/$DATE.md"
|
|
180
|
+
echo " -> $OUT_DIR/$DATE.md"
|
|
181
|
+
done
|
|
182
|
+
fi
|
|
183
|
+
fi
|
|
184
|
+
|
|
185
|
+
# ── Dev summary (org-wide, from git) ──
|
|
186
|
+
|
|
187
|
+
if [ "$TEAM_ONLY" = false ]; then
|
|
188
|
+
echo "--- Dev summary ($CADENCE) ---"
|
|
189
|
+
|
|
190
|
+
if [ "$CADENCE" = "daily" ]; then
|
|
191
|
+
GIT_LOG=""
|
|
192
|
+
for repo in $(find "$WORKSPACE/repos" -name ".git" -type d -maxdepth 4 2>/dev/null); do
|
|
193
|
+
RDIR=$(dirname "$repo"); RNAME=$(basename "$RDIR")
|
|
194
|
+
LOG=$(git -C "$RDIR" log --since="$DATE" --until="$NEXT_DATE" --oneline --all 2>/dev/null || true)
|
|
195
|
+
[ -n "$LOG" ] && GIT_LOG="$GIT_LOG
|
|
196
|
+
=== $RNAME ===
|
|
197
|
+
$LOG
|
|
198
|
+
"
|
|
199
|
+
done
|
|
200
|
+
|
|
201
|
+
if [ -z "$GIT_LOG" ]; then echo " No git activity."
|
|
202
|
+
elif [ "$DRY_RUN" = true ]; then echo " [DRY RUN] $(echo "$GIT_LOG" | grep -c "===") repos"
|
|
203
|
+
else
|
|
204
|
+
DEV_PROMPT_TEMPLATE=$(read_prompt "daily-dev.md")
|
|
205
|
+
DEV_PROMPT="$DEV_PROMPT_TEMPLATE
|
|
206
|
+
|
|
207
|
+
Date: $DATE
|
|
208
|
+
|
|
209
|
+
$GIT_LOG"
|
|
210
|
+
SUMMARY=$(claude -p "$DEV_PROMPT" --system-prompt "You are Dream Weaver. Dev facts only. No narrative." --output-format text 2>/dev/null || echo "Summary generation failed")
|
|
211
|
+
ORG_DIR="$WORKSPACE/operations/updates/dev/daily"
|
|
212
|
+
mkdir -p "$ORG_DIR"
|
|
213
|
+
printf "# Dev summary ... %s\n\n%s\n" "$DATE" "$SUMMARY" > "$ORG_DIR/$DATE.md"
|
|
214
|
+
echo " -> $ORG_DIR/$DATE.md"
|
|
215
|
+
fi
|
|
216
|
+
else
|
|
217
|
+
PARENT=""; case "$CADENCE" in weekly) PARENT="daily" ;; monthly) PARENT="weekly" ;; quarterly) PARENT="monthly" ;; esac
|
|
218
|
+
PDIR="$WORKSPACE/operations/updates/dev/$PARENT"
|
|
219
|
+
if [ ! -d "$PDIR" ] || [ -z "$(ls "$PDIR"/*.md 2>/dev/null)" ]; then echo " No $PARENT dev summaries."
|
|
220
|
+
elif [ "$DRY_RUN" = true ]; then echo " [DRY RUN] $(ls "$PDIR"/*.md 2>/dev/null | wc -l | tr -d ' ') files"
|
|
221
|
+
else
|
|
222
|
+
INPUT=""; for f in $(ls -1 "$PDIR"/*.md 2>/dev/null | sort | tail -7); do INPUT="$INPUT
|
|
223
|
+
--- $(basename "$f") ---
|
|
224
|
+
$(cat "$f")
|
|
225
|
+
"; done
|
|
226
|
+
DEV_CON_PROMPT="Consolidate these $PARENT dev summaries into a $CADENCE dev report. What shipped? Key releases? Architecture changes?
|
|
227
|
+
|
|
228
|
+
$INPUT"
|
|
229
|
+
SUMMARY=$(claude -p "$DEV_CON_PROMPT" --model opus --system-prompt "You are Dream Weaver. Dev facts. No narrative." --output-format text 2>/dev/null || echo "Summary generation failed")
|
|
230
|
+
ORG_DIR="$WORKSPACE/operations/updates/dev/$CADENCE"
|
|
231
|
+
mkdir -p "$ORG_DIR"
|
|
232
|
+
printf "# %s dev summary ... %s\n\n%s\n" "${CADENCE^}" "$DATE" "$SUMMARY" > "$ORG_DIR/$DATE.md"
|
|
233
|
+
echo " -> $ORG_DIR/$DATE.md"
|
|
234
|
+
fi
|
|
235
|
+
fi
|
|
236
|
+
fi
|
|
237
|
+
|
|
238
|
+
echo ""
|
|
239
|
+
echo "=== Summary complete ==="
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# Daily Agent Summary
|
|
2
|
+
|
|
3
|
+
Read your transcripts and daily logs from `~/.ldm/agents/{agentId}/memory/` for {date}.
|
|
4
|
+
|
|
5
|
+
Write a first-person daily summary:
|
|
6
|
+
- What did you work on?
|
|
7
|
+
- What decisions were made?
|
|
8
|
+
- What shipped?
|
|
9
|
+
- What surprised you? What did you learn?
|
|
10
|
+
- What's blocked and what's your next move?
|
|
11
|
+
- Who did you interact with?
|
|
12
|
+
|
|
13
|
+
Be specific. Name repos, tickets, people. Use "..." for casual breaks. Never use em dashes.
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
# Daily Dev Summary
|
|
2
|
+
|
|
3
|
+
Read git log output from all repos for {date}.
|
|
4
|
+
|
|
5
|
+
Summarize what shipped:
|
|
6
|
+
- PRs merged (repo, PR number, title)
|
|
7
|
+
- Releases (repo, version)
|
|
8
|
+
- Tickets closed
|
|
9
|
+
- New repos created
|
|
10
|
+
- Breaking changes or incidents
|
|
11
|
+
|
|
12
|
+
Be factual. List repos, versions, PR numbers. No narrative.
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# Monthly Agent Summary
|
|
2
|
+
|
|
3
|
+
Read your weekly summaries for this month. Also read last month's monthly summary for continuity.
|
|
4
|
+
|
|
5
|
+
Write a monthly summary:
|
|
6
|
+
- Major themes.
|
|
7
|
+
- What shipped.
|
|
8
|
+
- What did you think mattered at the start of the month vs what actually mattered?
|
|
9
|
+
- Patterns emerging.
|
|
10
|
+
- What from the weeklies is noise that doesn't need to propagate? (Let it go.)
|
|
11
|
+
- One paragraph: the narrative of this month.
|
|
12
|
+
|
|
13
|
+
First person. Specific. Use "..." for casual breaks. Never use em dashes.
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
# Org Daily Team Summary
|
|
2
|
+
|
|
3
|
+
Read all agent daily summaries for {date}. Combine into one org-wide team summary.
|
|
4
|
+
|
|
5
|
+
- What happened across the whole team?
|
|
6
|
+
- Who worked on what?
|
|
7
|
+
- What decisions were made?
|
|
8
|
+
- What is unresolved?
|
|
9
|
+
- Overlaps or conflicts between agents?
|
|
10
|
+
- What was the emotional temperature?
|
|
11
|
+
|
|
12
|
+
Third person. Name each agent. Use "..." for casual breaks. Never use em dashes.
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# Quarterly Agent Summary
|
|
2
|
+
|
|
3
|
+
Read your monthly summaries for this quarter. Also read last quarter's quarterly summary for continuity.
|
|
4
|
+
|
|
5
|
+
Write a quarterly summary:
|
|
6
|
+
- What was the arc of this quarter?
|
|
7
|
+
- What big decisions shaped the direction?
|
|
8
|
+
- What was built that didn't exist before?
|
|
9
|
+
- What got dropped and why?
|
|
10
|
+
- What would you tell yourself 3 months ago?
|
|
11
|
+
- What from the monthlies is noise that doesn't need to propagate? (Let it go.)
|
|
12
|
+
- One paragraph: the story of this quarter.
|
|
13
|
+
|
|
14
|
+
First person. Specific. Use "..." for casual breaks. Never use em dashes.
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# Weekly Agent Summary
|
|
2
|
+
|
|
3
|
+
Read your 7 daily summaries for this week (Sunday to Saturday). Also read last week's weekly summary for continuity.
|
|
4
|
+
|
|
5
|
+
Write a weekly summary:
|
|
6
|
+
- What themes emerged?
|
|
7
|
+
- What decisions stuck? What changed?
|
|
8
|
+
- What did you get wrong this week?
|
|
9
|
+
- What shipped vs what was planned?
|
|
10
|
+
- Open threads going into next week.
|
|
11
|
+
- What from the dailies is noise that doesn't need to propagate? (Let it go.)
|
|
12
|
+
- One sentence: what was this week about?
|
|
13
|
+
|
|
14
|
+
First person. Specific. Use "..." for casual breaks. Never use em dashes.
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# Git Conventions
|
|
2
|
+
|
|
3
|
+
## Never commit to main
|
|
4
|
+
|
|
5
|
+
All work happens on branches. The pre-commit hook blocks commits on main.
|
|
6
|
+
|
|
7
|
+
## Never squash merge
|
|
8
|
+
|
|
9
|
+
Every commit has co-authors and tells the story. Always `--merge` or fast-forward.
|
|
10
|
+
|
|
11
|
+
## Never push directly to main
|
|
12
|
+
|
|
13
|
+
Always use a branch and PR.
|
|
14
|
+
|
|
15
|
+
## Co-authors on every commit
|
|
16
|
+
|
|
17
|
+
List all contributors. Read co-author lines from `settings/config.json` in your workspace.
|
|
18
|
+
|
|
19
|
+
## Branch prefixes
|
|
20
|
+
|
|
21
|
+
Each agent uses a prefix from `settings/config.json` agents section. Prevents collisions.
|
|
22
|
+
|
|
23
|
+
## Worktrees
|
|
24
|
+
|
|
25
|
+
Use worktrees for isolated work. Main working tree stays on main (read-only).
|
|
26
|
+
|
|
27
|
+
## Issues go on the public repo
|
|
28
|
+
|
|
29
|
+
For private/public repo pairs, all issues go on the public repo.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# Release Pipeline
|
|
2
|
+
|
|
3
|
+
## Three steps. Never combine. Never skip.
|
|
4
|
+
|
|
5
|
+
| Step | What happens | What it means |
|
|
6
|
+
|------|-------------|---------------|
|
|
7
|
+
| **Merge** | PR merged to main | Code lands. Nothing else changes. |
|
|
8
|
+
| **Deploy** | wip-release + deploy-public.sh | Published to npm + GitHub. Not on your machine yet. |
|
|
9
|
+
| **Install** | Run the install prompt | Extensions updated on your machine. Only when Parker says "install." |
|
|
10
|
+
|
|
11
|
+
After Deploy, STOP. Do not copy files. Do not npm install -g. Do not npm link. Dogfood the install prompt.
|
|
12
|
+
|
|
13
|
+
## The workflow
|
|
14
|
+
|
|
15
|
+
1. Create worktree, make changes, commit
|
|
16
|
+
2. Write RELEASE-NOTES on the branch (not after)
|
|
17
|
+
3. Push, create PR, merge (--merge, never squash)
|
|
18
|
+
4. `git checkout main && git pull`
|
|
19
|
+
5. `wip-release patch` (auto-detects release notes)
|
|
20
|
+
6. `deploy-public.sh` to sync public repo
|
|
21
|
+
7. Dogfood: `Read https://wip.computer/install/wip-ldm-os.txt`
|
|
22
|
+
|
|
23
|
+
## Never run tools from repo clones
|
|
24
|
+
|
|
25
|
+
Installed tools are for execution. Repo clones are for development. Use the installed commands (`crystal`, `wip-release`, `mdview`, etc.), never run from source.
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# Security
|
|
2
|
+
|
|
3
|
+
## Secret management
|
|
4
|
+
|
|
5
|
+
Use your org's secret management tool (configured in settings/config.json). Never hardcode API keys, tokens, or credentials.
|
|
6
|
+
|
|
7
|
+
## Security audit before installing anything
|
|
8
|
+
|
|
9
|
+
Before installing ANY third-party skill, plugin, MCP server, or npm package, review it for prompt injection, malicious deps, data exfiltration.
|
|
10
|
+
|
|
11
|
+
## Shared file protection
|
|
12
|
+
|
|
13
|
+
Never overwrite shared workspace files. Always append or edit specific sections. Never delete history from shared files.
|
|
14
|
+
|
|
15
|
+
## Protected paths
|
|
16
|
+
|
|
17
|
+
Do not modify: secrets/, credentials/, auth-profiles.json, or any file containing API keys.
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# Workspace Boundaries
|
|
2
|
+
|
|
3
|
+
## Folder ownership
|
|
4
|
+
|
|
5
|
+
Each agent has its own folder under `staff/`. Never touch another agent's folders. If something needs to change, ask the other agent.
|
|
6
|
+
|
|
7
|
+
## Repos are shared
|
|
8
|
+
|
|
9
|
+
All repos live under `repos/` in the workspace. All agents work from the same repos using branches and worktrees.
|
|
10
|
+
|
|
11
|
+
## Memory-first rule
|
|
12
|
+
|
|
13
|
+
Before reaching for any external service or workaround: search memory first.
|
|
14
|
+
|
|
15
|
+
## Local-first principle
|
|
16
|
+
|
|
17
|
+
Every tool must work fully on your machine without calling any external server. Cloud features are optional and self-hostable.
|
|
18
|
+
|
|
19
|
+
## Never run tools from repo clones
|
|
20
|
+
|
|
21
|
+
Installed tools are for execution. Repo clones are for development. Use the installed commands, not source.
|