@ssm-08/relay 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,41 @@
1
+ {
2
+ "hooks": {
3
+ "SessionStart": [
4
+ {
5
+ "matcher": "",
6
+ "hooks": [
7
+ {
8
+ "type": "command",
9
+ "command": "\"${CLAUDE_PLUGIN_ROOT}/hooks/run-hook.cmd\" session-start",
10
+ "timeout": 2,
11
+ "statusMessage": "Loading relay memory..."
12
+ }
13
+ ]
14
+ }
15
+ ],
16
+ "Stop": [
17
+ {
18
+ "matcher": "",
19
+ "hooks": [
20
+ {
21
+ "type": "command",
22
+ "command": "\"${CLAUDE_PLUGIN_ROOT}/hooks/run-hook.cmd\" stop",
23
+ "timeout": 5
24
+ }
25
+ ]
26
+ }
27
+ ],
28
+ "UserPromptSubmit": [
29
+ {
30
+ "matcher": "",
31
+ "hooks": [
32
+ {
33
+ "type": "command",
34
+ "command": "\"${CLAUDE_PLUGIN_ROOT}/hooks/run-hook.cmd\" user-prompt-submit",
35
+ "timeout": 3
36
+ }
37
+ ]
38
+ }
39
+ ]
40
+ }
41
+ }
@@ -0,0 +1,11 @@
1
+ : << 'CMDBLOCK'
2
+ @echo off
3
+ setlocal
4
+ if "%CLAUDE_PLUGIN_ROOT%"=="" set CLAUDE_PLUGIN_ROOT=%~dp0..
5
+ node "%CLAUDE_PLUGIN_ROOT%\hooks\%1.mjs"
6
+ exit /b %ERRORLEVEL%
7
+ CMDBLOCK
8
+ #!/usr/bin/env bash
9
+ SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
10
+ node "$SCRIPT_DIR/$1.mjs"
11
+ exit $?
@@ -0,0 +1,110 @@
1
+ #!/usr/bin/env node
2
+ import fs from 'node:fs';
3
+ import path from 'node:path';
4
+ import { readMemory } from '../lib/memory.mjs';
5
+ import { readStdin, isMain } from '../lib/util.mjs';
6
+ import { GitSync } from '../lib/sync.mjs';
7
+
8
+ // Returns { content: string, skillFiles: string[] }
9
+ export function readBroadcastDir(broadcastDir) {
10
+ if (!fs.existsSync(broadcastDir)) return { content: '', skillFiles: [] };
11
+ const files = [];
12
+ function walk(dir) {
13
+ for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
14
+ const full = path.join(dir, entry.name);
15
+ if (entry.isDirectory()) walk(full);
16
+ else if (entry.name !== '.gitkeep') files.push(full);
17
+ }
18
+ }
19
+ walk(broadcastDir);
20
+ files.sort();
21
+ if (files.length === 0) return { content: '', skillFiles: [] };
22
+
23
+ const skillsDir = path.join(broadcastDir, 'skills');
24
+ const skillFiles = files
25
+ .filter((f) => path.dirname(f) === skillsDir)
26
+ .map((f) => path.basename(f));
27
+
28
+ const MAX_FILE_BYTES = 50_000;
29
+ const MAX_TOTAL_BYTES = 200_000;
30
+ let totalBytes = 0;
31
+ const parts = [];
32
+ for (const f of files) {
33
+ const name = path.relative(broadcastDir, f).replace(/\\/g, '/');
34
+ let body = fs.readFileSync(f, 'utf8');
35
+ if (Buffer.byteLength(body, 'utf8') > MAX_FILE_BYTES) {
36
+ body = body.slice(0, MAX_FILE_BYTES) + '\n<!-- relay: truncated — file exceeds 50 KB -->';
37
+ }
38
+ const entry = `## broadcast: ${name}\n\n${body.trim()}`;
39
+ const entryBytes = Buffer.byteLength(entry, 'utf8');
40
+ if (totalBytes + entryBytes > MAX_TOTAL_BYTES) break; // aggregate cap
41
+ parts.push(entry);
42
+ totalBytes += entryBytes;
43
+ }
44
+ const content = parts.join('\n\n---\n\n');
45
+
46
+ return { content, skillFiles };
47
+ }
48
+
49
+ export function buildContext(cwd) {
50
+ const relayDir = path.join(cwd, '.relay');
51
+ if (!fs.existsSync(relayDir)) return '';
52
+
53
+ const memory = readMemory(path.join(relayDir, 'memory.md'));
54
+ const { content: broadcast, skillFiles } = readBroadcastDir(path.join(relayDir, 'broadcast'));
55
+
56
+ const parts = [];
57
+ if (memory.trim()) parts.push(`# Relay Memory\n\n${memory.trim()}`);
58
+ if (broadcast.trim()) {
59
+ let broadcastSection = `# Relay Broadcast\n\n${broadcast.trim()}`;
60
+ if (skillFiles.length > 0) {
61
+ const count = skillFiles.length;
62
+ const names = skillFiles
63
+ .map((f) => '`' + path.basename(f, path.extname(f)) + '`')
64
+ .join(', ');
65
+ broadcastSection +=
66
+ `\n\n_Relay: ${count} team skill(s) loaded — ${names}.` +
67
+ ` Acknowledge in your first response with one line: "Loaded ${count} team skill(s): ${names}."_`;
68
+ }
69
+ parts.push(broadcastSection);
70
+ }
71
+
72
+ return parts.join('\n\n---\n\n');
73
+ }
74
+
75
+ function appendLog(cwd, msg) {
76
+ try {
77
+ const logPath = path.join(cwd, '.relay', 'log');
78
+ fs.appendFileSync(logPath, `[${new Date().toISOString()}] ${msg}\n`);
79
+ } catch {}
80
+ }
81
+
82
+ async function main() {
83
+ try {
84
+ const raw = await readStdin();
85
+ const input = JSON.parse(raw);
86
+ const { cwd } = input;
87
+
88
+ // Pull latest .relay/ from remote before reading memory (fail-open, 3s cap inside pull)
89
+ if (fs.existsSync(path.join(cwd, '.relay'))) {
90
+ try { new GitSync().pull(cwd, { fetchTimeoutMs: 1500, checkoutTimeoutMs: 500 }); } catch {}
91
+ }
92
+
93
+ const context = buildContext(cwd);
94
+ if (!context) process.exit(0);
95
+ appendLog(cwd, 'injection: session-start');
96
+ process.stdout.write(
97
+ JSON.stringify({
98
+ hookSpecificOutput: {
99
+ hookEventName: 'SessionStart',
100
+ additionalContext: context,
101
+ },
102
+ }) + '\n'
103
+ );
104
+ } catch (e) {
105
+ process.stderr.write(`[relay] session-start error: ${e.message}\n`);
106
+ process.exit(0);
107
+ }
108
+ }
109
+
110
+ if (isMain(import.meta.url)) main();
package/hooks/stop.mjs ADDED
@@ -0,0 +1,160 @@
1
+ #!/usr/bin/env node
2
+ import fs from 'node:fs';
3
+ import path from 'node:path';
4
+ import url from 'node:url';
5
+ import { spawn } from 'node:child_process';
6
+ import { readStdin, isMain } from '../lib/util.mjs';
7
+
8
+ const __dirname = path.dirname(url.fileURLToPath(import.meta.url));
9
+
10
+ const TURNS_THRESHOLD = process.env.RELAY_TURNS_THRESHOLD ? parseInt(process.env.RELAY_TURNS_THRESHOLD, 10) : 5;
11
+ const IDLE_MS = process.env.RELAY_IDLE_MS ? parseInt(process.env.RELAY_IDLE_MS, 10) : 2 * 60 * 1000;
12
+
13
+ export function writeWatermarkAtomic(watermarkPath, state) {
14
+ const tmp = `${watermarkPath}.${process.pid}.${Date.now()}.tmp`;
15
+ fs.writeFileSync(tmp, JSON.stringify(state, null, 2));
16
+ let lastErr;
17
+ for (let i = 0; i < 5; i++) {
18
+ try { fs.renameSync(tmp, watermarkPath); return; } catch (e) {
19
+ lastErr = e;
20
+ if (e.code !== 'EPERM' && e.code !== 'EBUSY' && e.code !== 'EACCES') break;
21
+ // Staggered busy-wait: reduces concurrent NTFS rename contention on Windows
22
+ const end = Date.now() + i + 1;
23
+ while (Date.now() < end) {}
24
+ }
25
+ }
26
+ try { fs.unlinkSync(tmp); } catch {}
27
+ throw lastErr;
28
+ }
29
+
30
+ export function updateWatermark(relayDir) {
31
+ const stateDir = path.join(relayDir, 'state');
32
+ fs.mkdirSync(stateDir, { recursive: true });
33
+
34
+ const watermarkPath = path.join(stateDir, 'watermark.json');
35
+ let state = { turns_since_distill: 0 };
36
+ try {
37
+ state = JSON.parse(fs.readFileSync(watermarkPath, 'utf8'));
38
+ } catch {}
39
+
40
+ state.turns_since_distill = (state.turns_since_distill ?? 0) + 1;
41
+ state.last_turn_at = Date.now();
42
+
43
+ writeWatermarkAtomic(watermarkPath, state);
44
+ return state;
45
+ }
46
+
47
+ export function shouldDistill(state) {
48
+ if (state.distiller_running) {
49
+ // PID liveness check — if the distiller was killed (OOM, forced reboot) the flag would
50
+ // stick forever. Verify the PID is still alive; only treat as stale on ESRCH (no such process).
51
+ if (state.distiller_pid) {
52
+ try {
53
+ process.kill(state.distiller_pid, 0);
54
+ return false; // still alive
55
+ } catch (e) {
56
+ if (e.code !== 'ESRCH') return false; // EPERM or unknown — conservatively honor flag
57
+ // ESRCH: process is gone — stale flag, fall through to normal threshold checks
58
+ }
59
+ } else {
60
+ return false; // no PID stored, honor flag
61
+ }
62
+ }
63
+ if (state.turns_since_distill >= TURNS_THRESHOLD) return true;
64
+ // idle trigger: turns accumulated but not yet at threshold
65
+ // Use last_turn_at (not last_distilled_at) so idle trigger fires even before first distillation
66
+ if (
67
+ state.turns_since_distill > 0 &&
68
+ state.last_turn_at &&
69
+ Date.now() - state.last_turn_at > IDLE_MS
70
+ ) return true;
71
+ return false;
72
+ }
73
+
74
+ export function spawnDistiller({ relayDir, transcriptPath, since, cwd }) {
75
+ const distillerPath =
76
+ process.env.CLAUDE_PLUGIN_ROOT
77
+ ? path.join(process.env.CLAUDE_PLUGIN_ROOT, 'distiller.mjs')
78
+ : path.join(__dirname, '..', 'distiller.mjs');
79
+
80
+ const memoryPath = path.join(relayDir, 'memory.md');
81
+ const logPath = path.join(relayDir, 'log');
82
+
83
+ const args = [
84
+ distillerPath,
85
+ '--transcript', transcriptPath,
86
+ '--memory', memoryPath,
87
+ '--out', memoryPath,
88
+ '--cwd', cwd,
89
+ ];
90
+ if (since) args.push('--since', since);
91
+
92
+ let logFd;
93
+ try {
94
+ logFd = fs.openSync(logPath, 'a');
95
+ } catch {
96
+ logFd = 'ignore';
97
+ }
98
+
99
+ const proc = spawn('node', args, {
100
+ detached: true,
101
+ windowsHide: true,
102
+ stdio: ['ignore', logFd, logFd],
103
+ });
104
+ proc.on('error', (e) => {
105
+ process.stderr.write(`[relay] distiller spawn failed: ${e.message}\n`);
106
+ });
107
+ proc.unref();
108
+ // close parent's copy — child already inherited its own fd
109
+ if (typeof logFd === 'number') { try { fs.closeSync(logFd); } catch {} }
110
+ return proc;
111
+ }
112
+
113
+ async function main() {
114
+ try {
115
+ const raw = await readStdin();
116
+ const input = JSON.parse(raw);
117
+ const { cwd, transcript_path } = input;
118
+ const relayDir = path.join(cwd, '.relay');
119
+ if (!fs.existsSync(relayDir)) process.exit(0);
120
+
121
+ const state = updateWatermark(relayDir);
122
+
123
+ if (shouldDistill(state) && transcript_path) {
124
+ // openSync('wx') is atomic — EEXIST if another Stop hook beat us here
125
+ const triggerLock = path.join(relayDir, 'state', 'distill-trigger.lock');
126
+ try {
127
+ fs.closeSync(fs.openSync(triggerLock, 'wx'));
128
+ } catch {
129
+ process.exit(0); // another process won the race
130
+ }
131
+
132
+ const watermarkPath = path.join(relayDir, 'state', 'watermark.json');
133
+
134
+ const distProc = spawnDistiller({
135
+ relayDir,
136
+ transcriptPath: transcript_path,
137
+ since: state.last_uuid || '',
138
+ cwd,
139
+ });
140
+
141
+ // Only set distiller_running + reset turns if OS assigned a pid.
142
+ // If spawn silently failed (pid undefined), don't reset turns — distiller never ran.
143
+ const pid = distProc?.pid;
144
+ if (pid) {
145
+ state.distiller_running = true;
146
+ state.distiller_pid = pid;
147
+ state.turns_since_distill = 0;
148
+ }
149
+ writeWatermarkAtomic(watermarkPath, state);
150
+ // Release trigger lock only after distiller_running is written — prevents a second
151
+ // concurrent Stop hook from passing the lock check before the flag is set.
152
+ try { fs.unlinkSync(triggerLock); } catch {}
153
+ }
154
+ } catch (e) {
155
+ process.stderr.write(`[relay] stop error: ${e.message}\n`);
156
+ }
157
+ process.exit(0);
158
+ }
159
+
160
+ if (isMain(import.meta.url)) main();
@@ -0,0 +1,176 @@
1
+ #!/usr/bin/env node
2
+ import fs from 'node:fs';
3
+ import path from 'node:path';
4
+ import { readStdin, isMain } from '../lib/util.mjs';
5
+ import { readMemory, writeMemoryAtomic } from '../lib/memory.mjs';
6
+ import { GitSync } from '../lib/sync.mjs';
7
+ import { diffMemory, renderDelta, hashMemory } from '../lib/diff-memory.mjs';
8
+
9
+ const SNAPSHOT_FILE = 'last-injected-memory.md';
10
+
11
+ /**
12
+ * UPS owns its own state file — separate from watermark.json (owned by stop.mjs).
13
+ * This eliminates the read-modify-write race that existed when both hooks wrote
14
+ * to the shared watermark.json: UPS only needs last_injected_mtime + last_injected_hash,
15
+ * Stop only needs turns_since_distill + distiller_running + last_turn_at.
16
+ * No shared mutable state → no lock needed.
17
+ */
18
+ const UPS_STATE_FILE = 'ups-state.json';
19
+
20
+ function appendLog(cwd, msg) {
21
+ try {
22
+ const logPath = path.join(cwd, '.relay', 'log');
23
+ fs.appendFileSync(logPath, `[${new Date().toISOString()}] [ups] ${msg}\n`);
24
+ } catch {}
25
+ }
26
+
27
+ function readUpsState(upsStatePath) {
28
+ try { return JSON.parse(fs.readFileSync(upsStatePath, 'utf8')); } catch { return {}; }
29
+ }
30
+
31
+ function writeUpsStateAtomic(upsStatePath, state) {
32
+ const tmp = `${upsStatePath}.${process.pid}.${Date.now()}.tmp`;
33
+ fs.writeFileSync(tmp, JSON.stringify(state, null, 2));
34
+ let lastErr;
35
+ for (let i = 0; i < 5; i++) {
36
+ try { fs.renameSync(tmp, upsStatePath); return; } catch (e) {
37
+ lastErr = e;
38
+ if (e.code !== 'EPERM' && e.code !== 'EBUSY' && e.code !== 'EACCES') break;
39
+ // Staggered busy-wait: reduces concurrent NTFS rename contention on Windows
40
+ const end = Date.now() + i + 1;
41
+ while (Date.now() < end) {}
42
+ }
43
+ }
44
+ try { fs.unlinkSync(tmp); } catch {}
45
+ throw lastErr;
46
+ }
47
+
48
+ function markInjection(cwd, event) {
49
+ appendLog(cwd, `injection: ${event}`);
50
+ }
51
+
52
+ /**
53
+ * Exported for unit testing — all logic with no I/O side effects except reading files.
54
+ * Returns { delta: string, newUpsState: object, newSnapshot: string } or null (skip).
55
+ *
56
+ * @param {object} opts
57
+ * @param {string} opts.cwd
58
+ * @param {string} opts.relayDir
59
+ * @param {string} opts.upsStatePath - path to ups-state.json (UPS-owned, never written by stop.mjs)
60
+ * @param {string} opts.snapshotPath
61
+ * @param {string} opts.memoryPath
62
+ */
63
+ export function buildInjection({ cwd, relayDir, upsStatePath, snapshotPath, memoryPath }) {
64
+ // 1. Check memory.md exists
65
+ if (!fs.existsSync(memoryPath)) return null;
66
+
67
+ const st = readUpsState(upsStatePath);
68
+ let currentMtime;
69
+ try {
70
+ const stat = fs.statSync(memoryPath);
71
+ if (!stat.isFile()) return null;
72
+ currentMtime = stat.mtimeMs;
73
+ } catch {
74
+ return null;
75
+ }
76
+
77
+ // 2. Fast path: mtime unchanged since last injection check
78
+ if (st.last_injected_mtime === currentMtime) return null;
79
+
80
+ // 3. Read current memory content + compute hash
81
+ const currentContent = readMemory(memoryPath);
82
+ const currentHash = hashMemory(currentContent);
83
+
84
+ // 4. First-run seed: no prior injection → seed without injecting
85
+ if (!st.last_injected_hash) {
86
+ return {
87
+ delta: null,
88
+ newUpsState: { last_injected_mtime: currentMtime, last_injected_hash: currentHash },
89
+ newSnapshot: currentContent,
90
+ };
91
+ }
92
+
93
+ // 5. Hash unchanged (e.g. file re-written with same content) → update mtime only
94
+ if (currentHash === st.last_injected_hash) {
95
+ return {
96
+ delta: null,
97
+ newUpsState: { ...st, last_injected_mtime: currentMtime },
98
+ newSnapshot: null,
99
+ };
100
+ }
101
+
102
+ // 6. Content changed — compute diff against stored snapshot
103
+ let snapshotContent = '';
104
+ try {
105
+ if (fs.existsSync(snapshotPath)) {
106
+ snapshotContent = fs.readFileSync(snapshotPath, 'utf8');
107
+ }
108
+ } catch {
109
+ // Corrupt/missing snapshot — treat as empty, inject full diff
110
+ }
111
+
112
+ const diff = diffMemory(snapshotContent, currentContent);
113
+ const delta = renderDelta(diff);
114
+
115
+ return {
116
+ delta: delta || null,
117
+ newUpsState: { last_injected_mtime: currentMtime, last_injected_hash: currentHash },
118
+ newSnapshot: currentContent,
119
+ };
120
+ }
121
+
122
+ async function main() {
123
+ let cwd;
124
+ try {
125
+ const raw = await readStdin();
126
+ const input = JSON.parse(raw);
127
+ cwd = input.cwd;
128
+
129
+ const relayDir = path.join(cwd, '.relay');
130
+ if (!fs.existsSync(relayDir)) { process.exit(0); }
131
+
132
+ // Pull latest memory from remote with tight timeout (stay within 2s hook budget)
133
+ try {
134
+ new GitSync().pull(cwd, { fetchTimeoutMs: 1500, checkoutTimeoutMs: 500 });
135
+ } catch (e) {
136
+ appendLog(cwd, `pull failed: ${e.message}`);
137
+ // Fail-open: proceed with whatever is on disk
138
+ }
139
+
140
+ const stateDir = path.join(relayDir, 'state');
141
+ const upsStatePath = path.join(stateDir, UPS_STATE_FILE);
142
+ const snapshotPath = path.join(stateDir, SNAPSHOT_FILE);
143
+ const memoryPath = path.join(relayDir, 'memory.md');
144
+
145
+ const result = buildInjection({ cwd, relayDir, upsStatePath, snapshotPath, memoryPath });
146
+
147
+ if (!result) { process.exit(0); }
148
+
149
+ const { delta, newUpsState, newSnapshot } = result;
150
+
151
+ // Write state updates — only to UPS-owned files; never touch watermark.json
152
+ fs.mkdirSync(stateDir, { recursive: true });
153
+ writeUpsStateAtomic(upsStatePath, newUpsState);
154
+ if (newSnapshot !== null) {
155
+ writeMemoryAtomic(snapshotPath, newSnapshot);
156
+ }
157
+
158
+ if (!delta) { process.exit(0); }
159
+
160
+ markInjection(cwd, 'user-prompt-submit');
161
+
162
+ // Emit additionalContext
163
+ process.stdout.write(JSON.stringify({
164
+ hookSpecificOutput: {
165
+ hookEventName: 'UserPromptSubmit',
166
+ additionalContext: delta,
167
+ },
168
+ }) + '\n');
169
+ } catch (e) {
170
+ // Fail-open: log + exit 0, never break the session
171
+ try { if (cwd) appendLog(cwd, `error: ${e && e.message ? e.message : e}`); } catch {}
172
+ process.exit(0);
173
+ }
174
+ }
175
+
176
+ if (isMain(import.meta.url)) main();
@@ -0,0 +1,97 @@
1
+ import crypto from 'node:crypto';
2
+
3
+ const SECTION_RE = /^## (.+)$/;
4
+ const BULLET_RE = /^- (.+)$/;
5
+
6
+ /**
7
+ * Parse memory.md into a Map of section name → Set of bullet text.
8
+ * Provenance tags like "[session X, turn N]" are kept as part of the bullet
9
+ * so they don't cause spurious diffs (section-aware set diff ignores ordering).
10
+ * Note: content before the first ## header (e.g. a # title line) is intentionally
11
+ * ignored — the distiller prompt outputs only section-based bullets.
12
+ */
13
+ export function parseSections(markdown) {
14
+ const sections = new Map();
15
+ let current = null;
16
+ for (const raw of (markdown || '').split('\n')) {
17
+ const line = raw.trimEnd();
18
+ const secMatch = line.match(SECTION_RE);
19
+ if (secMatch) {
20
+ current = secMatch[1].trim();
21
+ if (!sections.has(current)) sections.set(current, new Set());
22
+ continue;
23
+ }
24
+ const bulletMatch = line.match(BULLET_RE);
25
+ if (bulletMatch && current) {
26
+ sections.get(current).add(bulletMatch[1].trim());
27
+ }
28
+ }
29
+ return sections;
30
+ }
31
+
32
+ /**
33
+ * Compute section-aware set diff between two memory.md strings.
34
+ * Returns { added: Map<section, string[]>, removed: Map<section, string[]> }.
35
+ * Bullet reordering within a section produces an empty diff.
36
+ */
37
+ export function diffMemory(oldText, newText) {
38
+ const oldSec = parseSections(oldText);
39
+ const newSec = parseSections(newText);
40
+ const added = new Map();
41
+ const removed = new Map();
42
+
43
+ const allSections = new Set([...oldSec.keys(), ...newSec.keys()]);
44
+ for (const sec of allSections) {
45
+ const oldBullets = oldSec.get(sec) || new Set();
46
+ const newBullets = newSec.get(sec) || new Set();
47
+
48
+ const addedBullets = [...newBullets].filter((b) => !oldBullets.has(b));
49
+ const removedBullets = [...oldBullets].filter((b) => !newBullets.has(b));
50
+
51
+ if (addedBullets.length > 0) added.set(sec, addedBullets);
52
+ if (removedBullets.length > 0) removed.set(sec, removedBullets);
53
+ }
54
+
55
+ return { added, removed };
56
+ }
57
+
58
+ /**
59
+ * Render a diff into a compact additionalContext string.
60
+ * Truncates to maxBytes with a marker so the model knows it's partial.
61
+ */
62
+ export function renderDelta(diff, { maxBytes = 4096 } = {}) {
63
+ const { added, removed } = diff;
64
+ if (added.size === 0 && removed.size === 0) return '';
65
+
66
+ const lines = ['## Relay live update'];
67
+
68
+ if (added.size > 0) {
69
+ for (const [sec, bullets] of added) {
70
+ lines.push(`### New in ${sec}`);
71
+ for (const b of bullets) lines.push(`- ${b}`);
72
+ }
73
+ }
74
+ if (removed.size > 0) {
75
+ for (const [sec, bullets] of removed) {
76
+ lines.push(`### Removed from ${sec}`);
77
+ for (const b of bullets) lines.push(`- ${b}`);
78
+ }
79
+ }
80
+
81
+ let result = lines.join('\n');
82
+ if (Buffer.byteLength(result, 'utf8') > maxBytes) {
83
+ // Truncate at last newline before the byte limit (avoids splitting UTF-8 multi-byte sequences)
84
+ const truncated = Buffer.from(result, 'utf8').subarray(0, maxBytes - 60).toString('utf8');
85
+ const lastNL = truncated.lastIndexOf('\n');
86
+ result = (lastNL > 0 ? truncated.slice(0, lastNL) : truncated) +
87
+ '\n_…truncated, see .relay/memory.md for full context…_';
88
+ }
89
+ return result;
90
+ }
91
+
92
+ /**
93
+ * 12-char SHA-256 prefix of the memory text. Fast change detection.
94
+ */
95
+ export function hashMemory(text) {
96
+ return crypto.createHash('sha256').update(text || '').digest('hex').slice(0, 12);
97
+ }