@ssm-08/relay 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +13 -0
- package/LICENSE +182 -0
- package/README.md +158 -0
- package/bin/relay.mjs +394 -0
- package/commands/relay-handoff.toml +28 -0
- package/distiller.mjs +254 -0
- package/hooks/hooks.json +41 -0
- package/hooks/run-hook.cmd +11 -0
- package/hooks/session-start.mjs +110 -0
- package/hooks/stop.mjs +160 -0
- package/hooks/user-prompt-submit.mjs +176 -0
- package/lib/diff-memory.mjs +97 -0
- package/lib/filter.mjs +110 -0
- package/lib/memory.mjs +26 -0
- package/lib/sync.mjs +218 -0
- package/lib/transcript.mjs +120 -0
- package/lib/util.mjs +22 -0
- package/package.json +42 -0
- package/prompts/distill.md +65 -0
- package/scripts/installer.mjs +700 -0
package/lib/filter.mjs
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
// ── Category weights ──────────────────────────────────────────────────────
|
|
2
|
+
const SIGNALS = [
|
|
3
|
+
// High value — explicit decision language
|
|
4
|
+
{ weight: 3, pattern: /\b(decided?|we('re| are) going with|chose|choice|picked|settled on|agreed)\b/i },
|
|
5
|
+
|
|
6
|
+
// High value — rejection / failure
|
|
7
|
+
{ weight: 3, pattern: /\b(rejected?|doesn'?t work|won'?t work|tried .{0,30} (but|and it)|abandoned|reverted)\b/i },
|
|
8
|
+
|
|
9
|
+
// High value — deliberate hacks in code
|
|
10
|
+
{ weight: 3, pattern: /\b(workaround|hack|hardcod\w*|stub|mock|placeholder|skip for now)\b/i },
|
|
11
|
+
|
|
12
|
+
// High value — known broken / intentionally deferred bugs
|
|
13
|
+
{ weight: 3, pattern: /\b(known (issue|bug|broken)|intentionally (broken|disabled)|won'?t fix|deliberately (broken|disabled)|leaving .{0,15} broken)\b/i },
|
|
14
|
+
|
|
15
|
+
// Medium — scope signals
|
|
16
|
+
{ weight: 2, pattern: /\b(out of scope|descoped|added to scope|deferred|cut|dropping)\b/i },
|
|
17
|
+
|
|
18
|
+
// Medium — open questions
|
|
19
|
+
{ weight: 2, pattern: /\b(open question|still (need|deciding)|not sure yet|revisit|TBD)\b/i },
|
|
20
|
+
|
|
21
|
+
// Medium — maintenance flags
|
|
22
|
+
{ weight: 2, pattern: /\b(TODO|FIXME|before (demo|launch|merge))\b/ },
|
|
23
|
+
|
|
24
|
+
// Low — weak signal words that need reinforcement
|
|
25
|
+
{ weight: 1, pattern: /\b(actually|instead|broken|later|for now)\b/i },
|
|
26
|
+
];
|
|
27
|
+
|
|
28
|
+
// File edits are strong signal — team changed something deliberately
|
|
29
|
+
const EDIT_TOOL_REGEX = /\[tool_use (Edit|Write|MultiEdit)\]/;
|
|
30
|
+
const EDIT_WEIGHT = 3;
|
|
31
|
+
|
|
32
|
+
// ── Structural signals (scored on transcript shape, not text) ─────────────
|
|
33
|
+
function structuralScore(lines) {
|
|
34
|
+
let score = 0;
|
|
35
|
+
const turns = lines.filter(l => l && (l.type === 'user' || l.type === 'assistant'));
|
|
36
|
+
|
|
37
|
+
// Long sessions are more likely to contain decisions
|
|
38
|
+
if (turns.length >= 10) score += 2;
|
|
39
|
+
if (turns.length >= 20) score += 2;
|
|
40
|
+
|
|
41
|
+
// User messages that are long = explaining context or decisions, not just commands
|
|
42
|
+
const userLines = lines.filter(l => l && l.type === 'user');
|
|
43
|
+
const avgUserLength = userLines.reduce((sum, l) => {
|
|
44
|
+
const content = l.message?.content;
|
|
45
|
+
const text = typeof content === 'string' ? content : JSON.stringify(content || '');
|
|
46
|
+
return sum + text.length;
|
|
47
|
+
}, 0) / (userLines.length || 1);
|
|
48
|
+
if (avgUserLength > 200) score += 2;
|
|
49
|
+
|
|
50
|
+
// Multiple file edits = real work happened
|
|
51
|
+
const editCount = lines.filter(l => {
|
|
52
|
+
if (l?.type !== 'assistant') return false;
|
|
53
|
+
const content = l.message?.content;
|
|
54
|
+
return Array.isArray(content) && content.some(b =>
|
|
55
|
+
b?.type === 'tool_use' && ['Edit', 'Write', 'MultiEdit'].includes(b.name)
|
|
56
|
+
);
|
|
57
|
+
}).length;
|
|
58
|
+
if (editCount >= 3) score += 2;
|
|
59
|
+
if (editCount >= 8) score += 2;
|
|
60
|
+
|
|
61
|
+
return score;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// ── Threshold (env-overridable for faster test cycles) ────────────────────
|
|
65
|
+
// Read at call time (not module load) so tests can set the env var after import.
|
|
66
|
+
function getThreshold() {
|
|
67
|
+
const p = parseInt(process.env.RELAY_TIER0_THRESHOLD ?? '3', 10);
|
|
68
|
+
return isNaN(p) ? 3 : p;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// ── Main export ───────────────────────────────────────────────────────────
|
|
72
|
+
export function scoreTier0(transcriptText, lines = []) {
|
|
73
|
+
if (typeof transcriptText !== 'string') return { score: 0, passes: false, breakdown: [] };
|
|
74
|
+
let score = 0;
|
|
75
|
+
const breakdown = [];
|
|
76
|
+
|
|
77
|
+
// Text-based signals
|
|
78
|
+
for (const { weight, pattern } of SIGNALS) {
|
|
79
|
+
const matches = transcriptText.match(new RegExp(pattern.source, 'gi'));
|
|
80
|
+
if (matches) {
|
|
81
|
+
// Cap contribution per category to avoid one word dominating
|
|
82
|
+
const contribution = Math.min(matches.length * weight, weight * 3);
|
|
83
|
+
score += contribution;
|
|
84
|
+
breakdown.push({ pattern: pattern.source, matches: matches.length, contribution });
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Edit tool signal (rendered transcript format: [tool_use Edit] {...})
|
|
89
|
+
const editMatches = (transcriptText.match(new RegExp(EDIT_TOOL_REGEX.source, 'g')) || []).length;
|
|
90
|
+
if (editMatches > 0) {
|
|
91
|
+
const contribution = Math.min(editMatches * EDIT_WEIGHT, EDIT_WEIGHT * 4);
|
|
92
|
+
score += contribution;
|
|
93
|
+
breakdown.push({ pattern: 'tool_use Edit/Write/MultiEdit', matches: editMatches, contribution });
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Structural score (operates on raw JSONL lines, not rendered text)
|
|
97
|
+
const structural = structuralScore(lines);
|
|
98
|
+
score += structural;
|
|
99
|
+
if (structural > 0) breakdown.push({ pattern: 'structural', matches: 1, contribution: structural });
|
|
100
|
+
|
|
101
|
+
const THRESHOLD = getThreshold();
|
|
102
|
+
return { score, passes: score >= THRESHOLD, threshold: THRESHOLD, breakdown };
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// Backwards-compatible export for distiller.mjs and tests
|
|
106
|
+
export function hasTier0Signal(transcriptText, lines = []) {
|
|
107
|
+
const { score, passes, threshold } = scoreTier0(transcriptText, lines);
|
|
108
|
+
process.stderr.write(`[relay] tier0 score=${score} threshold=${threshold} passes=${passes}\n`);
|
|
109
|
+
return passes;
|
|
110
|
+
}
|
package/lib/memory.mjs
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
|
|
4
|
+
export function readMemory(memoryPath) {
|
|
5
|
+
if (!fs.existsSync(memoryPath)) return '';
|
|
6
|
+
return fs.readFileSync(memoryPath, 'utf8');
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
export function writeMemoryAtomic(memoryPath, content) {
|
|
10
|
+
const dir = path.dirname(memoryPath);
|
|
11
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
12
|
+
const tmp = path.join(
|
|
13
|
+
dir,
|
|
14
|
+
`.${path.basename(memoryPath)}.${process.pid}.${Date.now()}.tmp`
|
|
15
|
+
);
|
|
16
|
+
fs.writeFileSync(tmp, content, 'utf8');
|
|
17
|
+
let lastErr;
|
|
18
|
+
for (let i = 0; i < 3; i++) {
|
|
19
|
+
try { fs.renameSync(tmp, memoryPath); return; } catch (e) {
|
|
20
|
+
lastErr = e;
|
|
21
|
+
if (e.code !== 'EPERM' && e.code !== 'EBUSY') break;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
try { fs.unlinkSync(tmp); } catch {}
|
|
25
|
+
throw lastErr;
|
|
26
|
+
}
|
package/lib/sync.mjs
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { spawnSync } from 'node:child_process';
|
|
4
|
+
|
|
5
|
+
// I1: array args to spawnSync — no shell, no injection surface
|
|
6
|
+
// windowsHide: true prevents console window flash on Windows for each git call
|
|
7
|
+
function git(args, cwd, { timeout = 10_000 } = {}) {
|
|
8
|
+
const r = spawnSync('git', args, {
|
|
9
|
+
cwd,
|
|
10
|
+
encoding: 'utf8',
|
|
11
|
+
timeout,
|
|
12
|
+
windowsHide: true,
|
|
13
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
14
|
+
});
|
|
15
|
+
if (r.error) throw r.error;
|
|
16
|
+
if (r.status !== 0) {
|
|
17
|
+
const msg = (r.stderr || '').trim() || `exit code ${r.status}`;
|
|
18
|
+
throw Object.assign(new Error(msg), { status: r.status, stderr: r.stderr });
|
|
19
|
+
}
|
|
20
|
+
return (r.stdout || '').trim();
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function appendLog(cwd, msg) {
|
|
24
|
+
try {
|
|
25
|
+
const logPath = path.join(cwd, '.relay', 'log');
|
|
26
|
+
fs.appendFileSync(logPath, `[${new Date().toISOString()}] ${msg}\n`);
|
|
27
|
+
} catch {}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function resetWatermarkTurns(cwd) {
|
|
31
|
+
const p = path.join(cwd, '.relay', 'state', 'watermark.json');
|
|
32
|
+
let renameLogged = false;
|
|
33
|
+
try {
|
|
34
|
+
let s = {};
|
|
35
|
+
try { s = JSON.parse(fs.readFileSync(p, 'utf8')); } catch {}
|
|
36
|
+
s.turns_since_distill = 0;
|
|
37
|
+
delete s.distiller_running; // prevent stuck flag if distiller was killed mid-flight
|
|
38
|
+
delete s.distiller_pid; // clear stale PID alongside the flag
|
|
39
|
+
const tmp = `${p}.${process.pid}.${Date.now()}.tmp`;
|
|
40
|
+
fs.writeFileSync(tmp, JSON.stringify(s, null, 2));
|
|
41
|
+
try { fs.renameSync(tmp, p); } catch (e) {
|
|
42
|
+
try { fs.unlinkSync(tmp); } catch {}
|
|
43
|
+
appendLog(cwd, `sync.resetWatermarkTurns: rename failed: ${e.message}`);
|
|
44
|
+
renameLogged = true;
|
|
45
|
+
throw e;
|
|
46
|
+
}
|
|
47
|
+
} catch (e) {
|
|
48
|
+
if (renameLogged) return;
|
|
49
|
+
appendLog(cwd, `sync.resetWatermarkTurns: failed: ${e.message}`);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Implements RelaySync interface. Swap for CloudSync later by replacing this class.
|
|
54
|
+
export class GitSync {
|
|
55
|
+
// Pull latest .relay/ files from remote. Fail-open — session proceeds on any error.
|
|
56
|
+
// Only fetches memory.md and broadcast/; .relay/state/ is gitignored and machine-local.
|
|
57
|
+
// opts.fetchTimeoutMs / opts.checkoutTimeoutMs override defaults (UPS uses tight caps).
|
|
58
|
+
pull(cwd, { fetchTimeoutMs = 3_000, checkoutTimeoutMs = 2_000 } = {}) {
|
|
59
|
+
// I3: env escape hatch for local-only or demo environments
|
|
60
|
+
if (process.env.RELAY_SKIP_PULL) return;
|
|
61
|
+
|
|
62
|
+
// I3: short-circuit if no remote configured — avoids 50ms spawn + guaranteed failure
|
|
63
|
+
try {
|
|
64
|
+
git(['config', '--get', 'remote.origin.url'], cwd, { timeout: 1_000 });
|
|
65
|
+
} catch {
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
try {
|
|
70
|
+
git(['fetch', '--quiet'], cwd, { timeout: fetchTimeoutMs });
|
|
71
|
+
} catch (e) {
|
|
72
|
+
appendLog(cwd, `sync.pull: fetch failed: ${e.message}`);
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
let remote;
|
|
77
|
+
try {
|
|
78
|
+
// Prefer the configured upstream tracking branch
|
|
79
|
+
remote = git(['rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{upstream}'], cwd, { timeout: 2_000 });
|
|
80
|
+
} catch {
|
|
81
|
+
try {
|
|
82
|
+
const branch = git(['rev-parse', '--abbrev-ref', 'HEAD'], cwd, { timeout: 2_000 });
|
|
83
|
+
remote = `origin/${branch}`;
|
|
84
|
+
} catch {
|
|
85
|
+
appendLog(cwd, 'sync.pull: cannot determine remote tracking branch');
|
|
86
|
+
return;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Checkout only .relay/ tracked files — no full rebase, no touch on user working tree
|
|
91
|
+
try { git(['checkout', remote, '--', '.relay/memory.md'], cwd, { timeout: checkoutTimeoutMs }); } catch {}
|
|
92
|
+
try { git(['checkout', remote, '--', '.relay/broadcast'], cwd, { timeout: checkoutTimeoutMs }); } catch {}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Commit and push .relay/ changes. Retries on non-fast-forward with rebase.
|
|
96
|
+
push(cwd, sessionId) {
|
|
97
|
+
const shortId = (sessionId || 'unknown').slice(0, 8);
|
|
98
|
+
|
|
99
|
+
// Stage each path separately — if broadcast dir doesn't exist yet, don't abort the whole push
|
|
100
|
+
for (const p of ['.relay/memory.md', '.relay/broadcast']) {
|
|
101
|
+
try {
|
|
102
|
+
git(['add', p], cwd, { timeout: 5_000 });
|
|
103
|
+
} catch (e) {
|
|
104
|
+
appendLog(cwd, `sync.push: add ${p} failed (skipping): ${e.message}`);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// One diff call: get all staged names, bail if no relay files, unstage any non-relay files.
|
|
109
|
+
let nonRelayStaged = [];
|
|
110
|
+
try {
|
|
111
|
+
const staged = git(['diff', '--cached', '--name-only'], cwd, { timeout: 2_000 });
|
|
112
|
+
const allStaged = (staged || '').split('\n').filter(Boolean);
|
|
113
|
+
if (!allStaged.some((f) => f.startsWith('.relay/'))) return; // nothing relay to commit
|
|
114
|
+
nonRelayStaged = allStaged.filter((f) => !f.startsWith('.relay/'));
|
|
115
|
+
if (nonRelayStaged.length > 0) {
|
|
116
|
+
git(['reset', 'HEAD', '--', ...nonRelayStaged], cwd, { timeout: 5_000 });
|
|
117
|
+
}
|
|
118
|
+
} catch (e) {
|
|
119
|
+
appendLog(cwd, `sync.push: could not check staged files: ${e.message}`);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
try {
|
|
123
|
+
// I1: commit message as array arg — shortId never touches a shell
|
|
124
|
+
git(['commit', '-m', `[relay] memory update (session ${shortId})`], cwd, { timeout: 5_000 });
|
|
125
|
+
} catch (e) {
|
|
126
|
+
appendLog(cwd, `sync.push: commit failed: ${e.message}`);
|
|
127
|
+
// Re-stage user files before returning so we don't silently unstage their work
|
|
128
|
+
if (nonRelayStaged.length > 0) {
|
|
129
|
+
try { git(['add', '--', ...nonRelayStaged], cwd, { timeout: 5_000 }); } catch {}
|
|
130
|
+
}
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// Re-stage user files after relay commit
|
|
135
|
+
if (nonRelayStaged.length > 0) {
|
|
136
|
+
try { git(['add', '--', ...nonRelayStaged], cwd, { timeout: 5_000 }); } catch {}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
for (let attempt = 0; attempt < 3; attempt++) {
|
|
140
|
+
try {
|
|
141
|
+
git(['push', 'origin', 'HEAD'], cwd, { timeout: 10_000 });
|
|
142
|
+
return; // success
|
|
143
|
+
} catch {
|
|
144
|
+
if (attempt < 2) {
|
|
145
|
+
const rebased = this._rebase(cwd);
|
|
146
|
+
if (!rebased) return; // conflict rolled back — stop retrying
|
|
147
|
+
} else {
|
|
148
|
+
appendLog(cwd, 'sync.push: failed after 3 attempts — leaving commit local');
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Fetch + rebase onto FETCH_HEAD. On conflict: abort, advance local HEAD to FETCH_HEAD
|
|
155
|
+
// via reset --mixed (safe: doesn't touch working tree outside .relay/), reset turns.
|
|
156
|
+
// Returns true if rebase succeeded (push can retry), false if conflict (caller stops).
|
|
157
|
+
_rebase(cwd) {
|
|
158
|
+
try {
|
|
159
|
+
git(['fetch', '--quiet'], cwd, { timeout: 5_000 });
|
|
160
|
+
git(['rebase', 'FETCH_HEAD'], cwd, { timeout: 10_000 });
|
|
161
|
+
return true;
|
|
162
|
+
} catch {
|
|
163
|
+
try { git(['rebase', '--abort'], cwd, { timeout: 5_000 }); } catch {}
|
|
164
|
+
|
|
165
|
+
// C1 fix: advance local HEAD to FETCH_HEAD so repo is no longer behind remote.
|
|
166
|
+
// reset --mixed moves HEAD + index to FETCH_HEAD without touching working tree
|
|
167
|
+
// (user's in-progress code changes are preserved; only .relay/ files differ).
|
|
168
|
+
try { git(['reset', '--mixed', 'FETCH_HEAD'], cwd, { timeout: 2_000 }); } catch {}
|
|
169
|
+
|
|
170
|
+
// Sync .relay/ tracked files in working tree to remote version
|
|
171
|
+
try { git(['checkout', 'FETCH_HEAD', '--', '.relay/memory.md'], cwd, { timeout: 2_000 }); } catch {}
|
|
172
|
+
try { git(['checkout', 'FETCH_HEAD', '--', '.relay/broadcast'], cwd, { timeout: 2_000 }); } catch {}
|
|
173
|
+
|
|
174
|
+
// Reset turn counter so next Stop hook triggers a fresh distillation
|
|
175
|
+
resetWatermarkTurns(cwd);
|
|
176
|
+
appendLog(cwd, 'sync._rebase: conflict — advanced to FETCH_HEAD, re-distill queued');
|
|
177
|
+
return false;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Advisory local lock. Prevents two concurrent distillers from double-pushing on same machine.
|
|
182
|
+
// Returns a release function. Throws Error('LOCKED') if a fresh lock exists.
|
|
183
|
+
// Note: M4 edge case — if this is the repo's very first commit (no HEAD~1 parent),
|
|
184
|
+
// _rebase()'s reset --mixed HEAD~1 would fail silently; lock is still released correctly.
|
|
185
|
+
lock(cwd) {
|
|
186
|
+
const lockPath = path.join(cwd, '.relay', 'state', '.lock');
|
|
187
|
+
fs.mkdirSync(path.dirname(lockPath), { recursive: true });
|
|
188
|
+
|
|
189
|
+
// I4: 'wx' flag is atomic — fails with EEXIST if file already exists (no TOCTOU window)
|
|
190
|
+
try {
|
|
191
|
+
const fd = fs.openSync(lockPath, 'wx');
|
|
192
|
+
fs.writeSync(fd, new Date().toISOString());
|
|
193
|
+
fs.closeSync(fd);
|
|
194
|
+
} catch (e) {
|
|
195
|
+
if (e.code !== 'EEXIST') throw e;
|
|
196
|
+
// File exists — check if stale
|
|
197
|
+
try {
|
|
198
|
+
const age = Date.now() - fs.statSync(lockPath).mtimeMs;
|
|
199
|
+
if (age < 60_000) throw new Error('LOCKED');
|
|
200
|
+
// Stale (> 60s) — steal it atomically: unlink first, then re-acquire with 'wx'.
|
|
201
|
+
// This avoids the TOCTOU race where two processes both observe stale age and both
|
|
202
|
+
// open with 'w' (non-exclusive), both believing they hold the lock.
|
|
203
|
+
} catch (e2) {
|
|
204
|
+
if (e2.message === 'LOCKED') throw e2;
|
|
205
|
+
// stat failed (e.g. ENOENT — file vanished between EEXIST and stat).
|
|
206
|
+
// Fall through to steal attempt — unlink is a no-op if file is gone.
|
|
207
|
+
}
|
|
208
|
+
// Atomic steal: unlink stale lock, then acquire exclusively.
|
|
209
|
+
// If another process steals it first, our 'wx' open throws EEXIST → LOCKED.
|
|
210
|
+
try { fs.unlinkSync(lockPath); } catch {}
|
|
211
|
+
const fd = fs.openSync(lockPath, 'wx'); // throws EEXIST if stolen by another process
|
|
212
|
+
fs.writeSync(fd, new Date().toISOString());
|
|
213
|
+
fs.closeSync(fd);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return () => { try { fs.rmSync(lockPath, { force: true }); } catch {} };
|
|
217
|
+
}
|
|
218
|
+
}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import readline from 'node:readline';
|
|
3
|
+
|
|
4
|
+
const MAX_TOOL_RESULT_CHARS = 800;
|
|
5
|
+
const MAX_TOOL_INPUT_CHARS = 400;
|
|
6
|
+
|
|
7
|
+
export async function readTranscriptLines(filePath) {
|
|
8
|
+
const lines = [];
|
|
9
|
+
let parseErrors = 0;
|
|
10
|
+
const rl = readline.createInterface({
|
|
11
|
+
input: fs.createReadStream(filePath, { encoding: 'utf8' }),
|
|
12
|
+
crlfDelay: Infinity,
|
|
13
|
+
});
|
|
14
|
+
for await (const raw of rl) {
|
|
15
|
+
if (!raw) continue;
|
|
16
|
+
try {
|
|
17
|
+
lines.push(JSON.parse(raw));
|
|
18
|
+
} catch {
|
|
19
|
+
parseErrors++;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
if (parseErrors > 0) {
|
|
23
|
+
process.stderr.write(`[relay] transcript: ${parseErrors} malformed JSONL line(s) skipped in ${filePath}\n`);
|
|
24
|
+
}
|
|
25
|
+
return lines;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function sliceSinceUuid(lines, watermarkUuid) {
|
|
29
|
+
if (!watermarkUuid) return lines;
|
|
30
|
+
const idx = lines.findIndex((l) => l && l.uuid === watermarkUuid);
|
|
31
|
+
if (idx < 0) return lines;
|
|
32
|
+
return lines.slice(idx + 1);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function lastUuid(lines) {
|
|
36
|
+
for (let i = lines.length - 1; i >= 0; i--) {
|
|
37
|
+
if (lines[i] && lines[i].uuid) return lines[i].uuid;
|
|
38
|
+
}
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function truncate(s, n) {
|
|
43
|
+
if (!s) return '';
|
|
44
|
+
if (s.length <= n) return s;
|
|
45
|
+
return s.slice(0, n) + `... [truncated ${s.length - n} chars]`;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
function stringifyInput(input) {
|
|
49
|
+
if (!input) return '';
|
|
50
|
+
try {
|
|
51
|
+
return truncate(JSON.stringify(input), MAX_TOOL_INPUT_CHARS);
|
|
52
|
+
} catch {
|
|
53
|
+
return '[unserializable input]';
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function renderUserContent(content) {
|
|
58
|
+
if (typeof content === 'string') return content.trim();
|
|
59
|
+
if (!Array.isArray(content)) return '';
|
|
60
|
+
const parts = [];
|
|
61
|
+
for (const block of content) {
|
|
62
|
+
if (!block) continue;
|
|
63
|
+
if (block.type === 'text' && block.text) {
|
|
64
|
+
parts.push(block.text.trim());
|
|
65
|
+
} else if (block.type === 'tool_result') {
|
|
66
|
+
const body =
|
|
67
|
+
typeof block.content === 'string'
|
|
68
|
+
? block.content
|
|
69
|
+
: Array.isArray(block.content)
|
|
70
|
+
? block.content
|
|
71
|
+
.map((c) => (c && c.type === 'text' ? c.text : ''))
|
|
72
|
+
.join('\n')
|
|
73
|
+
: '';
|
|
74
|
+
const err = block.is_error ? ' ERROR' : '';
|
|
75
|
+
parts.push(`[tool_result${err}] ${truncate(body.trim(), MAX_TOOL_RESULT_CHARS)}`);
|
|
76
|
+
} else if (block.type === 'image') {
|
|
77
|
+
parts.push('[image omitted]');
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
return parts.join('\n').trim();
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
function renderAssistantContent(content) {
|
|
84
|
+
if (!Array.isArray(content)) return '';
|
|
85
|
+
const parts = [];
|
|
86
|
+
for (const block of content) {
|
|
87
|
+
if (!block) continue;
|
|
88
|
+
if (block.type === 'thinking') continue; // always skip — internal reasoning, not signal
|
|
89
|
+
if (block.type === 'text' && block.text) {
|
|
90
|
+
parts.push(block.text.trim());
|
|
91
|
+
} else if (block.type === 'tool_use') {
|
|
92
|
+
parts.push(`[tool_use ${block.name}] ${stringifyInput(block.input)}`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return parts.join('\n').trim();
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
export function renderForDistiller(lines) {
|
|
99
|
+
let turn = 0;
|
|
100
|
+
const out = [];
|
|
101
|
+
for (const line of lines) {
|
|
102
|
+
if (!line || line.isSidechain) continue;
|
|
103
|
+
if (line.type === 'user') {
|
|
104
|
+
const msg = line.message;
|
|
105
|
+
if (!msg) continue;
|
|
106
|
+
const text = renderUserContent(msg.content);
|
|
107
|
+
if (!text) continue;
|
|
108
|
+
turn++;
|
|
109
|
+
out.push(`[turn ${turn}, user]\n${text}`);
|
|
110
|
+
} else if (line.type === 'assistant') {
|
|
111
|
+
const msg = line.message;
|
|
112
|
+
if (!msg) continue;
|
|
113
|
+
const text = renderAssistantContent(msg.content);
|
|
114
|
+
if (!text) continue;
|
|
115
|
+
turn++;
|
|
116
|
+
out.push(`[turn ${turn}, assistant]\n${text}`);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
return out.join('\n\n');
|
|
120
|
+
}
|
package/lib/util.mjs
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { fileURLToPath } from 'node:url';
|
|
4
|
+
|
|
5
|
+
export async function readStdin() {
|
|
6
|
+
const chunks = [];
|
|
7
|
+
for await (const chunk of process.stdin) chunks.push(chunk);
|
|
8
|
+
return Buffer.concat(chunks).toString('utf8');
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function isMain(metaUrl) {
|
|
12
|
+
if (!process.argv[1]) return false;
|
|
13
|
+
try {
|
|
14
|
+
// realpathSync follows junctions/symlinks — avoids mismatch when plugin
|
|
15
|
+
// is registered via junction into ~/.claude/plugins/relay/
|
|
16
|
+
const argv1 = fs.realpathSync(path.resolve(process.argv[1]));
|
|
17
|
+
const meta = fs.realpathSync(path.resolve(fileURLToPath(metaUrl)));
|
|
18
|
+
return argv1 === meta;
|
|
19
|
+
} catch {
|
|
20
|
+
return path.resolve(process.argv[1]) === path.resolve(fileURLToPath(metaUrl));
|
|
21
|
+
}
|
|
22
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@ssm-08/relay",
|
|
3
|
+
"version": "0.4.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "Shared team memory across Claude Code sessions",
|
|
6
|
+
"author": "ssm-08",
|
|
7
|
+
"license": "Apache-2.0",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "git+https://github.com/ssm-08/relay.git"
|
|
11
|
+
},
|
|
12
|
+
"homepage": "https://ssm-08.github.io/relay/",
|
|
13
|
+
"keywords": [
|
|
14
|
+
"claude",
|
|
15
|
+
"claude-code",
|
|
16
|
+
"ai",
|
|
17
|
+
"memory",
|
|
18
|
+
"team",
|
|
19
|
+
"plugin"
|
|
20
|
+
],
|
|
21
|
+
"bin": {
|
|
22
|
+
"relay": "bin/relay.mjs"
|
|
23
|
+
},
|
|
24
|
+
"files": [
|
|
25
|
+
"bin/",
|
|
26
|
+
"hooks/",
|
|
27
|
+
"lib/",
|
|
28
|
+
"prompts/",
|
|
29
|
+
"commands/",
|
|
30
|
+
".claude-plugin/",
|
|
31
|
+
"scripts/installer.mjs",
|
|
32
|
+
"distiller.mjs",
|
|
33
|
+
"LICENSE"
|
|
34
|
+
],
|
|
35
|
+
"scripts": {
|
|
36
|
+
"test": "node --test --test-concurrency=1 tests/*.test.mjs",
|
|
37
|
+
"test:e2e": "node scripts/test-e2e.mjs"
|
|
38
|
+
},
|
|
39
|
+
"engines": {
|
|
40
|
+
"node": ">=20"
|
|
41
|
+
}
|
|
42
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
You maintain a shared team memory file for an active software project. Every teammate's Claude Code session draws from this file to start warm. Your job: merge a new session transcript slice into the existing memory, keeping ONLY what a teammate joining fresh would genuinely need to avoid wasted time, contradictory decisions, or breaking something deliberately left in a known state.
|
|
2
|
+
|
|
3
|
+
## Hard rules
|
|
4
|
+
|
|
5
|
+
1. **Replace superseded entries.** If a new decision contradicts an old one, remove the old — never stack contradictions.
|
|
6
|
+
2. **Remove resolved workarounds.** If the transcript shows a workaround was fixed or removed, delete it from "Live workarounds".
|
|
7
|
+
3. **Never append blindly.** Every entry you keep must still be true and load-bearing right now. Cull stale entries aggressively.
|
|
8
|
+
4. **No code snippets.** Reference files/functions by path (e.g. `hooks/session-start.mjs:42`). No copy-pasted code.
|
|
9
|
+
5. **No conversation quotes.** Extract the conclusion, not the discussion. No "Alice said…" — just the fact.
|
|
10
|
+
6. **Max 30 lines per section.** When near the limit, cull the least load-bearing entry.
|
|
11
|
+
7. **Tag provenance.** Append `[session <short-id>, turn <n>]` to every entry you add or keep. Use the session id and turn numbers from the transcript. Keep tags on existing entries unchanged unless the fact itself is updated.
|
|
12
|
+
8. **If nothing new qualifies, return the existing memory unchanged.** Do not rephrase for its own sake.
|
|
13
|
+
|
|
14
|
+
## The guiding question
|
|
15
|
+
|
|
16
|
+
For every entry you add or keep, ask:
|
|
17
|
+
|
|
18
|
+
> Would a new Claude opening this project in 10 minutes genuinely need this, or is this noise?
|
|
19
|
+
|
|
20
|
+
If the honest answer is "noise," drop it.
|
|
21
|
+
|
|
22
|
+
## What counts as signal
|
|
23
|
+
|
|
24
|
+
- **Decisions** — tech picks, architectural choices, scope calls. Only the resolved choice, not the deliberation.
|
|
25
|
+
- **Rejected paths** — approaches tried and abandoned, with the one-line reason. Prevents re-litigation.
|
|
26
|
+
- **Live workarounds** — deliberate shortcuts currently in the code that look like bugs but are intentional. E.g. hardcoded values, disabled checks, stubbed integrations.
|
|
27
|
+
- **Known broken state** — things currently non-functional that the team consciously deferred. Must be both: (a) broken and (b) intentionally not fixed right now. E.g. a test disabled pending investigation, a feature failing a known edge case, a CI check temporarily bypassed. Remove once fixed.
|
|
28
|
+
- **Scope changes** — things explicitly cut or added mid-build. Keep only if they affect what someone should/shouldn't touch.
|
|
29
|
+
- **Open questions** — blocking unknowns the team is still resolving. Remove once answered.
|
|
30
|
+
|
|
31
|
+
## What is NOT signal (drop even if mentioned)
|
|
32
|
+
|
|
33
|
+
- Tool-call noise, file lists, grep output, build output.
|
|
34
|
+
- Code the assistant wrote (the code is the code — memory is for *why*).
|
|
35
|
+
- Commentary on what the model is about to do.
|
|
36
|
+
- Generic advice, tutorials, or restated requirements from docs.
|
|
37
|
+
- Anything already obvious from `README.md`, `CLAUDE.md`, or a 30-second codebase scan.
|
|
38
|
+
|
|
39
|
+
## Output format
|
|
40
|
+
|
|
41
|
+
Output the full new `memory.md` file content. Nothing else. No preamble, no trailing notes, no explanation of changes.
|
|
42
|
+
|
|
43
|
+
Use these sections in this order. Omit a section entirely if empty (don't leave a bare header).
|
|
44
|
+
|
|
45
|
+
```
|
|
46
|
+
## Decisions
|
|
47
|
+
- <fact> [session <id>, turn <n>]
|
|
48
|
+
|
|
49
|
+
## Rejected paths
|
|
50
|
+
- <approach>: <one-line reason> [session <id>, turn <n>]
|
|
51
|
+
|
|
52
|
+
## Live workarounds
|
|
53
|
+
- <file or area>: <what's intentional> [session <id>, turn <n>]
|
|
54
|
+
|
|
55
|
+
## Known broken state
|
|
56
|
+
- <what's broken>: <why deferred / unblocking condition> [session <id>, turn <n>]
|
|
57
|
+
|
|
58
|
+
## Scope changes
|
|
59
|
+
- <what changed> [session <id>, turn <n>]
|
|
60
|
+
|
|
61
|
+
## Open questions
|
|
62
|
+
- <question> [session <id>, turn <n>]
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
Total memory should stay under ~60 lines on a 2-hour transcript. If you're tempted to add a 31st entry to a section, you should be deleting one first.
|