clementine-agent 1.1.21 → 1.1.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/brain-digest.d.ts +64 -0
- package/dist/agent/brain-digest.js +196 -0
- package/dist/cli/index.js +273 -8
- package/dist/cli/version-check.d.ts +35 -0
- package/dist/cli/version-check.js +147 -0
- package/dist/index.js +8 -2
- package/dist/types.d.ts +4 -0
- package/package.json +1 -1
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cross-agent brain digest.
|
|
3
|
+
*
|
|
4
|
+
* Aggregates raw signals from across the team (memory recurrence, cron
|
|
5
|
+
* activity, memory growth) and runs a single LLM synthesis pass to
|
|
6
|
+
* produce a leadable markdown narrative — what the team accomplished,
|
|
7
|
+
* what they learned in common, where to lead next.
|
|
8
|
+
*
|
|
9
|
+
* Intended caller: `clementine brain digest` CLI for v1; cron entry +
|
|
10
|
+
* heartbeat-side proactive surfacing for v2.
|
|
11
|
+
*/
|
|
12
|
+
import type { AgentManager } from './agent-manager.js';
|
|
13
|
+
import type { MemoryStore } from '../memory/store.js';
|
|
14
|
+
import type { PersonalAssistant } from './assistant.js';
|
|
15
|
+
export interface BrainDigestInputs {
|
|
16
|
+
windowDays: number;
|
|
17
|
+
agents: Array<{
|
|
18
|
+
slug: string;
|
|
19
|
+
name: string;
|
|
20
|
+
}>;
|
|
21
|
+
/** Clusters of similar memory chunks recurring across multiple agents. */
|
|
22
|
+
crossAgentClusters: Array<{
|
|
23
|
+
agents: string[];
|
|
24
|
+
representativeContent: string;
|
|
25
|
+
representativeSource: string;
|
|
26
|
+
memberCount: number;
|
|
27
|
+
}>;
|
|
28
|
+
/** Per-job summary of runs in the window. */
|
|
29
|
+
cronRunsByJob: Array<{
|
|
30
|
+
jobName: string;
|
|
31
|
+
agentSlug: string | null;
|
|
32
|
+
runs: number;
|
|
33
|
+
failures: number;
|
|
34
|
+
}>;
|
|
35
|
+
/** Chunk count growth per agent in the window — proxy for "what they worked on". */
|
|
36
|
+
memoryDeltas: Array<{
|
|
37
|
+
agentSlug: string;
|
|
38
|
+
chunksAdded: number;
|
|
39
|
+
}>;
|
|
40
|
+
}
|
|
41
|
+
/** Aggregate raw signals — pure data, no LLM call. */
|
|
42
|
+
export declare function gatherBrainDigestInputs(opts: {
|
|
43
|
+
agentManager: AgentManager;
|
|
44
|
+
memoryStore: MemoryStore;
|
|
45
|
+
baseDir: string;
|
|
46
|
+
windowDays: number;
|
|
47
|
+
}): BrainDigestInputs;
|
|
48
|
+
/**
|
|
49
|
+
* Format the raw inputs as a single text block the LLM can synthesize.
|
|
50
|
+
* Kept terse — the LLM does the heavy lifting of pattern surfacing.
|
|
51
|
+
*/
|
|
52
|
+
export declare function formatRawMaterial(inputs: BrainDigestInputs): string;
|
|
53
|
+
export declare function runBrainDigest(opts: {
|
|
54
|
+
assistant: PersonalAssistant;
|
|
55
|
+
agentManager: AgentManager;
|
|
56
|
+
memoryStore: MemoryStore;
|
|
57
|
+
baseDir: string;
|
|
58
|
+
windowDays?: number;
|
|
59
|
+
model?: string;
|
|
60
|
+
}): Promise<{
|
|
61
|
+
markdown: string;
|
|
62
|
+
inputs: BrainDigestInputs;
|
|
63
|
+
}>;
|
|
64
|
+
//# sourceMappingURL=brain-digest.d.ts.map
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cross-agent brain digest.
|
|
3
|
+
*
|
|
4
|
+
* Aggregates raw signals from across the team (memory recurrence, cron
|
|
5
|
+
* activity, memory growth) and runs a single LLM synthesis pass to
|
|
6
|
+
* produce a leadable markdown narrative — what the team accomplished,
|
|
7
|
+
* what they learned in common, where to lead next.
|
|
8
|
+
*
|
|
9
|
+
* Intended caller: `clementine brain digest` CLI for v1; cron entry +
|
|
10
|
+
* heartbeat-side proactive surfacing for v2.
|
|
11
|
+
*/
|
|
12
|
+
import { existsSync, readFileSync, readdirSync } from 'node:fs';
|
|
13
|
+
import path from 'node:path';
|
|
14
|
+
import pino from 'pino';
|
|
15
|
+
const logger = pino({ name: 'clementine.brain-digest' });
|
|
16
|
+
/** Aggregate raw signals — pure data, no LLM call. */
|
|
17
|
+
export function gatherBrainDigestInputs(opts) {
|
|
18
|
+
const sinceMs = Date.now() - opts.windowDays * 86_400_000;
|
|
19
|
+
const sinceIso = new Date(sinceMs).toISOString();
|
|
20
|
+
const agents = opts.agentManager.listAll().map(a => ({ slug: a.slug, name: a.name }));
|
|
21
|
+
// 1. Cross-agent memory recurrence — facts/topics surfaced by 2+ agents.
|
|
22
|
+
let crossAgentClusters = [];
|
|
23
|
+
try {
|
|
24
|
+
const clusters = opts.memoryStore.findCrossAgentRecurrence({
|
|
25
|
+
threshold: 0.85,
|
|
26
|
+
minAgents: 2,
|
|
27
|
+
limit: 20,
|
|
28
|
+
});
|
|
29
|
+
crossAgentClusters = clusters.map(c => ({
|
|
30
|
+
agents: c.agents,
|
|
31
|
+
representativeContent: c.representative.content.slice(0, 400),
|
|
32
|
+
representativeSource: `${c.representative.sourceFile}>${c.representative.section}`,
|
|
33
|
+
memberCount: c.members.length,
|
|
34
|
+
}));
|
|
35
|
+
}
|
|
36
|
+
catch (err) {
|
|
37
|
+
logger.debug({ err }, 'Cross-agent recurrence scan failed — continuing with empty list');
|
|
38
|
+
}
|
|
39
|
+
// 2. Cron run summary — walk cron/runs/*.jsonl, filter to the window.
|
|
40
|
+
const cronRunsByJob = gatherCronRunsByJob(opts.baseDir, sinceIso);
|
|
41
|
+
// 3. Memory deltas — chunk growth per agent in the window.
|
|
42
|
+
const memoryDeltas = gatherMemoryDeltas(opts.memoryStore, sinceIso);
|
|
43
|
+
return {
|
|
44
|
+
windowDays: opts.windowDays,
|
|
45
|
+
agents,
|
|
46
|
+
crossAgentClusters,
|
|
47
|
+
cronRunsByJob,
|
|
48
|
+
memoryDeltas,
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
function gatherCronRunsByJob(baseDir, sinceIso) {
|
|
52
|
+
const runsDir = path.join(baseDir, 'cron', 'runs');
|
|
53
|
+
if (!existsSync(runsDir))
|
|
54
|
+
return [];
|
|
55
|
+
const sinceMs = Date.parse(sinceIso);
|
|
56
|
+
const aggregates = new Map();
|
|
57
|
+
for (const file of readdirSync(runsDir).filter(f => f.endsWith('.jsonl'))) {
|
|
58
|
+
const jobName = file.replace(/\.jsonl$/, '');
|
|
59
|
+
const filePath = path.join(runsDir, file);
|
|
60
|
+
let lines;
|
|
61
|
+
try {
|
|
62
|
+
lines = readFileSync(filePath, 'utf-8').split('\n').filter(Boolean);
|
|
63
|
+
}
|
|
64
|
+
catch {
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
let runs = 0;
|
|
68
|
+
let failures = 0;
|
|
69
|
+
let agentSlug = null;
|
|
70
|
+
// jobNames may be agent-scoped: "<agent-slug>:<job>"
|
|
71
|
+
const parts = jobName.split(':');
|
|
72
|
+
if (parts.length > 1) {
|
|
73
|
+
agentSlug = parts[0];
|
|
74
|
+
}
|
|
75
|
+
for (const line of lines) {
|
|
76
|
+
try {
|
|
77
|
+
const entry = JSON.parse(line);
|
|
78
|
+
const ts = entry.startedAt ?? entry.finishedAt;
|
|
79
|
+
if (!ts)
|
|
80
|
+
continue;
|
|
81
|
+
if (Date.parse(ts) < sinceMs)
|
|
82
|
+
continue;
|
|
83
|
+
runs++;
|
|
84
|
+
if (entry.status === 'error')
|
|
85
|
+
failures++;
|
|
86
|
+
}
|
|
87
|
+
catch {
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
if (runs > 0) {
|
|
92
|
+
aggregates.set(jobName, { jobName, agentSlug, runs, failures });
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return Array.from(aggregates.values()).sort((a, b) => b.runs - a.runs);
|
|
96
|
+
}
|
|
97
|
+
function gatherMemoryDeltas(memoryStore, sinceIso) {
|
|
98
|
+
// Reach into the underlying connection — same pattern memory-tools.ts uses.
|
|
99
|
+
const conn = memoryStore.conn;
|
|
100
|
+
try {
|
|
101
|
+
const rows = conn
|
|
102
|
+
.prepare(`SELECT COALESCE(agent_slug, 'global') as agentSlug, COUNT(*) as chunksAdded
|
|
103
|
+
FROM chunks
|
|
104
|
+
WHERE updated_at >= ?
|
|
105
|
+
GROUP BY agent_slug
|
|
106
|
+
ORDER BY chunksAdded DESC`)
|
|
107
|
+
.all(sinceIso);
|
|
108
|
+
return rows;
|
|
109
|
+
}
|
|
110
|
+
catch (err) {
|
|
111
|
+
logger.debug({ err }, 'Memory delta query failed — continuing with empty list');
|
|
112
|
+
return [];
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Format the raw inputs as a single text block the LLM can synthesize.
|
|
117
|
+
* Kept terse — the LLM does the heavy lifting of pattern surfacing.
|
|
118
|
+
*/
|
|
119
|
+
export function formatRawMaterial(inputs) {
|
|
120
|
+
const sections = [];
|
|
121
|
+
sections.push(`## Window\nLast ${inputs.windowDays} days.`);
|
|
122
|
+
sections.push(`## Team roster\n${inputs.agents.length === 0 ? '(no specialist agents)' : inputs.agents.map(a => `- ${a.name} (${a.slug})`).join('\n')}`);
|
|
123
|
+
if (inputs.cronRunsByJob.length === 0) {
|
|
124
|
+
sections.push(`## Cron activity\n(no autonomous runs in window)`);
|
|
125
|
+
}
|
|
126
|
+
else {
|
|
127
|
+
const lines = inputs.cronRunsByJob.slice(0, 20).map(r => {
|
|
128
|
+
const tag = r.agentSlug ? ` [${r.agentSlug}]` : '';
|
|
129
|
+
const failTag = r.failures > 0 ? ` — ${r.failures} failure${r.failures === 1 ? '' : 's'}` : '';
|
|
130
|
+
return `- ${r.jobName}${tag}: ${r.runs} run${r.runs === 1 ? '' : 's'}${failTag}`;
|
|
131
|
+
});
|
|
132
|
+
sections.push(`## Cron activity\n${lines.join('\n')}`);
|
|
133
|
+
}
|
|
134
|
+
if (inputs.memoryDeltas.length === 0) {
|
|
135
|
+
sections.push(`## Memory growth\n(no new chunks in window)`);
|
|
136
|
+
}
|
|
137
|
+
else {
|
|
138
|
+
const lines = inputs.memoryDeltas.map(d => `- ${d.agentSlug}: +${d.chunksAdded} chunks`);
|
|
139
|
+
sections.push(`## Memory growth\n${lines.join('\n')}`);
|
|
140
|
+
}
|
|
141
|
+
if (inputs.crossAgentClusters.length === 0) {
|
|
142
|
+
sections.push(`## Cross-agent recurrence\n(no facts surfaced from 2+ agents)`);
|
|
143
|
+
}
|
|
144
|
+
else {
|
|
145
|
+
const lines = inputs.crossAgentClusters.slice(0, 12).map((c, i) => {
|
|
146
|
+
const preview = c.representativeContent.replace(/\n/g, ' ').slice(0, 200);
|
|
147
|
+
return `${i + 1}. agents: ${c.agents.join(', ')} (${c.memberCount} chunks)\n "${preview}${preview.length >= 200 ? '…' : ''}"`;
|
|
148
|
+
});
|
|
149
|
+
sections.push(`## Cross-agent recurrence\n${lines.join('\n')}`);
|
|
150
|
+
}
|
|
151
|
+
return sections.join('\n\n');
|
|
152
|
+
}
|
|
153
|
+
const SYNTHESIS_SYSTEM_PROMPT = `You are Clementine, the master assistant. Your team of specialist agents has been working autonomously, and you need to write a **brain digest** — a leadable summary of what happened over the window, what the team learned in common, and where you should lead them next.
|
|
154
|
+
|
|
155
|
+
Format the digest as markdown:
|
|
156
|
+
|
|
157
|
+
# Brain Digest — last {N} days
|
|
158
|
+
|
|
159
|
+
## What happened
|
|
160
|
+
2-3 sentence overview of activity. Be specific about who did what.
|
|
161
|
+
|
|
162
|
+
## What we learned together
|
|
163
|
+
The cross-agent recurrence section shows facts/topics that surfaced from MULTIPLE agents — these are the team's emerging shared knowledge. List the 3-5 most meaningful patterns. If empty, say "Nothing recurred across agents this window — the team's still working in parallel silos."
|
|
164
|
+
|
|
165
|
+
## Where to lead
|
|
166
|
+
2-3 concrete priorities or follow-ups based on what you see. What's the team's biggest opportunity? What's at risk? What's a clear next move?
|
|
167
|
+
|
|
168
|
+
## Per-agent highlights
|
|
169
|
+
One bullet per active agent — what they worked on, status (healthy / quiet / failing). Skip agents with no activity.
|
|
170
|
+
|
|
171
|
+
**Style rules:**
|
|
172
|
+
- Lead with what matters. Don't list raw data — synthesize.
|
|
173
|
+
- Be honest about sparse data. If the window is quiet, say so. Don't pad.
|
|
174
|
+
- Under 400 words total. Cut anything that doesn't help you lead the team.
|
|
175
|
+
- No greeting, no sign-off — this is a working document.
|
|
176
|
+
`;
|
|
177
|
+
export async function runBrainDigest(opts) {
|
|
178
|
+
const windowDays = opts.windowDays ?? 7;
|
|
179
|
+
const inputs = gatherBrainDigestInputs({
|
|
180
|
+
agentManager: opts.agentManager,
|
|
181
|
+
memoryStore: opts.memoryStore,
|
|
182
|
+
baseDir: opts.baseDir,
|
|
183
|
+
windowDays,
|
|
184
|
+
});
|
|
185
|
+
const rawMaterial = formatRawMaterial(inputs);
|
|
186
|
+
const prompt = `${SYNTHESIS_SYSTEM_PROMPT.replace('{N}', String(windowDays))}\n\n---\n\n# Raw signals\n\n${rawMaterial}`;
|
|
187
|
+
logger.info({ windowDays, agents: inputs.agents.length, clusters: inputs.crossAgentClusters.length, jobs: inputs.cronRunsByJob.length }, 'Running brain digest synthesis');
|
|
188
|
+
const markdown = await opts.assistant.runPlanStep('brain-digest', prompt, {
|
|
189
|
+
tier: 1,
|
|
190
|
+
maxTurns: 3,
|
|
191
|
+
model: opts.model ?? 'sonnet',
|
|
192
|
+
disableTools: true, // synthesis only — no tool calls
|
|
193
|
+
});
|
|
194
|
+
return { markdown: markdown.trim(), inputs };
|
|
195
|
+
}
|
|
196
|
+
//# sourceMappingURL=brain-digest.js.map
|
package/dist/cli/index.js
CHANGED
|
@@ -428,17 +428,22 @@ async function cmdRestart(options) {
|
|
|
428
428
|
}
|
|
429
429
|
}
|
|
430
430
|
function cmdStatus() {
|
|
431
|
+
const DIM = '\x1b[0;90m';
|
|
432
|
+
const RESET = '\x1b[0m';
|
|
431
433
|
const pid = readPid();
|
|
432
434
|
const name = getAssistantName();
|
|
435
|
+
const localVersion = readPkgVersion(PACKAGE_ROOT);
|
|
433
436
|
if (!pid) {
|
|
434
|
-
console.log(` ${name} is not running (no PID file).`);
|
|
437
|
+
console.log(` ${name} is not running ${DIM}(no PID file, v${localVersion})${RESET}.`);
|
|
438
|
+
surfaceUpdateNudge(localVersion);
|
|
435
439
|
return;
|
|
436
440
|
}
|
|
437
441
|
if (!isProcessAlive(pid)) {
|
|
438
|
-
console.log(` ${name} is not running (stale PID ${pid}).`);
|
|
442
|
+
console.log(` ${name} is not running ${DIM}(stale PID ${pid}, v${localVersion})${RESET}.`);
|
|
443
|
+
surfaceUpdateNudge(localVersion);
|
|
439
444
|
return;
|
|
440
445
|
}
|
|
441
|
-
console.log(` ${name} is running (PID ${pid})`);
|
|
446
|
+
console.log(` ${name} is running ${DIM}(PID ${pid}, v${localVersion})${RESET}`);
|
|
442
447
|
// Show uptime from PID file mtime
|
|
443
448
|
try {
|
|
444
449
|
const { mtimeMs } = statSync(getPidFilePath());
|
|
@@ -468,6 +473,36 @@ function cmdStatus() {
|
|
|
468
473
|
if (channels.length > 0) {
|
|
469
474
|
console.log(` Channels: ${channels.join(', ')}`);
|
|
470
475
|
}
|
|
476
|
+
surfaceUpdateNudge(localVersion);
|
|
477
|
+
}
|
|
478
|
+
/**
|
|
479
|
+
* Print a one-line nudge if a newer version is on npm. Reads the cached
|
|
480
|
+
* result synchronously (no network on the hot path) and fires off an async
|
|
481
|
+
* refresh in the background so the next call has fresh data.
|
|
482
|
+
*/
|
|
483
|
+
function surfaceUpdateNudge(localVersion) {
|
|
484
|
+
const DIM = '\x1b[0;90m';
|
|
485
|
+
const BOLD = '\x1b[1m';
|
|
486
|
+
const YELLOW = '\x1b[1;33m';
|
|
487
|
+
const RESET = '\x1b[0m';
|
|
488
|
+
try {
|
|
489
|
+
const cached = (() => {
|
|
490
|
+
// Lazy require to avoid pulling https/network into trivial CLI calls
|
|
491
|
+
// when the cache module isn't needed.
|
|
492
|
+
const { readCachedUpdateCheck } = require('./version-check.js');
|
|
493
|
+
return readCachedUpdateCheck(BASE_DIR, localVersion);
|
|
494
|
+
})();
|
|
495
|
+
if (cached?.updateAvailable && cached.latestVersion) {
|
|
496
|
+
console.log(` ${YELLOW}⬆${RESET} Update available: ${BOLD}v${cached.latestVersion}${RESET} ${DIM}(you're on v${localVersion})${RESET}`);
|
|
497
|
+
console.log(` ${DIM}Run: ${BOLD}clementine update restart${RESET}`);
|
|
498
|
+
}
|
|
499
|
+
// Fire-and-forget background refresh — never blocks status output.
|
|
500
|
+
const { checkForUpdate } = require('./version-check.js');
|
|
501
|
+
void checkForUpdate(BASE_DIR, localVersion).catch(() => { });
|
|
502
|
+
}
|
|
503
|
+
catch {
|
|
504
|
+
// version-check failed to load — degrade silently
|
|
505
|
+
}
|
|
471
506
|
}
|
|
472
507
|
function cmdDoctor(opts = {}) {
|
|
473
508
|
const DIM = '\x1b[0;90m';
|
|
@@ -1991,11 +2026,16 @@ program
|
|
|
1991
2026
|
});
|
|
1992
2027
|
program
|
|
1993
2028
|
.command('update')
|
|
1994
|
-
.description('Pull latest code, rebuild, and reinstall (preserves config)')
|
|
1995
|
-
.argument('[action]', 'Optional: "restart"
|
|
2029
|
+
.description('Pull latest code, rebuild, and reinstall (preserves config). Pass "history" to show recent updates.')
|
|
2030
|
+
.argument('[action]', 'Optional: "restart" = restart daemon after update; "history" = show update log')
|
|
1996
2031
|
.option('--restart', 'Restart daemon after update')
|
|
1997
2032
|
.option('--dry-run', 'Preview what would happen without making changes')
|
|
2033
|
+
.option('-n, --limit <n>', 'For history mode: max entries to show', '10')
|
|
1998
2034
|
.action((action, options) => {
|
|
2035
|
+
if (action === 'history') {
|
|
2036
|
+
cmdUpdateHistory(parseInt(options.limit ?? '10', 10));
|
|
2037
|
+
return;
|
|
2038
|
+
}
|
|
1999
2039
|
if (action === 'restart')
|
|
2000
2040
|
options.restart = true;
|
|
2001
2041
|
cmdUpdate(options).catch((err) => {
|
|
@@ -2090,6 +2130,80 @@ configCmd
|
|
|
2090
2130
|
console.error(` Failed to open editor: ${editor}`);
|
|
2091
2131
|
}
|
|
2092
2132
|
});
|
|
2133
|
+
// ── Brain commands ──────────────────────────────────────────────────
|
|
2134
|
+
const brainCmd = program
|
|
2135
|
+
.command('brain')
|
|
2136
|
+
.description('Cross-agent synthesis — leadable summaries of what your team learned');
|
|
2137
|
+
brainCmd
|
|
2138
|
+
.command('digest')
|
|
2139
|
+
.description('Run a brain digest — synthesize the past N days of cross-agent activity into a leadable narrative')
|
|
2140
|
+
.option('-d, --days <n>', 'Window in days', '7')
|
|
2141
|
+
.option('-m, --model <model>', 'Model to use for synthesis (sonnet, haiku, opus)', 'sonnet')
|
|
2142
|
+
.option('--save', 'Also save the digest to vault/00-System/brain-digests/<date>.md')
|
|
2143
|
+
.option('--raw', 'Print the raw signals only — skip the LLM synthesis')
|
|
2144
|
+
.action(async (opts) => {
|
|
2145
|
+
const BOLD = '\x1b[1m';
|
|
2146
|
+
const DIM = '\x1b[0;90m';
|
|
2147
|
+
const GREEN = '\x1b[0;32m';
|
|
2148
|
+
const RED = '\x1b[0;31m';
|
|
2149
|
+
const RESET = '\x1b[0m';
|
|
2150
|
+
const days = Math.max(1, Math.min(60, parseInt(opts.days, 10) || 7));
|
|
2151
|
+
process.env.CLEMENTINE_HOME = BASE_DIR;
|
|
2152
|
+
delete process.env['CLAUDECODE'];
|
|
2153
|
+
try {
|
|
2154
|
+
const { AgentManager } = await import('../agent/agent-manager.js');
|
|
2155
|
+
const { MemoryStore } = await import('../memory/store.js');
|
|
2156
|
+
const { gatherBrainDigestInputs, formatRawMaterial, runBrainDigest } = await import('../agent/brain-digest.js');
|
|
2157
|
+
const VAULT_DIR = path.join(BASE_DIR, 'vault');
|
|
2158
|
+
const DB_PATH = path.join(VAULT_DIR, '.memory.db');
|
|
2159
|
+
const AGENTS_DIR = path.join(BASE_DIR, 'agents');
|
|
2160
|
+
const agentManager = new AgentManager(AGENTS_DIR);
|
|
2161
|
+
const memoryStore = new MemoryStore(DB_PATH, VAULT_DIR);
|
|
2162
|
+
// Raw mode short-circuits the LLM call — useful for inspecting signals.
|
|
2163
|
+
if (opts.raw) {
|
|
2164
|
+
const inputs = gatherBrainDigestInputs({ agentManager, memoryStore, baseDir: BASE_DIR, windowDays: days });
|
|
2165
|
+
console.log();
|
|
2166
|
+
console.log(` ${BOLD}Brain digest — raw signals (${days} days)${RESET}`);
|
|
2167
|
+
console.log();
|
|
2168
|
+
console.log(formatRawMaterial(inputs));
|
|
2169
|
+
console.log();
|
|
2170
|
+
return;
|
|
2171
|
+
}
|
|
2172
|
+
console.log();
|
|
2173
|
+
console.log(` ${DIM}Synthesizing brain digest over ${days} days using ${opts.model}…${RESET}`);
|
|
2174
|
+
const { PersonalAssistant } = await import('../agent/assistant.js');
|
|
2175
|
+
const assistant = new PersonalAssistant();
|
|
2176
|
+
// Headless: auto-deny any approval prompts during synthesis.
|
|
2177
|
+
const { setApprovalCallback } = await import('../agent/hooks.js');
|
|
2178
|
+
setApprovalCallback(async () => false);
|
|
2179
|
+
const result = await runBrainDigest({
|
|
2180
|
+
assistant,
|
|
2181
|
+
agentManager,
|
|
2182
|
+
memoryStore,
|
|
2183
|
+
baseDir: BASE_DIR,
|
|
2184
|
+
windowDays: days,
|
|
2185
|
+
model: opts.model,
|
|
2186
|
+
});
|
|
2187
|
+
console.log();
|
|
2188
|
+
console.log(result.markdown);
|
|
2189
|
+
console.log();
|
|
2190
|
+
console.log(` ${DIM}Sources: ${result.inputs.agents.length} agent(s), ${result.inputs.cronRunsByJob.length} cron job(s) active, ${result.inputs.crossAgentClusters.length} cross-agent cluster(s).${RESET}`);
|
|
2191
|
+
if (opts.save) {
|
|
2192
|
+
const digestsDir = path.join(VAULT_DIR, '00-System', 'brain-digests');
|
|
2193
|
+
mkdirSync(digestsDir, { recursive: true });
|
|
2194
|
+
const stamp = new Date().toISOString().slice(0, 10);
|
|
2195
|
+
const filename = path.join(digestsDir, `${stamp}-${days}d.md`);
|
|
2196
|
+
const fileBody = `---\ntype: brain-digest\ngeneratedAt: ${new Date().toISOString()}\nwindowDays: ${days}\nmodel: ${opts.model}\n---\n\n${result.markdown}\n`;
|
|
2197
|
+
writeFileSync(filename, fileBody);
|
|
2198
|
+
console.log(` ${GREEN}✓${RESET} Saved to ${DIM}${filename}${RESET}`);
|
|
2199
|
+
}
|
|
2200
|
+
console.log();
|
|
2201
|
+
}
|
|
2202
|
+
catch (err) {
|
|
2203
|
+
console.error(` ${RED}Error generating brain digest:${RESET} ${err instanceof Error ? err.message : String(err)}`);
|
|
2204
|
+
process.exit(1);
|
|
2205
|
+
}
|
|
2206
|
+
});
|
|
2093
2207
|
// ── Agent commands ──────────────────────────────────────────────────
|
|
2094
2208
|
const agentCmd = program
|
|
2095
2209
|
.command('agent')
|
|
@@ -2583,14 +2697,113 @@ projectsCmd
|
|
|
2583
2697
|
}
|
|
2584
2698
|
});
|
|
2585
2699
|
// ── Update command ──────────────────────────────────────────────────
|
|
2700
|
+
/** Print the last N entries from update-history.jsonl. */
|
|
2701
|
+
function cmdUpdateHistory(limit) {
|
|
2702
|
+
const BOLD = '\x1b[1m';
|
|
2703
|
+
const DIM = '\x1b[0;90m';
|
|
2704
|
+
const GREEN = '\x1b[0;32m';
|
|
2705
|
+
const RED = '\x1b[0;31m';
|
|
2706
|
+
const RESET = '\x1b[0m';
|
|
2707
|
+
const historyPath = path.join(BASE_DIR, 'update-history.jsonl');
|
|
2708
|
+
if (!existsSync(historyPath)) {
|
|
2709
|
+
console.log();
|
|
2710
|
+
console.log(` ${DIM}No update history yet (${historyPath} doesn't exist).${RESET}`);
|
|
2711
|
+
console.log(` Run ${BOLD}clementine update${RESET} once to start the log.`);
|
|
2712
|
+
console.log();
|
|
2713
|
+
return;
|
|
2714
|
+
}
|
|
2715
|
+
const lines = readFileSync(historyPath, 'utf-8').split('\n').filter(Boolean);
|
|
2716
|
+
const entries = lines
|
|
2717
|
+
.map(l => { try {
|
|
2718
|
+
return JSON.parse(l);
|
|
2719
|
+
}
|
|
2720
|
+
catch {
|
|
2721
|
+
return null;
|
|
2722
|
+
} })
|
|
2723
|
+
.filter((e) => e !== null)
|
|
2724
|
+
.slice(-Math.max(1, limit))
|
|
2725
|
+
.reverse();
|
|
2726
|
+
if (entries.length === 0) {
|
|
2727
|
+
console.log(` ${DIM}History file exists but is empty or unparseable.${RESET}`);
|
|
2728
|
+
return;
|
|
2729
|
+
}
|
|
2730
|
+
console.log();
|
|
2731
|
+
console.log(` ${BOLD}Update history${RESET} ${DIM}(${historyPath})${RESET}`);
|
|
2732
|
+
console.log();
|
|
2733
|
+
for (const e of entries) {
|
|
2734
|
+
const ts = String(e.timestamp ?? '').slice(0, 19).replace('T', ' ');
|
|
2735
|
+
const from = String(e.fromVersion ?? '?');
|
|
2736
|
+
const to = String(e.toVersion ?? '?');
|
|
2737
|
+
const flavor = String(e.flavor ?? 'git');
|
|
2738
|
+
const failed = e.failed === true;
|
|
2739
|
+
const arrow = from === to ? '=' : '→';
|
|
2740
|
+
const verLabel = failed
|
|
2741
|
+
? `${RED}v${from} ${arrow} v${to} FAILED${RESET}`
|
|
2742
|
+
: (from === to ? `${DIM}v${from}${RESET}` : `v${from} ${arrow} ${BOLD}v${to}${RESET}`);
|
|
2743
|
+
const dur = typeof e.durationMs === 'number' ? ` ${DIM}(${Math.round(e.durationMs / 1000)}s)${RESET}` : '';
|
|
2744
|
+
console.log(` ${DIM}${ts}${RESET} ${verLabel} ${DIM}[${flavor}]${RESET}${dur}`);
|
|
2745
|
+
if (typeof e.commitHash === 'string' && e.commitHash) {
|
|
2746
|
+
console.log(` ${DIM}commit ${e.commitHash}${e.commitDate ? ` (${e.commitDate})` : ''}, ${e.commitsPulled ?? 0} commit${e.commitsPulled === 1 ? '' : 's'} pulled${RESET}`);
|
|
2747
|
+
}
|
|
2748
|
+
if (typeof e.summary === 'string' && e.summary) {
|
|
2749
|
+
const trimmed = e.summary.length > 100 ? e.summary.slice(0, 100) + '…' : e.summary;
|
|
2750
|
+
console.log(` ${DIM}${trimmed}${RESET}`);
|
|
2751
|
+
}
|
|
2752
|
+
if (failed && typeof e.error === 'string') {
|
|
2753
|
+
console.log(` ${RED}error: ${e.error.slice(0, 120)}${RESET}`);
|
|
2754
|
+
}
|
|
2755
|
+
const modSummary = [];
|
|
2756
|
+
if (typeof e.modsReapplied === 'number' && e.modsReapplied > 0)
|
|
2757
|
+
modSummary.push(`${e.modsReapplied} re-applied`);
|
|
2758
|
+
if (typeof e.modsSuperseded === 'number' && e.modsSuperseded > 0)
|
|
2759
|
+
modSummary.push(`${e.modsSuperseded} superseded`);
|
|
2760
|
+
if (typeof e.modsNeedReconciliation === 'number' && e.modsNeedReconciliation > 0)
|
|
2761
|
+
modSummary.push(`${e.modsNeedReconciliation} need attention`);
|
|
2762
|
+
if (typeof e.modsFailed === 'number' && e.modsFailed > 0)
|
|
2763
|
+
modSummary.push(`${e.modsFailed} failed`);
|
|
2764
|
+
if (modSummary.length > 0) {
|
|
2765
|
+
console.log(` ${DIM}source mods: ${modSummary.join(', ')}${RESET}`);
|
|
2766
|
+
}
|
|
2767
|
+
}
|
|
2768
|
+
console.log();
|
|
2769
|
+
console.log(` ${GREEN}Showing ${entries.length}${RESET}${DIM} of ${lines.length} total entries.${RESET}`);
|
|
2770
|
+
console.log();
|
|
2771
|
+
}
|
|
2772
|
+
/** Read the npm version from a package.json (returns 'unknown' on failure). */
|
|
2773
|
+
function readPkgVersion(packageRoot) {
|
|
2774
|
+
try {
|
|
2775
|
+
const pkgPath = path.join(packageRoot, 'package.json');
|
|
2776
|
+
if (!existsSync(pkgPath))
|
|
2777
|
+
return 'unknown';
|
|
2778
|
+
const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
|
|
2779
|
+
return pkg.version ?? 'unknown';
|
|
2780
|
+
}
|
|
2781
|
+
catch {
|
|
2782
|
+
return 'unknown';
|
|
2783
|
+
}
|
|
2784
|
+
}
|
|
2785
|
+
/** Append one line to the update-history log. Append-only, never throws. */
|
|
2786
|
+
function appendUpdateHistory(entry) {
|
|
2787
|
+
try {
|
|
2788
|
+
const historyPath = path.join(BASE_DIR, 'update-history.jsonl');
|
|
2789
|
+
const line = JSON.stringify({ timestamp: new Date().toISOString(), ...entry }) + '\n';
|
|
2790
|
+
require('node:fs').appendFileSync(historyPath, line, { mode: 0o600 });
|
|
2791
|
+
}
|
|
2792
|
+
catch {
|
|
2793
|
+
// Non-fatal — history is observability, not critical state.
|
|
2794
|
+
}
|
|
2795
|
+
}
|
|
2586
2796
|
async function cmdUpdate(options) {
|
|
2587
2797
|
const DIM = '\x1b[0;90m';
|
|
2798
|
+
const BOLD = '\x1b[1m';
|
|
2588
2799
|
const GREEN = '\x1b[0;32m';
|
|
2589
2800
|
const YELLOW = '\x1b[1;33m';
|
|
2590
2801
|
const RED = '\x1b[0;31m';
|
|
2591
2802
|
const RESET = '\x1b[0m';
|
|
2803
|
+
const updateStartedAt = Date.now();
|
|
2804
|
+
const previousVersion = readPkgVersion(PACKAGE_ROOT);
|
|
2592
2805
|
console.log();
|
|
2593
|
-
console.log(` ${DIM}Updating ${getAssistantName()}...${RESET}`);
|
|
2806
|
+
console.log(` ${DIM}Updating ${getAssistantName()} (current: v${previousVersion})...${RESET}`);
|
|
2594
2807
|
console.log();
|
|
2595
2808
|
// 1. Detect install flavor. Two valid paths:
|
|
2596
2809
|
// - git-clone install (PACKAGE_ROOT has .git) → pull + rebuild path below
|
|
@@ -2608,12 +2821,36 @@ async function cmdUpdate(options) {
|
|
|
2608
2821
|
console.log();
|
|
2609
2822
|
try {
|
|
2610
2823
|
execSync('npm install -g clementine-agent@latest', { stdio: 'inherit' });
|
|
2824
|
+
const newVersion = readPkgVersion(PACKAGE_ROOT);
|
|
2611
2825
|
console.log();
|
|
2612
|
-
|
|
2826
|
+
if (previousVersion !== 'unknown' && newVersion !== 'unknown' && previousVersion !== newVersion) {
|
|
2827
|
+
console.log(` ${GREEN}OK${RESET} Updated v${previousVersion} → ${BOLD}v${newVersion}${RESET}`);
|
|
2828
|
+
}
|
|
2829
|
+
else if (previousVersion === newVersion) {
|
|
2830
|
+
console.log(` ${GREEN}OK${RESET} Already on latest (v${newVersion})`);
|
|
2831
|
+
}
|
|
2832
|
+
else {
|
|
2833
|
+
console.log(` ${GREEN}OK${RESET} Updated via npm`);
|
|
2834
|
+
}
|
|
2835
|
+
appendUpdateHistory({
|
|
2836
|
+
flavor: 'npm-global',
|
|
2837
|
+
fromVersion: previousVersion,
|
|
2838
|
+
toVersion: newVersion,
|
|
2839
|
+
durationMs: Date.now() - updateStartedAt,
|
|
2840
|
+
restartRequested: !!options.restart,
|
|
2841
|
+
});
|
|
2613
2842
|
}
|
|
2614
2843
|
catch (err) {
|
|
2615
2844
|
console.error(` ${RED}FAIL${RESET} npm update failed: ${String(err).slice(0, 200)}`);
|
|
2616
2845
|
console.error(` ${YELLOW}Hint${RESET} If you see EACCES, see README "Troubleshooting" for npm prefix setup.`);
|
|
2846
|
+
appendUpdateHistory({
|
|
2847
|
+
flavor: 'npm-global',
|
|
2848
|
+
fromVersion: previousVersion,
|
|
2849
|
+
toVersion: previousVersion,
|
|
2850
|
+
durationMs: Date.now() - updateStartedAt,
|
|
2851
|
+
failed: true,
|
|
2852
|
+
error: String(err).slice(0, 300),
|
|
2853
|
+
});
|
|
2617
2854
|
process.exit(1);
|
|
2618
2855
|
}
|
|
2619
2856
|
if (options.restart) {
|
|
@@ -3152,6 +3389,26 @@ async function cmdUpdate(options) {
|
|
|
3152
3389
|
}).trim().slice(0, 10);
|
|
3153
3390
|
}
|
|
3154
3391
|
catch { /* best effort */ }
|
|
3392
|
+
// Capture the new version once the build is verified — package.json on
|
|
3393
|
+
// disk is now authoritative for the version we're about to run.
|
|
3394
|
+
const newVersion = readPkgVersion(PACKAGE_ROOT);
|
|
3395
|
+
// Persist update history before the restart (in case daemon restart fails,
|
|
3396
|
+
// we still have the record of what was attempted).
|
|
3397
|
+
appendUpdateHistory({
|
|
3398
|
+
flavor: 'git',
|
|
3399
|
+
fromVersion: previousVersion,
|
|
3400
|
+
toVersion: newVersion,
|
|
3401
|
+
commitHash,
|
|
3402
|
+
commitDate,
|
|
3403
|
+
commitsPulled,
|
|
3404
|
+
summary: pullSummary.split('\n').slice(0, 5).join('; '),
|
|
3405
|
+
modsReapplied: reconcileResult?.reapplied.length ?? 0,
|
|
3406
|
+
modsSuperseded: reconcileResult?.superseded.length ?? 0,
|
|
3407
|
+
modsNeedReconciliation: reconcileResult?.needsReconciliation.length ?? 0,
|
|
3408
|
+
modsFailed: reconcileResult?.failed.length ?? 0,
|
|
3409
|
+
durationMs: Date.now() - updateStartedAt,
|
|
3410
|
+
restartRequested: !!(options.restart || wasRunning),
|
|
3411
|
+
});
|
|
3155
3412
|
if (options.restart || wasRunning) {
|
|
3156
3413
|
const sentinelPath = path.join(BASE_DIR, '.restart-sentinel.json');
|
|
3157
3414
|
const sentinel = {
|
|
@@ -3159,6 +3416,8 @@ async function cmdUpdate(options) {
|
|
|
3159
3416
|
restartedAt: new Date().toISOString(),
|
|
3160
3417
|
reason: 'update',
|
|
3161
3418
|
updateDetails: {
|
|
3419
|
+
previousVersion,
|
|
3420
|
+
newVersion,
|
|
3162
3421
|
commitHash,
|
|
3163
3422
|
commitDate,
|
|
3164
3423
|
commitsBehind: commitsPulled,
|
|
@@ -3249,7 +3508,13 @@ async function cmdUpdate(options) {
|
|
|
3249
3508
|
}
|
|
3250
3509
|
// 14. Show current version
|
|
3251
3510
|
console.log();
|
|
3252
|
-
if (
|
|
3511
|
+
if (previousVersion !== 'unknown' && newVersion !== 'unknown' && previousVersion !== newVersion) {
|
|
3512
|
+
console.log(` ${GREEN}Updated v${previousVersion} → ${BOLD}v${newVersion}${RESET}${commitHash ? ` ${DIM}(${commitHash})${RESET}` : ''}`);
|
|
3513
|
+
}
|
|
3514
|
+
else if (previousVersion === newVersion && previousVersion !== 'unknown') {
|
|
3515
|
+
console.log(` ${GREEN}Already on latest (v${newVersion})${RESET}${commitHash ? ` ${DIM}(${commitHash})${RESET}` : ''}`);
|
|
3516
|
+
}
|
|
3517
|
+
else if (commitHash) {
|
|
3253
3518
|
console.log(` ${GREEN}Updated to ${commitHash} (${commitDate})${RESET}`);
|
|
3254
3519
|
}
|
|
3255
3520
|
else {
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Background "is there a newer version on npm?" check.
|
|
3
|
+
*
|
|
4
|
+
* Polls the public npm registry once per `CACHE_TTL_MS` (default 24h) and
|
|
5
|
+
* caches the result on disk so subsequent calls are instant. Surfaced in
|
|
6
|
+
* `clementine status` and the dashboard header so the user discovers
|
|
7
|
+
* updates without remembering to run `clementine update`.
|
|
8
|
+
*
|
|
9
|
+
* Pure read-only — never installs anything. Network failures are silent
|
|
10
|
+
* (offline → no nudge, not an error).
|
|
11
|
+
*/
|
|
12
|
+
export interface VersionCheckResult {
|
|
13
|
+
localVersion: string;
|
|
14
|
+
latestVersion: string | null;
|
|
15
|
+
/** True when latestVersion is strictly greater than localVersion. */
|
|
16
|
+
updateAvailable: boolean;
|
|
17
|
+
/** ISO of last successful registry check. null = never checked or fetch failed. */
|
|
18
|
+
checkedAt: string | null;
|
|
19
|
+
/** True when the cache was used (no network call this invocation). */
|
|
20
|
+
fromCache: boolean;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Check whether a newer version is available. Uses the cache when fresh.
|
|
24
|
+
* Pass `force = true` to bypass the cache (e.g. from a "check now" CLI flag).
|
|
25
|
+
*/
|
|
26
|
+
export declare function checkForUpdate(baseDir: string, localVersion: string, opts?: {
|
|
27
|
+
force?: boolean;
|
|
28
|
+
}): Promise<VersionCheckResult>;
|
|
29
|
+
/**
|
|
30
|
+
* Synchronous read of the cached result — used in fast paths like
|
|
31
|
+
* `clementine status` so we never block on a network call. Returns null
|
|
32
|
+
* when there's no cache yet.
|
|
33
|
+
*/
|
|
34
|
+
export declare function readCachedUpdateCheck(baseDir: string, localVersion: string): VersionCheckResult | null;
|
|
35
|
+
//# sourceMappingURL=version-check.d.ts.map
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Background "is there a newer version on npm?" check.
|
|
3
|
+
*
|
|
4
|
+
* Polls the public npm registry once per `CACHE_TTL_MS` (default 24h) and
|
|
5
|
+
* caches the result on disk so subsequent calls are instant. Surfaced in
|
|
6
|
+
* `clementine status` and the dashboard header so the user discovers
|
|
7
|
+
* updates without remembering to run `clementine update`.
|
|
8
|
+
*
|
|
9
|
+
* Pure read-only — never installs anything. Network failures are silent
|
|
10
|
+
* (offline → no nudge, not an error).
|
|
11
|
+
*/
|
|
12
|
+
import { existsSync, readFileSync, writeFileSync } from 'node:fs';
|
|
13
|
+
import path from 'node:path';
|
|
14
|
+
import https from 'node:https';
|
|
15
|
+
const PACKAGE_NAME = 'clementine-agent';
|
|
16
|
+
const CACHE_TTL_MS = 24 * 60 * 60 * 1000; // 24 hours
|
|
17
|
+
function cachePath(baseDir) {
|
|
18
|
+
return path.join(baseDir, '.update-check.json');
|
|
19
|
+
}
|
|
20
|
+
function readCache(baseDir) {
|
|
21
|
+
try {
|
|
22
|
+
const p = cachePath(baseDir);
|
|
23
|
+
if (!existsSync(p))
|
|
24
|
+
return null;
|
|
25
|
+
return JSON.parse(readFileSync(p, 'utf-8'));
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
function writeCache(baseDir, entry) {
|
|
32
|
+
try {
|
|
33
|
+
writeFileSync(cachePath(baseDir), JSON.stringify(entry, null, 2), { mode: 0o600 });
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
// Non-fatal — cache is an optimization, not state.
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Fetch the latest published version of the package from npm. Resolves null
|
|
41
|
+
* on any network/parse error so callers can degrade silently.
|
|
42
|
+
*/
|
|
43
|
+
function fetchLatestFromNpm(timeoutMs = 5000) {
|
|
44
|
+
return new Promise(resolve => {
|
|
45
|
+
const req = https.get(`https://registry.npmjs.org/${PACKAGE_NAME}/latest`, { timeout: timeoutMs, headers: { Accept: 'application/json' } }, res => {
|
|
46
|
+
if (res.statusCode !== 200) {
|
|
47
|
+
res.resume();
|
|
48
|
+
resolve(null);
|
|
49
|
+
return;
|
|
50
|
+
}
|
|
51
|
+
let body = '';
|
|
52
|
+
res.setEncoding('utf-8');
|
|
53
|
+
res.on('data', chunk => { body += chunk; });
|
|
54
|
+
res.on('end', () => {
|
|
55
|
+
try {
|
|
56
|
+
const parsed = JSON.parse(body);
|
|
57
|
+
resolve(parsed.version ?? null);
|
|
58
|
+
}
|
|
59
|
+
catch {
|
|
60
|
+
resolve(null);
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
});
|
|
64
|
+
req.on('error', () => resolve(null));
|
|
65
|
+
req.on('timeout', () => { req.destroy(); resolve(null); });
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Compare two semver strings lexicographically by parts. Returns positive
|
|
70
|
+
* when `a` > `b`, negative when `a` < `b`, zero when equal. Tolerates
|
|
71
|
+
* pre-release suffixes by ignoring them (we only care about released bumps).
|
|
72
|
+
*/
|
|
73
|
+
function compareSemver(a, b) {
|
|
74
|
+
if (a === b)
|
|
75
|
+
return 0;
|
|
76
|
+
const partsA = a.replace(/[-+].*$/, '').split('.').map(n => parseInt(n, 10) || 0);
|
|
77
|
+
const partsB = b.replace(/[-+].*$/, '').split('.').map(n => parseInt(n, 10) || 0);
|
|
78
|
+
for (let i = 0; i < Math.max(partsA.length, partsB.length); i++) {
|
|
79
|
+
const av = partsA[i] ?? 0;
|
|
80
|
+
const bv = partsB[i] ?? 0;
|
|
81
|
+
if (av !== bv)
|
|
82
|
+
return av - bv;
|
|
83
|
+
}
|
|
84
|
+
return 0;
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Check whether a newer version is available. Uses the cache when fresh.
|
|
88
|
+
* Pass `force = true` to bypass the cache (e.g. from a "check now" CLI flag).
|
|
89
|
+
*/
|
|
90
|
+
export async function checkForUpdate(baseDir, localVersion, opts = {}) {
|
|
91
|
+
const cache = readCache(baseDir);
|
|
92
|
+
const now = Date.now();
|
|
93
|
+
const cacheFresh = !!cache && (now - new Date(cache.checkedAt).getTime() < CACHE_TTL_MS);
|
|
94
|
+
if (cache && cacheFresh && !opts.force) {
|
|
95
|
+
return {
|
|
96
|
+
localVersion,
|
|
97
|
+
latestVersion: cache.latestVersion,
|
|
98
|
+
updateAvailable: compareSemver(cache.latestVersion, localVersion) > 0,
|
|
99
|
+
checkedAt: cache.checkedAt,
|
|
100
|
+
fromCache: true,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
const latest = await fetchLatestFromNpm();
|
|
104
|
+
if (!latest) {
|
|
105
|
+
// Couldn't reach the registry — fall back to stale cache if we have one.
|
|
106
|
+
if (cache) {
|
|
107
|
+
return {
|
|
108
|
+
localVersion,
|
|
109
|
+
latestVersion: cache.latestVersion,
|
|
110
|
+
updateAvailable: compareSemver(cache.latestVersion, localVersion) > 0,
|
|
111
|
+
checkedAt: cache.checkedAt,
|
|
112
|
+
fromCache: true,
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
return { localVersion, latestVersion: null, updateAvailable: false, checkedAt: null, fromCache: false };
|
|
116
|
+
}
|
|
117
|
+
writeCache(baseDir, {
|
|
118
|
+
checkedAt: new Date().toISOString(),
|
|
119
|
+
latestVersion: latest,
|
|
120
|
+
observedLocalVersion: localVersion,
|
|
121
|
+
});
|
|
122
|
+
return {
|
|
123
|
+
localVersion,
|
|
124
|
+
latestVersion: latest,
|
|
125
|
+
updateAvailable: compareSemver(latest, localVersion) > 0,
|
|
126
|
+
checkedAt: new Date().toISOString(),
|
|
127
|
+
fromCache: false,
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Synchronous read of the cached result — used in fast paths like
|
|
132
|
+
* `clementine status` so we never block on a network call. Returns null
|
|
133
|
+
* when there's no cache yet.
|
|
134
|
+
*/
|
|
135
|
+
export function readCachedUpdateCheck(baseDir, localVersion) {
|
|
136
|
+
const cache = readCache(baseDir);
|
|
137
|
+
if (!cache)
|
|
138
|
+
return null;
|
|
139
|
+
return {
|
|
140
|
+
localVersion,
|
|
141
|
+
latestVersion: cache.latestVersion,
|
|
142
|
+
updateAvailable: compareSemver(cache.latestVersion, localVersion) > 0,
|
|
143
|
+
checkedAt: cache.checkedAt,
|
|
144
|
+
fromCache: true,
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
//# sourceMappingURL=version-check.js.map
|
package/dist/index.js
CHANGED
|
@@ -888,8 +888,14 @@ async function asyncMain() {
|
|
|
888
888
|
else if (sentinel.reason === 'update' && sentinel.updateDetails) {
|
|
889
889
|
const d = sentinel.updateDetails;
|
|
890
890
|
const parts = [];
|
|
891
|
-
// Version info
|
|
892
|
-
if (d.
|
|
891
|
+
// Version info — prefer semver transition over commit hash for human readability.
|
|
892
|
+
if (d.previousVersion && d.newVersion && d.previousVersion !== d.newVersion) {
|
|
893
|
+
parts.push(`Updated v${d.previousVersion} → v${d.newVersion}`);
|
|
894
|
+
}
|
|
895
|
+
else if (d.newVersion) {
|
|
896
|
+
parts.push(`Now on v${d.newVersion}`);
|
|
897
|
+
}
|
|
898
|
+
else if (d.commitHash) {
|
|
893
899
|
parts.push(`Updated to ${d.commitHash}${d.commitDate ? ` (${d.commitDate})` : ''}`);
|
|
894
900
|
}
|
|
895
901
|
else {
|
package/dist/types.d.ts
CHANGED
|
@@ -573,6 +573,10 @@ export interface RestartSentinel {
|
|
|
573
573
|
sessionKey?: string;
|
|
574
574
|
changedFiles?: string[];
|
|
575
575
|
updateDetails?: {
|
|
576
|
+
/** Semver before the update — read from package.json prior to git pull. */
|
|
577
|
+
previousVersion?: string;
|
|
578
|
+
/** Semver after the update — read from package.json after build. */
|
|
579
|
+
newVersion?: string;
|
|
576
580
|
commitHash?: string;
|
|
577
581
|
commitDate?: string;
|
|
578
582
|
commitsBehind?: number;
|