metame-cli 1.5.1 → 1.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +2 -65
- package/package.json +2 -2
- package/scripts/bin/dispatch_to +45 -4
- package/scripts/daemon-command-router.js +1 -0
- package/scripts/daemon-task-scheduler.js +10 -2
- package/scripts/daemon.js +36 -1
- package/scripts/distill.js +77 -0
- package/scripts/publish-public.sh +85 -0
- package/scripts/self-reflect.js +286 -0
package/index.js
CHANGED
|
@@ -806,73 +806,10 @@ try {
|
|
|
806
806
|
// Non-fatal
|
|
807
807
|
}
|
|
808
808
|
|
|
809
|
-
// ---------------------------------------------------------
|
|
810
|
-
// 4.6 REFLECTION PROMPT (Phase C — conditional, NOT static)
|
|
811
|
-
// ---------------------------------------------------------
|
|
812
|
-
// Only inject when trigger conditions are met at startup.
|
|
813
|
-
// This ensures reflections don't fire every session.
|
|
814
|
-
let reflectionLine = '';
|
|
815
|
-
try {
|
|
816
|
-
if (isKnownUser && _brainDoc) {
|
|
817
|
-
const refDoc = _brainDoc;
|
|
818
|
-
|
|
819
|
-
// Check quiet mode
|
|
820
|
-
const quietUntil = refDoc.growth && refDoc.growth.quiet_until;
|
|
821
|
-
const isQuietForRef = quietUntil && new Date(quietUntil).getTime() > Date.now();
|
|
822
|
-
|
|
823
|
-
if (!isQuietForRef) {
|
|
824
|
-
const distillCount = (refDoc.evolution && refDoc.evolution.distill_count) || 0;
|
|
825
|
-
const zoneHistory = (refDoc.growth && refDoc.growth.zone_history) || [];
|
|
826
|
-
|
|
827
|
-
// Trigger 1: Every 7th session
|
|
828
|
-
const trigger7th = distillCount > 0 && distillCount % 7 === 0;
|
|
829
|
-
|
|
830
|
-
// Trigger 2: Three consecutive comfort-zone sessions
|
|
831
|
-
const lastThree = zoneHistory.slice(-3);
|
|
832
|
-
const triggerComfort = lastThree.length === 3 && lastThree.every(z => z === 'C');
|
|
833
|
-
|
|
834
|
-
// Trigger 3: Persistent goal drift (2+ drifted in last 3 sessions)
|
|
835
|
-
let triggerDrift = false;
|
|
836
|
-
let driftDeclaredFocus = null;
|
|
837
|
-
try {
|
|
838
|
-
const sessionLogFile = path.join(METAME_DIR, 'session_log.yaml');
|
|
839
|
-
if (fs.existsSync(sessionLogFile)) {
|
|
840
|
-
const driftLog = yaml.load(fs.readFileSync(sessionLogFile, 'utf8'));
|
|
841
|
-
if (driftLog && Array.isArray(driftLog.sessions)) {
|
|
842
|
-
const recentSessions = driftLog.sessions.slice(-3);
|
|
843
|
-
const driftCount = recentSessions.filter(s =>
|
|
844
|
-
s.goal_alignment === 'drifted' || s.goal_alignment === 'partial'
|
|
845
|
-
).length;
|
|
846
|
-
if (driftCount >= 2 && recentSessions.length >= 2) {
|
|
847
|
-
driftDeclaredFocus = refDoc.status?.focus || refDoc.context?.focus;
|
|
848
|
-
if (driftDeclaredFocus) triggerDrift = true;
|
|
849
|
-
}
|
|
850
|
-
}
|
|
851
|
-
}
|
|
852
|
-
} catch { /* non-fatal */ }
|
|
853
|
-
|
|
854
|
-
if (triggerDrift || triggerComfort || trigger7th) {
|
|
855
|
-
let hint = '';
|
|
856
|
-
if (triggerDrift) {
|
|
857
|
-
hint = `最近几个session的方向和"${driftDeclaredFocus}"有偏差。请在对话开始时温和地问:${icon("mirror")} 是方向有意调整了,还是不小心偏了?`;
|
|
858
|
-
} else if (triggerComfort) {
|
|
859
|
-
hint = `连续几次都在熟悉领域。如果用户在session结束时自然停顿,可以温和地问:${icon("mirror")} 准备好探索拉伸区了吗?`;
|
|
860
|
-
} else {
|
|
861
|
-
hint = '这是第' + distillCount + `次session。如果session自然结束,可以附加一句:${icon("mirror")} 一个词形容这次session的感受?`;
|
|
862
|
-
}
|
|
863
|
-
const timing = triggerDrift ? '在对话开始时就问一次' : '只在session即将结束时说一次';
|
|
864
|
-
reflectionLine = `\n[MetaMe reflection: ${hint} ${timing}。如果用户没回应就不要追问。]\n`;
|
|
865
|
-
}
|
|
866
|
-
}
|
|
867
|
-
}
|
|
868
|
-
} catch {
|
|
869
|
-
// Non-fatal
|
|
870
|
-
}
|
|
871
|
-
|
|
872
809
|
// Project-level CLAUDE.md: KERNEL has moved to global ~/.claude/CLAUDE.md.
|
|
873
|
-
// Only inject dynamic per-session observations (mirror
|
|
810
|
+
// Only inject dynamic per-session observations (mirror).
|
|
874
811
|
// If nothing dynamic, write the cleaned file with no METAME block at all.
|
|
875
|
-
const dynamicContent = mirrorLine
|
|
812
|
+
const dynamicContent = mirrorLine;
|
|
876
813
|
const newContent = dynamicContent.trim()
|
|
877
814
|
? METAME_START + '\n' + dynamicContent + METAME_END + '\n' + fileContent
|
|
878
815
|
: fileContent;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "metame-cli",
|
|
3
|
-
"version": "1.5.
|
|
3
|
+
"version": "1.5.2",
|
|
4
4
|
"description": "The Cognitive Profile Layer for Claude Code. Knows how you think, not just what you said.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"bin": {
|
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
"test": "node --test scripts/*.test.js",
|
|
18
18
|
"test:daemon-status": "node --test scripts/daemon-restart-status.test.js",
|
|
19
19
|
"start": "node index.js",
|
|
20
|
-
"sync:plugin": "cp scripts/platform.js scripts/schema.js scripts/pending-traits.js scripts/signal-capture.js scripts/distill.js scripts/daemon.js scripts/daemon-agent-commands.js scripts/daemon-session-commands.js scripts/daemon-admin-commands.js scripts/daemon-exec-commands.js scripts/daemon-ops-commands.js scripts/daemon-session-store.js scripts/daemon-checkpoints.js scripts/daemon-bridges.js scripts/daemon-file-browser.js scripts/daemon-runtime-lifecycle.js scripts/daemon-notify.js scripts/daemon-claude-engine.js scripts/daemon-engine-runtime.js scripts/daemon-command-router.js scripts/daemon-user-acl.js scripts/daemon-agent-tools.js scripts/daemon-task-scheduler.js scripts/daemon-task-envelope.js scripts/task-board.js scripts/telegram-adapter.js scripts/feishu-adapter.js scripts/daemon-default.yaml scripts/providers.js scripts/utils.js scripts/usage-classifier.js scripts/resolve-yaml.js scripts/memory.js scripts/memory-write.js scripts/memory-extract.js scripts/memory-search.js scripts/memory-gc.js scripts/memory-nightly-reflect.js scripts/memory-index.js scripts/qmd-client.js scripts/session-summarize.js scripts/session-analytics.js scripts/mentor-engine.js scripts/skill-evolution.js scripts/skill-changelog.js scripts/agent-layer.js scripts/check-macos-control-capabilities.sh plugin/scripts/ && echo 'Plugin scripts synced'",
|
|
20
|
+
"sync:plugin": "cp scripts/platform.js scripts/schema.js scripts/pending-traits.js scripts/signal-capture.js scripts/distill.js scripts/daemon.js scripts/daemon-agent-commands.js scripts/daemon-session-commands.js scripts/daemon-admin-commands.js scripts/daemon-exec-commands.js scripts/daemon-ops-commands.js scripts/daemon-session-store.js scripts/daemon-checkpoints.js scripts/daemon-bridges.js scripts/daemon-file-browser.js scripts/daemon-runtime-lifecycle.js scripts/daemon-notify.js scripts/daemon-claude-engine.js scripts/daemon-engine-runtime.js scripts/daemon-command-router.js scripts/daemon-user-acl.js scripts/daemon-agent-tools.js scripts/daemon-task-scheduler.js scripts/daemon-task-envelope.js scripts/task-board.js scripts/telegram-adapter.js scripts/feishu-adapter.js scripts/daemon-default.yaml scripts/providers.js scripts/utils.js scripts/usage-classifier.js scripts/resolve-yaml.js scripts/memory.js scripts/memory-write.js scripts/memory-extract.js scripts/memory-search.js scripts/memory-gc.js scripts/memory-nightly-reflect.js scripts/memory-index.js scripts/qmd-client.js scripts/session-summarize.js scripts/session-analytics.js scripts/mentor-engine.js scripts/skill-evolution.js scripts/skill-changelog.js scripts/agent-layer.js scripts/self-reflect.js scripts/check-macos-control-capabilities.sh plugin/scripts/ && echo 'Plugin scripts synced'",
|
|
21
21
|
"sync:readme": "node scripts/sync-readme.js",
|
|
22
22
|
"restart:daemon": "node index.js stop 2>/dev/null; sleep 1; node index.js start 2>/dev/null || echo '鈿狅笍 Daemon not running or restart failed'",
|
|
23
23
|
"precommit": "npm run sync:plugin && npm run restart:daemon"
|
package/scripts/bin/dispatch_to
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
/**
|
|
3
|
-
* dispatch_to [--new] <project_key> "<prompt>"
|
|
3
|
+
* dispatch_to [--new] [--from <project_key>] <project_key> "<prompt>"
|
|
4
4
|
* Tries Unix socket / Named Pipe first (low-latency), falls back to pending.jsonl.
|
|
5
5
|
*/
|
|
6
6
|
'use strict';
|
|
@@ -13,10 +13,18 @@ const { socketPath } = require('../platform');
|
|
|
13
13
|
|
|
14
14
|
const args = process.argv.slice(2);
|
|
15
15
|
const newSession = args[0] === '--new' ? (args.shift(), true) : false;
|
|
16
|
+
|
|
17
|
+
// --from <project_key>: identifies the calling agent for callback routing
|
|
18
|
+
let fromKey = '_claude_session';
|
|
19
|
+
const fromIdx = args.indexOf('--from');
|
|
20
|
+
if (fromIdx !== -1 && args[fromIdx + 1]) {
|
|
21
|
+
fromKey = args.splice(fromIdx, 2)[1];
|
|
22
|
+
}
|
|
23
|
+
|
|
16
24
|
const [target, ...rest] = args;
|
|
17
25
|
const prompt = rest.join(' ').replace(/^["']|["']$/g, '');
|
|
18
26
|
if (!target || !prompt) {
|
|
19
|
-
console.error('Usage: dispatch_to [--new] <project_key> "<prompt>"');
|
|
27
|
+
console.error('Usage: dispatch_to [--new] [--from <project_key>] <project_key> "<prompt>"');
|
|
20
28
|
process.exit(1);
|
|
21
29
|
}
|
|
22
30
|
|
|
@@ -41,10 +49,43 @@ function getDispatchSecret() {
|
|
|
41
49
|
|
|
42
50
|
const ts = new Date().toISOString();
|
|
43
51
|
const secret = getDispatchSecret();
|
|
44
|
-
|
|
52
|
+
|
|
53
|
+
// Auto-inject shared context: now/shared.md + target's _latest.md
|
|
54
|
+
function buildEnrichedPrompt(rawPrompt) {
|
|
55
|
+
const nowFile = path.join(METAME_DIR, 'memory', 'now', 'shared.md');
|
|
56
|
+
const agentFile = path.join(METAME_DIR, 'memory', 'agents', `${target}_latest.md`);
|
|
57
|
+
let ctx = '';
|
|
58
|
+
try { if (fs.existsSync(nowFile)) ctx += `[共享进度 now.md]\n${fs.readFileSync(nowFile, 'utf8').trim()}\n\n`; } catch {}
|
|
59
|
+
try { if (fs.existsSync(agentFile)) ctx += `[${target} 上次产出]\n${fs.readFileSync(agentFile, 'utf8').trim()}\n\n`; } catch {}
|
|
60
|
+
// Push model: inject unread inbox messages and immediately archive them
|
|
61
|
+
try {
|
|
62
|
+
const inboxDir = path.join(METAME_DIR, 'memory', 'inbox', target);
|
|
63
|
+
const readDir = path.join(inboxDir, 'read');
|
|
64
|
+
const files = fs.readdirSync(inboxDir).filter(f => f.endsWith('.md')).sort();
|
|
65
|
+
if (files.length > 0) {
|
|
66
|
+
ctx += `[📬 Agent Inbox — ${files.length} 条未读消息]\n`;
|
|
67
|
+
fs.mkdirSync(readDir, { recursive: true });
|
|
68
|
+
for (const f of files) {
|
|
69
|
+
const filePath = path.join(inboxDir, f);
|
|
70
|
+
ctx += fs.readFileSync(filePath, 'utf8').trim() + '\n---\n';
|
|
71
|
+
fs.renameSync(filePath, path.join(readDir, f));
|
|
72
|
+
}
|
|
73
|
+
ctx += '\n';
|
|
74
|
+
}
|
|
75
|
+
} catch {}
|
|
76
|
+
return ctx ? `${ctx}---\n${rawPrompt}` : rawPrompt;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const enrichedPrompt = buildEnrichedPrompt(prompt);
|
|
80
|
+
const sigPayload = JSON.stringify({ target, prompt: enrichedPrompt, ts });
|
|
45
81
|
const sig = crypto.createHmac('sha256', secret).update(sigPayload).digest('hex');
|
|
46
82
|
|
|
47
|
-
|
|
83
|
+
// Set callback: true when dispatched by another agent (not a user session)
|
|
84
|
+
const callback = fromKey !== '_claude_session';
|
|
85
|
+
const msg = { target, prompt: enrichedPrompt, from: fromKey, new_session: newSession, created_at: ts, ts, sig, ...(callback && { callback: true }) };
|
|
86
|
+
|
|
87
|
+
// Ensure target's inbox exists — lazy init, safe for new users and new agents
|
|
88
|
+
fs.mkdirSync(path.join(METAME_DIR, 'memory', 'inbox', target, 'read'), { recursive: true });
|
|
48
89
|
|
|
49
90
|
function fallbackToFile() {
|
|
50
91
|
fs.mkdirSync(DISPATCH_DIR, { recursive: true });
|
|
@@ -338,7 +338,7 @@ function createTaskScheduler(deps) {
|
|
|
338
338
|
|
|
339
339
|
if (!checkBudget(config, state)) {
|
|
340
340
|
log('WARN', `Budget exceeded, skipping task: ${task.name}`);
|
|
341
|
-
return { success: false, error: 'budget_exceeded', output: '' };
|
|
341
|
+
return { success: false, error: 'budget_exceeded', output: '', skipped: true };
|
|
342
342
|
}
|
|
343
343
|
|
|
344
344
|
// Precondition gate: run a cheap shell check before burning tokens
|
|
@@ -583,7 +583,7 @@ function createTaskScheduler(deps) {
|
|
|
583
583
|
const state = loadState();
|
|
584
584
|
if (!checkBudget(config, state)) {
|
|
585
585
|
log('WARN', `Budget exceeded, skipping workflow: ${task.name}`);
|
|
586
|
-
return { success: false, error: 'budget_exceeded', output: '' };
|
|
586
|
+
return { success: false, error: 'budget_exceeded', output: '', skipped: true };
|
|
587
587
|
}
|
|
588
588
|
// precheck.pass is guaranteed true here — executeTask() already returns early when false
|
|
589
589
|
const steps = task.steps || [];
|
|
@@ -793,6 +793,14 @@ function createTaskScheduler(deps) {
|
|
|
793
793
|
Promise.resolve(executeTask(task, config))
|
|
794
794
|
.then((result) => {
|
|
795
795
|
runningTasks.delete(task.name);
|
|
796
|
+
// Budget exceeded: back off until next day instead of retrying every interval
|
|
797
|
+
if (result.error === 'budget_exceeded') {
|
|
798
|
+
const tomorrow = new Date();
|
|
799
|
+
tomorrow.setDate(tomorrow.getDate() + 1);
|
|
800
|
+
tomorrow.setHours(0, 5, 0, 0); // 00:05 next day
|
|
801
|
+
nextRun[task.name] = tomorrow.getTime();
|
|
802
|
+
return;
|
|
803
|
+
}
|
|
796
804
|
if (task.notify && notifyFn && !result.skipped) {
|
|
797
805
|
const proj = task._project || null;
|
|
798
806
|
if (result.success) {
|
package/scripts/daemon.js
CHANGED
|
@@ -847,6 +847,25 @@ function dispatchTask(targetProject, message, config, replyFn, streamOptions = n
|
|
|
847
847
|
if (replyFn && outStr.trim().length > 2) {
|
|
848
848
|
replyFn(outStr);
|
|
849
849
|
} else if (!replyFn && fullMsg.callback && fullMsg.from && config) {
|
|
850
|
+
// Write result to sender's inbox before dispatching callback
|
|
851
|
+
try {
|
|
852
|
+
const inboxDir = path.join(os.homedir(), '.metame', 'memory', 'inbox', fullMsg.from);
|
|
853
|
+
fs.mkdirSync(inboxDir, { recursive: true });
|
|
854
|
+
const tsStr = new Date().toISOString().replace(/[-:T]/g, '').slice(0, 15);
|
|
855
|
+
const subject = `callback_${(fullMsg.payload.title || fullMsg.id || 'task').replace(/\s+/g, '_').slice(0, 30)}`;
|
|
856
|
+
const inboxFile = path.join(inboxDir, `${tsStr}_${targetProject}_${subject}.md`);
|
|
857
|
+
const body = [
|
|
858
|
+
`FROM: ${targetProject}`,
|
|
859
|
+
`TO: ${fullMsg.from}`,
|
|
860
|
+
`TS: ${new Date().toISOString()}`,
|
|
861
|
+
`SUBJECT: ${subject}`,
|
|
862
|
+
'',
|
|
863
|
+
outStr.trim().slice(0, 2000),
|
|
864
|
+
].join('\n');
|
|
865
|
+
fs.writeFileSync(inboxFile, body, 'utf8');
|
|
866
|
+
} catch (e) {
|
|
867
|
+
log('WARN', `callback inbox write failed: ${e.message}`);
|
|
868
|
+
}
|
|
850
869
|
dispatchTask(fullMsg.from, {
|
|
851
870
|
from: targetProject,
|
|
852
871
|
type: 'callback',
|
|
@@ -1808,6 +1827,16 @@ function acquireDaemonLock() {
|
|
|
1808
1827
|
if (attempt < maxAttempts - 1) continue;
|
|
1809
1828
|
return false;
|
|
1810
1829
|
}
|
|
1830
|
+
// Parent is dead — re-read lock before deleting: another daemon (e.g. LaunchAgent-
|
|
1831
|
+
// spawned) may have acquired it in the window between parent exit and us waking up.
|
|
1832
|
+
try {
|
|
1833
|
+
const reread = JSON.parse(fs.readFileSync(LOCK_FILE, 'utf8') || '{}');
|
|
1834
|
+
const newOwner = parseInt(reread.pid, 10);
|
|
1835
|
+
if (newOwner && newOwner !== ownerPid && newOwner !== process.pid && isPidAlive(newOwner)) {
|
|
1836
|
+
log('WARN', `Lock acquired by PID ${newOwner} during handoff — yielding`);
|
|
1837
|
+
return false;
|
|
1838
|
+
}
|
|
1839
|
+
} catch { /* lock already gone — proceed to create */ }
|
|
1811
1840
|
try { fs.unlinkSync(LOCK_FILE); } catch { /* ignore */ }
|
|
1812
1841
|
continue;
|
|
1813
1842
|
}
|
|
@@ -1828,7 +1857,13 @@ function releaseDaemonLock() {
|
|
|
1828
1857
|
if (daemonLockFd !== null) fs.closeSync(daemonLockFd);
|
|
1829
1858
|
} catch { /* ignore */ }
|
|
1830
1859
|
daemonLockFd = null;
|
|
1831
|
-
|
|
1860
|
+
// Only delete the lock file if we still own it — avoids wiping a successor daemon's lock.
|
|
1861
|
+
try {
|
|
1862
|
+
if (fs.existsSync(LOCK_FILE)) {
|
|
1863
|
+
const meta = JSON.parse(fs.readFileSync(LOCK_FILE, 'utf8') || '{}');
|
|
1864
|
+
if (parseInt(meta.pid, 10) === process.pid) fs.unlinkSync(LOCK_FILE);
|
|
1865
|
+
}
|
|
1866
|
+
} catch { /* ignore */ }
|
|
1832
1867
|
}
|
|
1833
1868
|
|
|
1834
1869
|
// ---------------------------------------------------------
|
package/scripts/distill.js
CHANGED
|
@@ -1370,12 +1370,86 @@ If no clear patterns found: respond with exactly NO_PATTERNS`;
|
|
|
1370
1370
|
}
|
|
1371
1371
|
}
|
|
1372
1372
|
|
|
1373
|
+
// ---------------------------------------------------------
|
|
1374
|
+
// SESSION REFLECTION — fill reflection word for sessions that lack one
|
|
1375
|
+
// ---------------------------------------------------------
|
|
1376
|
+
|
|
1377
|
+
/**
|
|
1378
|
+
* For each session in session_log.yaml that has no `reflection` field,
|
|
1379
|
+
* call Haiku once (batch) to generate a 1-4 char Chinese word capturing the feel.
|
|
1380
|
+
* Runs after every distill — no-op if all sessions already have reflection.
|
|
1381
|
+
*/
|
|
1382
|
+
async function fillSessionReflections() {
|
|
1383
|
+
const yaml = require('js-yaml');
|
|
1384
|
+
if (!fs.existsSync(SESSION_LOG_FILE)) return;
|
|
1385
|
+
|
|
1386
|
+
let log;
|
|
1387
|
+
try {
|
|
1388
|
+
log = yaml.load(fs.readFileSync(SESSION_LOG_FILE, 'utf8')) || { sessions: [] };
|
|
1389
|
+
} catch { return; }
|
|
1390
|
+
if (!Array.isArray(log.sessions)) return;
|
|
1391
|
+
|
|
1392
|
+
const pending = log.sessions.filter(s => !s.reflection);
|
|
1393
|
+
if (pending.length === 0) return;
|
|
1394
|
+
|
|
1395
|
+
const sessionLines = pending.map((s, i) => {
|
|
1396
|
+
const parts = [];
|
|
1397
|
+
if (s.topics && s.topics.length) parts.push(`topics: ${s.topics.join(', ')}`);
|
|
1398
|
+
if (s.zone) parts.push(`zone: ${s.zone}`);
|
|
1399
|
+
if (s.cognitive_load) parts.push(`load: ${s.cognitive_load}`);
|
|
1400
|
+
if (s.goal_alignment) parts.push(`goal: ${s.goal_alignment}`);
|
|
1401
|
+
if (s.friction && s.friction.length) parts.push(`friction: ${s.friction.join(', ')}`);
|
|
1402
|
+
if (s.session_outcome) parts.push(`outcome: ${s.session_outcome}`);
|
|
1403
|
+
return `${i + 1}. [${s.ts || '?'}] ${parts.join(' | ')}`;
|
|
1404
|
+
}).join('\n');
|
|
1405
|
+
|
|
1406
|
+
const prompt = `You are a session reflection assistant. For each session below, generate ONE Chinese word (1-4 characters) that honestly captures the overall feel or experience of that session.
|
|
1407
|
+
|
|
1408
|
+
SESSIONS:
|
|
1409
|
+
${sessionLines}
|
|
1410
|
+
|
|
1411
|
+
OUTPUT FORMAT — respond with ONLY a YAML code block:
|
|
1412
|
+
\`\`\`yaml
|
|
1413
|
+
reflections:
|
|
1414
|
+
- "专注"
|
|
1415
|
+
- "艰难"
|
|
1416
|
+
\`\`\`
|
|
1417
|
+
|
|
1418
|
+
One word per session, in order. Chinese only. Be honest and precise.`;
|
|
1419
|
+
|
|
1420
|
+
try {
|
|
1421
|
+
const result = await callHaiku(prompt, distillEnv, 20000);
|
|
1422
|
+
if (!result) return;
|
|
1423
|
+
|
|
1424
|
+
const yamlMatch = result.match(/```yaml\n([\s\S]*?)```/) || result.match(/```\n([\s\S]*?)```/);
|
|
1425
|
+
if (!yamlMatch) return;
|
|
1426
|
+
|
|
1427
|
+
const parsed = yaml.load(yamlMatch[1].trim());
|
|
1428
|
+
if (!parsed || !Array.isArray(parsed.reflections)) return;
|
|
1429
|
+
|
|
1430
|
+
let written = 0;
|
|
1431
|
+
for (let i = 0; i < pending.length && i < parsed.reflections.length; i++) {
|
|
1432
|
+
const word = String(parsed.reflections[i] || '').trim().slice(0, 8);
|
|
1433
|
+
if (word) { pending[i].reflection = word; written++; }
|
|
1434
|
+
}
|
|
1435
|
+
|
|
1436
|
+
if (written > 0) {
|
|
1437
|
+
const tmp = SESSION_LOG_FILE + '.tmp';
|
|
1438
|
+
fs.writeFileSync(tmp, yaml.dump(log, { lineWidth: -1 }), 'utf8');
|
|
1439
|
+
fs.renameSync(tmp, SESSION_LOG_FILE);
|
|
1440
|
+
}
|
|
1441
|
+
} catch {
|
|
1442
|
+
// Non-fatal
|
|
1443
|
+
}
|
|
1444
|
+
}
|
|
1445
|
+
|
|
1373
1446
|
// Export for use in index.js
|
|
1374
1447
|
module.exports = {
|
|
1375
1448
|
distill,
|
|
1376
1449
|
writeSessionLog,
|
|
1377
1450
|
bootstrapSessionLog,
|
|
1378
1451
|
detectPatterns,
|
|
1452
|
+
fillSessionReflections,
|
|
1379
1453
|
_private: {
|
|
1380
1454
|
mergeCompetenceMap,
|
|
1381
1455
|
normalizeCompetenceLevel,
|
|
@@ -1401,6 +1475,9 @@ if (require.main === module) {
|
|
|
1401
1475
|
writeSessionLog(result.behavior, result.signalCount || 0, result.skeleton || null, result.sessionSummary || null);
|
|
1402
1476
|
}
|
|
1403
1477
|
|
|
1478
|
+
// Fill reflection words for sessions that don't have one yet
|
|
1479
|
+
await fillSessionReflections();
|
|
1480
|
+
|
|
1404
1481
|
// Run pattern detection (only triggers every 5th distill)
|
|
1405
1482
|
if (!bootstrapped) await detectPatterns();
|
|
1406
1483
|
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# Sync sanitized code from private repo to the public MetaMe repo.
|
|
3
|
+
#
|
|
4
|
+
# Usage:
|
|
5
|
+
# ./scripts/publish-public.sh # generate to ../MetaMe-public
|
|
6
|
+
# ./scripts/publish-public.sh --push # generate + push to public repo
|
|
7
|
+
#
|
|
8
|
+
# Workflow:
|
|
9
|
+
# - You develop in MetaMe-private (this repo), push to origin as usual
|
|
10
|
+
# - Run this script when you want to update the public mirror
|
|
11
|
+
# - Private modules (listed in .private-modules) get replaced with stubs
|
|
12
|
+
|
|
13
|
+
set -euo pipefail
|
|
14
|
+
|
|
15
|
+
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
|
16
|
+
PRIVATE_LIST="$REPO_ROOT/.private-modules"
|
|
17
|
+
OUT_DIR="$REPO_ROOT/../MetaMe-public"
|
|
18
|
+
PUBLIC_REMOTE="git@github.com:Yaron9/MetaMe.git"
|
|
19
|
+
|
|
20
|
+
DO_PUSH=false
|
|
21
|
+
for arg in "$@"; do
|
|
22
|
+
case "$arg" in
|
|
23
|
+
--push) DO_PUSH=true ;;
|
|
24
|
+
esac
|
|
25
|
+
done
|
|
26
|
+
|
|
27
|
+
if [ ! -f "$PRIVATE_LIST" ]; then
|
|
28
|
+
echo "Error: .private-modules not found"
|
|
29
|
+
exit 1
|
|
30
|
+
fi
|
|
31
|
+
|
|
32
|
+
PRIVATE_FILES=()
|
|
33
|
+
while IFS= read -r line; do
|
|
34
|
+
line="${line%%#*}"
|
|
35
|
+
line="${line// /}"
|
|
36
|
+
[ -z "$line" ] && continue
|
|
37
|
+
PRIVATE_FILES+=("$line")
|
|
38
|
+
done < "$PRIVATE_LIST"
|
|
39
|
+
|
|
40
|
+
echo "==> Generating public release to: $OUT_DIR"
|
|
41
|
+
echo " Stripping ${#PRIVATE_FILES[@]} private modules"
|
|
42
|
+
|
|
43
|
+
rm -rf "$OUT_DIR"
|
|
44
|
+
mkdir -p "$OUT_DIR"
|
|
45
|
+
|
|
46
|
+
git -C "$REPO_ROOT" archive HEAD | tar -x -C "$OUT_DIR"
|
|
47
|
+
|
|
48
|
+
# Strip .private-modules itself from public release
|
|
49
|
+
rm -f "$OUT_DIR/.private-modules"
|
|
50
|
+
|
|
51
|
+
for pattern in "${PRIVATE_FILES[@]}"; do
|
|
52
|
+
for file in $OUT_DIR/$pattern; do
|
|
53
|
+
[ -f "$file" ] || continue
|
|
54
|
+
rel="${file#$OUT_DIR/}"
|
|
55
|
+
basename=$(basename "$file" .js)
|
|
56
|
+
cat > "$file" << STUB
|
|
57
|
+
// This module is part of MetaMe's proprietary core.
|
|
58
|
+
// See https://github.com/Yaron9/MetaMe for the open-source components.
|
|
59
|
+
//
|
|
60
|
+
// Module: ${basename}
|
|
61
|
+
// License: Business Source License (BSL 1.1)
|
|
62
|
+
//
|
|
63
|
+
// For licensing inquiries: github.com/Yaron9/MetaMe/issues
|
|
64
|
+
|
|
65
|
+
module.exports = {};
|
|
66
|
+
STUB
|
|
67
|
+
echo " [stub] $rel"
|
|
68
|
+
done
|
|
69
|
+
done
|
|
70
|
+
|
|
71
|
+
echo "==> Done. ${#PRIVATE_FILES[@]} modules stubbed."
|
|
72
|
+
|
|
73
|
+
if [ "$DO_PUSH" = true ]; then
|
|
74
|
+
cd "$OUT_DIR"
|
|
75
|
+
if [ ! -d .git ]; then
|
|
76
|
+
git init -b main
|
|
77
|
+
git remote add origin "$PUBLIC_REMOTE"
|
|
78
|
+
fi
|
|
79
|
+
git add -A
|
|
80
|
+
COMMIT_MSG="sync: $(git -C "$REPO_ROOT" log -1 --format='%h %s')"
|
|
81
|
+
git commit -m "$COMMIT_MSG" --allow-empty
|
|
82
|
+
echo "==> Pushing to public repo..."
|
|
83
|
+
git push -u origin main --force
|
|
84
|
+
echo "==> Public repo updated."
|
|
85
|
+
fi
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* self-reflect.js — Daily Self-Reflection Task
|
|
5
|
+
*
|
|
6
|
+
* Scans correction/metacognitive signals from the past 7 days,
|
|
7
|
+
* aggregates "where did the AI get it wrong", and writes a brief
|
|
8
|
+
* self-critique pattern into growth.patterns in ~/.claude_profile.yaml.
|
|
9
|
+
*
|
|
10
|
+
* Also distills correction signals into lessons/ SOP markdown files.
|
|
11
|
+
*
|
|
12
|
+
* Heartbeat: nightly at 23:00, require_idle, non-blocking.
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
'use strict';
|
|
16
|
+
|
|
17
|
+
const fs = require('fs');
|
|
18
|
+
const path = require('path');
|
|
19
|
+
const os = require('os');
|
|
20
|
+
const { callHaiku, buildDistillEnv } = require('./providers');
|
|
21
|
+
const { writeBrainFileSafe } = require('./utils');
|
|
22
|
+
|
|
23
|
+
const HOME = os.homedir();
|
|
24
|
+
const SIGNAL_FILE = path.join(HOME, '.metame', 'raw_signals.jsonl');
|
|
25
|
+
const BRAIN_FILE = path.join(HOME, '.claude_profile.yaml');
|
|
26
|
+
const LOCK_FILE = path.join(HOME, '.metame', 'self-reflect.lock');
|
|
27
|
+
const LESSONS_DIR = path.join(HOME, '.metame', 'memory', 'lessons');
|
|
28
|
+
const WINDOW_DAYS = 7;
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Distill correction signals into reusable SOP markdown files.
|
|
32
|
+
* Each run produces at most one lesson file per unique slug.
|
|
33
|
+
* Returns the number of lesson files actually written.
|
|
34
|
+
*
|
|
35
|
+
* @param {Array} signals - all recent signals (will filter to 'correction' type internally)
|
|
36
|
+
* @param {string} lessonsDir - absolute path where lesson .md files are written
|
|
37
|
+
*/
|
|
38
|
+
async function generateLessons(signals, lessonsDir) {
|
|
39
|
+
// Only process correction signals that carry explicit feedback
|
|
40
|
+
const corrections = signals.filter(s => s.type === 'correction' && s.feedback);
|
|
41
|
+
if (corrections.length < 2) {
|
|
42
|
+
console.log(`[self-reflect] Only ${corrections.length} correction signal(s) with feedback, skipping lessons.`);
|
|
43
|
+
return 0;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
fs.mkdirSync(lessonsDir, { recursive: true });
|
|
47
|
+
|
|
48
|
+
const correctionText = corrections
|
|
49
|
+
.slice(-15) // cap to avoid prompt bloat
|
|
50
|
+
.map(c => `- Prompt: ${(c.prompt || '').slice(0, 100)}\n Feedback: ${(c.feedback || '').slice(0, 150)}`)
|
|
51
|
+
.join('\n');
|
|
52
|
+
|
|
53
|
+
const prompt = `You are distilling correction signals into a reusable SOP for an AI assistant.
|
|
54
|
+
|
|
55
|
+
Corrections (JSON):
|
|
56
|
+
${correctionText}
|
|
57
|
+
|
|
58
|
+
Generate ONE actionable lesson in this JSON format:
|
|
59
|
+
{
|
|
60
|
+
"title": "简短标题(中文,10字以内)",
|
|
61
|
+
"slug": "kebab-case-english-slug",
|
|
62
|
+
"content": "## 问题\\n...\\n## 根因\\n...\\n## 操作手册\\n1. ...\\n2. ...\\n3. ..."
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
Rules: content must be in 中文, concrete and actionable, 100-300 chars total.
|
|
66
|
+
Only output the JSON object, no explanation.`;
|
|
67
|
+
|
|
68
|
+
let distillEnv = {};
|
|
69
|
+
try { distillEnv = buildDistillEnv(); } catch {}
|
|
70
|
+
|
|
71
|
+
let result;
|
|
72
|
+
try {
|
|
73
|
+
result = await Promise.race([
|
|
74
|
+
callHaiku(prompt, distillEnv, 60000),
|
|
75
|
+
new Promise((_, reject) => setTimeout(() => reject(new Error('timeout')), 90000)),
|
|
76
|
+
]);
|
|
77
|
+
} catch (e) {
|
|
78
|
+
console.log(`[self-reflect] generateLessons Haiku call failed: ${e.message}`);
|
|
79
|
+
return 0;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
let lesson;
|
|
83
|
+
try {
|
|
84
|
+
const cleaned = result.replace(/```json\n?/g, '').replace(/```\n?/g, '').trim();
|
|
85
|
+
lesson = JSON.parse(cleaned);
|
|
86
|
+
if (!lesson.title || !lesson.slug || !lesson.content) throw new Error('missing fields');
|
|
87
|
+
} catch (e) {
|
|
88
|
+
console.log(`[self-reflect] Failed to parse lesson JSON: ${e.message}`);
|
|
89
|
+
return 0;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Sanitize slug: only lowercase alphanumeric and hyphens
|
|
93
|
+
const slug = (lesson.slug || '').toLowerCase().replace(/[^a-z0-9-]/g, '-').replace(/-+/g, '-').replace(/^-|-$/g, '');
|
|
94
|
+
if (!slug) {
|
|
95
|
+
console.log('[self-reflect] generateLessons: empty slug, skipping');
|
|
96
|
+
return 0;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// Prevent duplicates: skip if any existing file already uses this slug
|
|
100
|
+
const existing = fs.readdirSync(lessonsDir).filter(f => f.endsWith(`-${slug}.md`));
|
|
101
|
+
if (existing.length > 0) {
|
|
102
|
+
console.log(`[self-reflect] Lesson '${slug}' already exists (${existing[0]}), skipping.`);
|
|
103
|
+
return 0;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const today = new Date().toISOString().slice(0, 10);
|
|
107
|
+
const filename = `${today}-${slug}.md`;
|
|
108
|
+
const filepath = path.join(lessonsDir, filename);
|
|
109
|
+
|
|
110
|
+
const fileContent = `---
|
|
111
|
+
date: ${today}
|
|
112
|
+
source: self-reflect
|
|
113
|
+
corrections: ${corrections.length}
|
|
114
|
+
---
|
|
115
|
+
|
|
116
|
+
# ${lesson.title}
|
|
117
|
+
|
|
118
|
+
${lesson.content}
|
|
119
|
+
`;
|
|
120
|
+
|
|
121
|
+
fs.writeFileSync(filepath, fileContent, 'utf8');
|
|
122
|
+
console.log(`[self-reflect] Lesson written: ${filepath}`);
|
|
123
|
+
return 1;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
async function run() {
|
|
127
|
+
// Atomic lock
|
|
128
|
+
let lockFd;
|
|
129
|
+
try {
|
|
130
|
+
lockFd = fs.openSync(LOCK_FILE, 'wx');
|
|
131
|
+
fs.writeSync(lockFd, process.pid.toString());
|
|
132
|
+
fs.closeSync(lockFd);
|
|
133
|
+
} catch (e) {
|
|
134
|
+
if (e.code === 'EEXIST') {
|
|
135
|
+
const age = Date.now() - fs.statSync(LOCK_FILE).mtimeMs;
|
|
136
|
+
if (age < 300000) { console.log('[self-reflect] Already running.'); return; }
|
|
137
|
+
fs.unlinkSync(LOCK_FILE);
|
|
138
|
+
try {
|
|
139
|
+
lockFd = fs.openSync(LOCK_FILE, 'wx');
|
|
140
|
+
fs.writeSync(lockFd, process.pid.toString());
|
|
141
|
+
fs.closeSync(lockFd);
|
|
142
|
+
} catch {
|
|
143
|
+
// Another process acquired the lock, or write failed — ensure fd is closed
|
|
144
|
+
try { if (lockFd !== undefined) fs.closeSync(lockFd); } catch { /* ignore */ }
|
|
145
|
+
return;
|
|
146
|
+
}
|
|
147
|
+
} else throw e;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
try {
|
|
151
|
+
// Read signals from last WINDOW_DAYS days
|
|
152
|
+
if (!fs.existsSync(SIGNAL_FILE)) {
|
|
153
|
+
console.log('[self-reflect] No signal file, skipping.');
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
const cutoff = Date.now() - WINDOW_DAYS * 24 * 60 * 60 * 1000;
|
|
158
|
+
const lines = fs.readFileSync(SIGNAL_FILE, 'utf8').trim().split('\n').filter(Boolean);
|
|
159
|
+
const recentSignals = lines
|
|
160
|
+
.map(l => { try { return JSON.parse(l); } catch { return null; } })
|
|
161
|
+
.filter(s => s && s.ts && new Date(s.ts).getTime() > cutoff);
|
|
162
|
+
|
|
163
|
+
// Filter to correction + metacognitive signals only
|
|
164
|
+
const correctionSignals = recentSignals.filter(s =>
|
|
165
|
+
s.type === 'correction' || s.type === 'metacognitive'
|
|
166
|
+
);
|
|
167
|
+
|
|
168
|
+
if (correctionSignals.length < 2) {
|
|
169
|
+
console.log(`[self-reflect] Only ${correctionSignals.length} correction signals this week, skipping.`);
|
|
170
|
+
return;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
// Read current profile for context
|
|
174
|
+
let currentPatterns = '';
|
|
175
|
+
try {
|
|
176
|
+
const yaml = require('js-yaml');
|
|
177
|
+
const profile = yaml.load(fs.readFileSync(BRAIN_FILE, 'utf8')) || {};
|
|
178
|
+
const existing = (profile.growth && profile.growth.patterns) || [];
|
|
179
|
+
if (existing.length > 0) {
|
|
180
|
+
currentPatterns = `Current growth.patterns (avoid repeating):\n${existing.map(p => `- ${p}`).join('\n')}\n\n`;
|
|
181
|
+
}
|
|
182
|
+
} catch { /* non-fatal */ }
|
|
183
|
+
|
|
184
|
+
const signalText = correctionSignals
|
|
185
|
+
.slice(-20) // cap at 20 signals
|
|
186
|
+
.map((s, i) => `${i + 1}. [${s.type}] "${s.prompt}"`)
|
|
187
|
+
.join('\n');
|
|
188
|
+
|
|
189
|
+
const prompt = `你是一个AI自我审视引擎。分析以下用户纠正/元认知信号,找出AI(即你)**系统性**犯错的模式。
|
|
190
|
+
|
|
191
|
+
${currentPatterns}用户纠正信号(最近7天):
|
|
192
|
+
${signalText}
|
|
193
|
+
|
|
194
|
+
任务:找出1-2条AI的系统性问题(不是偶发错误),例如:
|
|
195
|
+
- "经常过度简化用户的技术问题,忽略背景细节"
|
|
196
|
+
- "倾向于在用户还没说完就开始行动,导致方向偏差"
|
|
197
|
+
- "在不确定时倾向于肯定用户,而非直接说不知道"
|
|
198
|
+
|
|
199
|
+
输出格式(JSON数组,最多2条,每条≤40字中文):
|
|
200
|
+
["模式1描述", "模式2描述"]
|
|
201
|
+
|
|
202
|
+
注意:
|
|
203
|
+
- 只输出有充分证据支持的系统性模式
|
|
204
|
+
- 如果证据不足,输出 []
|
|
205
|
+
- 只输出JSON,不要解释`;
|
|
206
|
+
|
|
207
|
+
let distillEnv = {};
|
|
208
|
+
try { distillEnv = buildDistillEnv(); } catch {}
|
|
209
|
+
|
|
210
|
+
let result;
|
|
211
|
+
try {
|
|
212
|
+
result = await Promise.race([
|
|
213
|
+
callHaiku(prompt, distillEnv, 60000),
|
|
214
|
+
// outer safety net in case callHaiku's internal timeout doesn't propagate
|
|
215
|
+
new Promise((_, reject) => setTimeout(() => reject(new Error('timeout')), 90000)),
|
|
216
|
+
]);
|
|
217
|
+
} catch (e) {
|
|
218
|
+
console.log(`[self-reflect] Haiku call failed: ${e.message}`);
|
|
219
|
+
return;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// Parse result
|
|
223
|
+
let patterns = [];
|
|
224
|
+
try {
|
|
225
|
+
const cleaned = result.replace(/```json\n?/g, '').replace(/```\n?/g, '').trim();
|
|
226
|
+
const parsed = JSON.parse(cleaned);
|
|
227
|
+
if (Array.isArray(parsed)) {
|
|
228
|
+
patterns = parsed.filter(p => typeof p === 'string' && p.length > 5 && p.length <= 80);
|
|
229
|
+
}
|
|
230
|
+
} catch {
|
|
231
|
+
console.log('[self-reflect] Failed to parse Haiku output.');
|
|
232
|
+
return;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// === Generate lessons/ from correction signals (independent of patterns result) ===
|
|
236
|
+
try {
|
|
237
|
+
const lessonsCount = await generateLessons(recentSignals, LESSONS_DIR);
|
|
238
|
+
if (lessonsCount > 0) {
|
|
239
|
+
console.log(`[self-reflect] Generated ${lessonsCount} lesson(s) in ${LESSONS_DIR}`);
|
|
240
|
+
}
|
|
241
|
+
} catch (e) {
|
|
242
|
+
console.log(`[self-reflect] generateLessons failed (non-fatal): ${e.message}`);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
if (patterns.length === 0) {
|
|
246
|
+
console.log('[self-reflect] No patterns found this week.');
|
|
247
|
+
return;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// Merge into growth.patterns (cap at 3, keep newest)
|
|
251
|
+
try {
|
|
252
|
+
const yaml = require('js-yaml');
|
|
253
|
+
const raw = fs.readFileSync(BRAIN_FILE, 'utf8');
|
|
254
|
+
const profile = yaml.load(raw) || {};
|
|
255
|
+
if (!profile.growth) profile.growth = {};
|
|
256
|
+
const existing = Array.isArray(profile.growth.patterns) ? profile.growth.patterns : [];
|
|
257
|
+
// Add new patterns, deduplicate, cap at 3 newest
|
|
258
|
+
const merged = [...existing, ...patterns]
|
|
259
|
+
.filter((p, i, arr) => arr.indexOf(p) === i)
|
|
260
|
+
.slice(-3);
|
|
261
|
+
profile.growth.patterns = merged;
|
|
262
|
+
profile.growth.last_reflection = new Date().toISOString().slice(0, 10);
|
|
263
|
+
|
|
264
|
+
// Preserve locked lines (simple approach: only update growth section)
|
|
265
|
+
const dumped = yaml.dump(profile, { lineWidth: -1 });
|
|
266
|
+
await writeBrainFileSafe(dumped);
|
|
267
|
+
console.log(`[self-reflect] ${patterns.length} pattern(s) written to growth.patterns: ${patterns.join(' | ')}`);
|
|
268
|
+
} catch (e) {
|
|
269
|
+
console.log(`[self-reflect] Failed to write profile: ${e.message}`);
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
} finally {
|
|
273
|
+
try { fs.unlinkSync(LOCK_FILE); } catch {}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
if (require.main === module) {
|
|
278
|
+
run().then(() => {
|
|
279
|
+
console.log('✅ self-reflect complete');
|
|
280
|
+
}).catch(e => {
|
|
281
|
+
console.error(`[self-reflect] Fatal: ${e.message}`);
|
|
282
|
+
process.exit(1);
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
module.exports = { run };
|