metame-cli 1.6.0 → 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +6 -7
- package/package.json +1 -1
- package/scripts/core/chunker.js +100 -0
- package/scripts/core/embedding.js +225 -0
- package/scripts/core/hybrid-search.js +296 -0
- package/scripts/core/wiki-db.js +144 -3
- package/scripts/daemon-bridges.js +9 -6
- package/scripts/daemon-command-router.js +25 -1
- package/scripts/daemon-default.yaml +31 -0
- package/scripts/daemon-embedding.js +162 -0
- package/scripts/daemon-engine-runtime.js +1 -1
- package/scripts/daemon-health-scan.js +185 -0
- package/scripts/daemon-runtime-lifecycle.js +1 -1
- package/scripts/daemon-task-scheduler.js +5 -3
- package/scripts/daemon-wiki.js +126 -4
- package/scripts/daemon.js +4 -2
- package/scripts/feishu-adapter.js +208 -29
- package/scripts/memory-backfill-chunks.js +92 -0
- package/scripts/memory-search.js +43 -15
- package/scripts/memory-wiki-schema.js +161 -2
- package/scripts/memory.js +15 -0
- package/scripts/providers.js +37 -6
- package/scripts/wiki-cluster.js +121 -0
- package/scripts/wiki-extract.js +171 -0
- package/scripts/wiki-facts.js +351 -0
- package/scripts/wiki-import.js +256 -0
- package/scripts/wiki-reflect-build.js +352 -28
- package/scripts/wiki-reflect-export.js +115 -0
- package/scripts/wiki-reflect.js +34 -1
- package/scripts/wiki-synthesis.js +224 -0
package/scripts/core/wiki-db.js
CHANGED
|
@@ -83,6 +83,9 @@ function upsertWikiPage(db, {
|
|
|
83
83
|
raw_source_count = 0,
|
|
84
84
|
topic_tags = '[]',
|
|
85
85
|
word_count = 0,
|
|
86
|
+
source_type = 'memory',
|
|
87
|
+
membership_hash = null,
|
|
88
|
+
cluster_size = null,
|
|
86
89
|
}) {
|
|
87
90
|
const rawSourceIdsStr = typeof raw_source_ids === 'string'
|
|
88
91
|
? raw_source_ids : JSON.stringify(raw_source_ids);
|
|
@@ -105,18 +108,22 @@ function upsertWikiPage(db, {
|
|
|
105
108
|
raw_source_count = ?,
|
|
106
109
|
topic_tags = ?,
|
|
107
110
|
word_count = ?,
|
|
111
|
+
source_type = ?,
|
|
112
|
+
membership_hash = ?,
|
|
113
|
+
cluster_size = ?,
|
|
108
114
|
updated_at = datetime('now')
|
|
109
115
|
WHERE slug = ?
|
|
110
|
-
`).run(primary_topic, title, content, rawSourceIdsStr, capsuleRefsStr, raw_source_count, topicTagsStr, word_count, slug);
|
|
116
|
+
`).run(primary_topic, title, content, rawSourceIdsStr, capsuleRefsStr, raw_source_count, topicTagsStr, word_count, source_type, membership_hash, cluster_size, slug);
|
|
111
117
|
} else {
|
|
112
118
|
const id = `wp_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
|
|
113
119
|
db.prepare(`
|
|
114
120
|
INSERT INTO wiki_pages
|
|
115
121
|
(id, slug, primary_topic, title, content, raw_source_ids, capsule_refs,
|
|
116
122
|
raw_source_count, topic_tags, word_count, staleness, new_facts_since_build,
|
|
123
|
+
source_type, membership_hash, cluster_size,
|
|
117
124
|
created_at, updated_at)
|
|
118
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 0.0, 0, datetime('now'), datetime('now'))
|
|
119
|
-
`).run(id, slug, primary_topic, title, content, rawSourceIdsStr, capsuleRefsStr, raw_source_count, topicTagsStr, word_count);
|
|
125
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 0.0, 0, ?, ?, ?, datetime('now'), datetime('now'))
|
|
126
|
+
`).run(id, slug, primary_topic, title, content, rawSourceIdsStr, capsuleRefsStr, raw_source_count, topicTagsStr, word_count, source_type, membership_hash, cluster_size);
|
|
120
127
|
}
|
|
121
128
|
}
|
|
122
129
|
|
|
@@ -284,6 +291,24 @@ function listRecentSessionSummaries(db, { limit = 200 } = {}) {
|
|
|
284
291
|
`).all(limit);
|
|
285
292
|
}
|
|
286
293
|
|
|
294
|
+
// ── Timeline ──────────────────────────────────────────────────────────────────
|
|
295
|
+
|
|
296
|
+
/**
|
|
297
|
+
* Append a timestamped entry to a wiki page's timeline (evidence trail).
|
|
298
|
+
* Does NOT touch content (compiled truth) — timeline is append-only.
|
|
299
|
+
*
|
|
300
|
+
* @param {object} db
|
|
301
|
+
* @param {string} slug
|
|
302
|
+
* @param {string} entry — free-text description of what happened
|
|
303
|
+
*/
|
|
304
|
+
function appendWikiTimeline(db, slug, entry) {
|
|
305
|
+
const ts = new Date().toISOString().slice(0, 10);
|
|
306
|
+
const line = `[${ts}] ${entry}`;
|
|
307
|
+
db.prepare(
|
|
308
|
+
`UPDATE wiki_pages SET timeline = COALESCE(timeline, '') || ? || char(10), updated_at = datetime('now') WHERE slug = ?`,
|
|
309
|
+
).run(line, slug);
|
|
310
|
+
}
|
|
311
|
+
|
|
287
312
|
// ── Search ────────────────────────────────────────────────────────────────────
|
|
288
313
|
|
|
289
314
|
/**
|
|
@@ -311,6 +336,11 @@ function searchWikiAndFacts(db, query, { trackSearch = true } = {}) {
|
|
|
311
336
|
LIMIT 5
|
|
312
337
|
`).all(safeQuery);
|
|
313
338
|
|
|
339
|
+
// Add stale flag to wiki results (staleness >= 0.3 means compiled truth may be outdated)
|
|
340
|
+
for (const wp of wikiPages) {
|
|
341
|
+
wp.stale = typeof wp.staleness === 'number' && wp.staleness >= 0.3;
|
|
342
|
+
}
|
|
343
|
+
|
|
314
344
|
// 2. FTS5 search memory_items_fts — graceful fallback if table doesn't exist
|
|
315
345
|
let facts = [];
|
|
316
346
|
try {
|
|
@@ -385,6 +415,105 @@ function updateStalenessForTags(db, dirtyTagCounts) {
|
|
|
385
415
|
}
|
|
386
416
|
}
|
|
387
417
|
|
|
418
|
+
// ── doc_sources CRUD ──────────────────────────────────────────────────────────
|
|
419
|
+
|
|
420
|
+
function upsertDocSource(db, { filePath, fileHash, mtimeMs, sizeBytes, fileType,
|
|
421
|
+
extractor, extractStatus, extractedTextHash, title, slug }) {
|
|
422
|
+
const now = new Date().toISOString();
|
|
423
|
+
const existing = db.prepare('SELECT file_hash, extracted_text_hash FROM doc_sources WHERE file_path=?').get(filePath);
|
|
424
|
+
const hashChanged = !existing
|
|
425
|
+
|| existing.file_hash !== fileHash
|
|
426
|
+
|| existing.extracted_text_hash !== (extractedTextHash || null);
|
|
427
|
+
|
|
428
|
+
db.prepare(`
|
|
429
|
+
INSERT INTO doc_sources
|
|
430
|
+
(file_path, file_hash, mtime_ms, size_bytes, extracted_text_hash, file_type, extractor,
|
|
431
|
+
extract_status, title, slug, status, indexed_at, last_seen_at, content_stale)
|
|
432
|
+
VALUES (?,?,?,?,?,?,?,?,?,?,'active',?,?,?)
|
|
433
|
+
ON CONFLICT(file_path) DO UPDATE SET
|
|
434
|
+
file_hash=excluded.file_hash,
|
|
435
|
+
mtime_ms=excluded.mtime_ms,
|
|
436
|
+
size_bytes=excluded.size_bytes,
|
|
437
|
+
extracted_text_hash=excluded.extracted_text_hash,
|
|
438
|
+
extractor=excluded.extractor,
|
|
439
|
+
extract_status=excluded.extract_status,
|
|
440
|
+
title=excluded.title,
|
|
441
|
+
status='active',
|
|
442
|
+
last_seen_at=excluded.last_seen_at,
|
|
443
|
+
content_stale=CASE
|
|
444
|
+
WHEN excluded.file_hash != doc_sources.file_hash
|
|
445
|
+
OR COALESCE(excluded.extracted_text_hash,'') != COALESCE(doc_sources.extracted_text_hash,'')
|
|
446
|
+
THEN 1
|
|
447
|
+
ELSE doc_sources.content_stale
|
|
448
|
+
END
|
|
449
|
+
`).run(
|
|
450
|
+
filePath, fileHash, mtimeMs || null, sizeBytes || null,
|
|
451
|
+
extractedTextHash || null, fileType, extractor || null,
|
|
452
|
+
extractStatus || 'pending', title || null, slug,
|
|
453
|
+
now, now, hashChanged ? 1 : 0
|
|
454
|
+
);
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
function getDocSourceByPath(db, filePath) {
|
|
458
|
+
return db.prepare('SELECT * FROM doc_sources WHERE file_path=?').get(filePath) || null;
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
function getDocSourceBySlug(db, slug) {
|
|
462
|
+
return db.prepare('SELECT * FROM doc_sources WHERE slug=?').get(slug) || null;
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
function listStaleDocSources(db) {
|
|
466
|
+
return db.prepare("SELECT * FROM doc_sources WHERE content_stale=1 AND status='active'").all();
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
function markDocSourcesMissing(db, seenPaths) {
|
|
470
|
+
if (seenPaths.length === 0) return;
|
|
471
|
+
const set = new Set(seenPaths);
|
|
472
|
+
// Infer the scan directory from the seen paths so we only mark files
|
|
473
|
+
// within that directory scope as missing (not docs imported from other paths).
|
|
474
|
+
const path = require('node:path');
|
|
475
|
+
const dirs = new Set(seenPaths.map(p => path.dirname(p)));
|
|
476
|
+
const all = db.prepare("SELECT id, file_path FROM doc_sources WHERE status='active'").all();
|
|
477
|
+
const missingIds = all
|
|
478
|
+
.filter(r => dirs.has(path.dirname(r.file_path)) && !set.has(r.file_path))
|
|
479
|
+
.map(r => r.id);
|
|
480
|
+
if (missingIds.length === 0) return;
|
|
481
|
+
const ph = missingIds.map(() => '?').join(',');
|
|
482
|
+
db.prepare(`UPDATE doc_sources SET status='missing' WHERE id IN (${ph})`).run(...missingIds);
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
function upsertDocPageLink(db, pageSlug, docSourceId, role) {
|
|
486
|
+
db.prepare(`
|
|
487
|
+
INSERT OR IGNORE INTO wiki_page_doc_sources (page_slug, doc_source_id, role)
|
|
488
|
+
VALUES (?, ?, ?)
|
|
489
|
+
`).run(pageSlug, docSourceId, role);
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
function getClusterMemberIds(db, pageSlug) {
|
|
493
|
+
return db.prepare(
|
|
494
|
+
"SELECT doc_source_id FROM wiki_page_doc_sources WHERE page_slug=? AND role='cluster_member'"
|
|
495
|
+
).all(pageSlug).map(r => r.doc_source_id);
|
|
496
|
+
}
|
|
497
|
+
|
|
498
|
+
function replaceClusterMembers(db, pageSlug, docSourceIds) {
|
|
499
|
+
db.exec('BEGIN');
|
|
500
|
+
try {
|
|
501
|
+
db.prepare("DELETE FROM wiki_page_doc_sources WHERE page_slug=? AND role='cluster_member'").run(pageSlug);
|
|
502
|
+
const ins = db.prepare("INSERT INTO wiki_page_doc_sources (page_slug, doc_source_id, role) VALUES (?,?,'cluster_member')");
|
|
503
|
+
for (const id of docSourceIds) ins.run(pageSlug, id);
|
|
504
|
+
db.exec('COMMIT');
|
|
505
|
+
} catch (err) {
|
|
506
|
+
db.exec('ROLLBACK');
|
|
507
|
+
throw err;
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
function listClusterPages(db) {
|
|
512
|
+
return db.prepare(
|
|
513
|
+
"SELECT slug, membership_hash, cluster_size FROM wiki_pages WHERE source_type='topic_cluster'"
|
|
514
|
+
).all();
|
|
515
|
+
}
|
|
516
|
+
|
|
388
517
|
module.exports = {
|
|
389
518
|
// wiki_pages
|
|
390
519
|
getWikiPageBySlug,
|
|
@@ -392,6 +521,7 @@ module.exports = {
|
|
|
392
521
|
getStalePages,
|
|
393
522
|
upsertWikiPage,
|
|
394
523
|
resetPageStaleness,
|
|
524
|
+
appendWikiTimeline,
|
|
395
525
|
// wiki_topics
|
|
396
526
|
upsertWikiTopic,
|
|
397
527
|
checkTopicThreshold,
|
|
@@ -401,4 +531,15 @@ module.exports = {
|
|
|
401
531
|
searchWikiAndFacts,
|
|
402
532
|
// staleness
|
|
403
533
|
updateStalenessForTags,
|
|
534
|
+
// doc_sources CRUD
|
|
535
|
+
upsertDocSource,
|
|
536
|
+
getDocSourceByPath,
|
|
537
|
+
getDocSourceBySlug,
|
|
538
|
+
listStaleDocSources,
|
|
539
|
+
markDocSourcesMissing,
|
|
540
|
+
// wiki_page_doc_sources CRUD
|
|
541
|
+
upsertDocPageLink,
|
|
542
|
+
getClusterMemberIds,
|
|
543
|
+
replaceClusterMembers,
|
|
544
|
+
listClusterPages,
|
|
404
545
|
};
|
|
@@ -736,21 +736,24 @@ function createBridgeStarter(deps) {
|
|
|
736
736
|
const { createBot } = require('./feishu-adapter.js');
|
|
737
737
|
const bot = createBot(config.feishu);
|
|
738
738
|
|
|
739
|
-
//
|
|
739
|
+
// Credential pre-check is informational only. We always start the WS
|
|
740
|
+
// pipeline — it has its own network-ready-probe + backoff reconnect, so
|
|
741
|
+
// even if startup lands in a "just woke / network flaky" window, recovery
|
|
742
|
+
// is automatic instead of requiring a manual daemon restart.
|
|
740
743
|
try {
|
|
741
744
|
const validation = await bot.validateCredentials();
|
|
742
745
|
if (!validation.ok) {
|
|
743
|
-
log('ERROR', `Feishu credential check FAILED: ${validation.error}`);
|
|
744
746
|
if (validation.isAuthError) {
|
|
745
|
-
log('ERROR',
|
|
746
|
-
|
|
747
|
+
log('ERROR', `Feishu credential check FAILED (likely bad app_id/app_secret): ${validation.error}`);
|
|
748
|
+
log('WARN', 'Starting bridge anyway — if this persists, fix ~/.metame/daemon.yaml and restart daemon');
|
|
749
|
+
} else {
|
|
750
|
+
log('WARN', `Feishu credential pre-check failed (transient): ${validation.error} — WS pipeline will retry`);
|
|
747
751
|
}
|
|
748
|
-
log('WARN', 'Feishu credential check failed (possibly network issue) — attempting to start anyway');
|
|
749
752
|
} else {
|
|
750
753
|
log('INFO', 'Feishu credentials validated OK');
|
|
751
754
|
}
|
|
752
755
|
} catch (e) {
|
|
753
|
-
log('WARN', `Feishu credential pre-check error: ${e.message} —
|
|
756
|
+
log('WARN', `Feishu credential pre-check error: ${e.message} — WS pipeline will retry`);
|
|
754
757
|
}
|
|
755
758
|
|
|
756
759
|
try {
|
|
@@ -613,7 +613,31 @@ function createCommandRouter(deps) {
|
|
|
613
613
|
await bot.sendMessage(chatId, 'Daily token budget exceeded.');
|
|
614
614
|
return;
|
|
615
615
|
}
|
|
616
|
-
|
|
616
|
+
|
|
617
|
+
// --- "修" shortcut: inject latest health report as context ---
|
|
618
|
+
let effectiveText = text;
|
|
619
|
+
if (text.trim() === '修') {
|
|
620
|
+
const reportFile = require('path').join(require('os').homedir(), '.metame', 'health-report-latest.json');
|
|
621
|
+
try {
|
|
622
|
+
const raw = require('fs').readFileSync(reportFile, 'utf8');
|
|
623
|
+
const report = JSON.parse(raw);
|
|
624
|
+
const ageMs = Date.now() - new Date(report.generated_at).getTime();
|
|
625
|
+
if (ageMs < 48 * 60 * 60 * 1000) { // only if report is within 48h
|
|
626
|
+
const issues = (report.analysis && report.analysis.issues || [])
|
|
627
|
+
.map(i => `- ${i.name}(×${i.count}):${i.fix}`)
|
|
628
|
+
.join('\n');
|
|
629
|
+
effectiveText = `请根据以下 daemon 健康报告修复问题:\n\n` +
|
|
630
|
+
`摘要:${report.analysis.summary}\n` +
|
|
631
|
+
`建议行动:${report.analysis.action}\n\n` +
|
|
632
|
+
`问题清单:\n${issues}\n\n` +
|
|
633
|
+
`报告时间:${report.generated_at}\n` +
|
|
634
|
+
`请先读取 ~/.metame/daemon.log 确认问题,然后修复 /Users/yaron/AGI/MetaMe/scripts/ 中的对应代码。`;
|
|
635
|
+
log('INFO', `[health-scan] "修" shortcut: injected health report (${report.total_errors} errors)`);
|
|
636
|
+
}
|
|
637
|
+
} catch { /* no report or stale — fall through with original text */ }
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
const claudeResult = await askClaude(bot, chatId, effectiveText, config, readOnly, senderId);
|
|
617
641
|
const claudeFailed = !!(claudeResult && claudeResult.ok === false);
|
|
618
642
|
const claudeAborted = !!(claudeResult && claudeResult.error === 'Stopped by user');
|
|
619
643
|
if (claudeFailed && !claudeAborted && !macLocalFirst && macFallbackEnabled && allowLocalMacControl) {
|
|
@@ -107,6 +107,27 @@ heartbeat:
|
|
|
107
107
|
notify: false
|
|
108
108
|
enabled: true
|
|
109
109
|
|
|
110
|
+
# wiki-sync: 每天 02:30 把所有 wiki 页面(记忆主题、doc/cluster 页、decisions/lessons)同步到 Obsidian vault
|
|
111
|
+
# 前置条件: daemon.wiki_output_dir 已配置,如: ~/Documents/ObsidianVault/MetaMe/wiki
|
|
112
|
+
- name: wiki-sync
|
|
113
|
+
type: script
|
|
114
|
+
command: >-
|
|
115
|
+
node -e "
|
|
116
|
+
const path=require('path'),os=require('os');
|
|
117
|
+
const {DatabaseSync}=require('node:sqlite');
|
|
118
|
+
const {runWikiReflect}=require(path.join(os.homedir(),'.metame','wiki-reflect'));
|
|
119
|
+
const providers=require(path.join(os.homedir(),'.metame','providers'));
|
|
120
|
+
const db=new DatabaseSync(path.join(os.homedir(),'.metame','memory.db'));
|
|
121
|
+
const outDir=path.join(os.homedir(),'Documents/ObsidianVault/MetaMe/wiki');
|
|
122
|
+
runWikiReflect(db,{providers,outputDir:outDir}).then(r=>{
|
|
123
|
+
console.log('wiki-sync done',JSON.stringify(r));db.close();
|
|
124
|
+
}).catch(e=>{console.error(e.message);db.close();process.exit(1);});
|
|
125
|
+
"
|
|
126
|
+
at: "02:30"
|
|
127
|
+
require_idle: true
|
|
128
|
+
notify: false
|
|
129
|
+
enabled: false # set to true after verifying wiki_output_dir is correct
|
|
130
|
+
|
|
110
131
|
# 记忆索引:每天 01:30 更新 ~/.metame/memory/INDEX.md
|
|
111
132
|
- name: memory-index
|
|
112
133
|
type: script
|
|
@@ -116,6 +137,16 @@ heartbeat:
|
|
|
116
137
|
notify: false
|
|
117
138
|
enabled: true
|
|
118
139
|
|
|
140
|
+
# Embedding 索引:消费 embedding_queue,生成向量嵌入,30 分钟冷却
|
|
141
|
+
- name: embedding-index
|
|
142
|
+
type: script
|
|
143
|
+
command: node ~/.metame/daemon-embedding.js
|
|
144
|
+
interval: 30m
|
|
145
|
+
timeout: 600
|
|
146
|
+
require_idle: false
|
|
147
|
+
notify: false
|
|
148
|
+
enabled: true
|
|
149
|
+
|
|
119
150
|
# Legacy flat tasks (no project isolation). New tasks should go under projects: above.
|
|
120
151
|
# Examples — uncomment or add your own:
|
|
121
152
|
#
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
'use strict';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* daemon-embedding.js — Embedding queue consumer
|
|
7
|
+
*
|
|
8
|
+
* Processes pending items in embedding_queue:
|
|
9
|
+
* 1. Reads batch from queue (attempts < 3)
|
|
10
|
+
* 2. Fetches text from content_chunks
|
|
11
|
+
* 3. Calls OpenAI embedding API
|
|
12
|
+
* 4. Writes BLOB + metadata back to content_chunks
|
|
13
|
+
* 5. Deletes completed queue rows; increments attempts on failure
|
|
14
|
+
*
|
|
15
|
+
* Designed to run as heartbeat task (interval: 30min) or post-wiki-reflect trigger.
|
|
16
|
+
* Graceful degradation: no OPENAI_API_KEY → exits immediately, no error.
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
const path = require('path');
|
|
20
|
+
const os = require('os');
|
|
21
|
+
const fs = require('fs');
|
|
22
|
+
|
|
23
|
+
const HOME = os.homedir();
|
|
24
|
+
const METAME_DIR = path.join(HOME, '.metame');
|
|
25
|
+
const DB_PATH = path.join(METAME_DIR, 'memory.db');
|
|
26
|
+
const LOCK_FILE = path.join(METAME_DIR, 'daemon-embedding.lock');
|
|
27
|
+
const LOG_FILE = path.join(METAME_DIR, 'embedding_log.jsonl');
|
|
28
|
+
const LOCK_TIMEOUT_MS = 10 * 60 * 1000;
|
|
29
|
+
const MAX_BATCH = 50;
|
|
30
|
+
|
|
31
|
+
function loadModule(name) {
|
|
32
|
+
const candidates = [
|
|
33
|
+
path.join(HOME, '.metame', name),
|
|
34
|
+
path.join(__dirname, name),
|
|
35
|
+
];
|
|
36
|
+
for (const p of candidates) {
|
|
37
|
+
try { return require(p); } catch { }
|
|
38
|
+
}
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async function main() {
|
|
43
|
+
const embedding = loadModule('core/embedding');
|
|
44
|
+
if (!embedding || !embedding.isEmbeddingAvailable()) {
|
|
45
|
+
// No API key — skip silently
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Atomic lock acquisition
|
|
50
|
+
try {
|
|
51
|
+
const fd = fs.openSync(LOCK_FILE, 'wx');
|
|
52
|
+
fs.writeFileSync(fd, String(process.pid));
|
|
53
|
+
fs.closeSync(fd);
|
|
54
|
+
} catch {
|
|
55
|
+
// Lock exists — check if stale
|
|
56
|
+
try {
|
|
57
|
+
const lockAge = Date.now() - fs.statSync(LOCK_FILE).mtimeMs;
|
|
58
|
+
if (lockAge < LOCK_TIMEOUT_MS) return; // another instance running
|
|
59
|
+
fs.unlinkSync(LOCK_FILE);
|
|
60
|
+
fs.openSync(LOCK_FILE, 'wx');
|
|
61
|
+
fs.writeFileSync(LOCK_FILE, String(process.pid));
|
|
62
|
+
} catch {
|
|
63
|
+
return; // race lost or fs error
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
let db;
|
|
68
|
+
try {
|
|
69
|
+
const { DatabaseSync } = require('node:sqlite');
|
|
70
|
+
db = new DatabaseSync(DB_PATH);
|
|
71
|
+
db.exec('PRAGMA journal_mode = WAL');
|
|
72
|
+
db.exec('PRAGMA busy_timeout = 3000');
|
|
73
|
+
|
|
74
|
+
// Ensure schema exists
|
|
75
|
+
try {
|
|
76
|
+
const { applyWikiSchema } = loadModule('memory-wiki-schema') || {};
|
|
77
|
+
if (applyWikiSchema) applyWikiSchema(db);
|
|
78
|
+
} catch { }
|
|
79
|
+
|
|
80
|
+
// Fetch pending queue items
|
|
81
|
+
const pending = db.prepare(`
|
|
82
|
+
SELECT eq.id AS queue_id, eq.item_type, eq.item_id, eq.model, eq.attempts,
|
|
83
|
+
cc.chunk_text
|
|
84
|
+
FROM embedding_queue eq
|
|
85
|
+
JOIN content_chunks cc ON eq.item_id = cc.id
|
|
86
|
+
WHERE eq.item_type = 'chunk'
|
|
87
|
+
AND eq.attempts < 3
|
|
88
|
+
ORDER BY eq.created_at ASC
|
|
89
|
+
LIMIT ?
|
|
90
|
+
`).all(MAX_BATCH);
|
|
91
|
+
|
|
92
|
+
if (pending.length === 0) return;
|
|
93
|
+
|
|
94
|
+
// Batch embed
|
|
95
|
+
const texts = pending.map(p => p.chunk_text);
|
|
96
|
+
let embeddings;
|
|
97
|
+
try {
|
|
98
|
+
embeddings = await embedding.batchEmbed(texts);
|
|
99
|
+
} catch (err) {
|
|
100
|
+
// API failure — increment attempts for all
|
|
101
|
+
const updateAttempts = db.prepare(
|
|
102
|
+
'UPDATE embedding_queue SET attempts = attempts + 1, last_error = ? WHERE id = ?',
|
|
103
|
+
);
|
|
104
|
+
for (const p of pending) {
|
|
105
|
+
updateAttempts.run(err.message.slice(0, 500), p.queue_id);
|
|
106
|
+
}
|
|
107
|
+
appendLog({ ts: new Date().toISOString(), error: err.message, batch_size: pending.length });
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Write results
|
|
112
|
+
const updateChunk = db.prepare(`
|
|
113
|
+
UPDATE content_chunks
|
|
114
|
+
SET embedding = ?, embedding_model = ?, embedding_dim = ?
|
|
115
|
+
WHERE id = ?
|
|
116
|
+
`);
|
|
117
|
+
const deleteQueue = db.prepare('DELETE FROM embedding_queue WHERE id = ?');
|
|
118
|
+
const updateAttempts = db.prepare(
|
|
119
|
+
'UPDATE embedding_queue SET attempts = attempts + 1, last_error = ? WHERE id = ?',
|
|
120
|
+
);
|
|
121
|
+
|
|
122
|
+
let success = 0;
|
|
123
|
+
let failed = 0;
|
|
124
|
+
db.prepare('BEGIN').run();
|
|
125
|
+
try {
|
|
126
|
+
for (let i = 0; i < pending.length; i++) {
|
|
127
|
+
const emb = embeddings[i];
|
|
128
|
+
if (emb) {
|
|
129
|
+
const buf = embedding.embeddingToBuffer(emb);
|
|
130
|
+
updateChunk.run(buf, embedding.MODEL, embedding.DIMENSIONS, pending[i].item_id);
|
|
131
|
+
deleteQueue.run(pending[i].queue_id);
|
|
132
|
+
success++;
|
|
133
|
+
} else {
|
|
134
|
+
updateAttempts.run('null embedding returned', pending[i].queue_id);
|
|
135
|
+
failed++;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
db.prepare('COMMIT').run();
|
|
139
|
+
} catch (err) {
|
|
140
|
+
try { db.prepare('ROLLBACK').run(); } catch { }
|
|
141
|
+
appendLog({ ts: new Date().toISOString(), error: err.message, batch_size: pending.length });
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
appendLog({ ts: new Date().toISOString(), success, failed, batch_size: pending.length });
|
|
146
|
+
|
|
147
|
+
} finally {
|
|
148
|
+
if (db) try { db.close(); } catch { }
|
|
149
|
+
try { fs.unlinkSync(LOCK_FILE); } catch { }
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
function appendLog(entry) {
|
|
154
|
+
try {
|
|
155
|
+
fs.appendFileSync(LOG_FILE, JSON.stringify(entry) + '\n');
|
|
156
|
+
} catch { }
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
main().catch(err => {
|
|
160
|
+
appendLog({ ts: new Date().toISOString(), error: err.message });
|
|
161
|
+
try { fs.unlinkSync(LOCK_FILE); } catch { }
|
|
162
|
+
});
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* daemon-health-scan.js — Daily Daemon Health Report
|
|
5
|
+
*
|
|
6
|
+
* Reads ~/.metame/daemon.log for last 24h ERROR/WARN entries,
|
|
7
|
+
* calls LLM (Haiku) to analyze root causes and propose fixes,
|
|
8
|
+
* saves report to ~/.metame/health-report-latest.json,
|
|
9
|
+
* then prints a formatted summary to stdout.
|
|
10
|
+
*
|
|
11
|
+
* Heartbeat: daily at 08:30 via daemon.yaml
|
|
12
|
+
* notify: true → daemon sends stdout to Feishu automatically.
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
const fs = require('fs');
|
|
16
|
+
const path = require('path');
|
|
17
|
+
const os = require('os');
|
|
18
|
+
const { callHaiku, buildDistillEnv } = require('./providers');
|
|
19
|
+
|
|
20
|
+
const HOME = os.homedir();
|
|
21
|
+
const LOG_FILE = path.join(HOME, '.metame', 'daemon.log');
|
|
22
|
+
const REPORT_FILE = path.join(HOME, '.metame', 'health-report-latest.json');
|
|
23
|
+
const WINDOW_MS = 24 * 60 * 60 * 1000;
|
|
24
|
+
const MAX_UNIQUE_ERRORS = 8;
|
|
25
|
+
const MAX_LINE_LEN = 280;
|
|
26
|
+
|
|
27
|
+
// Match log lines that contain an ERROR or WARN level tag
|
|
28
|
+
const LEVEL_PATTERN = /\[(ERROR|WARN)\]/;
|
|
29
|
+
// Extract ISO timestamp from log line prefix like [2026-04-10T08:00:00
|
|
30
|
+
const TS_PATTERN = /^\[(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})/;
|
|
31
|
+
|
|
32
|
+
function readRecentErrors(logFile, windowMs) {
|
|
33
|
+
let content;
|
|
34
|
+
try {
|
|
35
|
+
content = fs.readFileSync(logFile, 'utf8');
|
|
36
|
+
} catch {
|
|
37
|
+
return [];
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const cutoff = Date.now() - windowMs;
|
|
41
|
+
const lines = content.split('\n').filter(Boolean);
|
|
42
|
+
const result = [];
|
|
43
|
+
|
|
44
|
+
for (const line of lines) {
|
|
45
|
+
if (!LEVEL_PATTERN.test(line)) continue;
|
|
46
|
+
const tsMatch = line.match(TS_PATTERN);
|
|
47
|
+
if (tsMatch) {
|
|
48
|
+
const ts = new Date(tsMatch[1]).getTime();
|
|
49
|
+
if (ts < cutoff) continue;
|
|
50
|
+
}
|
|
51
|
+
result.push(line.slice(0, MAX_LINE_LEN));
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
return result;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function groupErrors(lines) {
|
|
58
|
+
const counts = new Map();
|
|
59
|
+
for (const line of lines) {
|
|
60
|
+
// Normalize numbers to reduce noise, use first 100 chars as bucket key
|
|
61
|
+
const key = line.slice(0, 100).replace(/\d+/g, 'N').replace(/[a-f0-9]{8,}/gi, 'HASH');
|
|
62
|
+
counts.set(key, (counts.get(key) || 0) + 1);
|
|
63
|
+
}
|
|
64
|
+
// Sort by frequency descending
|
|
65
|
+
return Array.from(counts.entries())
|
|
66
|
+
.sort((a, b) => b[1] - a[1])
|
|
67
|
+
.slice(0, MAX_UNIQUE_ERRORS)
|
|
68
|
+
.map(([key, count]) => ({ key: key.trim(), count }));
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
async function analyzeWithLLM(grouped, totalCount) {
|
|
72
|
+
const errorList = grouped
|
|
73
|
+
.map(({ key, count }) => `[×${count}] ${key}`)
|
|
74
|
+
.join('\n');
|
|
75
|
+
|
|
76
|
+
const prompt = `你是 MetaMe daemon 的健康分析师。以下是过去24小时的错误/警告日志(已去重,按频次排序):
|
|
77
|
+
|
|
78
|
+
${errorList}
|
|
79
|
+
|
|
80
|
+
请分析并以 JSON 格式回复:
|
|
81
|
+
{
|
|
82
|
+
"summary": "一句话总结(20字以内)",
|
|
83
|
+
"severity": "low|medium|high",
|
|
84
|
+
"issues": [
|
|
85
|
+
{
|
|
86
|
+
"name": "问题名称(10字以内)",
|
|
87
|
+
"count": 频次,
|
|
88
|
+
"cause": "根因(30字以内)",
|
|
89
|
+
"fix": "修复建议(50字以内)"
|
|
90
|
+
}
|
|
91
|
+
],
|
|
92
|
+
"action": "最紧迫的下一步行动(30字以内)"
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
severity 判断:high=影响功能/数据/重复崩溃,medium=有异常但仍可运行,low=轻微警告。
|
|
96
|
+
只输出 JSON,不要解释。`;
|
|
97
|
+
|
|
98
|
+
let distillEnv = {};
|
|
99
|
+
try { distillEnv = buildDistillEnv(); } catch { /* ignore */ }
|
|
100
|
+
|
|
101
|
+
try {
|
|
102
|
+
const raw = await Promise.race([
|
|
103
|
+
callHaiku(prompt, distillEnv, 60000),
|
|
104
|
+
new Promise((_, reject) => setTimeout(() => reject(new Error('llm_timeout')), 90000)),
|
|
105
|
+
]);
|
|
106
|
+
const cleaned = raw.replace(/```json\n?/g, '').replace(/```\n?/g, '').trim();
|
|
107
|
+
const parsed = JSON.parse(cleaned);
|
|
108
|
+
// Basic validation
|
|
109
|
+
if (!parsed.summary || !parsed.severity || !Array.isArray(parsed.issues)) {
|
|
110
|
+
throw new Error('invalid structure');
|
|
111
|
+
}
|
|
112
|
+
return parsed;
|
|
113
|
+
} catch {
|
|
114
|
+
// Fallback: no LLM
|
|
115
|
+
return {
|
|
116
|
+
summary: `发现 ${totalCount} 条错误/警告`,
|
|
117
|
+
severity: 'medium',
|
|
118
|
+
issues: grouped.slice(0, 5).map(({ key, count }) => ({
|
|
119
|
+
name: key.slice(0, 30),
|
|
120
|
+
count,
|
|
121
|
+
cause: '待分析',
|
|
122
|
+
fix: '手动检查 daemon.log',
|
|
123
|
+
})),
|
|
124
|
+
action: '手动检查 ~/.metame/daemon.log',
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
function formatReport(analysis, totalCount, uniqueTypes) {
|
|
130
|
+
const emoji = { low: '🟡', medium: '🟠', high: '🔴' }[analysis.severity] || '🟠';
|
|
131
|
+
const date = new Date().toLocaleDateString('zh-CN', { month: 'long', day: 'numeric' });
|
|
132
|
+
|
|
133
|
+
const issueLines = (analysis.issues || []).slice(0, 5).map(issue =>
|
|
134
|
+
`• ${issue.name}(×${issue.count})\n 根因:${issue.cause}\n 建议:${issue.fix}`
|
|
135
|
+
).join('\n\n');
|
|
136
|
+
|
|
137
|
+
return [
|
|
138
|
+
`${emoji} Daemon 健康报告 · ${date}`,
|
|
139
|
+
``,
|
|
140
|
+
`📊 过去24h:${totalCount} 条错误/警告,${uniqueTypes} 种类型`,
|
|
141
|
+
`📝 摘要:${analysis.summary}`,
|
|
142
|
+
``,
|
|
143
|
+
`🔍 问题详情:`,
|
|
144
|
+
issueLines,
|
|
145
|
+
``,
|
|
146
|
+
`⚡ 建议:${analysis.action}`,
|
|
147
|
+
``,
|
|
148
|
+
`---`,
|
|
149
|
+
`需要修复?回复「修」,我来处理。`,
|
|
150
|
+
].join('\n');
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
async function run() {
|
|
154
|
+
const errorLines = readRecentErrors(LOG_FILE, WINDOW_MS);
|
|
155
|
+
|
|
156
|
+
if (errorLines.length === 0) {
|
|
157
|
+
console.log('✅ Daemon 健康正常 · 过去24小时无错误/警告');
|
|
158
|
+
return;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
const grouped = groupErrors(errorLines);
|
|
162
|
+
const analysis = await analyzeWithLLM(grouped, errorLines.length);
|
|
163
|
+
|
|
164
|
+
// Save full report for "修" handler to load
|
|
165
|
+
const report = {
|
|
166
|
+
generated_at: new Date().toISOString(),
|
|
167
|
+
total_errors: errorLines.length,
|
|
168
|
+
unique_types: grouped.length,
|
|
169
|
+
analysis,
|
|
170
|
+
raw_grouped: grouped,
|
|
171
|
+
};
|
|
172
|
+
|
|
173
|
+
try {
|
|
174
|
+
fs.writeFileSync(REPORT_FILE, JSON.stringify(report, null, 2), 'utf8');
|
|
175
|
+
} catch (e) {
|
|
176
|
+
process.stderr.write(`[health-scan] failed to write report: ${e.message}\n`);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
console.log(formatReport(analysis, errorLines.length, grouped.length));
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
run().catch(e => {
|
|
183
|
+
process.stderr.write(`[daemon-health-scan] fatal: ${e.message}\n`);
|
|
184
|
+
process.exit(1);
|
|
185
|
+
});
|
|
@@ -153,7 +153,7 @@ function setupRuntimeWatchers(deps) {
|
|
|
153
153
|
refreshLogMaxSize(newConfig);
|
|
154
154
|
const timer = getHeartbeatTimer();
|
|
155
155
|
if (timer) clearInterval(timer);
|
|
156
|
-
setHeartbeatTimer(startHeartbeat(newConfig, notifyFn, notifyPersonalFn));
|
|
156
|
+
setHeartbeatTimer(startHeartbeat(newConfig, notifyFn, notifyPersonalFn, adminNotifyFn));
|
|
157
157
|
const { general, project } = getAllTasks(newConfig);
|
|
158
158
|
const totalCount = general.length + project.length;
|
|
159
159
|
log('INFO', `Config reloaded: ${totalCount} tasks (${project.length} in projects)`);
|