@geravant/sinain 1.0.19 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -1
- package/cli.js +176 -0
- package/install.js +11 -2
- package/launcher.js +622 -0
- package/openclaw.plugin.json +4 -0
- package/pack-prepare.js +48 -0
- package/package.json +24 -5
- package/sense_client/README.md +82 -0
- package/sense_client/__init__.py +1 -0
- package/sense_client/__main__.py +462 -0
- package/sense_client/app_detector.py +54 -0
- package/sense_client/app_detector_win.py +83 -0
- package/sense_client/capture.py +215 -0
- package/sense_client/capture_win.py +88 -0
- package/sense_client/change_detector.py +86 -0
- package/sense_client/config.py +64 -0
- package/sense_client/gate.py +145 -0
- package/sense_client/ocr.py +347 -0
- package/sense_client/privacy.py +65 -0
- package/sense_client/requirements.txt +13 -0
- package/sense_client/roi_extractor.py +84 -0
- package/sense_client/sender.py +173 -0
- package/sense_client/tests/__init__.py +0 -0
- package/sense_client/tests/test_stream1_optimizations.py +234 -0
- package/setup-overlay.js +82 -0
- package/sinain-agent/.env.example +17 -0
- package/sinain-agent/CLAUDE.md +80 -0
- package/sinain-agent/mcp-config.json +12 -0
- package/sinain-agent/run.sh +248 -0
- package/sinain-core/.env.example +93 -0
- package/sinain-core/package-lock.json +552 -0
- package/sinain-core/package.json +21 -0
- package/sinain-core/src/agent/analyzer.ts +366 -0
- package/sinain-core/src/agent/context-window.ts +172 -0
- package/sinain-core/src/agent/loop.ts +404 -0
- package/sinain-core/src/agent/situation-writer.ts +187 -0
- package/sinain-core/src/agent/traits.ts +520 -0
- package/sinain-core/src/audio/capture-spawner-macos.ts +44 -0
- package/sinain-core/src/audio/capture-spawner-win.ts +37 -0
- package/sinain-core/src/audio/capture-spawner.ts +14 -0
- package/sinain-core/src/audio/pipeline.ts +335 -0
- package/sinain-core/src/audio/transcription-local.ts +141 -0
- package/sinain-core/src/audio/transcription.ts +278 -0
- package/sinain-core/src/buffers/feed-buffer.ts +71 -0
- package/sinain-core/src/buffers/sense-buffer.ts +425 -0
- package/sinain-core/src/config.ts +245 -0
- package/sinain-core/src/escalation/escalation-slot.ts +136 -0
- package/sinain-core/src/escalation/escalator.ts +812 -0
- package/sinain-core/src/escalation/message-builder.ts +323 -0
- package/sinain-core/src/escalation/openclaw-ws.ts +726 -0
- package/sinain-core/src/escalation/scorer.ts +166 -0
- package/sinain-core/src/index.ts +507 -0
- package/sinain-core/src/learning/feedback-store.ts +253 -0
- package/sinain-core/src/learning/signal-collector.ts +218 -0
- package/sinain-core/src/log.ts +24 -0
- package/sinain-core/src/overlay/commands.ts +126 -0
- package/sinain-core/src/overlay/ws-handler.ts +267 -0
- package/sinain-core/src/privacy/index.ts +18 -0
- package/sinain-core/src/privacy/presets.ts +40 -0
- package/sinain-core/src/privacy/redact.ts +92 -0
- package/sinain-core/src/profiler.ts +181 -0
- package/sinain-core/src/recorder.ts +186 -0
- package/sinain-core/src/server.ts +417 -0
- package/sinain-core/src/trace/trace-store.ts +73 -0
- package/sinain-core/src/trace/tracer.ts +94 -0
- package/sinain-core/src/types.ts +427 -0
- package/sinain-core/src/util/dedup.ts +48 -0
- package/sinain-core/src/util/task-store.ts +84 -0
- package/sinain-core/tsconfig.json +18 -0
- package/sinain-knowledge/data/git-store.ts +2 -0
- package/sinain-mcp-server/index.ts +337 -0
- package/sinain-mcp-server/package.json +19 -0
- package/sinain-mcp-server/tsconfig.json +15 -0
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import type { ContextWindow, EscalationMode } from "../types.js";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Score-based escalation decision with documented thresholds.
|
|
5
|
+
*
|
|
6
|
+
* Rationale:
|
|
7
|
+
* - Errors are most actionable → +3 (user likely wants help)
|
|
8
|
+
* - Questions need answers → +2 (user explicitly asking)
|
|
9
|
+
* - Code issues are advisory → +1 (TODO/FIXME/deprecated)
|
|
10
|
+
* - Rapid app switching = confusion → +1 (user may be lost)
|
|
11
|
+
*
|
|
12
|
+
* Threshold: 3 (an error alone triggers; a question + code issue triggers)
|
|
13
|
+
*/
|
|
14
|
+
export const ESCALATION_SCORES = {
|
|
15
|
+
error: 3,
|
|
16
|
+
question: 2,
|
|
17
|
+
codeIssue: 1,
|
|
18
|
+
appChurn: 1,
|
|
19
|
+
} as const;
|
|
20
|
+
|
|
21
|
+
export const ESCALATION_THRESHOLD = 3;
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Error patterns with word boundary matching.
|
|
25
|
+
* Using regex with \b prevents false positives like "error-free" or "no-fail".
|
|
26
|
+
*/
|
|
27
|
+
const ERROR_PATTERNS = [
|
|
28
|
+
"error", "failed", "failure", "exception", "crash", "traceback",
|
|
29
|
+
"typeerror", "referenceerror", "syntaxerror", "cannot read", "undefined is not",
|
|
30
|
+
"exit code", "segfault", "panic", "fatal", "enoent",
|
|
31
|
+
];
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Question patterns for detecting when user needs help.
|
|
35
|
+
* Checked in audio transcripts and screen OCR.
|
|
36
|
+
*/
|
|
37
|
+
const QUESTION_PATTERNS = [
|
|
38
|
+
"how do i", "how to", "what if", "why is", "help me",
|
|
39
|
+
"not working", "stuck", "confused", "any ideas", "suggestions",
|
|
40
|
+
];
|
|
41
|
+
|
|
42
|
+
const CODE_ISSUE_PATTERNS = [
|
|
43
|
+
"todo", "fixme", "hack", "workaround", "deprecated",
|
|
44
|
+
];
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Check if pattern matches with word boundaries.
|
|
48
|
+
* Prevents false positives like "error-free" matching "error".
|
|
49
|
+
*/
|
|
50
|
+
function matchesWordBoundary(text: string, pattern: string): boolean {
|
|
51
|
+
const regex = new RegExp(`\\b${pattern}\\b`, 'i');
|
|
52
|
+
return regex.test(text);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export interface EscalationScore {
|
|
56
|
+
total: number;
|
|
57
|
+
reasons: string[];
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Calculate escalation score for a given digest and context window.
|
|
62
|
+
* Returns the score and the reasons that contributed.
|
|
63
|
+
*/
|
|
64
|
+
export function calculateEscalationScore(
|
|
65
|
+
digest: string,
|
|
66
|
+
contextWindow: ContextWindow,
|
|
67
|
+
): EscalationScore {
|
|
68
|
+
const digestLower = digest.toLowerCase();
|
|
69
|
+
let total = 0;
|
|
70
|
+
const reasons: string[] = [];
|
|
71
|
+
|
|
72
|
+
// Error indicators (using word boundaries to avoid false positives like "error-free")
|
|
73
|
+
for (const p of ERROR_PATTERNS) {
|
|
74
|
+
if (matchesWordBoundary(digestLower, p)) {
|
|
75
|
+
total += ESCALATION_SCORES.error;
|
|
76
|
+
reasons.push(`error:${p}`);
|
|
77
|
+
break;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Question/help indicators in audio
|
|
82
|
+
let questionDetected = false;
|
|
83
|
+
for (const item of contextWindow.audio) {
|
|
84
|
+
if (questionDetected) break;
|
|
85
|
+
const text = (item.text || "").toLowerCase();
|
|
86
|
+
for (const p of QUESTION_PATTERNS) {
|
|
87
|
+
if (text.includes(p)) {
|
|
88
|
+
total += ESCALATION_SCORES.question;
|
|
89
|
+
reasons.push(`question:audio:${p}`);
|
|
90
|
+
questionDetected = true;
|
|
91
|
+
break;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Question/help indicators in screen OCR (catches typed questions in chat/search)
|
|
97
|
+
if (!questionDetected) {
|
|
98
|
+
for (const item of contextWindow.screen) {
|
|
99
|
+
if (questionDetected) break;
|
|
100
|
+
const ocrText = (item.ocr || "").toLowerCase();
|
|
101
|
+
for (const p of QUESTION_PATTERNS) {
|
|
102
|
+
if (ocrText.includes(p)) {
|
|
103
|
+
total += ESCALATION_SCORES.question;
|
|
104
|
+
reasons.push(`question:ocr:${p}`);
|
|
105
|
+
questionDetected = true;
|
|
106
|
+
break;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Code issue indicators
|
|
113
|
+
for (const p of CODE_ISSUE_PATTERNS) {
|
|
114
|
+
if (digestLower.includes(p)) {
|
|
115
|
+
total += ESCALATION_SCORES.codeIssue;
|
|
116
|
+
reasons.push(`codeIssue:${p}`);
|
|
117
|
+
break;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// App churn
|
|
122
|
+
if (contextWindow.appHistory.length >= 4) {
|
|
123
|
+
total += ESCALATION_SCORES.appChurn;
|
|
124
|
+
reasons.push(`appChurn:${contextWindow.appHistory.length}apps`);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return { total, reasons };
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Decide whether to escalate based on mode, score, cooldown, dedup, and stale timer.
|
|
132
|
+
*/
|
|
133
|
+
export function shouldEscalate(
|
|
134
|
+
digest: string,
|
|
135
|
+
hud: string,
|
|
136
|
+
contextWindow: ContextWindow,
|
|
137
|
+
mode: EscalationMode,
|
|
138
|
+
lastEscalationTs: number,
|
|
139
|
+
cooldownMs: number,
|
|
140
|
+
lastEscalatedDigest: string,
|
|
141
|
+
staleMs: number = 0,
|
|
142
|
+
): { escalate: boolean; score: EscalationScore; stale: boolean } {
|
|
143
|
+
const score = calculateEscalationScore(digest, contextWindow);
|
|
144
|
+
|
|
145
|
+
if (mode === "off") return { escalate: false, score, stale: false };
|
|
146
|
+
|
|
147
|
+
// Cooldown check
|
|
148
|
+
if (Date.now() - lastEscalationTs < cooldownMs) return { escalate: false, score, stale: false };
|
|
149
|
+
|
|
150
|
+
// Stale override: force escalation after prolonged silence (even when idle)
|
|
151
|
+
if (staleMs > 0 && Date.now() - lastEscalationTs > staleMs) {
|
|
152
|
+
return { escalate: true, score, stale: true };
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Don't escalate idle
|
|
156
|
+
if (hud === "Idle" || hud === "\u2014") return { escalate: false, score, stale: false };
|
|
157
|
+
|
|
158
|
+
// Focus mode: always escalate (even if digest unchanged)
|
|
159
|
+
if (mode === "focus" || mode === "rich") return { escalate: true, score, stale: false };
|
|
160
|
+
|
|
161
|
+
// Selective mode: dedup identical digests
|
|
162
|
+
if (digest === lastEscalatedDigest) return { escalate: false, score, stale: false };
|
|
163
|
+
|
|
164
|
+
// Selective mode: score-based
|
|
165
|
+
return { escalate: score.total >= ESCALATION_THRESHOLD, score, stale: false };
|
|
166
|
+
}
|
|
@@ -0,0 +1,507 @@
|
|
|
1
|
+
import { loadConfig } from "./config.js";
|
|
2
|
+
import { FeedBuffer } from "./buffers/feed-buffer.js";
|
|
3
|
+
import { SenseBuffer } from "./buffers/sense-buffer.js";
|
|
4
|
+
import { WsHandler } from "./overlay/ws-handler.js";
|
|
5
|
+
import { setupCommands } from "./overlay/commands.js";
|
|
6
|
+
import { AudioPipeline } from "./audio/pipeline.js";
|
|
7
|
+
import type { CaptureSpawner } from "./audio/capture-spawner.js";
|
|
8
|
+
import { TranscriptionService } from "./audio/transcription.js";
|
|
9
|
+
import { AgentLoop } from "./agent/loop.js";
|
|
10
|
+
import { TraitEngine, loadTraitRoster } from "./agent/traits.js";
|
|
11
|
+
import { shortAppName } from "./agent/context-window.js";
|
|
12
|
+
import { Escalator } from "./escalation/escalator.js";
|
|
13
|
+
import { Recorder } from "./recorder.js";
|
|
14
|
+
import { Tracer } from "./trace/tracer.js";
|
|
15
|
+
import { TraceStore } from "./trace/trace-store.js";
|
|
16
|
+
import { FeedbackStore } from "./learning/feedback-store.js";
|
|
17
|
+
import { SignalCollector } from "./learning/signal-collector.js";
|
|
18
|
+
import { createAppServer } from "./server.js";
|
|
19
|
+
import { Profiler } from "./profiler.js";
|
|
20
|
+
import type { SenseEvent, EscalationMode, FeedItem } from "./types.js";
|
|
21
|
+
import { isDuplicateTranscript, bigramSimilarity } from "./util/dedup.js";
|
|
22
|
+
import { log, warn, error } from "./log.js";
|
|
23
|
+
import { initPrivacy, levelFor, applyLevel } from "./privacy/index.js";
|
|
24
|
+
|
|
25
|
+
const TAG = "core";
|
|
26
|
+
|
|
27
|
+
async function main() {
|
|
28
|
+
log(TAG, "sinain-core starting...");
|
|
29
|
+
|
|
30
|
+
// ── Load config ──
|
|
31
|
+
const config = loadConfig();
|
|
32
|
+
log(TAG, `port: ${config.port}`);
|
|
33
|
+
log(TAG, `audio: device=${config.audioConfig.device} cmd=${config.audioConfig.captureCommand} chunk=${config.audioConfig.chunkDurationMs}ms`);
|
|
34
|
+
log(TAG, `mic: enabled=${config.micEnabled} device=${config.micConfig.device} cmd=${config.micConfig.captureCommand}`);
|
|
35
|
+
log(TAG, `transcription: model=${config.transcriptionConfig.geminiModel}`);
|
|
36
|
+
log(TAG, `agent: model=${config.agentConfig.model} debounce=${config.agentConfig.debounceMs}ms max=${config.agentConfig.maxIntervalMs}ms`);
|
|
37
|
+
log(TAG, `escalation: mode=${config.escalationConfig.mode} cooldown=${config.escalationConfig.cooldownMs}ms stale=${config.escalationConfig.staleMs}ms`);
|
|
38
|
+
log(TAG, `openclaw: ws=${config.openclawConfig.gatewayWsUrl} http=${config.openclawConfig.hookUrl}`);
|
|
39
|
+
log(TAG, `situation: ${config.situationMdPath}`);
|
|
40
|
+
log(TAG, `tracing: enabled=${config.traceEnabled} dir=${config.traceDir}`);
|
|
41
|
+
log(TAG, `learning: enabled=${config.learningConfig.enabled} dir=${config.learningConfig.feedbackDir}`);
|
|
42
|
+
|
|
43
|
+
// ── Initialize privacy ──
|
|
44
|
+
initPrivacy(config.privacyConfig);
|
|
45
|
+
log(TAG, `privacy: mode=${config.privacyConfig.mode}`);
|
|
46
|
+
|
|
47
|
+
// ── Initialize core buffers (single source of truth) ──
|
|
48
|
+
const feedBuffer = new FeedBuffer(100);
|
|
49
|
+
const senseBuffer = new SenseBuffer(30);
|
|
50
|
+
|
|
51
|
+
// ── Initialize overlay WS handler ──
|
|
52
|
+
const wsHandler = new WsHandler();
|
|
53
|
+
|
|
54
|
+
// ── Initialize tracing ──
|
|
55
|
+
const tracer = config.traceEnabled ? new Tracer() : null;
|
|
56
|
+
const traceStore = config.traceEnabled ? new TraceStore(config.traceDir) : null;
|
|
57
|
+
|
|
58
|
+
// ── Initialize recorder ──
|
|
59
|
+
const recorder = new Recorder();
|
|
60
|
+
|
|
61
|
+
// ── Initialize profiler ──
|
|
62
|
+
const profiler = new Profiler();
|
|
63
|
+
|
|
64
|
+
// ── Initialize learning subsystem ──
|
|
65
|
+
const feedbackStore = config.learningConfig.enabled
|
|
66
|
+
? new FeedbackStore(config.learningConfig.feedbackDir, config.learningConfig.retentionDays)
|
|
67
|
+
: null;
|
|
68
|
+
|
|
69
|
+
// ── Initialize trait engine ──
|
|
70
|
+
const traitRoster = loadTraitRoster(config.traitConfig.configPath);
|
|
71
|
+
const traitEngine = new TraitEngine(traitRoster, config.traitConfig);
|
|
72
|
+
|
|
73
|
+
// ── Initialize escalation ──
|
|
74
|
+
const escalator = new Escalator({
|
|
75
|
+
feedBuffer,
|
|
76
|
+
wsHandler,
|
|
77
|
+
escalationConfig: config.escalationConfig,
|
|
78
|
+
openclawConfig: config.openclawConfig,
|
|
79
|
+
profiler,
|
|
80
|
+
feedbackStore: feedbackStore ?? undefined,
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
// ── Initialize agent loop (event-driven) ──
|
|
84
|
+
const agentLoop = new AgentLoop({
|
|
85
|
+
feedBuffer,
|
|
86
|
+
senseBuffer,
|
|
87
|
+
agentConfig: config.agentConfig,
|
|
88
|
+
escalationMode: config.escalationConfig.mode,
|
|
89
|
+
situationMdPath: config.situationMdPath,
|
|
90
|
+
getRecorderStatus: () => recorder.getStatus(),
|
|
91
|
+
profiler,
|
|
92
|
+
onAnalysis: (entry, contextWindow) => {
|
|
93
|
+
// Handle recorder commands
|
|
94
|
+
const stopResult = recorder.handleCommand(entry.record);
|
|
95
|
+
|
|
96
|
+
// Dispatch task via subagent spawn
|
|
97
|
+
if (entry.task || stopResult) {
|
|
98
|
+
let task: string;
|
|
99
|
+
let label: string | undefined;
|
|
100
|
+
|
|
101
|
+
if (stopResult && stopResult.segments > 0 && entry.task) {
|
|
102
|
+
// Recording stopped with explicit task instruction
|
|
103
|
+
task = `${entry.task}\n\n[Recording: "${stopResult.title}", ${stopResult.durationS}s]\n${stopResult.transcript}`;
|
|
104
|
+
label = stopResult.title;
|
|
105
|
+
} else if (stopResult && stopResult.segments > 0) {
|
|
106
|
+
// Recording stopped without explicit task — default to cleanup/summarize
|
|
107
|
+
task = `Clean up and summarize this recording transcript:\n\n[Recording: "${stopResult.title}", ${stopResult.durationS}s]\n${stopResult.transcript}`;
|
|
108
|
+
label = stopResult.title;
|
|
109
|
+
} else if (entry.task) {
|
|
110
|
+
// Standalone task without recording
|
|
111
|
+
task = entry.task;
|
|
112
|
+
} else {
|
|
113
|
+
task = "";
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
if (task) {
|
|
117
|
+
escalator.dispatchSpawnTask(task, label).catch(err => {
|
|
118
|
+
error(TAG, "spawn task dispatch error:", err);
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Escalation continues as normal
|
|
124
|
+
escalator.onAgentAnalysis(entry, contextWindow);
|
|
125
|
+
},
|
|
126
|
+
onSituationUpdate: (content) => {
|
|
127
|
+
escalator.pushSituationMd(content);
|
|
128
|
+
},
|
|
129
|
+
onHudUpdate: (text) => {
|
|
130
|
+
wsHandler.broadcast(text, "normal", "stream");
|
|
131
|
+
},
|
|
132
|
+
onTraceStart: tracer ? (tickId) => {
|
|
133
|
+
const ctx = tracer.startTrace(tickId);
|
|
134
|
+
// Hook trace persistence
|
|
135
|
+
const origFinish = ctx.finish.bind(ctx);
|
|
136
|
+
ctx.finish = (metrics) => {
|
|
137
|
+
origFinish(metrics);
|
|
138
|
+
const traces = tracer.getTraces(tickId - 1, 1);
|
|
139
|
+
if (traces.length > 0 && traceStore) {
|
|
140
|
+
traceStore.append(traces[0]);
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
return ctx;
|
|
144
|
+
} : undefined,
|
|
145
|
+
traitEngine,
|
|
146
|
+
traitLogDir: config.traitConfig.logDir,
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
// ── Wire learning signal collector (needs agentLoop) ──
|
|
150
|
+
const signalCollector = feedbackStore
|
|
151
|
+
? new SignalCollector(feedbackStore, agentLoop, senseBuffer)
|
|
152
|
+
: null;
|
|
153
|
+
if (signalCollector) {
|
|
154
|
+
escalator.setSignalCollector(signalCollector);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// ── Platform-specific audio capture spawner ──
|
|
158
|
+
let captureSpawner: CaptureSpawner;
|
|
159
|
+
if (process.platform === "win32") {
|
|
160
|
+
const { WindowsCaptureSpawner } = await import("./audio/capture-spawner-win.js");
|
|
161
|
+
captureSpawner = new WindowsCaptureSpawner();
|
|
162
|
+
} else {
|
|
163
|
+
const { MacOSCaptureSpawner } = await import("./audio/capture-spawner-macos.js");
|
|
164
|
+
captureSpawner = new MacOSCaptureSpawner();
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// ── Initialize audio pipelines ──
|
|
168
|
+
const systemAudioPipeline = new AudioPipeline(config.audioConfig, "system", captureSpawner);
|
|
169
|
+
const micPipeline = config.micEnabled ? new AudioPipeline(config.micConfig, "mic", captureSpawner) : null;
|
|
170
|
+
const transcription = new TranscriptionService(config.transcriptionConfig);
|
|
171
|
+
systemAudioPipeline.setProfiler(profiler);
|
|
172
|
+
if (micPipeline) micPipeline.setProfiler(profiler);
|
|
173
|
+
transcription.setProfiler(profiler);
|
|
174
|
+
|
|
175
|
+
// Wire: audio chunks → transcription (both pipelines share the same transcription service)
|
|
176
|
+
systemAudioPipeline.on("chunk", (chunk) => {
|
|
177
|
+
transcription.processChunk(chunk).catch((err) => {
|
|
178
|
+
error(TAG, "transcription error:", err instanceof Error ? err.message : err);
|
|
179
|
+
});
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
if (micPipeline) {
|
|
183
|
+
micPipeline.on("chunk", (chunk) => {
|
|
184
|
+
transcription.processChunk(chunk).catch((err) => {
|
|
185
|
+
error(TAG, "mic transcription error:", err instanceof Error ? err.message : err);
|
|
186
|
+
});
|
|
187
|
+
});
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// System audio pipeline lifecycle events
|
|
191
|
+
systemAudioPipeline.on("error", (err) => {
|
|
192
|
+
error(TAG, "system audio pipeline error:", err instanceof Error ? err.message : err);
|
|
193
|
+
wsHandler.broadcast("\u26a0 System audio capture error. Check device settings.", "high");
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
systemAudioPipeline.on("started", () => {
|
|
197
|
+
log(TAG, "system audio pipeline started");
|
|
198
|
+
wsHandler.updateState({ audio: "active" });
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
systemAudioPipeline.on("stopped", () => {
|
|
202
|
+
log(TAG, "system audio pipeline stopped");
|
|
203
|
+
wsHandler.updateState({ audio: "muted" });
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
systemAudioPipeline.on("muted", () => {
|
|
207
|
+
log(TAG, "system audio muted (capture process still running)");
|
|
208
|
+
wsHandler.updateState({ audio: "muted" });
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
systemAudioPipeline.on("unmuted", () => {
|
|
212
|
+
log(TAG, "system audio unmuted");
|
|
213
|
+
wsHandler.updateState({ audio: "active" });
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
// Mic pipeline lifecycle events
|
|
217
|
+
if (micPipeline) {
|
|
218
|
+
micPipeline.on("error", (err) => {
|
|
219
|
+
error(TAG, "mic pipeline error:", err instanceof Error ? err.message : err);
|
|
220
|
+
wsHandler.broadcast("\u26a0 Mic capture error. Check device settings.", "high");
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
micPipeline.on("started", () => {
|
|
224
|
+
log(TAG, "mic pipeline started");
|
|
225
|
+
wsHandler.updateState({ mic: "active" });
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
micPipeline.on("stopped", () => {
|
|
229
|
+
log(TAG, "mic pipeline stopped");
|
|
230
|
+
wsHandler.updateState({ mic: "muted" });
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Wire: transcripts → feed buffer + overlay + agent trigger + recorder
|
|
235
|
+
// Per-source dedup: track last 3 transcripts per source
|
|
236
|
+
const recentSystemTranscripts: string[] = [];
|
|
237
|
+
const recentMicTranscripts: string[] = [];
|
|
238
|
+
|
|
239
|
+
transcription.on("transcript", (result) => {
|
|
240
|
+
const isSystem = result.audioSource === "system";
|
|
241
|
+
const recentSame = isSystem ? recentSystemTranscripts : recentMicTranscripts;
|
|
242
|
+
|
|
243
|
+
// Skip near-duplicate transcripts within same source
|
|
244
|
+
if (isDuplicateTranscript(result.text, recentSame)) {
|
|
245
|
+
log(TAG, `transcript deduped (${result.audioSource}): "${result.text.slice(0, 60)}..."`);
|
|
246
|
+
return;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// Cross-stream dedup: drop mic transcript if >70% similar to recent system transcript
|
|
250
|
+
if (!isSystem && recentSystemTranscripts.length > 0) {
|
|
251
|
+
const trimmed = result.text.trim();
|
|
252
|
+
for (const recent of recentSystemTranscripts) {
|
|
253
|
+
if (bigramSimilarity(trimmed, recent) > 0.70) {
|
|
254
|
+
log(TAG, `mic transcript cross-deduped (speakers pickup): "${trimmed.slice(0, 60)}..."`);
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// Track recent transcripts (ring buffer of 3 per source)
|
|
261
|
+
recentSame.push(result.text.trim());
|
|
262
|
+
if (recentSame.length > 3) recentSame.shift();
|
|
263
|
+
|
|
264
|
+
const emoji = isSystem ? "\ud83d\udd0a" : "\ud83c\udf99";
|
|
265
|
+
const tag = `[${emoji}]`;
|
|
266
|
+
const bufferLevel = levelFor("audio_transcript", "local_buffer");
|
|
267
|
+
const bufferText = applyLevel(result.text, bufferLevel, "audio");
|
|
268
|
+
const item = feedBuffer.push(`${tag} ${bufferText}`, "normal", "audio", "stream");
|
|
269
|
+
if (!isSystem) item.audioSource = "mic";
|
|
270
|
+
wsHandler.broadcast(`${tag} ${bufferText}`, "normal");
|
|
271
|
+
recorder.onFeedItem(item); // Collect for recording if active
|
|
272
|
+
agentLoop.onNewContext(); // Trigger debounced analysis
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
// ── Screen capture active flag ──
|
|
276
|
+
let screenActive = true;
|
|
277
|
+
|
|
278
|
+
// ── Create HTTP + WS server ──
|
|
279
|
+
const server = createAppServer({
|
|
280
|
+
config,
|
|
281
|
+
feedBuffer,
|
|
282
|
+
senseBuffer,
|
|
283
|
+
wsHandler,
|
|
284
|
+
profiler,
|
|
285
|
+
feedbackStore: feedbackStore ?? undefined,
|
|
286
|
+
isScreenActive: () => screenActive,
|
|
287
|
+
|
|
288
|
+
onSenseEvent: (event: SenseEvent) => {
|
|
289
|
+
// Respect toggle_screen — if user disabled screen, ignore sense events
|
|
290
|
+
if (!screenActive) return;
|
|
291
|
+
|
|
292
|
+
wsHandler.updateState({ screen: "active" });
|
|
293
|
+
|
|
294
|
+
// Track app context for recorder
|
|
295
|
+
recorder.onSenseEvent(event);
|
|
296
|
+
|
|
297
|
+
// Broadcast app/window changes to overlay
|
|
298
|
+
if (event.type === "text" && event.ocr && event.ocr.trim().length > 10) {
|
|
299
|
+
const app = shortAppName(event.meta.app || "");
|
|
300
|
+
const firstLine = event.ocr.split("\n").find((l: string) => l.trim().length > 5)?.trim() || event.ocr.split("\n")[0].trim();
|
|
301
|
+
const text = firstLine.slice(0, 80);
|
|
302
|
+
const prefix = app ? `${app}: ` : "";
|
|
303
|
+
wsHandler.broadcast(`[\ud83d\udc41] ${prefix}${text}`, "normal");
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
// Trigger debounced agent analysis
|
|
307
|
+
agentLoop.onNewContext();
|
|
308
|
+
},
|
|
309
|
+
|
|
310
|
+
onFeedPost: (text: string, priority: string) => {
|
|
311
|
+
const item = feedBuffer.push(text, priority as any, "system", "stream");
|
|
312
|
+
wsHandler.broadcast(text, priority as any);
|
|
313
|
+
agentLoop.onNewContext();
|
|
314
|
+
log(TAG, `[feed] #${item.id}: ${text.slice(0, 80)}`);
|
|
315
|
+
},
|
|
316
|
+
|
|
317
|
+
onSenseProfile: (snapshot) => profiler.reportSense(snapshot),
|
|
318
|
+
|
|
319
|
+
getHealthPayload: () => {
|
|
320
|
+
const escStats = escalator.getStats();
|
|
321
|
+
const warnings: string[] = [];
|
|
322
|
+
|
|
323
|
+
// Compute health warnings from escalation metrics
|
|
324
|
+
const totalAttempts = (escStats.totalDirectResponses as number) + (escStats.totalTimeouts as number);
|
|
325
|
+
const timeoutRate = totalAttempts > 0 ? (escStats.totalTimeouts as number) / totalAttempts : 0;
|
|
326
|
+
|
|
327
|
+
if (totalAttempts >= 5 && timeoutRate > 0.3) {
|
|
328
|
+
warnings.push(`high_timeout_rate: ${Math.round(timeoutRate * 100)}%`);
|
|
329
|
+
}
|
|
330
|
+
if ((escStats.consecutiveTimeouts as number) >= 3) {
|
|
331
|
+
warnings.push(`consecutive_timeouts: ${escStats.consecutiveTimeouts}`);
|
|
332
|
+
}
|
|
333
|
+
const lastResp = escStats.lastResponseTs as number;
|
|
334
|
+
if (lastResp > 0 && Date.now() - lastResp > 5 * 60 * 1000) {
|
|
335
|
+
warnings.push(`stale_responses: ${Math.round((Date.now() - lastResp) / 60000)}min`);
|
|
336
|
+
}
|
|
337
|
+
if ((escStats.totalSpawnResponses as number) > 5 && (escStats.totalDirectResponses as number) === 0) {
|
|
338
|
+
warnings.push("no_direct_responses");
|
|
339
|
+
}
|
|
340
|
+
if ((escStats.avgResponseMs as number) > 30000) {
|
|
341
|
+
warnings.push(`slow_responses: ${Math.round(escStats.avgResponseMs as number)}ms avg`);
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
return {
|
|
345
|
+
warnings,
|
|
346
|
+
agent: agentLoop.getStats(),
|
|
347
|
+
escalation: escStats,
|
|
348
|
+
transcription: transcription.getProfilingStats(),
|
|
349
|
+
situation: { path: config.situationMdPath },
|
|
350
|
+
traces: tracer ? tracer.getMetricsSummary() : null,
|
|
351
|
+
profiling: profiler.getSnapshot(),
|
|
352
|
+
};
|
|
353
|
+
},
|
|
354
|
+
|
|
355
|
+
getAgentDigest: () => agentLoop.getDigest(),
|
|
356
|
+
getAgentHistory: (limit) => agentLoop.getHistory(limit),
|
|
357
|
+
getAgentContext: () => agentLoop.getContext(),
|
|
358
|
+
getAgentConfig: () => agentLoop.getConfig(),
|
|
359
|
+
|
|
360
|
+
updateAgentConfig: (updates) => {
|
|
361
|
+
// Handle escalation mode updates
|
|
362
|
+
if (updates.escalationMode !== undefined) {
|
|
363
|
+
const mode = String(updates.escalationMode) as EscalationMode;
|
|
364
|
+
if (["focus", "selective", "rich", "off"].includes(mode)) {
|
|
365
|
+
escalator.setMode(mode);
|
|
366
|
+
(agentLoop as any).deps.escalationMode = mode;
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
if (updates.escalationCooldownMs !== undefined) {
|
|
370
|
+
config.escalationConfig.cooldownMs = Math.max(5000, parseInt(String(updates.escalationCooldownMs)));
|
|
371
|
+
}
|
|
372
|
+
if (updates.escalationStaleMs !== undefined) {
|
|
373
|
+
config.escalationConfig.staleMs = Math.max(0, parseInt(String(updates.escalationStaleMs)));
|
|
374
|
+
}
|
|
375
|
+
agentLoop.updateConfig(updates);
|
|
376
|
+
return agentLoop.getConfig();
|
|
377
|
+
},
|
|
378
|
+
|
|
379
|
+
getTraces: (after, limit) => tracer ? tracer.getTraces(after, limit) : [],
|
|
380
|
+
reconnectGateway: () => escalator.reconnectGateway(),
|
|
381
|
+
|
|
382
|
+
// Bare agent HTTP escalation bridge
|
|
383
|
+
getEscalationPending: () => escalator.getPendingHttp(),
|
|
384
|
+
respondEscalation: (id: string, response: string) => escalator.respondHttp(id, response),
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
// ── Wire overlay profiling ──
|
|
388
|
+
wsHandler.onProfiling((msg) => {
|
|
389
|
+
profiler.reportOverlay({ rssMb: msg.rssMb, uptimeS: msg.uptimeS, ts: msg.ts });
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
// ── Wire overlay commands ──
|
|
393
|
+
setupCommands({
|
|
394
|
+
wsHandler,
|
|
395
|
+
systemAudioPipeline,
|
|
396
|
+
micPipeline,
|
|
397
|
+
config,
|
|
398
|
+
onUserMessage: async (text) => {
|
|
399
|
+
await escalator.sendDirect(text);
|
|
400
|
+
},
|
|
401
|
+
onToggleScreen: () => {
|
|
402
|
+
screenActive = !screenActive;
|
|
403
|
+
if (!screenActive) {
|
|
404
|
+
senseBuffer.clear();
|
|
405
|
+
}
|
|
406
|
+
wsHandler.updateState({ screen: screenActive ? "active" : "off" });
|
|
407
|
+
return screenActive;
|
|
408
|
+
},
|
|
409
|
+
onToggleTraits: () => traitEngine.toggle(),
|
|
410
|
+
});
|
|
411
|
+
|
|
412
|
+
// Broadcast initial screen state so overlay gets correct status on connect
|
|
413
|
+
wsHandler.updateState({ screen: "active" });
|
|
414
|
+
|
|
415
|
+
// ── Start services ──
|
|
416
|
+
try {
|
|
417
|
+
await server.start();
|
|
418
|
+
} catch (err) {
|
|
419
|
+
error(TAG, "failed to start server:", err);
|
|
420
|
+
process.exit(1);
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
// Start profiler
|
|
424
|
+
profiler.start();
|
|
425
|
+
// Periodically sample buffer gauges
|
|
426
|
+
const bufferGaugeTimer = setInterval(() => {
|
|
427
|
+
profiler.gauge("buffer.feed", feedBuffer.size);
|
|
428
|
+
profiler.gauge("buffer.sense", senseBuffer.size);
|
|
429
|
+
profiler.gauge("buffer.feed.hwm", feedBuffer.hwm);
|
|
430
|
+
profiler.gauge("buffer.sense.hwm", senseBuffer.hwm);
|
|
431
|
+
profiler.gauge("ws.clients", wsHandler.clientCount);
|
|
432
|
+
}, 10_000);
|
|
433
|
+
|
|
434
|
+
// Start escalation WS connection
|
|
435
|
+
escalator.start();
|
|
436
|
+
|
|
437
|
+
// Start periodic feedback summary (every 30 minutes, offset from startup)
|
|
438
|
+
const feedbackSummaryTimer = config.learningConfig.enabled
|
|
439
|
+
? setInterval(() => {
|
|
440
|
+
escalator.sendFeedbackSummary().catch(err => {
|
|
441
|
+
warn(TAG, "feedback summary error:", err);
|
|
442
|
+
});
|
|
443
|
+
}, 30 * 60 * 1000)
|
|
444
|
+
: null;
|
|
445
|
+
|
|
446
|
+
// Start agent loop
|
|
447
|
+
agentLoop.start();
|
|
448
|
+
|
|
449
|
+
// Auto-start system audio if configured
|
|
450
|
+
if (config.audioConfig.autoStart) {
|
|
451
|
+
log(TAG, "auto-starting system audio pipeline...");
|
|
452
|
+
systemAudioPipeline.start();
|
|
453
|
+
} else {
|
|
454
|
+
log(TAG, "system audio pipeline ready (not auto-started \u2014 send toggle_audio or set AUDIO_AUTO_START=true)");
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// Auto-start mic if configured
|
|
458
|
+
if (micPipeline && config.micConfig.autoStart) {
|
|
459
|
+
log(TAG, "auto-starting mic pipeline...");
|
|
460
|
+
micPipeline.start();
|
|
461
|
+
} else if (micPipeline) {
|
|
462
|
+
log(TAG, "mic pipeline ready (not auto-started \u2014 send toggle_mic or set MIC_AUTO_START=true)");
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
log(TAG, "\u2713 sinain-core running");
|
|
466
|
+
log(TAG, ` http+ws: http://0.0.0.0:${config.port}`);
|
|
467
|
+
log(TAG, ` audio: ${config.audioConfig.autoStart ? "active" : "standby"} (${config.audioConfig.captureCommand})`);
|
|
468
|
+
log(TAG, ` mic: ${config.micEnabled ? (config.micConfig.autoStart ? "active" : "standby") : "disabled"}`);
|
|
469
|
+
log(TAG, ` agent: ${config.agentConfig.enabled ? "enabled" : "disabled"}`);
|
|
470
|
+
log(TAG, ` escal: ${config.escalationConfig.mode}`);
|
|
471
|
+
log(TAG, ` traits: ${config.traitConfig.enabled ? "enabled" : "disabled"} (${traitRoster.length} traits)`);
|
|
472
|
+
|
|
473
|
+
// ── Graceful shutdown ──
|
|
474
|
+
const shutdown = async (signal: string) => {
|
|
475
|
+
log(TAG, `${signal} received, shutting down...`);
|
|
476
|
+
clearInterval(bufferGaugeTimer);
|
|
477
|
+
if (feedbackSummaryTimer) clearInterval(feedbackSummaryTimer);
|
|
478
|
+
profiler.stop();
|
|
479
|
+
recorder.forceStop(); // Stop any active recording
|
|
480
|
+
agentLoop.stop();
|
|
481
|
+
systemAudioPipeline.stop();
|
|
482
|
+
if (micPipeline) micPipeline.stop();
|
|
483
|
+
transcription.destroy();
|
|
484
|
+
escalator.stop();
|
|
485
|
+
signalCollector?.destroy();
|
|
486
|
+
feedbackStore?.destroy();
|
|
487
|
+
traceStore?.destroy();
|
|
488
|
+
await server.destroy();
|
|
489
|
+
log(TAG, "goodbye");
|
|
490
|
+
process.exit(0);
|
|
491
|
+
};
|
|
492
|
+
|
|
493
|
+
process.on("SIGINT", () => shutdown("SIGINT"));
|
|
494
|
+
process.on("SIGTERM", () => shutdown("SIGTERM"));
|
|
495
|
+
|
|
496
|
+
process.on("uncaughtException", (err) => {
|
|
497
|
+
error(TAG, "uncaught exception:", err);
|
|
498
|
+
});
|
|
499
|
+
process.on("unhandledRejection", (reason) => {
|
|
500
|
+
error(TAG, "unhandled rejection:", reason);
|
|
501
|
+
});
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
main().catch((err) => {
|
|
505
|
+
error(TAG, "fatal:", err);
|
|
506
|
+
process.exit(1);
|
|
507
|
+
});
|