@bryti/agent 0.0.1 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Dockerfile +27 -0
- package/README.md +77 -50
- package/config.example.yml +265 -0
- package/dist/active-hours.d.ts +23 -0
- package/dist/active-hours.d.ts.map +1 -0
- package/dist/active-hours.js +68 -0
- package/dist/active-hours.js.map +1 -0
- package/dist/agent.d.ts +84 -0
- package/dist/agent.d.ts.map +1 -0
- package/dist/agent.js +383 -0
- package/dist/agent.js.map +1 -0
- package/dist/channels/markdown/ir.d.ts +79 -0
- package/dist/channels/markdown/ir.d.ts.map +1 -0
- package/dist/channels/markdown/ir.js +824 -0
- package/dist/channels/markdown/ir.js.map +1 -0
- package/dist/channels/markdown/render.d.ts +35 -0
- package/dist/channels/markdown/render.d.ts.map +1 -0
- package/dist/channels/markdown/render.js +178 -0
- package/dist/channels/markdown/render.js.map +1 -0
- package/dist/channels/telegram-network-errors.d.ts +27 -0
- package/dist/channels/telegram-network-errors.d.ts.map +1 -0
- package/dist/channels/telegram-network-errors.js +156 -0
- package/dist/channels/telegram-network-errors.js.map +1 -0
- package/dist/channels/telegram.d.ts +76 -0
- package/dist/channels/telegram.d.ts.map +1 -0
- package/dist/channels/telegram.js +814 -0
- package/dist/channels/telegram.js.map +1 -0
- package/dist/channels/types.d.ts +59 -0
- package/dist/channels/types.d.ts.map +1 -0
- package/dist/channels/types.js +9 -0
- package/dist/channels/types.js.map +1 -0
- package/dist/channels/whatsapp.d.ts +45 -0
- package/dist/channels/whatsapp.d.ts.map +1 -0
- package/dist/channels/whatsapp.js +310 -0
- package/dist/channels/whatsapp.js.map +1 -0
- package/dist/cli.d.ts +13 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +635 -0
- package/dist/cli.js.map +1 -0
- package/dist/commands.d.ts +35 -0
- package/dist/commands.d.ts.map +1 -0
- package/dist/commands.js +113 -0
- package/dist/commands.js.map +1 -0
- package/dist/compaction/history.d.ts +17 -0
- package/dist/compaction/history.d.ts.map +1 -0
- package/dist/compaction/history.js +35 -0
- package/dist/compaction/history.js.map +1 -0
- package/dist/compaction/index.d.ts +3 -0
- package/dist/compaction/index.d.ts.map +1 -0
- package/dist/compaction/index.js +3 -0
- package/dist/compaction/index.js.map +1 -0
- package/dist/compaction/proactive.d.ts +25 -0
- package/dist/compaction/proactive.d.ts.map +1 -0
- package/dist/compaction/proactive.js +87 -0
- package/dist/compaction/proactive.js.map +1 -0
- package/dist/compaction/transcript-repair.d.ts +55 -0
- package/dist/compaction/transcript-repair.d.ts.map +1 -0
- package/dist/compaction/transcript-repair.js +215 -0
- package/dist/compaction/transcript-repair.js.map +1 -0
- package/dist/config.d.ts +128 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +317 -0
- package/dist/config.js.map +1 -0
- package/dist/crash-recovery.d.ts +23 -0
- package/dist/crash-recovery.d.ts.map +1 -0
- package/dist/crash-recovery.js +96 -0
- package/dist/crash-recovery.js.map +1 -0
- package/dist/defaults/extensions/EXTENSIONS.md +158 -0
- package/dist/defaults/extensions/documents-hedgedoc.ts +153 -0
- package/dist/history.d.ts +31 -0
- package/dist/history.d.ts.map +1 -0
- package/dist/history.js +49 -0
- package/dist/history.js.map +1 -0
- package/dist/index.d.ts +19 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +673 -0
- package/dist/index.js.map +1 -0
- package/dist/logger.d.ts +39 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/logger.js +143 -0
- package/dist/logger.js.map +1 -0
- package/dist/memory/conversation-search.d.ts +15 -0
- package/dist/memory/conversation-search.d.ts.map +1 -0
- package/dist/memory/conversation-search.js +60 -0
- package/dist/memory/conversation-search.js.map +1 -0
- package/dist/memory/core-memory.d.ts +28 -0
- package/dist/memory/core-memory.d.ts.map +1 -0
- package/dist/memory/core-memory.js +102 -0
- package/dist/memory/core-memory.js.map +1 -0
- package/dist/memory/embeddings.d.ts +44 -0
- package/dist/memory/embeddings.d.ts.map +1 -0
- package/dist/memory/embeddings.js +139 -0
- package/dist/memory/embeddings.js.map +1 -0
- package/dist/memory/search.d.ts +49 -0
- package/dist/memory/search.d.ts.map +1 -0
- package/dist/memory/search.js +97 -0
- package/dist/memory/search.js.map +1 -0
- package/dist/memory/store.d.ts +32 -0
- package/dist/memory/store.d.ts.map +1 -0
- package/dist/memory/store.js +205 -0
- package/dist/memory/store.js.map +1 -0
- package/dist/message-queue.d.ts +73 -0
- package/dist/message-queue.d.ts.map +1 -0
- package/dist/message-queue.js +188 -0
- package/dist/message-queue.js.map +1 -0
- package/dist/model-infra.d.ts +64 -0
- package/dist/model-infra.d.ts.map +1 -0
- package/dist/model-infra.js +202 -0
- package/dist/model-infra.js.map +1 -0
- package/dist/projection/format.d.ts +10 -0
- package/dist/projection/format.d.ts.map +1 -0
- package/dist/projection/format.js +30 -0
- package/dist/projection/format.js.map +1 -0
- package/dist/projection/index.d.ts +11 -0
- package/dist/projection/index.d.ts.map +1 -0
- package/dist/projection/index.js +9 -0
- package/dist/projection/index.js.map +1 -0
- package/dist/projection/reflection.d.ts +94 -0
- package/dist/projection/reflection.d.ts.map +1 -0
- package/dist/projection/reflection.js +334 -0
- package/dist/projection/reflection.js.map +1 -0
- package/dist/projection/store.d.ts +144 -0
- package/dist/projection/store.d.ts.map +1 -0
- package/dist/projection/store.js +519 -0
- package/dist/projection/store.js.map +1 -0
- package/dist/projection/tools.d.ts +11 -0
- package/dist/projection/tools.d.ts.map +1 -0
- package/dist/projection/tools.js +237 -0
- package/dist/projection/tools.js.map +1 -0
- package/dist/scheduler.d.ts +36 -0
- package/dist/scheduler.d.ts.map +1 -0
- package/dist/scheduler.js +286 -0
- package/dist/scheduler.js.map +1 -0
- package/dist/system-prompt.d.ts +41 -0
- package/dist/system-prompt.d.ts.map +1 -0
- package/dist/system-prompt.js +162 -0
- package/dist/system-prompt.js.map +1 -0
- package/dist/time.d.ts +52 -0
- package/dist/time.d.ts.map +1 -0
- package/dist/time.js +138 -0
- package/dist/time.js.map +1 -0
- package/dist/tools/archival-memory-tool.d.ts +8 -0
- package/dist/tools/archival-memory-tool.d.ts.map +1 -0
- package/dist/tools/archival-memory-tool.js +68 -0
- package/dist/tools/archival-memory-tool.js.map +1 -0
- package/dist/tools/conversation-search-tool.d.ts +6 -0
- package/dist/tools/conversation-search-tool.d.ts.map +1 -0
- package/dist/tools/conversation-search-tool.js +28 -0
- package/dist/tools/conversation-search-tool.js.map +1 -0
- package/dist/tools/core-memory-tool.d.ts +7 -0
- package/dist/tools/core-memory-tool.d.ts.map +1 -0
- package/dist/tools/core-memory-tool.js +59 -0
- package/dist/tools/core-memory-tool.js.map +1 -0
- package/dist/tools/fetch-url.d.ts +15 -0
- package/dist/tools/fetch-url.d.ts.map +1 -0
- package/dist/tools/fetch-url.js +76 -0
- package/dist/tools/fetch-url.js.map +1 -0
- package/dist/tools/files.d.ts +10 -0
- package/dist/tools/files.d.ts.map +1 -0
- package/dist/tools/files.js +127 -0
- package/dist/tools/files.js.map +1 -0
- package/dist/tools/index.d.ts +17 -0
- package/dist/tools/index.d.ts.map +1 -0
- package/dist/tools/index.js +118 -0
- package/dist/tools/index.js.map +1 -0
- package/dist/tools/result.d.ts +21 -0
- package/dist/tools/result.d.ts.map +1 -0
- package/dist/tools/result.js +36 -0
- package/dist/tools/result.js.map +1 -0
- package/dist/tools/skill-install.d.ts +17 -0
- package/dist/tools/skill-install.d.ts.map +1 -0
- package/dist/tools/skill-install.js +148 -0
- package/dist/tools/skill-install.js.map +1 -0
- package/dist/tools/web-search.d.ts +42 -0
- package/dist/tools/web-search.d.ts.map +1 -0
- package/dist/tools/web-search.js +237 -0
- package/dist/tools/web-search.js.map +1 -0
- package/dist/trust/guardrail.d.ts +60 -0
- package/dist/trust/guardrail.d.ts.map +1 -0
- package/dist/trust/guardrail.js +171 -0
- package/dist/trust/guardrail.js.map +1 -0
- package/dist/trust/index.d.ts +12 -0
- package/dist/trust/index.d.ts.map +1 -0
- package/dist/trust/index.js +12 -0
- package/dist/trust/index.js.map +1 -0
- package/dist/trust/store.d.ts +118 -0
- package/dist/trust/store.d.ts.map +1 -0
- package/dist/trust/store.js +209 -0
- package/dist/trust/store.js.map +1 -0
- package/dist/trust/wrapper.d.ts +36 -0
- package/dist/trust/wrapper.d.ts.map +1 -0
- package/dist/trust/wrapper.js +142 -0
- package/dist/trust/wrapper.js.map +1 -0
- package/dist/usage.d.ts +53 -0
- package/dist/usage.d.ts.map +1 -0
- package/dist/usage.js +124 -0
- package/dist/usage.js.map +1 -0
- package/dist/util/math.d.ts +9 -0
- package/dist/util/math.d.ts.map +1 -0
- package/dist/util/math.js +22 -0
- package/dist/util/math.js.map +1 -0
- package/dist/util/ssrf.d.ts +21 -0
- package/dist/util/ssrf.d.ts.map +1 -0
- package/dist/util/ssrf.js +77 -0
- package/dist/util/ssrf.js.map +1 -0
- package/dist/workers/index.d.ts +8 -0
- package/dist/workers/index.d.ts.map +1 -0
- package/dist/workers/index.js +7 -0
- package/dist/workers/index.js.map +1 -0
- package/dist/workers/registry.d.ts +53 -0
- package/dist/workers/registry.d.ts.map +1 -0
- package/dist/workers/registry.js +38 -0
- package/dist/workers/registry.js.map +1 -0
- package/dist/workers/scoped-tools.d.ts +21 -0
- package/dist/workers/scoped-tools.d.ts.map +1 -0
- package/dist/workers/scoped-tools.js +111 -0
- package/dist/workers/scoped-tools.js.map +1 -0
- package/dist/workers/spawn.d.ts +62 -0
- package/dist/workers/spawn.d.ts.map +1 -0
- package/dist/workers/spawn.js +314 -0
- package/dist/workers/spawn.js.map +1 -0
- package/dist/workers/tools.d.ts +26 -0
- package/dist/workers/tools.d.ts.map +1 -0
- package/dist/workers/tools.js +380 -0
- package/dist/workers/tools.js.map +1 -0
- package/docker-compose.yml +72 -0
- package/package.json +16 -1
- package/run.sh +27 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,673 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Bryti entry point.
|
|
3
|
+
*
|
|
4
|
+
* Wires config, persistent pi sessions (one per user), channel bridges
|
|
5
|
+
* (Telegram, WhatsApp), cron scheduler, and the message queue together.
|
|
6
|
+
*
|
|
7
|
+
* Startup: load config, ensure data dirs, warm up embedding model, start
|
|
8
|
+
* bridges, start scheduler, begin processing messages.
|
|
9
|
+
*
|
|
10
|
+
* Each message: load (or reuse) the user's persistent session, run
|
|
11
|
+
* transcript repair, prompt the model with fallback, persist the
|
|
12
|
+
* response to the JSONL audit log.
|
|
13
|
+
*/
|
|
14
|
+
import fs from "node:fs";
|
|
15
|
+
import path from "node:path";
|
|
16
|
+
import { loadConfig, ensureDataDirs, applyIntegrationEnvVars } from "./config.js";
|
|
17
|
+
import { createCoreMemory } from "./memory/core-memory.js";
|
|
18
|
+
import { createHistoryManager } from "./history.js";
|
|
19
|
+
import { warmupEmbeddings, disposeEmbeddings } from "./memory/embeddings.js";
|
|
20
|
+
import { createTools } from "./tools/index.js";
|
|
21
|
+
import { loadUserSession, repairSessionTranscript, refreshSystemPrompt, promptWithFallback, SILENT_REPLY_TOKEN } from "./agent.js";
|
|
22
|
+
import { TelegramBridge } from "./channels/telegram.js";
|
|
23
|
+
import { WhatsAppBridge } from "./channels/whatsapp.js";
|
|
24
|
+
import { createScheduler } from "./scheduler.js";
|
|
25
|
+
import { MessageQueue } from "./message-queue.js";
|
|
26
|
+
import { createTrustStore, checkPendingApproval, isAlwaysApproval, wrapToolsWithTrustChecks, } from "./trust/index.js";
|
|
27
|
+
import { calculateCostUsd, createUsageTracker, resolveModelCost, } from "./usage.js";
|
|
28
|
+
import { createAppLogger, installConsoleFileLogging } from "./logger.js";
|
|
29
|
+
import { handleSlashCommand } from "./commands.js";
|
|
30
|
+
import { writePendingCheckpoint, deletePendingCheckpoint, recoverPendingCheckpoints, } from "./crash-recovery.js";
|
|
31
|
+
import { startProactiveCompaction } from "./compaction/proactive.js";
|
|
32
|
+
// ---------------------------------------------------------------------------
|
|
33
|
+
// Restart protocol
|
|
34
|
+
//
|
|
35
|
+
// Exit code 42 tells run.sh this was intentional, so it loops immediately
|
|
36
|
+
// without delay. A marker file records who triggered the restart and which
|
|
37
|
+
// channel they're on, so the "Back online" message goes to the right place.
|
|
38
|
+
// ---------------------------------------------------------------------------
|
|
39
|
+
/**
|
|
40
|
+
* Exit code that signals an intentional restart to the run.sh supervisor loop.
|
|
41
|
+
* The loop checks for this code and restarts immediately without delay.
|
|
42
|
+
*/
|
|
43
|
+
export const RESTART_EXIT_CODE = 42;
|
|
44
|
+
function restartMarkerPath(dataDir) {
|
|
45
|
+
return path.join(dataDir, "pending", "restart.json");
|
|
46
|
+
}
|
|
47
|
+
function writeRestartMarker(dataDir, marker) {
|
|
48
|
+
fs.mkdirSync(path.join(dataDir, "pending"), { recursive: true });
|
|
49
|
+
fs.writeFileSync(restartMarkerPath(dataDir), JSON.stringify(marker), "utf8");
|
|
50
|
+
}
|
|
51
|
+
function readAndClearRestartMarker(dataDir) {
|
|
52
|
+
const p = restartMarkerPath(dataDir);
|
|
53
|
+
if (!fs.existsSync(p))
|
|
54
|
+
return null;
|
|
55
|
+
try {
|
|
56
|
+
const marker = JSON.parse(fs.readFileSync(p, "utf8"));
|
|
57
|
+
fs.rmSync(p, { force: true });
|
|
58
|
+
return { marker, configRolledBack: false };
|
|
59
|
+
}
|
|
60
|
+
catch {
|
|
61
|
+
fs.rmSync(p, { force: true });
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
// ---------------------------------------------------------------------------
|
|
66
|
+
// Config snapshot / rollback
|
|
67
|
+
//
|
|
68
|
+
// Before restarting with a potentially-modified config.yml, we snapshot the
|
|
69
|
+
// current (known-good) file. On the next startup, if loadConfig() fails, we
|
|
70
|
+
// restore the snapshot and retry so the process comes back up even after a
|
|
71
|
+
// bad config edit. On successful startup the snapshot is deleted.
|
|
72
|
+
// ---------------------------------------------------------------------------
|
|
73
|
+
function configSnapshotPath(dataDir) {
|
|
74
|
+
return path.join(dataDir, "pending", "config.yml.pre-restart");
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Snapshot the current config.yml before triggering a restart.
|
|
78
|
+
* Called only when config.yml exists (successful boot confirms it was valid).
|
|
79
|
+
*/
|
|
80
|
+
function snapshotConfig(dataDir) {
|
|
81
|
+
const dataDir_ = path.resolve(process.env.BRYTI_DATA_DIR || "./data");
|
|
82
|
+
// Use the resolved data dir from env, not the one stored in config (same value, but safer).
|
|
83
|
+
const src = path.join(dataDir_, "config.yml");
|
|
84
|
+
const dst = configSnapshotPath(dataDir_);
|
|
85
|
+
if (fs.existsSync(src)) {
|
|
86
|
+
fs.mkdirSync(path.dirname(dst), { recursive: true });
|
|
87
|
+
fs.copyFileSync(src, dst);
|
|
88
|
+
console.log("[config] Snapshotted config.yml for rollback if restart fails.");
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* On startup: if loadConfig() throws and a snapshot exists, restore it and
|
|
93
|
+
* return the error that triggered the rollback. Otherwise rethrow.
|
|
94
|
+
*
|
|
95
|
+
* Returns the loaded config (from snapshot or original).
|
|
96
|
+
* Throws only if loadConfig() fails AND no snapshot is available.
|
|
97
|
+
*/
|
|
98
|
+
function loadConfigWithRollback() {
|
|
99
|
+
const dataDir = path.resolve(process.env.BRYTI_DATA_DIR || "./data");
|
|
100
|
+
try {
|
|
101
|
+
const config = loadConfig();
|
|
102
|
+
// Success: delete any leftover snapshot (previous good restart).
|
|
103
|
+
const snap = configSnapshotPath(dataDir);
|
|
104
|
+
if (fs.existsSync(snap)) {
|
|
105
|
+
fs.rmSync(snap, { force: true });
|
|
106
|
+
console.log("[config] Deleted config snapshot (current config loaded successfully).");
|
|
107
|
+
}
|
|
108
|
+
return { config, rolledBack: false };
|
|
109
|
+
}
|
|
110
|
+
catch (err) {
|
|
111
|
+
const snap = configSnapshotPath(dataDir);
|
|
112
|
+
if (!fs.existsSync(snap)) {
|
|
113
|
+
// No snapshot to fall back on — propagate the error.
|
|
114
|
+
throw err;
|
|
115
|
+
}
|
|
116
|
+
const reason = err.message;
|
|
117
|
+
console.warn(`[config] loadConfig() failed: ${reason}`);
|
|
118
|
+
console.warn("[config] Restoring config.yml from pre-restart snapshot...");
|
|
119
|
+
const cfgPath = path.join(dataDir, "config.yml");
|
|
120
|
+
fs.copyFileSync(snap, cfgPath);
|
|
121
|
+
fs.rmSync(snap, { force: true });
|
|
122
|
+
// Retry with the restored config — if this also fails, propagate.
|
|
123
|
+
const config = loadConfig();
|
|
124
|
+
console.warn("[config] Rollback successful. Running on previous config.");
|
|
125
|
+
return { config, rolledBack: true, rollbackReason: reason };
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
function toAssistantMessage(message) {
|
|
129
|
+
if (!message || typeof message !== "object") {
|
|
130
|
+
return undefined;
|
|
131
|
+
}
|
|
132
|
+
const candidate = message;
|
|
133
|
+
if (candidate.role !== "assistant") {
|
|
134
|
+
return undefined;
|
|
135
|
+
}
|
|
136
|
+
return message;
|
|
137
|
+
}
|
|
138
|
+
/** Extract text content from an assistant message (ignores tool calls, thinking). */
|
|
139
|
+
function extractResponseText(msg) {
|
|
140
|
+
if (!msg || !("content" in msg))
|
|
141
|
+
return "";
|
|
142
|
+
const content = msg.content;
|
|
143
|
+
if (Array.isArray(content)) {
|
|
144
|
+
return content
|
|
145
|
+
.filter((c) => c.type === "text")
|
|
146
|
+
.map((c) => String(c.text ?? ""))
|
|
147
|
+
.join("");
|
|
148
|
+
}
|
|
149
|
+
if (typeof content === "string")
|
|
150
|
+
return content;
|
|
151
|
+
return "";
|
|
152
|
+
}
|
|
153
|
+
function modelNameForLog(provider, model, fallback) {
|
|
154
|
+
if (provider && model) {
|
|
155
|
+
return `${provider}/${model}`;
|
|
156
|
+
}
|
|
157
|
+
return model || fallback;
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* Find the bridge for a platform, falling back to the first available one.
|
|
161
|
+
*/
|
|
162
|
+
function getBridge(state, platform) {
|
|
163
|
+
if (platform) {
|
|
164
|
+
const match = state.bridges.find((b) => b.platform === platform);
|
|
165
|
+
if (match)
|
|
166
|
+
return match;
|
|
167
|
+
}
|
|
168
|
+
return state.bridges[0];
|
|
169
|
+
}
|
|
170
|
+
/**
|
|
171
|
+
* Get or load the persistent session for a user.
|
|
172
|
+
*/
|
|
173
|
+
async function getOrLoadSession(state, msg) {
|
|
174
|
+
const { userId, channelId, platform } = msg;
|
|
175
|
+
const existing = state.sessions.get(userId);
|
|
176
|
+
if (existing) {
|
|
177
|
+
return existing;
|
|
178
|
+
}
|
|
179
|
+
const tools = createTools(state.config, state.coreMemory, userId, (triggered) => {
|
|
180
|
+
// Worker completion triggered projections. Inject an immediate message
|
|
181
|
+
// so the agent reads the results and notifies the user without waiting
|
|
182
|
+
// for the 5-minute scheduler tick.
|
|
183
|
+
//
|
|
184
|
+
// The message text is deliberately explicit: the agent has no idea what the
|
|
185
|
+
// worker found (it ran in isolation), and the user hasn't seen anything yet.
|
|
186
|
+
// Without these instructions, the agent tends to assume the user is already
|
|
187
|
+
// aware of the results and skips straight to next steps.
|
|
188
|
+
if (!state.enqueue)
|
|
189
|
+
return;
|
|
190
|
+
const channelId = String(state.config.telegram.allowed_users[0] ?? userId);
|
|
191
|
+
const summaries = triggered.map((p) => `- ${p.summary} (id: ${p.id})`).join("\n");
|
|
192
|
+
state.enqueue({
|
|
193
|
+
channelId,
|
|
194
|
+
userId,
|
|
195
|
+
text: `[Worker completed]\n\nThe following commitment(s) were triggered:\n\n${summaries}\n\n` +
|
|
196
|
+
`IMPORTANT: The user has NOT seen the worker's results yet. You must:\n` +
|
|
197
|
+
`1. Read the worker's result file (file_read)\n` +
|
|
198
|
+
`2. Share the key findings with the user FIRST\n` +
|
|
199
|
+
`3. Only THEN suggest next steps or act on them\n` +
|
|
200
|
+
`Never assume the user knows what the worker found. Always present the findings before drawing conclusions or taking action.`,
|
|
201
|
+
platform: "telegram",
|
|
202
|
+
raw: { type: "worker_trigger" },
|
|
203
|
+
});
|
|
204
|
+
}, async (reason) => {
|
|
205
|
+
// Agent-triggered restart. Send notification then exit 42.
|
|
206
|
+
await triggerRestart(state, { userId, channelId, platform, text: "", raw: null }, reason);
|
|
207
|
+
});
|
|
208
|
+
// Wrap tools with trust checks + LLM guardrail
|
|
209
|
+
const trustContext = {
|
|
210
|
+
config: state.config,
|
|
211
|
+
getLastUserMessage: () => state.lastUserMessages.get(userId),
|
|
212
|
+
onApprovalNeeded: async (prompt, approvalKey) => {
|
|
213
|
+
const bridge = getBridge(state, platform);
|
|
214
|
+
return bridge.sendApprovalRequest(channelId, prompt, approvalKey);
|
|
215
|
+
},
|
|
216
|
+
};
|
|
217
|
+
const wrappedTools = wrapToolsWithTrustChecks(tools, state.trustStore, userId, trustContext);
|
|
218
|
+
const sessDir = path.join(state.config.data_dir, "sessions", userId);
|
|
219
|
+
let userSession;
|
|
220
|
+
try {
|
|
221
|
+
userSession = await loadUserSession(state.config, state.coreMemory, userId, wrappedTools);
|
|
222
|
+
}
|
|
223
|
+
catch (err) {
|
|
224
|
+
console.error(`[session] Failed to load session for user ${userId}, attempting recovery:`, err);
|
|
225
|
+
const corruptDir = path.join(state.config.data_dir, "sessions", `${userId}-corrupt-${Date.now()}`);
|
|
226
|
+
if (fs.existsSync(sessDir)) {
|
|
227
|
+
try {
|
|
228
|
+
fs.renameSync(sessDir, corruptDir);
|
|
229
|
+
console.log(`[session] Quarantined corrupt session to: ${corruptDir}`);
|
|
230
|
+
}
|
|
231
|
+
catch (renameErr) {
|
|
232
|
+
console.error(`[session] Could not quarantine corrupt session:`, renameErr);
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
// Retry with a clean slate — loadUserSession will create a fresh session directory
|
|
236
|
+
userSession = await loadUserSession(state.config, state.coreMemory, userId, wrappedTools);
|
|
237
|
+
state.recoveredSessions.add(userId);
|
|
238
|
+
}
|
|
239
|
+
state.sessions.set(userId, userSession);
|
|
240
|
+
return userSession;
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Write a restart marker and exit with code 42 (intentional restart).
|
|
244
|
+
*/
|
|
245
|
+
async function triggerRestart(state, msg, reason) {
|
|
246
|
+
console.log(`[restart] Requested by user ${msg.userId}: ${reason}`);
|
|
247
|
+
deletePendingCheckpoint(state.config, msg.userId);
|
|
248
|
+
// Snapshot current config.yml before restarting so we can roll back if the
|
|
249
|
+
// agent left it in a broken state.
|
|
250
|
+
snapshotConfig(state.config.data_dir);
|
|
251
|
+
writeRestartMarker(state.config.data_dir, {
|
|
252
|
+
userId: msg.userId,
|
|
253
|
+
channelId: msg.channelId,
|
|
254
|
+
platform: msg.platform,
|
|
255
|
+
reason,
|
|
256
|
+
});
|
|
257
|
+
await getBridge(state, msg.platform).sendMessage(msg.channelId, "Restarting now. Back in a few seconds.");
|
|
258
|
+
process.exit(RESTART_EXIT_CODE);
|
|
259
|
+
}
|
|
260
|
+
/**
|
|
261
|
+
* Process an incoming message through the agent.
|
|
262
|
+
*
|
|
263
|
+
* Pipeline (in order):
|
|
264
|
+
* 1. Slash command check — /clear, /memory, /log, /restart handled here, return early
|
|
265
|
+
* 2. Length check — reject messages over 10K chars before they waste context
|
|
266
|
+
* 3. Trust approval check — user may be responding to a pending "Can I use X?" prompt
|
|
267
|
+
* 4. Session load — get or create the persistent session for this user
|
|
268
|
+
* 5. Transcript repair — fix any corrupted tool-call/result pairings from the previous turn
|
|
269
|
+
* 6. System prompt refresh — pick up any core memory changes made last turn
|
|
270
|
+
* 7. Prompt — call the model with fallback chain
|
|
271
|
+
* 8. Usage tracking — log tokens, cost, latency
|
|
272
|
+
* 9. Send — deliver the response text to the channel
|
|
273
|
+
*/
|
|
274
|
+
async function processMessage(state, originalMsg) {
|
|
275
|
+
let msg = originalMsg;
|
|
276
|
+
// Handle slash commands first
|
|
277
|
+
const wasCommand = await handleSlashCommand(msg, {
|
|
278
|
+
config: state.config,
|
|
279
|
+
coreMemory: state.coreMemory,
|
|
280
|
+
historyManager: state.historyManager,
|
|
281
|
+
disposeSession: (userId) => {
|
|
282
|
+
const existing = state.sessions.get(userId);
|
|
283
|
+
if (existing) {
|
|
284
|
+
existing.dispose();
|
|
285
|
+
state.sessions.delete(userId);
|
|
286
|
+
// Delete the session directory so the next message creates a fresh session
|
|
287
|
+
if (fs.existsSync(existing.sessionDir)) {
|
|
288
|
+
fs.rmSync(existing.sessionDir, { recursive: true, force: true });
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
},
|
|
292
|
+
sendMessage: (channelId, text) => getBridge(state, msg.platform).sendMessage(channelId, text),
|
|
293
|
+
triggerRestart: (msg, reason) => triggerRestart(state, msg, reason),
|
|
294
|
+
});
|
|
295
|
+
if (wasCommand) {
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
// Input validation: reject excessively long messages before they waste context
|
|
299
|
+
const MAX_MESSAGE_LENGTH = 10_000;
|
|
300
|
+
if (msg.text.length > MAX_MESSAGE_LENGTH) {
|
|
301
|
+
await getBridge(state, msg.platform).sendMessage(msg.channelId, `That message is too long (${msg.text.length.toLocaleString()} characters). ` +
|
|
302
|
+
`Could you break it into smaller pieces? I can handle up to ${MAX_MESSAGE_LENGTH.toLocaleString()} characters at a time.`);
|
|
303
|
+
return;
|
|
304
|
+
}
|
|
305
|
+
// Check for pending trust approvals (user responding to "Can I use X?" prompt)
|
|
306
|
+
const approvedTool = checkPendingApproval(msg.userId, msg.text);
|
|
307
|
+
if (approvedTool) {
|
|
308
|
+
const duration = isAlwaysApproval(msg.text) ? "always" : "once";
|
|
309
|
+
state.trustStore.approve(approvedTool, duration);
|
|
310
|
+
const durLabel = duration === "always" ? "Always allowed" : "Allowed for this time";
|
|
311
|
+
await getBridge(state, msg.platform).sendMessage(msg.channelId, `${durLabel}: ${approvedTool}. Continuing...`);
|
|
312
|
+
// Don't return; let the message flow through so the agent can retry the tool
|
|
313
|
+
}
|
|
314
|
+
// Track last user message for guardrail context
|
|
315
|
+
state.lastUserMessages.set(msg.userId, msg.text);
|
|
316
|
+
// Show typing indicator
|
|
317
|
+
await getBridge(state, msg.platform).sendTyping(msg.channelId);
|
|
318
|
+
try {
|
|
319
|
+
// Load (or reuse) the persistent session for this user
|
|
320
|
+
const userSession = await getOrLoadSession(state, msg);
|
|
321
|
+
if (state.recoveredSessions.has(msg.userId)) {
|
|
322
|
+
state.recoveredSessions.delete(msg.userId);
|
|
323
|
+
await getBridge(state, msg.platform).sendMessage(msg.channelId, "I had to start a fresh conversation due to a technical issue. My memory and reminders are intact, just the recent conversation thread was lost.");
|
|
324
|
+
}
|
|
325
|
+
const { session } = userSession;
|
|
326
|
+
// Track last user message time (scheduler messages have raw.type set)
|
|
327
|
+
const rawObj = msg.raw;
|
|
328
|
+
const schedulerType = rawObj?.type;
|
|
329
|
+
const isSchedulerMessage = schedulerType != null;
|
|
330
|
+
// Daily reviews are context, not urgent. Buffer them so the agent can
|
|
331
|
+
// weave them into the next user-initiated response instead of sending
|
|
332
|
+
// a separate message that feels disconnected.
|
|
333
|
+
if (schedulerType === "projection_daily_review") {
|
|
334
|
+
const pending = state.pendingSchedulerContext.get(msg.userId) ?? [];
|
|
335
|
+
pending.push(msg.text);
|
|
336
|
+
state.pendingSchedulerContext.set(msg.userId, pending);
|
|
337
|
+
console.log(`[scheduler] Buffered daily review for ${msg.userId} (${pending.length} pending)`);
|
|
338
|
+
return;
|
|
339
|
+
}
|
|
340
|
+
if (!isSchedulerMessage) {
|
|
341
|
+
userSession.lastUserMessageAt = Date.now();
|
|
342
|
+
// Prepend any buffered scheduler context (daily reviews, etc.) so the
|
|
343
|
+
// agent can weave them into a single coherent response instead of
|
|
344
|
+
// sending separate messages for each scheduler event.
|
|
345
|
+
const pending = state.pendingSchedulerContext.get(msg.userId);
|
|
346
|
+
if (pending && pending.length > 0) {
|
|
347
|
+
const schedulerBlock = pending.join("\n\n---\n\n");
|
|
348
|
+
msg = {
|
|
349
|
+
...msg,
|
|
350
|
+
text: `${schedulerBlock}\n\n---\n\nUser message:\n${msg.text}`,
|
|
351
|
+
};
|
|
352
|
+
state.pendingSchedulerContext.delete(msg.userId);
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
// Repair transcript before prompting
|
|
356
|
+
repairSessionTranscript(session, msg.userId);
|
|
357
|
+
// Reload the system prompt so the agent sees any core memory changes
|
|
358
|
+
// it made during the previous turn (memory_core_append / memory_core_replace)
|
|
359
|
+
await refreshSystemPrompt(session);
|
|
360
|
+
// Append user message to audit log (images logged as placeholder, not base64)
|
|
361
|
+
const imageLogSuffix = msg.images && msg.images.length > 0
|
|
362
|
+
? " " + msg.images.map((img) => {
|
|
363
|
+
const bytes = Math.round(img.data.length * 0.75);
|
|
364
|
+
const kb = Math.round(bytes / 1024);
|
|
365
|
+
return `[image: ${img.mimeType}, ${kb}KB]`;
|
|
366
|
+
}).join(" ")
|
|
367
|
+
: "";
|
|
368
|
+
await state.historyManager.append({
|
|
369
|
+
role: "user",
|
|
370
|
+
content: msg.text + imageLogSuffix,
|
|
371
|
+
});
|
|
372
|
+
// Write a crash-recovery checkpoint before the (potentially long) model call.
|
|
373
|
+
// Deleted after the response is sent. If the process dies in between, the
|
|
374
|
+
// next startup will find this file and notify the user.
|
|
375
|
+
const isUserMessage = !isSchedulerMessage;
|
|
376
|
+
if (isUserMessage) {
|
|
377
|
+
writePendingCheckpoint(state.config, msg);
|
|
378
|
+
}
|
|
379
|
+
// Prompt the agent, with automatic fallback to other models if the primary fails
|
|
380
|
+
const promptStart = Date.now();
|
|
381
|
+
await promptWithFallback(session, msg.text, state.config, userSession.modelRegistry, msg.userId, msg.images);
|
|
382
|
+
const latencyMs = Date.now() - promptStart;
|
|
383
|
+
// Extract the last assistant response
|
|
384
|
+
const lastAssistant = toAssistantMessage(session.messages.filter((m) => m.role === "assistant").pop());
|
|
385
|
+
const inputTokens = lastAssistant?.usage?.input ?? 0;
|
|
386
|
+
const outputTokens = lastAssistant?.usage?.output ?? 0;
|
|
387
|
+
const model = modelNameForLog(lastAssistant?.provider, lastAssistant?.model, state.config.agent.model);
|
|
388
|
+
const costConfig = resolveModelCost(state.config, lastAssistant?.provider, lastAssistant?.model ?? state.config.agent.model);
|
|
389
|
+
const costUsd = costConfig
|
|
390
|
+
? calculateCostUsd(inputTokens, outputTokens, costConfig)
|
|
391
|
+
: (lastAssistant?.usage?.cost?.total ?? 0);
|
|
392
|
+
await state.usageTracker.append({
|
|
393
|
+
user_id: msg.userId,
|
|
394
|
+
model,
|
|
395
|
+
input_tokens: inputTokens,
|
|
396
|
+
output_tokens: outputTokens,
|
|
397
|
+
cost_usd: costUsd,
|
|
398
|
+
latency_ms: latencyMs,
|
|
399
|
+
});
|
|
400
|
+
if (lastAssistant?.stopReason === "error") {
|
|
401
|
+
const errorMsg = String(lastAssistant.errorMessage ?? "Unknown model error");
|
|
402
|
+
console.error("Model error:", errorMsg);
|
|
403
|
+
await getBridge(state, msg.platform).sendMessage(msg.channelId, "Something went wrong while generating a response. Please try again.");
|
|
404
|
+
return;
|
|
405
|
+
}
|
|
406
|
+
const responseText = extractResponseText(lastAssistant);
|
|
407
|
+
if (responseText.trim() === SILENT_REPLY_TOKEN) {
|
|
408
|
+
// Scheduled/proactive turn with nothing to surface — swallow silently
|
|
409
|
+
console.log(`[agent] Silent reply from ${msg.userId}, suppressing message`);
|
|
410
|
+
}
|
|
411
|
+
else if (responseText.trim()) {
|
|
412
|
+
// Append to audit log
|
|
413
|
+
await state.historyManager.append({
|
|
414
|
+
role: "assistant",
|
|
415
|
+
content: responseText,
|
|
416
|
+
});
|
|
417
|
+
await getBridge(state, msg.platform).sendMessage(msg.channelId, responseText);
|
|
418
|
+
}
|
|
419
|
+
else if (!isSchedulerMessage) {
|
|
420
|
+
// Model made tool calls but produced no text in response to a user
|
|
421
|
+
// message. Re-prompt so the user gets a real reply, not silence.
|
|
422
|
+
console.log(`[agent] No text response from ${msg.userId} after user message, re-prompting`);
|
|
423
|
+
await promptWithFallback(session, "You just completed tool calls but didn't reply to the user. Respond now with a brief confirmation of what you did.", state.config, userSession.modelRegistry, msg.userId);
|
|
424
|
+
const followUpMsg = toAssistantMessage(session.messages.filter((m) => m.role === "assistant").pop());
|
|
425
|
+
const followUpText = extractResponseText(followUpMsg);
|
|
426
|
+
if (followUpText.trim() && followUpText.trim() !== SILENT_REPLY_TOKEN) {
|
|
427
|
+
await state.historyManager.append({ role: "assistant", content: followUpText });
|
|
428
|
+
await getBridge(state, msg.platform).sendMessage(msg.channelId, followUpText);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
else {
|
|
432
|
+
// Scheduler/system turn with no text output — normal, suppress silently.
|
|
433
|
+
console.log(`[agent] No text response from ${msg.userId} (scheduler turn), suppressing`);
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
catch (error) {
|
|
437
|
+
const err = error;
|
|
438
|
+
console.error("Error processing message:", err);
|
|
439
|
+
await getBridge(state, msg.platform).sendMessage(msg.channelId, "Something went wrong processing your message. Please try again.");
|
|
440
|
+
}
|
|
441
|
+
finally {
|
|
442
|
+
// Always clean up the crash-recovery checkpoint, regardless of outcome.
|
|
443
|
+
// force: true makes this a no-op for scheduler messages (no file was written).
|
|
444
|
+
deletePendingCheckpoint(state.config, msg.userId);
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
/**
|
|
448
|
+
* Start one app instance.
|
|
449
|
+
*/
|
|
450
|
+
async function startApp() {
|
|
451
|
+
// ---------------------------------------------------------------------------
|
|
452
|
+
// Infra setup: config, logging, embedding model
|
|
453
|
+
// ---------------------------------------------------------------------------
|
|
454
|
+
const { config, rolledBack, rollbackReason } = loadConfigWithRollback();
|
|
455
|
+
applyIntegrationEnvVars(config);
|
|
456
|
+
ensureDataDirs(config);
|
|
457
|
+
installConsoleFileLogging(createAppLogger(config.data_dir));
|
|
458
|
+
console.log(`Bryti starting: agent="${config.agent.name}" model="${config.agent.model}"`);
|
|
459
|
+
console.log(`Data directory: ${config.data_dir}`);
|
|
460
|
+
console.log(`Providers: ${config.models.providers.map((p) => p.name).join(", ")}`);
|
|
461
|
+
console.log(`Config cron jobs: ${config.cron.length}`);
|
|
462
|
+
// Pre-load embedding model
|
|
463
|
+
const modelsDir = path.join(config.data_dir, ".models");
|
|
464
|
+
console.log("Loading embedding model (downloading on first run)...");
|
|
465
|
+
await warmupEmbeddings(modelsDir);
|
|
466
|
+
console.log("Embedding model ready.");
|
|
467
|
+
const coreMemory = createCoreMemory(config.data_dir);
|
|
468
|
+
const historyManager = createHistoryManager(config.data_dir);
|
|
469
|
+
const usageTracker = createUsageTracker(config.data_dir);
|
|
470
|
+
const trustStore = createTrustStore(config.data_dir, config.trust.approved_tools);
|
|
471
|
+
// ---------------------------------------------------------------------------
|
|
472
|
+
// Bridge setup: Telegram, WhatsApp
|
|
473
|
+
// ---------------------------------------------------------------------------
|
|
474
|
+
const bridges = [];
|
|
475
|
+
if (config.telegram.token) {
|
|
476
|
+
if (config.telegram.allowed_users.length === 0) {
|
|
477
|
+
console.warn("[telegram] WARNING: allowed_users is empty. No users will be able to interact with the bot. Add Telegram user IDs to config.");
|
|
478
|
+
}
|
|
479
|
+
const telegram = new TelegramBridge(config.telegram.token, config.telegram.allowed_users);
|
|
480
|
+
bridges.push(telegram);
|
|
481
|
+
}
|
|
482
|
+
if (config.whatsapp.enabled) {
|
|
483
|
+
const whatsapp = new WhatsAppBridge(config.data_dir, config.whatsapp.allowed_users);
|
|
484
|
+
bridges.push(whatsapp);
|
|
485
|
+
}
|
|
486
|
+
if (bridges.length === 0) {
|
|
487
|
+
throw new Error("No channel bridges configured. Enable Telegram and/or WhatsApp.");
|
|
488
|
+
}
|
|
489
|
+
// ---------------------------------------------------------------------------
|
|
490
|
+
// State assembly
|
|
491
|
+
// ---------------------------------------------------------------------------
|
|
492
|
+
const state = {
|
|
493
|
+
config,
|
|
494
|
+
coreMemory,
|
|
495
|
+
historyManager,
|
|
496
|
+
usageTracker,
|
|
497
|
+
sessions: new Map(),
|
|
498
|
+
bridges,
|
|
499
|
+
scheduler: null,
|
|
500
|
+
enqueue: null,
|
|
501
|
+
trustStore,
|
|
502
|
+
lastUserMessages: new Map(),
|
|
503
|
+
recoveredSessions: new Set(),
|
|
504
|
+
pendingSchedulerContext: new Map(),
|
|
505
|
+
};
|
|
506
|
+
const queue = new MessageQueue((msg) => processMessage(state, msg), async (msg) => {
|
|
507
|
+
console.log("Queue full, rejecting message:", msg.text);
|
|
508
|
+
const bridge = getBridge(state, msg.platform);
|
|
509
|
+
await bridge.sendMessage(msg.channelId, "I'm a bit overwhelmed right now. Please wait a moment and try again.");
|
|
510
|
+
});
|
|
511
|
+
// ---------------------------------------------------------------------------
|
|
512
|
+
// Queue / scheduler wiring
|
|
513
|
+
// ---------------------------------------------------------------------------
|
|
514
|
+
// Wire up the enqueue function so worker trigger callbacks can inject messages
|
|
515
|
+
state.enqueue = (msg) => queue.enqueue(msg);
|
|
516
|
+
for (const bridge of bridges) {
|
|
517
|
+
bridge.onMessage(async (msg) => {
|
|
518
|
+
queue.enqueue(msg);
|
|
519
|
+
});
|
|
520
|
+
}
|
|
521
|
+
// Start all bridges concurrently
|
|
522
|
+
await Promise.all(bridges.map((b) => b.start()));
|
|
523
|
+
console.log(`Channels: ${bridges.map((b) => b.name).join(", ")}`);
|
|
524
|
+
const scheduler = createScheduler(config, async (msg) => {
|
|
525
|
+
queue.enqueue(msg);
|
|
526
|
+
});
|
|
527
|
+
scheduler.start();
|
|
528
|
+
state.scheduler = scheduler;
|
|
529
|
+
// Start proactive compaction (idle + nightly)
|
|
530
|
+
const compactionJobs = startProactiveCompaction(config, () => state.sessions);
|
|
531
|
+
// ---------------------------------------------------------------------------
|
|
532
|
+
// Startup notifications: crash recovery, restart marker, config rollback
|
|
533
|
+
// ---------------------------------------------------------------------------
|
|
534
|
+
// Recover any pending messages from a previous crash
|
|
535
|
+
await recoverPendingCheckpoints(config, async (checkpoint, userId) => {
|
|
536
|
+
const bridge = getBridge(state, checkpoint.platform);
|
|
537
|
+
await bridge.sendMessage(checkpoint.channelId, "Sorry, I crashed while working on your last message. Could you resend it?");
|
|
538
|
+
});
|
|
539
|
+
// If this startup was triggered by a restart request, notify the user.
|
|
540
|
+
// If config.yml was rolled back due to a bad edit, include a warning.
|
|
541
|
+
const restartResult = readAndClearRestartMarker(config.data_dir);
|
|
542
|
+
if (restartResult) {
|
|
543
|
+
const { marker } = restartResult;
|
|
544
|
+
console.log(`[restart] Back online after restart requested by ${marker.userId}: ${marker.reason}`);
|
|
545
|
+
const bridge = bridges.find((b) => b.platform === marker.platform) ?? bridges[0];
|
|
546
|
+
if (rolledBack) {
|
|
547
|
+
console.warn(`[config] Config was rolled back due to: ${rollbackReason}`);
|
|
548
|
+
await bridge.sendMessage(marker.channelId, `Back online, but your config.yml change was invalid and has been rolled back.\n\nError: ${rollbackReason}\n\nThe previous working config is still active.`);
|
|
549
|
+
}
|
|
550
|
+
else {
|
|
551
|
+
await bridge.sendMessage(marker.channelId, "Back online.");
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
else if (rolledBack) {
|
|
555
|
+
// Rare: a snapshot existed but there was no restart marker (e.g. previous crash).
|
|
556
|
+
// Log a warning but don't notify — we don't know which channel to use.
|
|
557
|
+
console.warn(`[config] Config rolled back from snapshot (no restart marker). Reason: ${rollbackReason}`);
|
|
558
|
+
}
|
|
559
|
+
console.log("Bryti ready!");
|
|
560
|
+
let stopped = false;
|
|
561
|
+
return {
|
|
562
|
+
async stop() {
|
|
563
|
+
if (stopped) {
|
|
564
|
+
return;
|
|
565
|
+
}
|
|
566
|
+
stopped = true;
|
|
567
|
+
console.log("Shutting down...");
|
|
568
|
+
state.scheduler.stop();
|
|
569
|
+
for (const job of compactionJobs)
|
|
570
|
+
job.stop();
|
|
571
|
+
await Promise.all(state.bridges.map((b) => b.stop()));
|
|
572
|
+
for (const [userId, userSession] of state.sessions) {
|
|
573
|
+
console.log(`Disposing session for user ${userId}`);
|
|
574
|
+
userSession.dispose();
|
|
575
|
+
}
|
|
576
|
+
await disposeEmbeddings();
|
|
577
|
+
},
|
|
578
|
+
};
|
|
579
|
+
}
|
|
580
|
+
function asError(reason) {
|
|
581
|
+
if (reason instanceof Error) {
|
|
582
|
+
return reason;
|
|
583
|
+
}
|
|
584
|
+
return new Error(String(reason));
|
|
585
|
+
}
|
|
586
|
+
function sleep(ms) {
|
|
587
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
588
|
+
}
|
|
589
|
+
/**
|
|
590
|
+
* Top-level supervisor loop. Starts the app, catches fatal errors, and
|
|
591
|
+
* restarts automatically after a delay.
|
|
592
|
+
*
|
|
593
|
+
* State machine: the `resolver` is a promise resolve function that is set
|
|
594
|
+
* when we're waiting for either a SIGINT/SIGTERM (shutdown) or an uncaught
|
|
595
|
+
* exception (restart). When one fires, it resolves the promise with the
|
|
596
|
+
* appropriate outcome string, the app is stopped, and the loop continues
|
|
597
|
+
* or breaks accordingly. The indirection via `resolver` lets signal handlers
|
|
598
|
+
* and error handlers share a single control path.
|
|
599
|
+
*/
|
|
600
|
+
async function runWithSupervisor() {
|
|
601
|
+
const restartDelayMs = Number(process.env.BRYTI_RESTART_DELAY_MS ?? 2000);
|
|
602
|
+
let shutdownRequested = false;
|
|
603
|
+
let resolver = null;
|
|
604
|
+
const resolveOutcome = (outcome) => {
|
|
605
|
+
if (!resolver) {
|
|
606
|
+
return;
|
|
607
|
+
}
|
|
608
|
+
const current = resolver;
|
|
609
|
+
resolver = null;
|
|
610
|
+
current(outcome);
|
|
611
|
+
};
|
|
612
|
+
const onSignal = () => {
|
|
613
|
+
shutdownRequested = true;
|
|
614
|
+
resolveOutcome("shutdown");
|
|
615
|
+
};
|
|
616
|
+
process.once("SIGINT", onSignal);
|
|
617
|
+
process.once("SIGTERM", onSignal);
|
|
618
|
+
while (!shutdownRequested) {
|
|
619
|
+
let app;
|
|
620
|
+
let fatalError;
|
|
621
|
+
try {
|
|
622
|
+
app = await startApp();
|
|
623
|
+
}
|
|
624
|
+
catch (error) {
|
|
625
|
+
fatalError = asError(error);
|
|
626
|
+
}
|
|
627
|
+
if (!app) {
|
|
628
|
+
console.error("Fatal startup error:", fatalError);
|
|
629
|
+
if (shutdownRequested) {
|
|
630
|
+
break;
|
|
631
|
+
}
|
|
632
|
+
console.log(`Restarting in ${restartDelayMs}ms...`);
|
|
633
|
+
await sleep(restartDelayMs);
|
|
634
|
+
continue;
|
|
635
|
+
}
|
|
636
|
+
const onUncaughtException = (error) => {
|
|
637
|
+
fatalError = error;
|
|
638
|
+
resolveOutcome("restart");
|
|
639
|
+
};
|
|
640
|
+
const onUnhandledRejection = (reason) => {
|
|
641
|
+
fatalError = asError(reason);
|
|
642
|
+
resolveOutcome("restart");
|
|
643
|
+
};
|
|
644
|
+
process.once("uncaughtException", onUncaughtException);
|
|
645
|
+
process.once("unhandledRejection", onUnhandledRejection);
|
|
646
|
+
const outcome = await new Promise((resolve) => {
|
|
647
|
+
if (shutdownRequested) {
|
|
648
|
+
resolve("shutdown");
|
|
649
|
+
return;
|
|
650
|
+
}
|
|
651
|
+
resolver = resolve;
|
|
652
|
+
});
|
|
653
|
+
process.removeListener("uncaughtException", onUncaughtException);
|
|
654
|
+
process.removeListener("unhandledRejection", onUnhandledRejection);
|
|
655
|
+
await app.stop();
|
|
656
|
+
if (outcome === "shutdown") {
|
|
657
|
+
break;
|
|
658
|
+
}
|
|
659
|
+
console.error("Fatal runtime error:", fatalError);
|
|
660
|
+
if (shutdownRequested) {
|
|
661
|
+
break;
|
|
662
|
+
}
|
|
663
|
+
console.log(`Restarting in ${restartDelayMs}ms...`);
|
|
664
|
+
await sleep(restartDelayMs);
|
|
665
|
+
}
|
|
666
|
+
process.removeListener("SIGINT", onSignal);
|
|
667
|
+
process.removeListener("SIGTERM", onSignal);
|
|
668
|
+
}
|
|
669
|
+
runWithSupervisor().catch((error) => {
|
|
670
|
+
console.error("Supervisor fatal error:", error);
|
|
671
|
+
process.exit(1);
|
|
672
|
+
});
|
|
673
|
+
//# sourceMappingURL=index.js.map
|