@iletai/nzb 1.9.1 → 1.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cron/research-runner.js +208 -0
- package/dist/cron/scheduler.js +4 -1
- package/dist/cron/task-runner.js +3 -0
- package/dist/daemon.js +5 -0
- package/dist/logger.js +118 -0
- package/dist/paths.js +2 -0
- package/dist/telegram/handlers/commands.js +19 -1
- package/package.json +1 -1
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
import { config } from "../config.js";
|
|
2
|
+
const FETCH_TIMEOUT_MS = 10_000;
|
|
3
|
+
const USER_AGENT = "nzb-research/1.0";
|
|
4
|
+
// ── HTTP helper ──────────────────────────────────────────────────────
|
|
5
|
+
async function fetchJson(url) {
|
|
6
|
+
const controller = new AbortController();
|
|
7
|
+
const timer = setTimeout(() => controller.abort(), FETCH_TIMEOUT_MS);
|
|
8
|
+
try {
|
|
9
|
+
const res = await fetch(url, {
|
|
10
|
+
signal: controller.signal,
|
|
11
|
+
headers: { "User-Agent": USER_AGENT },
|
|
12
|
+
});
|
|
13
|
+
if (!res.ok)
|
|
14
|
+
throw new Error(`HTTP ${res.status} ${res.statusText}`);
|
|
15
|
+
return (await res.json());
|
|
16
|
+
}
|
|
17
|
+
finally {
|
|
18
|
+
clearTimeout(timer);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
// ── Source fetchers ──────────────────────────────────────────────────
|
|
22
|
+
async function fetchHackerNews() {
|
|
23
|
+
const ids = await fetchJson("https://hacker-news.firebaseio.com/v0/topstories.json");
|
|
24
|
+
const top10 = ids.slice(0, 10);
|
|
25
|
+
const items = await Promise.all(top10.map((id) => fetchJson(`https://hacker-news.firebaseio.com/v0/item/${id}.json`)));
|
|
26
|
+
const lines = items.map((item, i) => `${i + 1}. ${item.title ?? "Untitled"} (${item.score ?? 0} pts, ${item.descendants ?? 0} comments)${item.url ? `\n ${item.url}` : ""}`);
|
|
27
|
+
return lines.join("\n");
|
|
28
|
+
}
|
|
29
|
+
async function fetchGitHubTrending() {
|
|
30
|
+
const since = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000)
|
|
31
|
+
.toISOString()
|
|
32
|
+
.split("T")[0];
|
|
33
|
+
const data = await fetchJson(`https://api.github.com/search/repositories?q=created:>${since}&sort=stars&order=desc&per_page=5`);
|
|
34
|
+
const repos = data.items ?? [];
|
|
35
|
+
const lines = repos.map((r, i) => `${i + 1}. ${r.full_name ?? "unknown"} ⭐${r.stargazers_count ?? 0} [${r.language ?? "N/A"}]\n ${r.description ?? "No description"}\n ${r.html_url ?? ""}`);
|
|
36
|
+
return lines.join("\n");
|
|
37
|
+
}
|
|
38
|
+
async function fetchReddit(subreddit) {
|
|
39
|
+
const data = await fetchJson(`https://www.reddit.com/r/${subreddit}/hot.json?limit=5`);
|
|
40
|
+
const posts = data.data?.children ?? [];
|
|
41
|
+
const lines = posts
|
|
42
|
+
.filter((p) => p.data.title)
|
|
43
|
+
.map((p, i) => `${i + 1}. ${p.data.title} (${p.data.score ?? 0} pts, ${p.data.num_comments ?? 0} comments)`);
|
|
44
|
+
return lines.join("\n");
|
|
45
|
+
}
|
|
46
|
+
async function fetchGoldPrice() {
|
|
47
|
+
// Use a free metals API. Falls back to a message if unavailable.
|
|
48
|
+
try {
|
|
49
|
+
const data = await fetchJson("https://api.metals.dev/v1/latest?api_key=demo¤cy=USD&unit=toz");
|
|
50
|
+
const metals = data.metals;
|
|
51
|
+
if (metals) {
|
|
52
|
+
const lines = [];
|
|
53
|
+
if (metals.gold)
|
|
54
|
+
lines.push(`Gold: $${metals.gold}/oz`);
|
|
55
|
+
if (metals.silver)
|
|
56
|
+
lines.push(`Silver: $${metals.silver}/oz`);
|
|
57
|
+
if (metals.platinum)
|
|
58
|
+
lines.push(`Platinum: $${metals.platinum}/oz`);
|
|
59
|
+
return lines.join("\n") || "No metal price data available.";
|
|
60
|
+
}
|
|
61
|
+
return JSON.stringify(data).slice(0, 500);
|
|
62
|
+
}
|
|
63
|
+
catch {
|
|
64
|
+
// Fallback: try alternative free API
|
|
65
|
+
try {
|
|
66
|
+
const data = await fetchJson("https://www.goldapi.io/api/XAU/USD");
|
|
67
|
+
const price = data.price;
|
|
68
|
+
return price ? `Gold: $${price}/oz` : JSON.stringify(data).slice(0, 300);
|
|
69
|
+
}
|
|
70
|
+
catch {
|
|
71
|
+
return "Gold price APIs unavailable. Could not fetch current prices.";
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
async function fetchCryptoPrices() {
|
|
76
|
+
const data = await fetchJson("https://api.coingecko.com/api/v3/simple/price?ids=bitcoin,ethereum,solana,cardano&vs_currencies=usd&include_24hr_change=true");
|
|
77
|
+
const lines = [];
|
|
78
|
+
for (const [coin, info] of Object.entries(data)) {
|
|
79
|
+
const change = info.usd_24h_change != null ? ` (${info.usd_24h_change > 0 ? "+" : ""}${info.usd_24h_change.toFixed(2)}%)` : "";
|
|
80
|
+
lines.push(`${coin}: $${info.usd ?? "N/A"}${change}`);
|
|
81
|
+
}
|
|
82
|
+
return lines.join("\n") || "No crypto data available.";
|
|
83
|
+
}
|
|
84
|
+
// ── Presets ───────────────────────────────────────────────────────────
|
|
85
|
+
function getPresetSources(preset) {
|
|
86
|
+
switch (preset) {
|
|
87
|
+
case "tech-trends":
|
|
88
|
+
return [
|
|
89
|
+
{ name: "HackerNews Top Stories", fetchData: fetchHackerNews },
|
|
90
|
+
{ name: "GitHub Trending Repos (last 7 days)", fetchData: fetchGitHubTrending },
|
|
91
|
+
{ name: "Reddit r/programming", fetchData: () => fetchReddit("programming") },
|
|
92
|
+
{ name: "Reddit r/MachineLearning", fetchData: () => fetchReddit("MachineLearning") },
|
|
93
|
+
];
|
|
94
|
+
case "gold-price":
|
|
95
|
+
return [
|
|
96
|
+
{ name: "Gold & Metals Prices", fetchData: fetchGoldPrice },
|
|
97
|
+
];
|
|
98
|
+
case "crypto":
|
|
99
|
+
return [
|
|
100
|
+
{ name: "Cryptocurrency Prices", fetchData: fetchCryptoPrices },
|
|
101
|
+
];
|
|
102
|
+
default:
|
|
103
|
+
return [];
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
function buildCustomSources(sources) {
|
|
107
|
+
return sources.map((s) => ({
|
|
108
|
+
name: s.name,
|
|
109
|
+
fetchData: async () => {
|
|
110
|
+
const data = await fetchJson(s.url);
|
|
111
|
+
return typeof data === "string" ? data : JSON.stringify(data).slice(0, 2000);
|
|
112
|
+
},
|
|
113
|
+
}));
|
|
114
|
+
}
|
|
115
|
+
// ── Fetch all sources in parallel ────────────────────────────────────
|
|
116
|
+
async function fetchAllSources(sources) {
|
|
117
|
+
const results = await Promise.allSettled(sources.map(async (src) => {
|
|
118
|
+
try {
|
|
119
|
+
const data = await src.fetchData();
|
|
120
|
+
return { name: src.name, data };
|
|
121
|
+
}
|
|
122
|
+
catch (err) {
|
|
123
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
124
|
+
return { name: src.name, data: `[Failed to fetch: ${msg}]` };
|
|
125
|
+
}
|
|
126
|
+
}));
|
|
127
|
+
const sections = [];
|
|
128
|
+
for (const result of results) {
|
|
129
|
+
if (result.status === "fulfilled") {
|
|
130
|
+
sections.push(`## ${result.value.name}\n${result.value.data}`);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return sections.join("\n\n");
|
|
134
|
+
}
|
|
135
|
+
// ── Main entry point ─────────────────────────────────────────────────
|
|
136
|
+
export async function executeResearchTask(job, payload) {
|
|
137
|
+
// 1. Determine sources
|
|
138
|
+
const preset = payload.preset;
|
|
139
|
+
const customSources = payload.sources;
|
|
140
|
+
const prompt = payload.prompt || "Summarize the following data concisely.";
|
|
141
|
+
let sources = [];
|
|
142
|
+
if (preset) {
|
|
143
|
+
sources = getPresetSources(preset);
|
|
144
|
+
if (sources.length === 0) {
|
|
145
|
+
throw new Error(`Unknown research preset: ${preset}. Available: tech-trends, gold-price, crypto`);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
if (customSources && Array.isArray(customSources)) {
|
|
149
|
+
sources = sources.concat(buildCustomSources(customSources));
|
|
150
|
+
}
|
|
151
|
+
if (sources.length === 0) {
|
|
152
|
+
throw new Error("Research task requires 'preset' or 'sources' in payload. Available presets: tech-trends, gold-price, crypto");
|
|
153
|
+
}
|
|
154
|
+
// 2. Fetch real data from all sources
|
|
155
|
+
console.log(`[nzb] Research task '${job.id}': fetching ${sources.length} source(s)...`);
|
|
156
|
+
const fetchedData = await fetchAllSources(sources);
|
|
157
|
+
if (!fetchedData.trim()) {
|
|
158
|
+
throw new Error("All research sources failed to return data.");
|
|
159
|
+
}
|
|
160
|
+
// 3. Build AI prompt with real data
|
|
161
|
+
const aiPrompt = `[Scheduled research task]
|
|
162
|
+
|
|
163
|
+
You are given REAL-TIME data fetched from the internet just now. Use ONLY this data to produce your summary — do NOT use your built-in knowledge for facts or figures.
|
|
164
|
+
|
|
165
|
+
USER INSTRUCTIONS:
|
|
166
|
+
${prompt}
|
|
167
|
+
|
|
168
|
+
--- FETCHED DATA (${new Date().toISOString()}) ---
|
|
169
|
+
|
|
170
|
+
${fetchedData}
|
|
171
|
+
|
|
172
|
+
--- END OF DATA ---
|
|
173
|
+
|
|
174
|
+
Based on the data above, provide your summary following the user's instructions.`;
|
|
175
|
+
// 4. Send to AI for summarization
|
|
176
|
+
console.log(`[nzb] Research task '${job.id}': sending to AI for summarization...`);
|
|
177
|
+
let aiResponse;
|
|
178
|
+
try {
|
|
179
|
+
if (job.model) {
|
|
180
|
+
const { runOneOffPrompt } = await import("../copilot/orchestrator.js");
|
|
181
|
+
aiResponse = await runOneOffPrompt(aiPrompt, job.model);
|
|
182
|
+
}
|
|
183
|
+
else {
|
|
184
|
+
const { sendToOrchestrator } = await import("../copilot/orchestrator.js");
|
|
185
|
+
aiResponse = await new Promise((resolve) => {
|
|
186
|
+
sendToOrchestrator(aiPrompt, { type: "background" }, (text, done) => {
|
|
187
|
+
if (done)
|
|
188
|
+
resolve(text);
|
|
189
|
+
});
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
catch (err) {
|
|
194
|
+
throw new Error(`Research AI summarization failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
195
|
+
}
|
|
196
|
+
// 5. Format and send to Telegram
|
|
197
|
+
const header = preset
|
|
198
|
+
? `🔬 Research: ${preset}`
|
|
199
|
+
: "🔬 Research Report";
|
|
200
|
+
const formattedMessage = `${header}\n\n${aiResponse}`;
|
|
201
|
+
if (config.telegramEnabled) {
|
|
202
|
+
const { sendProactiveMessage } = await import("../telegram/bot.js");
|
|
203
|
+
await sendProactiveMessage(formattedMessage);
|
|
204
|
+
}
|
|
205
|
+
console.log(`[nzb] Research task '${job.id}': completed successfully.`);
|
|
206
|
+
return formattedMessage;
|
|
207
|
+
}
|
|
208
|
+
//# sourceMappingURL=research-runner.js.map
|
package/dist/cron/scheduler.js
CHANGED
|
@@ -107,7 +107,10 @@ async function runJob(jobId) {
|
|
|
107
107
|
}
|
|
108
108
|
}
|
|
109
109
|
console.log(`[nzb] Cron job '${job.id}' completed in ${finishedAt.getTime() - startedAt.getTime()}ms`);
|
|
110
|
-
|
|
110
|
+
// Vocab and research tasks handle their own Telegram notifications
|
|
111
|
+
if (job.taskType !== "vocab" && job.taskType !== "research") {
|
|
112
|
+
await notifyResult(job, `✅ Completed\n${result}`);
|
|
113
|
+
}
|
|
111
114
|
return result;
|
|
112
115
|
}
|
|
113
116
|
catch (err) {
|
package/dist/cron/task-runner.js
CHANGED
|
@@ -4,6 +4,7 @@ import { freemem, totalmem } from "os";
|
|
|
4
4
|
import { join } from "path";
|
|
5
5
|
import { config } from "../config.js";
|
|
6
6
|
import { DB_PATH, NZB_HOME } from "../paths.js";
|
|
7
|
+
import { executeResearchTask } from "./research-runner.js";
|
|
7
8
|
const BACKUPS_DIR = join(NZB_HOME, "backups");
|
|
8
9
|
/** Notify result to Telegram if the job has notifyTelegram enabled. */
|
|
9
10
|
export async function notifyResult(job, message) {
|
|
@@ -33,6 +34,8 @@ export async function executeCronTask(job) {
|
|
|
33
34
|
return await executeWebhookTask(payload, job.timeoutMs);
|
|
34
35
|
case "vocab":
|
|
35
36
|
return await executeVocabTask(job, payload);
|
|
37
|
+
case "research":
|
|
38
|
+
return await executeResearchTask(job, payload);
|
|
36
39
|
default:
|
|
37
40
|
throw new Error(`Unknown task type: ${job.taskType}`);
|
|
38
41
|
}
|
package/dist/daemon.js
CHANGED
|
@@ -4,11 +4,14 @@ import { broadcastToSSE, startApiServer } from "./api/server.js";
|
|
|
4
4
|
import { config } from "./config.js";
|
|
5
5
|
import { getClient, stopClient } from "./copilot/client.js";
|
|
6
6
|
import { getWorkers, initOrchestrator, setMessageLogger, setProactiveNotify, setWorkerNotify, stopHealthCheck, } from "./copilot/orchestrator.js";
|
|
7
|
+
import { closeDaemonLogger, initDaemonLogger } from "./logger.js";
|
|
7
8
|
import { PID_FILE_PATH } from "./paths.js";
|
|
8
9
|
import { closeDb, getDb } from "./store/db.js";
|
|
9
10
|
import { createBot, sendProactiveMessage, sendWorkerNotification, startBot, stopBot } from "./telegram/bot.js";
|
|
10
11
|
import { startCronScheduler, stopCronScheduler } from "./cron/scheduler.js";
|
|
11
12
|
import { checkForUpdate, getDismissedVersion, isAutoUpdateEnabled, scheduleUpdateCheck, shouldCheckUpdate, stopUpdateCheck } from "./update.js";
|
|
13
|
+
// Initialize file logging before anything else
|
|
14
|
+
initDaemonLogger();
|
|
12
15
|
// Log the active CA bundle (injected by cli.ts via re-exec).
|
|
13
16
|
if (process.env.NODE_EXTRA_CA_CERTS) {
|
|
14
17
|
console.log(`[nzb] Using system CA bundle: ${process.env.NODE_EXTRA_CA_CERTS}`);
|
|
@@ -261,6 +264,7 @@ async function shutdown() {
|
|
|
261
264
|
closeDb();
|
|
262
265
|
releasePidLock();
|
|
263
266
|
console.log("[nzb] Goodbye.");
|
|
267
|
+
closeDaemonLogger();
|
|
264
268
|
process.exit(0);
|
|
265
269
|
}
|
|
266
270
|
/** Restart the daemon by spawning a new process and exiting. */
|
|
@@ -294,6 +298,7 @@ export async function restartDaemon() {
|
|
|
294
298
|
}
|
|
295
299
|
closeDb();
|
|
296
300
|
releasePidLock();
|
|
301
|
+
closeDaemonLogger();
|
|
297
302
|
// Spawn a detached replacement process with the same args (include execArgv for tsx/loaders)
|
|
298
303
|
const child = spawn(process.execPath, [...process.execArgv, ...process.argv.slice(1)], {
|
|
299
304
|
detached: true,
|
package/dist/logger.js
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { createWriteStream, readFileSync, renameSync, statSync } from "fs";
|
|
2
|
+
import { DAEMON_LOG_PATH, ensureNZBHome } from "./paths.js";
|
|
3
|
+
/** Max log file size before rotation (5 MB) */
|
|
4
|
+
const MAX_LOG_SIZE = 5 * 1024 * 1024;
|
|
5
|
+
let logStream;
|
|
6
|
+
let originalLog;
|
|
7
|
+
let originalError;
|
|
8
|
+
let originalWarn;
|
|
9
|
+
let initialized = false;
|
|
10
|
+
function getTimestamp() {
|
|
11
|
+
return new Date().toISOString().replace("T", " ").slice(0, 23);
|
|
12
|
+
}
|
|
13
|
+
/** Rotate daemon.log → daemon.log.1 if it exceeds MAX_LOG_SIZE */
|
|
14
|
+
function rotateIfNeeded() {
|
|
15
|
+
try {
|
|
16
|
+
const stats = statSync(DAEMON_LOG_PATH);
|
|
17
|
+
if (stats.size >= MAX_LOG_SIZE) {
|
|
18
|
+
if (logStream) {
|
|
19
|
+
logStream.end();
|
|
20
|
+
logStream = undefined;
|
|
21
|
+
}
|
|
22
|
+
renameSync(DAEMON_LOG_PATH, DAEMON_LOG_PATH + ".1");
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
catch {
|
|
26
|
+
// File doesn't exist yet — nothing to rotate
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
function ensureStream() {
|
|
30
|
+
if (!logStream || logStream.destroyed) {
|
|
31
|
+
logStream = createWriteStream(DAEMON_LOG_PATH, { flags: "a" });
|
|
32
|
+
logStream.on("error", () => {
|
|
33
|
+
// Silently ignore write errors — never crash the daemon for logging
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
return logStream;
|
|
37
|
+
}
|
|
38
|
+
function formatArgs(args) {
|
|
39
|
+
return args
|
|
40
|
+
.map((a) => {
|
|
41
|
+
if (typeof a === "string")
|
|
42
|
+
return a;
|
|
43
|
+
if (a instanceof Error)
|
|
44
|
+
return `${a.message}\n${a.stack ?? ""}`;
|
|
45
|
+
try {
|
|
46
|
+
return JSON.stringify(a);
|
|
47
|
+
}
|
|
48
|
+
catch {
|
|
49
|
+
return String(a);
|
|
50
|
+
}
|
|
51
|
+
})
|
|
52
|
+
.join(" ");
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Initialize daemon file logging.
|
|
56
|
+
* Intercepts console.log, console.error, console.warn and mirrors
|
|
57
|
+
* all output to ~/.nzb/daemon.log with timestamps.
|
|
58
|
+
* Call this once at daemon startup before any other logging.
|
|
59
|
+
*/
|
|
60
|
+
export function initDaemonLogger() {
|
|
61
|
+
if (initialized)
|
|
62
|
+
return;
|
|
63
|
+
initialized = true;
|
|
64
|
+
ensureNZBHome();
|
|
65
|
+
rotateIfNeeded();
|
|
66
|
+
originalLog = console.log.bind(console);
|
|
67
|
+
originalError = console.error.bind(console);
|
|
68
|
+
originalWarn = console.warn.bind(console);
|
|
69
|
+
console.log = (...args) => {
|
|
70
|
+
originalLog(...args);
|
|
71
|
+
const line = `${getTimestamp()} [LOG] ${formatArgs(args)}\n`;
|
|
72
|
+
try {
|
|
73
|
+
ensureStream().write(line);
|
|
74
|
+
}
|
|
75
|
+
catch {
|
|
76
|
+
// Never crash for logging
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
console.error = (...args) => {
|
|
80
|
+
originalError(...args);
|
|
81
|
+
const line = `${getTimestamp()} [ERR] ${formatArgs(args)}\n`;
|
|
82
|
+
try {
|
|
83
|
+
ensureStream().write(line);
|
|
84
|
+
}
|
|
85
|
+
catch {
|
|
86
|
+
// Never crash for logging
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
console.warn = (...args) => {
|
|
90
|
+
originalWarn(...args);
|
|
91
|
+
const line = `${getTimestamp()} [WRN] ${formatArgs(args)}\n`;
|
|
92
|
+
try {
|
|
93
|
+
ensureStream().write(line);
|
|
94
|
+
}
|
|
95
|
+
catch {
|
|
96
|
+
// Never crash for logging
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
/** Close the log stream gracefully (call during shutdown) */
|
|
101
|
+
export function closeDaemonLogger() {
|
|
102
|
+
if (logStream && !logStream.destroyed) {
|
|
103
|
+
logStream.end();
|
|
104
|
+
logStream = undefined;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
/** Read the last N lines from the daemon log file. Returns null if file doesn't exist. */
|
|
108
|
+
export function tailDaemonLog(lines = 50) {
|
|
109
|
+
try {
|
|
110
|
+
const content = readFileSync(DAEMON_LOG_PATH, "utf-8");
|
|
111
|
+
const allLines = content.trimEnd().split("\n");
|
|
112
|
+
return allLines.slice(-lines).join("\n");
|
|
113
|
+
}
|
|
114
|
+
catch {
|
|
115
|
+
return null;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
//# sourceMappingURL=logger.js.map
|
package/dist/paths.js
CHANGED
|
@@ -19,6 +19,8 @@ export const TUI_DEBUG_LOG_PATH = join(NZB_HOME, "tui-debug.log");
|
|
|
19
19
|
export const API_TOKEN_PATH = join(NZB_HOME, "api-token");
|
|
20
20
|
/** Path to the PID lock file for single-instance enforcement */
|
|
21
21
|
export const PID_FILE_PATH = join(NZB_HOME, "nzb.pid");
|
|
22
|
+
/** Path to the daemon console log file */
|
|
23
|
+
export const DAEMON_LOG_PATH = join(NZB_HOME, "daemon.log");
|
|
22
24
|
/** Ensure ~/.nzb/ exists */
|
|
23
25
|
export function ensureNZBHome() {
|
|
24
26
|
mkdirSync(NZB_HOME, { recursive: true });
|
|
@@ -2,8 +2,9 @@ import { config, persistEnvVar, persistModel } from "../../config.js";
|
|
|
2
2
|
import { cancelCurrentMessage, compactSession, getQueueSize, getWorkers, resetSession, } from "../../copilot/orchestrator.js";
|
|
3
3
|
import { listSkills } from "../../copilot/skills.js";
|
|
4
4
|
import { restartDaemon } from "../../daemon.js";
|
|
5
|
+
import { tailDaemonLog } from "../../logger.js";
|
|
5
6
|
import { searchMemories } from "../../store/memory.js";
|
|
6
|
-
import { chunkMessage } from "../formatter.js";
|
|
7
|
+
import { chunkMessage, escapeHtml } from "../formatter.js";
|
|
7
8
|
import { buildSettingsText, formatMemoryList } from "../menus.js";
|
|
8
9
|
import { sendCronMenu } from "./cron.js";
|
|
9
10
|
import { getReactionHelpText } from "./reactions.js";
|
|
@@ -31,6 +32,7 @@ export function registerCommandHandlers(bot, deps) {
|
|
|
31
32
|
"/memory — Stored memories\n" +
|
|
32
33
|
"/skills — Installed skills\n" +
|
|
33
34
|
"/workers — Active worker sessions\n" +
|
|
35
|
+
"/logs [N] — Tail daemon log (last N lines)\n" +
|
|
34
36
|
"/cron — Manage cron jobs\n" +
|
|
35
37
|
"/update — Check for updates\n" +
|
|
36
38
|
"/restart — Restart NZB\n\n" +
|
|
@@ -194,6 +196,22 @@ export function registerCommandHandlers(bot, deps) {
|
|
|
194
196
|
});
|
|
195
197
|
}, 500);
|
|
196
198
|
});
|
|
199
|
+
bot.command("logs", async (ctx) => {
|
|
200
|
+
const arg = ctx.match?.trim();
|
|
201
|
+
const lineCount = arg ? parseInt(arg, 10) : 50;
|
|
202
|
+
const count = Number.isNaN(lineCount) || lineCount < 1 ? 50 : Math.min(lineCount, 200);
|
|
203
|
+
const lines = tailDaemonLog(count);
|
|
204
|
+
if (!lines) {
|
|
205
|
+
await ctx.reply("No daemon log found.");
|
|
206
|
+
return;
|
|
207
|
+
}
|
|
208
|
+
const header = `📋 Last ${count} log lines:`;
|
|
209
|
+
const text = `${header}\n\n<pre>${escapeHtml(lines)}</pre>`;
|
|
210
|
+
const chunks = chunkMessage(text);
|
|
211
|
+
for (const chunk of chunks) {
|
|
212
|
+
await ctx.reply(chunk, { parse_mode: "HTML" });
|
|
213
|
+
}
|
|
214
|
+
});
|
|
197
215
|
bot.command("settings", async (ctx) => {
|
|
198
216
|
await ctx.reply(buildSettingsText(getUptimeStr), { reply_markup: settingsMenu });
|
|
199
217
|
});
|