@telepat/snoopy 0.1.13 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +48 -218
- package/README.zh-CN.md +112 -0
- package/dist/src/agent/install.d.ts +18 -0
- package/dist/src/agent/install.js +488 -0
- package/dist/src/cli/index.js +37 -0
- package/dist/src/mcp/helpers.d.ts +43 -0
- package/dist/src/mcp/helpers.js +433 -0
- package/dist/src/mcp/server.d.ts +1 -0
- package/dist/src/mcp/server.js +260 -0
- package/dist/src/mcp/tools.d.ts +77 -0
- package/dist/src/mcp/tools.js +90 -0
- package/package.json +2 -1
|
@@ -0,0 +1,433 @@
|
|
|
1
|
+
import { createRequire } from 'node:module';
|
|
2
|
+
import { spawnSync } from 'node:child_process';
|
|
3
|
+
import { getDb } from '../services/db/sqlite.js';
|
|
4
|
+
import { JobsRepository } from '../services/db/repositories/jobsRepo.js';
|
|
5
|
+
import { RunsRepository } from '../services/db/repositories/runsRepo.js';
|
|
6
|
+
import { ScanItemsRepository } from '../services/db/repositories/scanItemsRepo.js';
|
|
7
|
+
import { SettingsRepository } from '../services/db/repositories/settingsRepo.js';
|
|
8
|
+
import { AnalyticsService } from '../services/analytics/analyticsService.js';
|
|
9
|
+
import { extractErrorEntries, readRunLog } from '../services/logging/logReader.js';
|
|
10
|
+
import { getOpenRouterApiKey, isKeytarAvailable } from '../services/security/secretStore.js';
|
|
11
|
+
import { getStartupStatus } from '../services/startup/index.js';
|
|
12
|
+
import { isDaemonRunning, ensureDaemonRunning, requestDaemonReload } from '../services/daemonControl.js';
|
|
13
|
+
import { ensureAppDirs } from '../utils/paths.js';
|
|
14
|
+
const require = createRequire(import.meta.url);
|
|
15
|
+
export function getSnoopyVersion() {
|
|
16
|
+
for (const rel of ['../../../package.json', '../../package.json']) {
|
|
17
|
+
try {
|
|
18
|
+
const pkg = require(rel);
|
|
19
|
+
if (pkg.name === '@telepat/snoopy')
|
|
20
|
+
return pkg.version ?? '0.0.0';
|
|
21
|
+
}
|
|
22
|
+
catch { /* try next depth */ }
|
|
23
|
+
}
|
|
24
|
+
return '0.0.0';
|
|
25
|
+
}
|
|
26
|
+
function formatToolError(error) {
|
|
27
|
+
const message = error instanceof Error ? error.message : String(error ?? 'Unknown error');
|
|
28
|
+
return {
|
|
29
|
+
content: [{ type: 'text', text: message }],
|
|
30
|
+
isError: true,
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
function formatToolResult(data) {
|
|
34
|
+
return {
|
|
35
|
+
content: [{ type: 'text', text: JSON.stringify(data, null, 2) }],
|
|
36
|
+
structuredContent: data,
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
export { formatToolError, formatToolResult };
|
|
40
|
+
export async function buildDoctorReport() {
|
|
41
|
+
const paths = ensureAppDirs();
|
|
42
|
+
const db = getDb();
|
|
43
|
+
let dbOk = false;
|
|
44
|
+
let dbDetails = `DB file: ${paths.dbPath}`;
|
|
45
|
+
try {
|
|
46
|
+
db.prepare('SELECT 1').get();
|
|
47
|
+
dbOk = true;
|
|
48
|
+
dbDetails = `DB reachable at ${paths.dbPath}`;
|
|
49
|
+
}
|
|
50
|
+
catch (error) {
|
|
51
|
+
dbDetails = `DB error: ${String(error)}`;
|
|
52
|
+
}
|
|
53
|
+
const jobsRepo = new JobsRepository();
|
|
54
|
+
const runsRepo = new RunsRepository();
|
|
55
|
+
const jobs = jobsRepo.list();
|
|
56
|
+
const enabledJobs = jobs.filter((j) => j.enabled).length;
|
|
57
|
+
const apiKey = await getOpenRouterApiKey();
|
|
58
|
+
const keytarAvailable = await isKeytarAvailable();
|
|
59
|
+
const startup = getStartupStatus();
|
|
60
|
+
const daemon = isDaemonRunning();
|
|
61
|
+
const recentProblemRuns = runsRepo
|
|
62
|
+
.latestWithJobNames(20)
|
|
63
|
+
.filter((run) => {
|
|
64
|
+
const timestamp = Date.parse(run.createdAt);
|
|
65
|
+
return !Number.isNaN(timestamp) && timestamp >= Date.now() - 24 * 60 * 60 * 1000;
|
|
66
|
+
})
|
|
67
|
+
.map((run) => {
|
|
68
|
+
const logContent = readRunLog(run.logFilePath);
|
|
69
|
+
const errorEntries = extractErrorEntries(logContent ?? '');
|
|
70
|
+
return { run, errorEntries };
|
|
71
|
+
})
|
|
72
|
+
.filter(({ run, errorEntries }) => run.status === 'failed' || errorEntries.length > 0);
|
|
73
|
+
return {
|
|
74
|
+
platform: process.platform,
|
|
75
|
+
nodeVersion: process.version,
|
|
76
|
+
database: { ok: dbOk, details: dbDetails },
|
|
77
|
+
openRouterApiKey: { configured: Boolean(apiKey), keytarAvailable },
|
|
78
|
+
jobs: { total: jobs.length, enabled: enabledJobs },
|
|
79
|
+
daemon: { running: daemon.running, pid: daemon.pid },
|
|
80
|
+
startup: { enabled: startup.enabled, method: startup.method, detail: startup.detail },
|
|
81
|
+
recentErrors: recentProblemRuns.map(({ run, errorEntries }) => ({
|
|
82
|
+
runId: run.id,
|
|
83
|
+
jobName: run.jobName ?? run.jobId,
|
|
84
|
+
status: run.status,
|
|
85
|
+
message: run.message,
|
|
86
|
+
errorCount: errorEntries.length,
|
|
87
|
+
latestError: errorEntries.length > 0 ? errorEntries[errorEntries.length - 1]?.split('\n')[0] : null,
|
|
88
|
+
})),
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
export function buildDaemonStatusReport() {
|
|
92
|
+
const status = isDaemonRunning();
|
|
93
|
+
return {
|
|
94
|
+
running: status.running,
|
|
95
|
+
pid: status.pid,
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
export function startDaemonReport() {
|
|
99
|
+
const result = ensureDaemonRunning();
|
|
100
|
+
return {
|
|
101
|
+
started: result.started,
|
|
102
|
+
pid: result.pid,
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
export function stopDaemonReport() {
|
|
106
|
+
const status = isDaemonRunning();
|
|
107
|
+
if (!status.running || !status.pid) {
|
|
108
|
+
return { stopped: false, message: 'Daemon is not running' };
|
|
109
|
+
}
|
|
110
|
+
try {
|
|
111
|
+
process.kill(status.pid, 'SIGTERM');
|
|
112
|
+
return { stopped: true, pid: status.pid };
|
|
113
|
+
}
|
|
114
|
+
catch (error) {
|
|
115
|
+
return { stopped: false, pid: status.pid, error: String(error) };
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
export function reloadDaemonReport() {
|
|
119
|
+
const result = requestDaemonReload();
|
|
120
|
+
return {
|
|
121
|
+
reloaded: result.reloaded,
|
|
122
|
+
pid: result.pid,
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
export function listJobsReport() {
|
|
126
|
+
const jobsRepo = new JobsRepository();
|
|
127
|
+
const jobs = jobsRepo.list();
|
|
128
|
+
return {
|
|
129
|
+
count: jobs.length,
|
|
130
|
+
jobs: jobs.map((j) => ({
|
|
131
|
+
id: j.id,
|
|
132
|
+
slug: j.slug,
|
|
133
|
+
name: j.name,
|
|
134
|
+
enabled: j.enabled,
|
|
135
|
+
subreddits: j.subreddits,
|
|
136
|
+
scheduleCron: j.scheduleCron,
|
|
137
|
+
monitorComments: j.monitorComments,
|
|
138
|
+
})),
|
|
139
|
+
};
|
|
140
|
+
}
|
|
141
|
+
export function listJobRunsReport(jobRef, limit) {
|
|
142
|
+
const runsRepo = new RunsRepository();
|
|
143
|
+
const boundedLimit = limit ?? 20;
|
|
144
|
+
if (jobRef) {
|
|
145
|
+
const jobsRepo = new JobsRepository();
|
|
146
|
+
const job = jobsRepo.getByRef(jobRef);
|
|
147
|
+
if (!job) {
|
|
148
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
149
|
+
}
|
|
150
|
+
const runs = runsRepo.listByJob(job.id, boundedLimit);
|
|
151
|
+
return {
|
|
152
|
+
job: { id: job.id, slug: job.slug, name: job.name },
|
|
153
|
+
count: runs.length,
|
|
154
|
+
runs: runs.map((r) => ({
|
|
155
|
+
id: r.id,
|
|
156
|
+
status: r.status,
|
|
157
|
+
message: r.message,
|
|
158
|
+
startedAt: r.startedAt,
|
|
159
|
+
finishedAt: r.finishedAt,
|
|
160
|
+
itemsDiscovered: r.itemsDiscovered,
|
|
161
|
+
itemsNew: r.itemsNew,
|
|
162
|
+
itemsQualified: r.itemsQualified,
|
|
163
|
+
promptTokens: r.promptTokens,
|
|
164
|
+
completionTokens: r.completionTokens,
|
|
165
|
+
estimatedCostUsd: r.estimatedCostUsd,
|
|
166
|
+
})),
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
const runs = runsRepo.latestWithJobNames(boundedLimit);
|
|
170
|
+
return {
|
|
171
|
+
count: runs.length,
|
|
172
|
+
runs: runs.map((r) => ({
|
|
173
|
+
id: r.id,
|
|
174
|
+
jobId: r.jobId,
|
|
175
|
+
jobName: r.jobName,
|
|
176
|
+
status: r.status,
|
|
177
|
+
message: r.message,
|
|
178
|
+
createdAt: r.createdAt,
|
|
179
|
+
itemsDiscovered: r.itemsDiscovered,
|
|
180
|
+
itemsNew: r.itemsNew,
|
|
181
|
+
itemsQualified: r.itemsQualified,
|
|
182
|
+
})),
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
export function addJobReport(input) {
|
|
186
|
+
const jobsRepo = new JobsRepository();
|
|
187
|
+
const job = jobsRepo.create({
|
|
188
|
+
name: input.name,
|
|
189
|
+
description: input.description ?? '',
|
|
190
|
+
subreddits: input.subreddits,
|
|
191
|
+
qualificationPrompt: input.qualificationPrompt,
|
|
192
|
+
scheduleCron: input.scheduleCron,
|
|
193
|
+
enabled: input.enabled,
|
|
194
|
+
monitorComments: input.monitorComments,
|
|
195
|
+
});
|
|
196
|
+
return {
|
|
197
|
+
id: job.id,
|
|
198
|
+
slug: job.slug,
|
|
199
|
+
name: job.name,
|
|
200
|
+
enabled: job.enabled,
|
|
201
|
+
subreddits: job.subreddits,
|
|
202
|
+
scheduleCron: job.scheduleCron,
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
export function deleteJobReport(jobRef) {
|
|
206
|
+
const jobsRepo = new JobsRepository();
|
|
207
|
+
const job = jobsRepo.removeByRef(jobRef);
|
|
208
|
+
if (!job) {
|
|
209
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
210
|
+
}
|
|
211
|
+
return { deleted: true, id: job.id, slug: job.slug, name: job.name };
|
|
212
|
+
}
|
|
213
|
+
export function enableJobReport(jobRef) {
|
|
214
|
+
const jobsRepo = new JobsRepository();
|
|
215
|
+
const job = jobsRepo.setEnabledByRef(jobRef, true);
|
|
216
|
+
if (!job) {
|
|
217
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
218
|
+
}
|
|
219
|
+
return { id: job.id, slug: job.slug, name: job.name, enabled: job.enabled };
|
|
220
|
+
}
|
|
221
|
+
export function disableJobReport(jobRef) {
|
|
222
|
+
const jobsRepo = new JobsRepository();
|
|
223
|
+
const job = jobsRepo.setEnabledByRef(jobRef, false);
|
|
224
|
+
if (!job) {
|
|
225
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
226
|
+
}
|
|
227
|
+
return { id: job.id, slug: job.slug, name: job.name, enabled: job.enabled };
|
|
228
|
+
}
|
|
229
|
+
export function runJobReport(jobRef, limit) {
|
|
230
|
+
const jobsRepo = new JobsRepository();
|
|
231
|
+
const job = jobsRepo.getByRef(jobRef);
|
|
232
|
+
if (!job) {
|
|
233
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
234
|
+
}
|
|
235
|
+
const args = ['job', 'run', jobRef];
|
|
236
|
+
if (limit) {
|
|
237
|
+
args.push('--limit', String(limit));
|
|
238
|
+
}
|
|
239
|
+
const result = spawnSync(process.execPath, [process.argv[1], ...args], {
|
|
240
|
+
encoding: 'utf8',
|
|
241
|
+
timeout: 120_000,
|
|
242
|
+
});
|
|
243
|
+
return {
|
|
244
|
+
job: { id: job.id, slug: job.slug, name: job.name },
|
|
245
|
+
exitCode: result.status,
|
|
246
|
+
stdout: result.stdout?.slice(0, 4000) ?? '',
|
|
247
|
+
stderr: result.stderr?.slice(0, 2000) ?? '',
|
|
248
|
+
};
|
|
249
|
+
}
|
|
250
|
+
export function analyticsReport(jobRef, days) {
|
|
251
|
+
const analyticsService = new AnalyticsService();
|
|
252
|
+
const boundedDays = days ?? 30;
|
|
253
|
+
if (jobRef) {
|
|
254
|
+
const jobsRepo = new JobsRepository();
|
|
255
|
+
const job = jobsRepo.getByRef(jobRef);
|
|
256
|
+
if (!job) {
|
|
257
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
258
|
+
}
|
|
259
|
+
return analyticsService.getJobAnalytics(job.id, { days: boundedDays });
|
|
260
|
+
}
|
|
261
|
+
return analyticsService.getGlobalAnalytics({ days: boundedDays });
|
|
262
|
+
}
|
|
263
|
+
export function exportReport(jobRef, format, lastRun, limit) {
|
|
264
|
+
const jobsRepo = new JobsRepository();
|
|
265
|
+
const scanItemsRepo = new ScanItemsRepository();
|
|
266
|
+
const runsRepo = new RunsRepository();
|
|
267
|
+
const boundedLimit = limit ?? 100;
|
|
268
|
+
const jobs = jobRef
|
|
269
|
+
? (() => {
|
|
270
|
+
const job = jobsRepo.getByRef(jobRef);
|
|
271
|
+
if (!job)
|
|
272
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
273
|
+
return [job];
|
|
274
|
+
})()
|
|
275
|
+
: jobsRepo.list();
|
|
276
|
+
const results = [];
|
|
277
|
+
for (const job of jobs) {
|
|
278
|
+
const latestRunId = lastRun ? runsRepo.listByJob(job.id, 1)[0]?.id ?? null : null;
|
|
279
|
+
if (lastRun && !latestRunId)
|
|
280
|
+
continue;
|
|
281
|
+
const items = latestRunId
|
|
282
|
+
? scanItemsRepo.listQualifiedByJobRun(job.id, latestRunId, boundedLimit)
|
|
283
|
+
: scanItemsRepo.listQualifiedByJob(job.id, boundedLimit);
|
|
284
|
+
results.push({
|
|
285
|
+
jobId: job.id,
|
|
286
|
+
jobSlug: job.slug,
|
|
287
|
+
jobName: job.name,
|
|
288
|
+
rowCount: items.length,
|
|
289
|
+
items,
|
|
290
|
+
});
|
|
291
|
+
}
|
|
292
|
+
return { format: format ?? 'json', jobs: results, totalRows: results.reduce((s, r) => s + r.rowCount, 0) };
|
|
293
|
+
}
|
|
294
|
+
export function consumeReport(jobRef, limit, dryRun) {
|
|
295
|
+
const jobsRepo = new JobsRepository();
|
|
296
|
+
const scanItemsRepo = new ScanItemsRepository();
|
|
297
|
+
let jobId;
|
|
298
|
+
if (jobRef) {
|
|
299
|
+
const job = jobsRepo.getByRef(jobRef);
|
|
300
|
+
if (!job)
|
|
301
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
302
|
+
jobId = job.id;
|
|
303
|
+
}
|
|
304
|
+
const rows = scanItemsRepo.listUnconsumedQualified(jobId, limit);
|
|
305
|
+
if (dryRun) {
|
|
306
|
+
return { dryRun: true, count: rows.length, items: rows };
|
|
307
|
+
}
|
|
308
|
+
const consumedCount = scanItemsRepo.markConsumed(rows.map((r) => r.id));
|
|
309
|
+
return { consumed: consumedCount, items: rows };
|
|
310
|
+
}
|
|
311
|
+
export function errorsReport(jobRef, hours) {
|
|
312
|
+
const jobsRepo = new JobsRepository();
|
|
313
|
+
const runsRepo = new RunsRepository();
|
|
314
|
+
const boundedHours = hours ?? 24;
|
|
315
|
+
const job = jobsRepo.getByRef(jobRef);
|
|
316
|
+
if (!job) {
|
|
317
|
+
throw new Error(`Job not found: ${jobRef}`);
|
|
318
|
+
}
|
|
319
|
+
const cutoff = Date.now() - boundedHours * 60 * 60 * 1000;
|
|
320
|
+
const recentRuns = runsRepo.listByJob(job.id, 100).filter((run) => {
|
|
321
|
+
const ts = Date.parse(run.createdAt);
|
|
322
|
+
return !Number.isNaN(ts) && ts >= cutoff;
|
|
323
|
+
});
|
|
324
|
+
const errorRuns = recentRuns
|
|
325
|
+
.map((run) => {
|
|
326
|
+
const logContent = readRunLog(run.logFilePath);
|
|
327
|
+
const errorEntries = extractErrorEntries(logContent ?? '');
|
|
328
|
+
return { run, errorEntries, hasErrors: run.status === 'failed' || errorEntries.length > 0 };
|
|
329
|
+
})
|
|
330
|
+
.filter((entry) => entry.hasErrors);
|
|
331
|
+
return {
|
|
332
|
+
job: { id: job.id, slug: job.slug, name: job.name },
|
|
333
|
+
hours: boundedHours,
|
|
334
|
+
errorCount: errorRuns.length,
|
|
335
|
+
errors: errorRuns.map(({ run, errorEntries }) => ({
|
|
336
|
+
runId: run.id,
|
|
337
|
+
status: run.status,
|
|
338
|
+
message: run.message,
|
|
339
|
+
createdAt: run.createdAt,
|
|
340
|
+
errorEntries: errorEntries.map((e) => e.split('\n')[0]),
|
|
341
|
+
})),
|
|
342
|
+
};
|
|
343
|
+
}
|
|
344
|
+
export function logsReport(runId) {
|
|
345
|
+
const runsRepo = new RunsRepository();
|
|
346
|
+
const run = runsRepo.getById(runId);
|
|
347
|
+
if (!run) {
|
|
348
|
+
throw new Error(`Run not found: ${runId}`);
|
|
349
|
+
}
|
|
350
|
+
const logContent = readRunLog(run.logFilePath);
|
|
351
|
+
return {
|
|
352
|
+
runId: run.id,
|
|
353
|
+
jobId: run.jobId,
|
|
354
|
+
jobName: run.jobName,
|
|
355
|
+
status: run.status,
|
|
356
|
+
logPath: run.logFilePath,
|
|
357
|
+
logLength: logContent?.length ?? 0,
|
|
358
|
+
log: logContent?.slice(0, 10_000) ?? null,
|
|
359
|
+
};
|
|
360
|
+
}
|
|
361
|
+
export async function settingsGetReport() {
|
|
362
|
+
const settingsRepo = new SettingsRepository();
|
|
363
|
+
const appSettings = settingsRepo.getAppSettings();
|
|
364
|
+
const apiKey = await getOpenRouterApiKey();
|
|
365
|
+
const redditState = await settingsRepo.getRedditCredentialState();
|
|
366
|
+
return {
|
|
367
|
+
model: appSettings.model,
|
|
368
|
+
temperature: appSettings.modelSettings.temperature,
|
|
369
|
+
maxTokens: appSettings.modelSettings.maxTokens,
|
|
370
|
+
topP: appSettings.modelSettings.topP,
|
|
371
|
+
cronIntervalMinutes: appSettings.cronIntervalMinutes,
|
|
372
|
+
jobTimeoutMs: appSettings.jobTimeoutMs,
|
|
373
|
+
notificationsEnabled: appSettings.notificationsEnabled,
|
|
374
|
+
openRouterApiKeyConfigured: Boolean(apiKey),
|
|
375
|
+
reddit: {
|
|
376
|
+
appName: redditState.appName,
|
|
377
|
+
clientId: redditState.clientId,
|
|
378
|
+
hasClientSecret: redditState.hasClientSecret,
|
|
379
|
+
},
|
|
380
|
+
};
|
|
381
|
+
}
|
|
382
|
+
export function settingsSetReport(key, value) {
|
|
383
|
+
const settingsRepo = new SettingsRepository();
|
|
384
|
+
const appSettings = settingsRepo.getAppSettings();
|
|
385
|
+
switch (key) {
|
|
386
|
+
case 'model':
|
|
387
|
+
appSettings.model = value;
|
|
388
|
+
break;
|
|
389
|
+
case 'temperature': {
|
|
390
|
+
const parsed = Number(value);
|
|
391
|
+
if (Number.isNaN(parsed) || parsed < 0 || parsed > 2)
|
|
392
|
+
throw new Error('temperature must be 0.0-2.0');
|
|
393
|
+
appSettings.modelSettings.temperature = parsed;
|
|
394
|
+
break;
|
|
395
|
+
}
|
|
396
|
+
case 'maxTokens': {
|
|
397
|
+
const parsed = Number(value);
|
|
398
|
+
if (!Number.isInteger(parsed) || parsed < 1)
|
|
399
|
+
throw new Error('maxTokens must be a positive integer');
|
|
400
|
+
appSettings.modelSettings.maxTokens = parsed;
|
|
401
|
+
break;
|
|
402
|
+
}
|
|
403
|
+
case 'topP': {
|
|
404
|
+
const parsed = Number(value);
|
|
405
|
+
if (Number.isNaN(parsed) || parsed < 0 || parsed > 1)
|
|
406
|
+
throw new Error('topP must be 0.0-1.0');
|
|
407
|
+
appSettings.modelSettings.topP = parsed;
|
|
408
|
+
break;
|
|
409
|
+
}
|
|
410
|
+
case 'cronIntervalMinutes': {
|
|
411
|
+
const parsed = Number(value);
|
|
412
|
+
if (!Number.isInteger(parsed) || parsed < 1)
|
|
413
|
+
throw new Error('cronIntervalMinutes must be a positive integer');
|
|
414
|
+
appSettings.cronIntervalMinutes = parsed;
|
|
415
|
+
break;
|
|
416
|
+
}
|
|
417
|
+
case 'jobTimeoutMs': {
|
|
418
|
+
const parsed = Number(value);
|
|
419
|
+
if (!Number.isInteger(parsed) || parsed < 0)
|
|
420
|
+
throw new Error('jobTimeoutMs must be a non-negative integer');
|
|
421
|
+
appSettings.jobTimeoutMs = parsed;
|
|
422
|
+
break;
|
|
423
|
+
}
|
|
424
|
+
case 'notificationsEnabled':
|
|
425
|
+
appSettings.notificationsEnabled = value === 'true';
|
|
426
|
+
break;
|
|
427
|
+
default:
|
|
428
|
+
throw new Error(`Unknown setting: ${key}`);
|
|
429
|
+
}
|
|
430
|
+
settingsRepo.setAppSettings(appSettings);
|
|
431
|
+
return { updated: key, value, settings: appSettings };
|
|
432
|
+
}
|
|
433
|
+
//# sourceMappingURL=helpers.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function startSnoopyMcpServer(): Promise<void>;
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
|
2
|
+
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
|
3
|
+
import { snoopyDoctorToolInputSchema, snoopyDaemonStatusToolInputSchema, snoopyDaemonStartToolInputSchema, snoopyDaemonStopToolInputSchema, snoopyDaemonReloadToolInputSchema, snoopyJobListToolInputSchema, snoopyJobRunsToolInputSchema, snoopyJobAddToolInputSchema, snoopyJobDeleteToolInputSchema, snoopyJobEnableToolInputSchema, snoopyJobDisableToolInputSchema, snoopyJobRunToolInputSchema, snoopyAnalyticsToolInputSchema, snoopyExportToolInputSchema, snoopyConsumeToolInputSchema, snoopyErrorsToolInputSchema, snoopyLogsToolInputSchema, snoopySettingsGetToolInputSchema, snoopySettingsSetToolInputSchema, } from './tools.js';
|
|
4
|
+
import { getSnoopyVersion, formatToolError, formatToolResult, buildDoctorReport, buildDaemonStatusReport, startDaemonReport, stopDaemonReport, reloadDaemonReport, listJobsReport, listJobRunsReport, addJobReport, deleteJobReport, enableJobReport, disableJobReport, runJobReport, analyticsReport, exportReport, consumeReport, errorsReport, logsReport, settingsGetReport, settingsSetReport, } from './helpers.js';
|
|
5
|
+
export async function startSnoopyMcpServer() {
|
|
6
|
+
const version = getSnoopyVersion();
|
|
7
|
+
const server = new McpServer({ name: 'snoopy', version });
|
|
8
|
+
// --- snoopy_doctor ---
|
|
9
|
+
server.registerTool('snoopy_doctor', {
|
|
10
|
+
title: 'Health Check',
|
|
11
|
+
description: 'Run full system health check: database, API key, daemon, jobs, startup, recent errors.',
|
|
12
|
+
inputSchema: snoopyDoctorToolInputSchema,
|
|
13
|
+
}, async () => {
|
|
14
|
+
try {
|
|
15
|
+
const report = await buildDoctorReport();
|
|
16
|
+
return formatToolResult(report);
|
|
17
|
+
}
|
|
18
|
+
catch (error) {
|
|
19
|
+
return formatToolError(error);
|
|
20
|
+
}
|
|
21
|
+
});
|
|
22
|
+
// --- snoopy_daemon_status ---
|
|
23
|
+
server.registerTool('snoopy_daemon_status', {
|
|
24
|
+
title: 'Daemon Status',
|
|
25
|
+
description: 'Show whether the Snoopy daemon is running and its PID.',
|
|
26
|
+
inputSchema: snoopyDaemonStatusToolInputSchema,
|
|
27
|
+
}, () => {
|
|
28
|
+
try {
|
|
29
|
+
return formatToolResult(buildDaemonStatusReport());
|
|
30
|
+
}
|
|
31
|
+
catch (error) {
|
|
32
|
+
return formatToolError(error);
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
// --- snoopy_daemon_start ---
|
|
36
|
+
server.registerTool('snoopy_daemon_start', {
|
|
37
|
+
title: 'Start Daemon',
|
|
38
|
+
description: 'Start the Snoopy background daemon.',
|
|
39
|
+
inputSchema: snoopyDaemonStartToolInputSchema,
|
|
40
|
+
}, () => {
|
|
41
|
+
try {
|
|
42
|
+
return formatToolResult(startDaemonReport());
|
|
43
|
+
}
|
|
44
|
+
catch (error) {
|
|
45
|
+
return formatToolError(error);
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
// --- snoopy_daemon_stop ---
|
|
49
|
+
server.registerTool('snoopy_daemon_stop', {
|
|
50
|
+
title: 'Stop Daemon',
|
|
51
|
+
description: 'Stop the Snoopy background daemon.',
|
|
52
|
+
inputSchema: snoopyDaemonStopToolInputSchema,
|
|
53
|
+
}, () => {
|
|
54
|
+
try {
|
|
55
|
+
return formatToolResult(stopDaemonReport());
|
|
56
|
+
}
|
|
57
|
+
catch (error) {
|
|
58
|
+
return formatToolError(error);
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
// --- snoopy_daemon_reload ---
|
|
62
|
+
server.registerTool('snoopy_daemon_reload', {
|
|
63
|
+
title: 'Reload Daemon',
|
|
64
|
+
description: 'Hot-reload daemon job schedules without restart.',
|
|
65
|
+
inputSchema: snoopyDaemonReloadToolInputSchema,
|
|
66
|
+
}, () => {
|
|
67
|
+
try {
|
|
68
|
+
return formatToolResult(reloadDaemonReport());
|
|
69
|
+
}
|
|
70
|
+
catch (error) {
|
|
71
|
+
return formatToolError(error);
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
// --- snoopy_job_list ---
|
|
75
|
+
server.registerTool('snoopy_job_list', {
|
|
76
|
+
title: 'List Jobs',
|
|
77
|
+
description: 'List all monitoring jobs with their state, subreddits, and schedule.',
|
|
78
|
+
inputSchema: snoopyJobListToolInputSchema,
|
|
79
|
+
}, () => {
|
|
80
|
+
try {
|
|
81
|
+
return formatToolResult(listJobsReport());
|
|
82
|
+
}
|
|
83
|
+
catch (error) {
|
|
84
|
+
return formatToolError(error);
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
// --- snoopy_job_runs ---
|
|
88
|
+
server.registerTool('snoopy_job_runs', {
|
|
89
|
+
title: 'Job Run History',
|
|
90
|
+
description: 'List recent run history for a job or all jobs.',
|
|
91
|
+
inputSchema: snoopyJobRunsToolInputSchema,
|
|
92
|
+
}, (input) => {
|
|
93
|
+
try {
|
|
94
|
+
return formatToolResult(listJobRunsReport(input.jobRef, input.limit));
|
|
95
|
+
}
|
|
96
|
+
catch (error) {
|
|
97
|
+
return formatToolError(error);
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
// --- snoopy_job_add ---
|
|
101
|
+
server.registerTool('snoopy_job_add', {
|
|
102
|
+
title: 'Add Job',
|
|
103
|
+
description: 'Create a new monitoring job with subreddits and qualification prompt.',
|
|
104
|
+
inputSchema: snoopyJobAddToolInputSchema,
|
|
105
|
+
}, (input) => {
|
|
106
|
+
try {
|
|
107
|
+
return formatToolResult(addJobReport(input));
|
|
108
|
+
}
|
|
109
|
+
catch (error) {
|
|
110
|
+
return formatToolError(error);
|
|
111
|
+
}
|
|
112
|
+
});
|
|
113
|
+
// --- snoopy_job_delete ---
|
|
114
|
+
server.registerTool('snoopy_job_delete', {
|
|
115
|
+
title: 'Delete Job',
|
|
116
|
+
description: 'Delete a job and all its runs, scan items, and log files.',
|
|
117
|
+
inputSchema: snoopyJobDeleteToolInputSchema,
|
|
118
|
+
}, (input) => {
|
|
119
|
+
try {
|
|
120
|
+
return formatToolResult(deleteJobReport(input.jobRef));
|
|
121
|
+
}
|
|
122
|
+
catch (error) {
|
|
123
|
+
return formatToolError(error);
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
// --- snoopy_job_enable ---
|
|
127
|
+
server.registerTool('snoopy_job_enable', {
|
|
128
|
+
title: 'Enable Job',
|
|
129
|
+
description: 'Enable scheduling for a monitoring job.',
|
|
130
|
+
inputSchema: snoopyJobEnableToolInputSchema,
|
|
131
|
+
}, (input) => {
|
|
132
|
+
try {
|
|
133
|
+
return formatToolResult(enableJobReport(input.jobRef));
|
|
134
|
+
}
|
|
135
|
+
catch (error) {
|
|
136
|
+
return formatToolError(error);
|
|
137
|
+
}
|
|
138
|
+
});
|
|
139
|
+
// --- snoopy_job_disable ---
|
|
140
|
+
server.registerTool('snoopy_job_disable', {
|
|
141
|
+
title: 'Disable Job',
|
|
142
|
+
description: 'Disable scheduling for a monitoring job.',
|
|
143
|
+
inputSchema: snoopyJobDisableToolInputSchema,
|
|
144
|
+
}, (input) => {
|
|
145
|
+
try {
|
|
146
|
+
return formatToolResult(disableJobReport(input.jobRef));
|
|
147
|
+
}
|
|
148
|
+
catch (error) {
|
|
149
|
+
return formatToolError(error);
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
// --- snoopy_job_run ---
|
|
153
|
+
server.registerTool('snoopy_job_run', {
|
|
154
|
+
title: 'Run Job Now',
|
|
155
|
+
description: 'Trigger an immediate run for a monitoring job.',
|
|
156
|
+
inputSchema: snoopyJobRunToolInputSchema,
|
|
157
|
+
}, (input) => {
|
|
158
|
+
try {
|
|
159
|
+
return formatToolResult(runJobReport(input.jobRef, input.limit));
|
|
160
|
+
}
|
|
161
|
+
catch (error) {
|
|
162
|
+
return formatToolError(error);
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
// --- snoopy_analytics ---
|
|
166
|
+
server.registerTool('snoopy_analytics', {
|
|
167
|
+
title: 'Analytics',
|
|
168
|
+
description: 'Show analytics for all jobs or a single job (tokens, cost, posts, comments).',
|
|
169
|
+
inputSchema: snoopyAnalyticsToolInputSchema,
|
|
170
|
+
}, (input) => {
|
|
171
|
+
try {
|
|
172
|
+
return formatToolResult(analyticsReport(input.jobRef, input.days));
|
|
173
|
+
}
|
|
174
|
+
catch (error) {
|
|
175
|
+
return formatToolError(error);
|
|
176
|
+
}
|
|
177
|
+
});
|
|
178
|
+
// --- snoopy_export ---
|
|
179
|
+
server.registerTool('snoopy_export', {
|
|
180
|
+
title: 'Export Results',
|
|
181
|
+
description: 'Export qualified scan items as JSON or CSV for downstream processing.',
|
|
182
|
+
inputSchema: snoopyExportToolInputSchema,
|
|
183
|
+
}, (input) => {
|
|
184
|
+
try {
|
|
185
|
+
return formatToolResult(exportReport(input.jobRef, input.format, input.lastRun, input.limit));
|
|
186
|
+
}
|
|
187
|
+
catch (error) {
|
|
188
|
+
return formatToolError(error);
|
|
189
|
+
}
|
|
190
|
+
});
|
|
191
|
+
// --- snoopy_consume ---
|
|
192
|
+
server.registerTool('snoopy_consume', {
|
|
193
|
+
title: 'Consume Results',
|
|
194
|
+
description: 'List and mark unconsumed qualified results as consumed.',
|
|
195
|
+
inputSchema: snoopyConsumeToolInputSchema,
|
|
196
|
+
}, (input) => {
|
|
197
|
+
try {
|
|
198
|
+
return formatToolResult(consumeReport(input.jobRef, input.limit, input.dryRun));
|
|
199
|
+
}
|
|
200
|
+
catch (error) {
|
|
201
|
+
return formatToolError(error);
|
|
202
|
+
}
|
|
203
|
+
});
|
|
204
|
+
// --- snoopy_errors ---
|
|
205
|
+
server.registerTool('snoopy_errors', {
|
|
206
|
+
title: 'Recent Errors',
|
|
207
|
+
description: 'Show recent failed or errored runs for a job.',
|
|
208
|
+
inputSchema: snoopyErrorsToolInputSchema,
|
|
209
|
+
}, (input) => {
|
|
210
|
+
try {
|
|
211
|
+
return formatToolResult(errorsReport(input.jobRef, input.hours));
|
|
212
|
+
}
|
|
213
|
+
catch (error) {
|
|
214
|
+
return formatToolError(error);
|
|
215
|
+
}
|
|
216
|
+
});
|
|
217
|
+
// --- snoopy_logs ---
|
|
218
|
+
server.registerTool('snoopy_logs', {
|
|
219
|
+
title: 'Run Logs',
|
|
220
|
+
description: 'View the log output for a specific run.',
|
|
221
|
+
inputSchema: snoopyLogsToolInputSchema,
|
|
222
|
+
}, (input) => {
|
|
223
|
+
try {
|
|
224
|
+
return formatToolResult(logsReport(input.runId));
|
|
225
|
+
}
|
|
226
|
+
catch (error) {
|
|
227
|
+
return formatToolError(error);
|
|
228
|
+
}
|
|
229
|
+
});
|
|
230
|
+
// --- snoopy_settings_get ---
|
|
231
|
+
server.registerTool('snoopy_settings_get', {
|
|
232
|
+
title: 'Get Settings',
|
|
233
|
+
description: 'Read current Snoopy settings (model, API key status, schedule, notifications).',
|
|
234
|
+
inputSchema: snoopySettingsGetToolInputSchema,
|
|
235
|
+
}, async () => {
|
|
236
|
+
try {
|
|
237
|
+
const report = await settingsGetReport();
|
|
238
|
+
return formatToolResult(report);
|
|
239
|
+
}
|
|
240
|
+
catch (error) {
|
|
241
|
+
return formatToolError(error);
|
|
242
|
+
}
|
|
243
|
+
});
|
|
244
|
+
// --- snoopy_settings_set ---
|
|
245
|
+
server.registerTool('snoopy_settings_set', {
|
|
246
|
+
title: 'Update Setting',
|
|
247
|
+
description: 'Update a single Snoopy setting.',
|
|
248
|
+
inputSchema: snoopySettingsSetToolInputSchema,
|
|
249
|
+
}, (input) => {
|
|
250
|
+
try {
|
|
251
|
+
return formatToolResult(settingsSetReport(input.key, input.value));
|
|
252
|
+
}
|
|
253
|
+
catch (error) {
|
|
254
|
+
return formatToolError(error);
|
|
255
|
+
}
|
|
256
|
+
});
|
|
257
|
+
const transport = new StdioServerTransport();
|
|
258
|
+
await server.connect(transport);
|
|
259
|
+
}
|
|
260
|
+
//# sourceMappingURL=server.js.map
|