@hasna/logs 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/biome.json +13 -0
- package/dist/cli/index.js +2272 -0
- package/dist/mcp/index.js +28507 -0
- package/dist/server/index.js +1862 -0
- package/package.json +43 -0
- package/sdk/package.json +21 -0
- package/sdk/src/index.ts +143 -0
- package/src/cli/index.ts +193 -0
- package/src/db/index.test.ts +33 -0
- package/src/db/index.ts +153 -0
- package/src/lib/browser-script.test.ts +35 -0
- package/src/lib/browser-script.ts +31 -0
- package/src/lib/github.ts +38 -0
- package/src/lib/ingest.test.ts +57 -0
- package/src/lib/ingest.ts +51 -0
- package/src/lib/jobs.test.ts +69 -0
- package/src/lib/jobs.ts +63 -0
- package/src/lib/lighthouse.ts +65 -0
- package/src/lib/perf.test.ts +45 -0
- package/src/lib/perf.ts +46 -0
- package/src/lib/projects.test.ts +73 -0
- package/src/lib/projects.ts +59 -0
- package/src/lib/query.test.ts +104 -0
- package/src/lib/query.ts +56 -0
- package/src/lib/rotate.test.ts +37 -0
- package/src/lib/rotate.ts +27 -0
- package/src/lib/scanner.ts +112 -0
- package/src/lib/scheduler.ts +57 -0
- package/src/lib/summarize.test.ts +38 -0
- package/src/lib/summarize.ts +21 -0
- package/src/mcp/index.ts +165 -0
- package/src/server/index.ts +42 -0
- package/src/server/routes/jobs.ts +32 -0
- package/src/server/routes/logs.ts +65 -0
- package/src/server/routes/perf.ts +23 -0
- package/src/server/routes/projects.ts +42 -0
- package/src/server/server.test.ts +194 -0
- package/src/types/index.ts +119 -0
- package/tsconfig.json +22 -0
package/package.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@hasna/logs",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Log aggregation + browser script + headless page scanner + performance monitoring for AI agents",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"bin": {
|
|
9
|
+
"logs": "./dist/cli/index.js",
|
|
10
|
+
"logs-mcp": "./dist/mcp/index.js",
|
|
11
|
+
"logs-serve": "./dist/server/index.js"
|
|
12
|
+
},
|
|
13
|
+
"scripts": {
|
|
14
|
+
"build": "bun build src/cli/index.ts src/mcp/index.ts src/server/index.ts --outdir dist --target bun --splitting --external playwright --external playwright-core --external electron --external chromium-bidi --external lighthouse",
|
|
15
|
+
"dev": "bun run src/server/index.ts",
|
|
16
|
+
"test": "bun test",
|
|
17
|
+
"test:coverage": "bun test --coverage",
|
|
18
|
+
"lint": "biome check src/"
|
|
19
|
+
},
|
|
20
|
+
"publishConfig": {
|
|
21
|
+
"access": "restricted",
|
|
22
|
+
"registry": "https://registry.npmjs.org/"
|
|
23
|
+
},
|
|
24
|
+
"keywords": ["logs", "monitoring", "mcp", "ai-agents", "sentry", "performance", "lighthouse"],
|
|
25
|
+
"author": "Andrei Hasna <andrei@hasna.com>",
|
|
26
|
+
"license": "MIT",
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"@modelcontextprotocol/sdk": "^1.12.1",
|
|
29
|
+
"commander": "^14.0.0",
|
|
30
|
+
"hono": "^4.7.11",
|
|
31
|
+
"ink": "^5.1.0",
|
|
32
|
+
"node-cron": "^3.0.3",
|
|
33
|
+
"playwright": "^1.52.0",
|
|
34
|
+
"react": "^19.1.0"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@biomejs/biome": "^1.9.4",
|
|
38
|
+
"@types/bun": "latest",
|
|
39
|
+
"@types/node-cron": "^3.0.11",
|
|
40
|
+
"@types/react": "^19.1.4",
|
|
41
|
+
"typescript": "^5.9.3"
|
|
42
|
+
}
|
|
43
|
+
}
|
package/sdk/package.json
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@hasna/logs-sdk",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Zero-dependency fetch client for @hasna/logs",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"types": "./dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": "./dist/index.js",
|
|
10
|
+
"./browser": "./dist/index.js"
|
|
11
|
+
},
|
|
12
|
+
"publishConfig": {
|
|
13
|
+
"access": "restricted",
|
|
14
|
+
"registry": "https://registry.npmjs.org/"
|
|
15
|
+
},
|
|
16
|
+
"scripts": {
|
|
17
|
+
"build": "bun build src/index.ts --outdir dist --target browser"
|
|
18
|
+
},
|
|
19
|
+
"author": "Andrei Hasna <andrei@hasna.com>",
|
|
20
|
+
"license": "MIT"
|
|
21
|
+
}
|
package/sdk/src/index.ts
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import type { LogEntry, LogLevel, LogQuery, LogRow, LogSummary, Page, PerformanceSnapshot, Project, ScanJob } from "../../src/types/index.ts"
|
|
2
|
+
|
|
3
|
+
export type { LogEntry, LogLevel, LogQuery, LogRow, LogSummary, Page, PerformanceSnapshot, Project, ScanJob }
|
|
4
|
+
|
|
5
|
+
export interface LogsClientOptions {
|
|
6
|
+
url?: string
|
|
7
|
+
projectId?: string
|
|
8
|
+
apiKey?: string
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
const DEFAULT_URL = "http://localhost:3460"
|
|
12
|
+
|
|
13
|
+
export class LogsClient {
|
|
14
|
+
private url: string
|
|
15
|
+
private projectId?: string
|
|
16
|
+
private headers: Record<string, string>
|
|
17
|
+
|
|
18
|
+
constructor(opts: LogsClientOptions = {}) {
|
|
19
|
+
this.url = (opts.url ?? DEFAULT_URL).replace(/\/$/, "")
|
|
20
|
+
this.projectId = opts.projectId
|
|
21
|
+
this.headers = { "Content-Type": "application/json" }
|
|
22
|
+
if (opts.apiKey) this.headers["Authorization"] = `Bearer ${opts.apiKey}`
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async push(entry: LogEntry): Promise<LogRow> {
|
|
26
|
+
const res = await fetch(`${this.url}/api/logs`, {
|
|
27
|
+
method: "POST",
|
|
28
|
+
headers: this.headers,
|
|
29
|
+
body: JSON.stringify({ project_id: this.projectId, ...entry }),
|
|
30
|
+
})
|
|
31
|
+
return res.json() as Promise<LogRow>
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async pushBatch(entries: LogEntry[]): Promise<{ inserted: number }> {
|
|
35
|
+
const res = await fetch(`${this.url}/api/logs`, {
|
|
36
|
+
method: "POST",
|
|
37
|
+
headers: this.headers,
|
|
38
|
+
body: JSON.stringify(entries.map(e => ({ project_id: this.projectId, ...e }))),
|
|
39
|
+
})
|
|
40
|
+
return res.json() as Promise<{ inserted: number }>
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async search(query: LogQuery = {}): Promise<LogRow[]> {
|
|
44
|
+
const params = new URLSearchParams()
|
|
45
|
+
if (query.project_id ?? this.projectId) params.set("project_id", query.project_id ?? this.projectId!)
|
|
46
|
+
if (query.page_id) params.set("page_id", query.page_id)
|
|
47
|
+
if (query.level) params.set("level", Array.isArray(query.level) ? query.level.join(",") : query.level)
|
|
48
|
+
if (query.service) params.set("service", query.service)
|
|
49
|
+
if (query.since) params.set("since", query.since)
|
|
50
|
+
if (query.until) params.set("until", query.until)
|
|
51
|
+
if (query.text) params.set("text", query.text)
|
|
52
|
+
if (query.limit) params.set("limit", String(query.limit))
|
|
53
|
+
if (query.offset) params.set("offset", String(query.offset))
|
|
54
|
+
if (query.fields) params.set("fields", query.fields.join(","))
|
|
55
|
+
const res = await fetch(`${this.url}/api/logs?${params}`, { headers: this.headers })
|
|
56
|
+
return res.json() as Promise<LogRow[]>
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async tail(projectId?: string, n = 50): Promise<LogRow[]> {
|
|
60
|
+
const params = new URLSearchParams({ n: String(n) })
|
|
61
|
+
const pid = projectId ?? this.projectId
|
|
62
|
+
if (pid) params.set("project_id", pid)
|
|
63
|
+
const res = await fetch(`${this.url}/api/logs/tail?${params}`, { headers: this.headers })
|
|
64
|
+
return res.json() as Promise<LogRow[]>
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
async summary(projectId?: string, since?: string): Promise<LogSummary[]> {
|
|
68
|
+
const params = new URLSearchParams()
|
|
69
|
+
const pid = projectId ?? this.projectId
|
|
70
|
+
if (pid) params.set("project_id", pid)
|
|
71
|
+
if (since) params.set("since", since)
|
|
72
|
+
const res = await fetch(`${this.url}/api/logs/summary?${params}`, { headers: this.headers })
|
|
73
|
+
return res.json() as Promise<LogSummary[]>
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async context(traceId: string): Promise<LogRow[]> {
|
|
77
|
+
const res = await fetch(`${this.url}/api/logs/${traceId}/context`, { headers: this.headers })
|
|
78
|
+
return res.json() as Promise<LogRow[]>
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
async registerProject(name: string, githubRepo?: string, baseUrl?: string): Promise<Project> {
|
|
82
|
+
const res = await fetch(`${this.url}/api/projects`, {
|
|
83
|
+
method: "POST",
|
|
84
|
+
headers: this.headers,
|
|
85
|
+
body: JSON.stringify({ name, github_repo: githubRepo, base_url: baseUrl }),
|
|
86
|
+
})
|
|
87
|
+
return res.json() as Promise<Project>
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async registerPage(projectId: string, url: string, path?: string, name?: string): Promise<Page> {
|
|
91
|
+
const res = await fetch(`${this.url}/api/projects/${projectId}/pages`, {
|
|
92
|
+
method: "POST",
|
|
93
|
+
headers: this.headers,
|
|
94
|
+
body: JSON.stringify({ url, path, name }),
|
|
95
|
+
})
|
|
96
|
+
return res.json() as Promise<Page>
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
async createScanJob(projectId: string, schedule: string, pageId?: string): Promise<ScanJob> {
|
|
100
|
+
const res = await fetch(`${this.url}/api/jobs`, {
|
|
101
|
+
method: "POST",
|
|
102
|
+
headers: this.headers,
|
|
103
|
+
body: JSON.stringify({ project_id: projectId, schedule, page_id: pageId }),
|
|
104
|
+
})
|
|
105
|
+
return res.json() as Promise<ScanJob>
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
async perfSnapshot(projectId: string, pageId?: string): Promise<PerformanceSnapshot | null> {
|
|
109
|
+
const params = new URLSearchParams({ project_id: projectId })
|
|
110
|
+
if (pageId) params.set("page_id", pageId)
|
|
111
|
+
const res = await fetch(`${this.url}/api/perf?${params}`, { headers: this.headers })
|
|
112
|
+
return res.json() as Promise<PerformanceSnapshot | null>
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
async perfTrend(projectId: string, pageId?: string, since?: string, limit?: number): Promise<PerformanceSnapshot[]> {
|
|
116
|
+
const params = new URLSearchParams({ project_id: projectId })
|
|
117
|
+
if (pageId) params.set("page_id", pageId)
|
|
118
|
+
if (since) params.set("since", since)
|
|
119
|
+
if (limit) params.set("limit", String(limit))
|
|
120
|
+
const res = await fetch(`${this.url}/api/perf/trend?${params}`, { headers: this.headers })
|
|
121
|
+
return res.json() as Promise<PerformanceSnapshot[]>
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Browser auto-capture init
|
|
126
|
+
export function initLogs(opts: { projectId: string; url?: string }): void {
|
|
127
|
+
if (typeof window === "undefined") return
|
|
128
|
+
const serverUrl = (opts.url ?? DEFAULT_URL).replace(/\/$/, "")
|
|
129
|
+
const client = new LogsClient({ url: serverUrl, projectId: opts.projectId })
|
|
130
|
+
const q: LogEntry[] = []
|
|
131
|
+
const flush = () => { if (q.length) client.pushBatch(q.splice(0)).catch(() => {}) }
|
|
132
|
+
setInterval(flush, 2000)
|
|
133
|
+
|
|
134
|
+
const _ce = console.error.bind(console)
|
|
135
|
+
console.error = (...args: unknown[]) => { _ce(...args); q.push({ level: "error", message: args.map(String).join(" "), source: "script", url: location.href }) }
|
|
136
|
+
|
|
137
|
+
const _cw = console.warn.bind(console)
|
|
138
|
+
console.warn = (...args: unknown[]) => { _cw(...args); q.push({ level: "warn", message: args.map(String).join(" "), source: "script", url: location.href }) }
|
|
139
|
+
|
|
140
|
+
window.addEventListener("error", (e) => { q.push({ level: "error", message: e.message, stack_trace: e.error?.stack, source: "script", url: location.href }) })
|
|
141
|
+
window.addEventListener("unhandledrejection", (e) => { q.push({ level: "error", message: `Unhandled: ${e.reason?.message ?? e.reason}`, stack_trace: e.reason?.stack, source: "script", url: location.href }) })
|
|
142
|
+
window.addEventListener("beforeunload", flush)
|
|
143
|
+
}
|
package/src/cli/index.ts
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
import { Command } from "commander"
|
|
3
|
+
import { getDb } from "../db/index.ts"
|
|
4
|
+
import { ingestLog } from "../lib/ingest.ts"
|
|
5
|
+
import { searchLogs, tailLogs } from "../lib/query.ts"
|
|
6
|
+
import { summarizeLogs } from "../lib/summarize.ts"
|
|
7
|
+
import { createJob, listJobs } from "../lib/jobs.ts"
|
|
8
|
+
import { createPage, createProject, listPages, listProjects } from "../lib/projects.ts"
|
|
9
|
+
import { runJob } from "../lib/scheduler.ts"
|
|
10
|
+
import type { LogLevel } from "../types/index.ts"
|
|
11
|
+
|
|
12
|
+
const program = new Command()
|
|
13
|
+
.name("logs")
|
|
14
|
+
.description("@hasna/logs — log aggregation and monitoring")
|
|
15
|
+
.version("0.0.1")
|
|
16
|
+
|
|
17
|
+
// ── logs list ──────────────────────────────────────────────
|
|
18
|
+
program.command("list")
|
|
19
|
+
.description("Search and list logs")
|
|
20
|
+
.option("--project <id>", "Filter by project ID")
|
|
21
|
+
.option("--page <id>", "Filter by page ID")
|
|
22
|
+
.option("--level <levels>", "Comma-separated levels (error,warn,info,debug,fatal)")
|
|
23
|
+
.option("--service <name>", "Filter by service")
|
|
24
|
+
.option("--since <iso>", "Since timestamp or relative (1h, 24h, 7d)")
|
|
25
|
+
.option("--text <query>", "Full-text search")
|
|
26
|
+
.option("--limit <n>", "Max results", "100")
|
|
27
|
+
.option("--format <fmt>", "Output format: table|json|compact", "table")
|
|
28
|
+
.action((opts) => {
|
|
29
|
+
const db = getDb()
|
|
30
|
+
const since = parseRelativeTime(opts.since)
|
|
31
|
+
const rows = searchLogs(db, {
|
|
32
|
+
project_id: opts.project,
|
|
33
|
+
page_id: opts.page,
|
|
34
|
+
level: opts.level ? (opts.level.split(",") as LogLevel[]) : undefined,
|
|
35
|
+
service: opts.service,
|
|
36
|
+
since,
|
|
37
|
+
text: opts.text,
|
|
38
|
+
limit: Number(opts.limit),
|
|
39
|
+
})
|
|
40
|
+
if (opts.format === "json") { console.log(JSON.stringify(rows, null, 2)); return }
|
|
41
|
+
if (opts.format === "compact") {
|
|
42
|
+
for (const r of rows) console.log(`${r.timestamp} [${r.level.toUpperCase()}] ${r.service ?? "-"} ${r.message}`)
|
|
43
|
+
return
|
|
44
|
+
}
|
|
45
|
+
for (const r of rows) {
|
|
46
|
+
const meta = r.metadata ? ` ${r.metadata}` : ""
|
|
47
|
+
console.log(`${r.timestamp} ${pad(r.level.toUpperCase(), 5)} ${pad(r.service ?? "-", 12)} ${r.message}${meta}`)
|
|
48
|
+
}
|
|
49
|
+
console.log(`\n${rows.length} log(s)`)
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
// ── logs tail ──────────────────────────────────────────────
|
|
53
|
+
program.command("tail")
|
|
54
|
+
.description("Show most recent logs")
|
|
55
|
+
.option("--project <id>")
|
|
56
|
+
.option("--n <count>", "Number of logs", "50")
|
|
57
|
+
.action((opts) => {
|
|
58
|
+
const rows = tailLogs(getDb(), opts.project, Number(opts.n))
|
|
59
|
+
for (const r of rows) console.log(`${r.timestamp} ${pad(r.level.toUpperCase(), 5)} ${r.message}`)
|
|
60
|
+
})
|
|
61
|
+
|
|
62
|
+
// ── logs summary ──────────────────────────────────────────
|
|
63
|
+
program.command("summary")
|
|
64
|
+
.description("Error/warn summary by service")
|
|
65
|
+
.option("--project <id>")
|
|
66
|
+
.option("--since <time>", "Relative time (1h, 24h, 7d)", "24h")
|
|
67
|
+
.action((opts) => {
|
|
68
|
+
const summary = summarizeLogs(getDb(), opts.project, parseRelativeTime(opts.since))
|
|
69
|
+
if (!summary.length) { console.log("No errors/warnings in this window."); return }
|
|
70
|
+
for (const s of summary) console.log(`${pad(s.level.toUpperCase(), 5)} ${pad(s.service ?? "-", 15)} count=${s.count} latest=${s.latest}`)
|
|
71
|
+
})
|
|
72
|
+
|
|
73
|
+
// ── logs push ─────────────────────────────────────────────
|
|
74
|
+
program.command("push <message>")
|
|
75
|
+
.description("Push a log entry")
|
|
76
|
+
.option("--level <level>", "Log level", "info")
|
|
77
|
+
.option("--service <name>")
|
|
78
|
+
.option("--project <id>")
|
|
79
|
+
.option("--trace <id>", "Trace ID")
|
|
80
|
+
.action((message, opts) => {
|
|
81
|
+
const row = ingestLog(getDb(), { level: opts.level as LogLevel, message, service: opts.service, project_id: opts.project, trace_id: opts.trace })
|
|
82
|
+
console.log(`Logged: ${row.id}`)
|
|
83
|
+
})
|
|
84
|
+
|
|
85
|
+
// ── logs project ──────────────────────────────────────────
|
|
86
|
+
const projectCmd = program.command("project").description("Manage projects")
|
|
87
|
+
|
|
88
|
+
projectCmd.command("create")
|
|
89
|
+
.option("--name <name>", "Project name")
|
|
90
|
+
.option("--repo <url>", "GitHub repo")
|
|
91
|
+
.option("--url <url>", "Base URL")
|
|
92
|
+
.action((opts) => {
|
|
93
|
+
if (!opts.name) { console.error("--name is required"); process.exit(1) }
|
|
94
|
+
const p = createProject(getDb(), { name: opts.name, github_repo: opts.repo, base_url: opts.url })
|
|
95
|
+
console.log(`Created project: ${p.id} — ${p.name}`)
|
|
96
|
+
})
|
|
97
|
+
|
|
98
|
+
projectCmd.command("list").action(() => {
|
|
99
|
+
const projects = listProjects(getDb())
|
|
100
|
+
for (const p of projects) console.log(`${p.id} ${p.name} ${p.base_url ?? ""} ${p.github_repo ?? ""}`)
|
|
101
|
+
})
|
|
102
|
+
|
|
103
|
+
// ── logs page ─────────────────────────────────────────────
|
|
104
|
+
const pageCmd = program.command("page").description("Manage pages")
|
|
105
|
+
|
|
106
|
+
pageCmd.command("add")
|
|
107
|
+
.option("--project <id>")
|
|
108
|
+
.option("--url <url>")
|
|
109
|
+
.option("--name <name>")
|
|
110
|
+
.action((opts) => {
|
|
111
|
+
if (!opts.project || !opts.url) { console.error("--project and --url required"); process.exit(1) }
|
|
112
|
+
const p = createPage(getDb(), { project_id: opts.project, url: opts.url, name: opts.name })
|
|
113
|
+
console.log(`Page registered: ${p.id} — ${p.url}`)
|
|
114
|
+
})
|
|
115
|
+
|
|
116
|
+
pageCmd.command("list").option("--project <id>").action((opts) => {
|
|
117
|
+
if (!opts.project) { console.error("--project required"); process.exit(1) }
|
|
118
|
+
const pages = listPages(getDb(), opts.project)
|
|
119
|
+
for (const p of pages) console.log(`${p.id} ${p.url} last=${p.last_scanned_at ?? "never"}`)
|
|
120
|
+
})
|
|
121
|
+
|
|
122
|
+
// ── logs job ──────────────────────────────────────────────
|
|
123
|
+
const jobCmd = program.command("job").description("Manage scan jobs")
|
|
124
|
+
|
|
125
|
+
jobCmd.command("create")
|
|
126
|
+
.option("--project <id>")
|
|
127
|
+
.option("--schedule <cron>", "Cron expression", "*/30 * * * *")
|
|
128
|
+
.action((opts) => {
|
|
129
|
+
if (!opts.project) { console.error("--project required"); process.exit(1) }
|
|
130
|
+
const j = createJob(getDb(), { project_id: opts.project, schedule: opts.schedule })
|
|
131
|
+
console.log(`Job created: ${j.id} — ${j.schedule}`)
|
|
132
|
+
})
|
|
133
|
+
|
|
134
|
+
jobCmd.command("list").option("--project <id>").action((opts) => {
|
|
135
|
+
const jobs = listJobs(getDb(), opts.project)
|
|
136
|
+
for (const j of jobs) console.log(`${j.id} ${j.schedule} enabled=${j.enabled} last=${j.last_run_at ?? "never"}`)
|
|
137
|
+
})
|
|
138
|
+
|
|
139
|
+
// ── logs scan ─────────────────────────────────────────────
|
|
140
|
+
program.command("scan")
|
|
141
|
+
.description("Run an immediate scan for a job")
|
|
142
|
+
.option("--job <id>")
|
|
143
|
+
.option("--project <id>")
|
|
144
|
+
.action(async (opts) => {
|
|
145
|
+
if (!opts.job) { console.error("--job required"); process.exit(1) }
|
|
146
|
+
const db = getDb()
|
|
147
|
+
const job = (await import("../lib/jobs.ts")).getJob(db, opts.job)
|
|
148
|
+
if (!job) { console.error("Job not found"); process.exit(1) }
|
|
149
|
+
console.log("Running scan...")
|
|
150
|
+
await runJob(db, job.id, job.project_id, job.page_id ?? undefined)
|
|
151
|
+
console.log("Scan complete.")
|
|
152
|
+
})
|
|
153
|
+
|
|
154
|
+
// ── logs mcp / logs serve ─────────────────────────────────
|
|
155
|
+
program.command("mcp")
|
|
156
|
+
.description("Start the MCP server")
|
|
157
|
+
.option("--claude", "Install into Claude Code")
|
|
158
|
+
.option("--codex", "Install into Codex")
|
|
159
|
+
.option("--gemini", "Install into Gemini")
|
|
160
|
+
.action(async (opts) => {
|
|
161
|
+
if (opts.claude || opts.codex || opts.gemini) {
|
|
162
|
+
const bin = process.execPath
|
|
163
|
+
const script = new URL(import.meta.url).pathname
|
|
164
|
+
if (opts.claude) {
|
|
165
|
+
const { execSync } = await import("node:child_process")
|
|
166
|
+
execSync(`claude mcp add --transport stdio --scope user logs -- ${bin} ${script} mcp`, { stdio: "inherit" })
|
|
167
|
+
}
|
|
168
|
+
return
|
|
169
|
+
}
|
|
170
|
+
await import("../mcp/index.ts")
|
|
171
|
+
})
|
|
172
|
+
|
|
173
|
+
program.command("serve")
|
|
174
|
+
.description("Start the REST API server")
|
|
175
|
+
.option("--port <n>", "Port", "3460")
|
|
176
|
+
.action(async (opts) => {
|
|
177
|
+
process.env.LOGS_PORT = opts.port
|
|
178
|
+
await import("../server/index.ts")
|
|
179
|
+
})
|
|
180
|
+
|
|
181
|
+
// ── helpers ───────────────────────────────────────────────
|
|
182
|
+
function pad(s: string, n: number) { return s.padEnd(n) }
|
|
183
|
+
|
|
184
|
+
function parseRelativeTime(val?: string): string | undefined {
|
|
185
|
+
if (!val) return undefined
|
|
186
|
+
const m = val.match(/^(\d+)(h|d|m)$/)
|
|
187
|
+
if (!m) return val
|
|
188
|
+
const [, n, unit] = m
|
|
189
|
+
const ms = Number(n) * (unit === "h" ? 3600 : unit === "d" ? 86400 : 60) * 1000
|
|
190
|
+
return new Date(Date.now() - ms).toISOString()
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
program.parse()
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "./index.ts"
|
|
3
|
+
|
|
4
|
+
describe("db migrations", () => {
|
|
5
|
+
it("creates all tables", () => {
|
|
6
|
+
const db = createTestDb()
|
|
7
|
+
const tables = db.prepare("SELECT name FROM sqlite_master WHERE type='table'").all() as { name: string }[]
|
|
8
|
+
const names = tables.map(t => t.name)
|
|
9
|
+
expect(names).toContain("projects")
|
|
10
|
+
expect(names).toContain("pages")
|
|
11
|
+
expect(names).toContain("logs")
|
|
12
|
+
expect(names).toContain("scan_jobs")
|
|
13
|
+
expect(names).toContain("scan_runs")
|
|
14
|
+
expect(names).toContain("performance_snapshots")
|
|
15
|
+
expect(names).toContain("logs_fts")
|
|
16
|
+
})
|
|
17
|
+
|
|
18
|
+
it("creates indexes", () => {
|
|
19
|
+
const db = createTestDb()
|
|
20
|
+
const indexes = db.prepare("SELECT name FROM sqlite_master WHERE type='index'").all() as { name: string }[]
|
|
21
|
+
const names = indexes.map(i => i.name)
|
|
22
|
+
expect(names).toContain("idx_logs_project_level_ts")
|
|
23
|
+
expect(names).toContain("idx_logs_trace")
|
|
24
|
+
expect(names).toContain("idx_logs_service")
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
it("is idempotent (migrate twice)", () => {
|
|
28
|
+
const db = createTestDb()
|
|
29
|
+
expect(() => {
|
|
30
|
+
db.run("CREATE TABLE IF NOT EXISTS projects (id TEXT PRIMARY KEY, name TEXT NOT NULL UNIQUE, github_repo TEXT, base_url TEXT, description TEXT, github_description TEXT, github_branch TEXT, github_sha TEXT, last_synced_at TEXT, created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')))")
|
|
31
|
+
}).not.toThrow()
|
|
32
|
+
})
|
|
33
|
+
})
|
package/src/db/index.ts
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import { Database } from "bun:sqlite"
|
|
2
|
+
import { join } from "node:path"
|
|
3
|
+
import { existsSync, mkdirSync } from "node:fs"
|
|
4
|
+
|
|
5
|
+
const DATA_DIR = process.env.LOGS_DATA_DIR ?? join(process.env.HOME ?? "~", ".logs")
|
|
6
|
+
const DB_PATH = process.env.LOGS_DB_PATH ?? join(DATA_DIR, "logs.db")
|
|
7
|
+
|
|
8
|
+
let _db: Database | null = null
|
|
9
|
+
|
|
10
|
+
export function getDb(): Database {
|
|
11
|
+
if (_db) return _db
|
|
12
|
+
if (!existsSync(DATA_DIR)) mkdirSync(DATA_DIR, { recursive: true })
|
|
13
|
+
_db = new Database(DB_PATH)
|
|
14
|
+
_db.run("PRAGMA journal_mode=WAL")
|
|
15
|
+
_db.run("PRAGMA foreign_keys=ON")
|
|
16
|
+
migrate(_db)
|
|
17
|
+
return _db
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function closeDb(): void {
|
|
21
|
+
_db?.close()
|
|
22
|
+
_db = null
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function createTestDb(): Database {
|
|
26
|
+
const db = new Database(":memory:")
|
|
27
|
+
db.run("PRAGMA journal_mode=WAL")
|
|
28
|
+
db.run("PRAGMA foreign_keys=ON")
|
|
29
|
+
migrate(db)
|
|
30
|
+
return db
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function migrate(db: Database): void {
|
|
34
|
+
db.run(`
|
|
35
|
+
CREATE TABLE IF NOT EXISTS projects (
|
|
36
|
+
id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(8)))),
|
|
37
|
+
name TEXT NOT NULL UNIQUE,
|
|
38
|
+
github_repo TEXT,
|
|
39
|
+
base_url TEXT,
|
|
40
|
+
description TEXT,
|
|
41
|
+
github_description TEXT,
|
|
42
|
+
github_branch TEXT,
|
|
43
|
+
github_sha TEXT,
|
|
44
|
+
last_synced_at TEXT,
|
|
45
|
+
created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
|
|
46
|
+
)
|
|
47
|
+
`)
|
|
48
|
+
|
|
49
|
+
db.run(`
|
|
50
|
+
CREATE TABLE IF NOT EXISTS pages (
|
|
51
|
+
id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(8)))),
|
|
52
|
+
project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
53
|
+
url TEXT NOT NULL,
|
|
54
|
+
path TEXT NOT NULL DEFAULT '/',
|
|
55
|
+
name TEXT,
|
|
56
|
+
last_scanned_at TEXT,
|
|
57
|
+
created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')),
|
|
58
|
+
UNIQUE(project_id, url)
|
|
59
|
+
)
|
|
60
|
+
`)
|
|
61
|
+
|
|
62
|
+
db.run(`
|
|
63
|
+
CREATE TABLE IF NOT EXISTS logs (
|
|
64
|
+
id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(16)))),
|
|
65
|
+
timestamp TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')),
|
|
66
|
+
project_id TEXT REFERENCES projects(id) ON DELETE SET NULL,
|
|
67
|
+
page_id TEXT REFERENCES pages(id) ON DELETE SET NULL,
|
|
68
|
+
level TEXT NOT NULL CHECK(level IN ('debug','info','warn','error','fatal')),
|
|
69
|
+
source TEXT NOT NULL DEFAULT 'sdk' CHECK(source IN ('sdk','script','scanner')),
|
|
70
|
+
service TEXT,
|
|
71
|
+
message TEXT NOT NULL,
|
|
72
|
+
trace_id TEXT,
|
|
73
|
+
session_id TEXT,
|
|
74
|
+
agent TEXT,
|
|
75
|
+
url TEXT,
|
|
76
|
+
stack_trace TEXT,
|
|
77
|
+
metadata TEXT
|
|
78
|
+
)
|
|
79
|
+
`)
|
|
80
|
+
|
|
81
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_logs_project_level_ts ON logs(project_id, level, timestamp DESC)`)
|
|
82
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_logs_trace ON logs(trace_id)`)
|
|
83
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_logs_service ON logs(service)`)
|
|
84
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_logs_page ON logs(page_id)`)
|
|
85
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_logs_timestamp ON logs(timestamp DESC)`)
|
|
86
|
+
|
|
87
|
+
db.run(`
|
|
88
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS logs_fts USING fts5(
|
|
89
|
+
message, service, stack_trace,
|
|
90
|
+
content=logs, content_rowid=rowid
|
|
91
|
+
)
|
|
92
|
+
`)
|
|
93
|
+
|
|
94
|
+
db.run(`
|
|
95
|
+
CREATE TRIGGER IF NOT EXISTS logs_fts_insert AFTER INSERT ON logs BEGIN
|
|
96
|
+
INSERT INTO logs_fts(rowid, message, service, stack_trace)
|
|
97
|
+
VALUES (new.rowid, new.message, new.service, new.stack_trace);
|
|
98
|
+
END
|
|
99
|
+
`)
|
|
100
|
+
|
|
101
|
+
db.run(`
|
|
102
|
+
CREATE TRIGGER IF NOT EXISTS logs_fts_delete AFTER DELETE ON logs BEGIN
|
|
103
|
+
INSERT INTO logs_fts(logs_fts, rowid, message, service, stack_trace)
|
|
104
|
+
VALUES ('delete', old.rowid, old.message, old.service, old.stack_trace);
|
|
105
|
+
END
|
|
106
|
+
`)
|
|
107
|
+
|
|
108
|
+
db.run(`
|
|
109
|
+
CREATE TABLE IF NOT EXISTS scan_jobs (
|
|
110
|
+
id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(8)))),
|
|
111
|
+
project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
112
|
+
page_id TEXT REFERENCES pages(id) ON DELETE SET NULL,
|
|
113
|
+
schedule TEXT NOT NULL DEFAULT '*/30 * * * *',
|
|
114
|
+
enabled INTEGER NOT NULL DEFAULT 1,
|
|
115
|
+
last_run_at TEXT,
|
|
116
|
+
created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now'))
|
|
117
|
+
)
|
|
118
|
+
`)
|
|
119
|
+
|
|
120
|
+
db.run(`
|
|
121
|
+
CREATE TABLE IF NOT EXISTS scan_runs (
|
|
122
|
+
id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(8)))),
|
|
123
|
+
job_id TEXT NOT NULL REFERENCES scan_jobs(id) ON DELETE CASCADE,
|
|
124
|
+
page_id TEXT REFERENCES pages(id) ON DELETE SET NULL,
|
|
125
|
+
started_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')),
|
|
126
|
+
finished_at TEXT,
|
|
127
|
+
status TEXT NOT NULL DEFAULT 'running' CHECK(status IN ('running','completed','failed')),
|
|
128
|
+
logs_collected INTEGER NOT NULL DEFAULT 0,
|
|
129
|
+
errors_found INTEGER NOT NULL DEFAULT 0,
|
|
130
|
+
perf_score REAL
|
|
131
|
+
)
|
|
132
|
+
`)
|
|
133
|
+
|
|
134
|
+
db.run(`
|
|
135
|
+
CREATE TABLE IF NOT EXISTS performance_snapshots (
|
|
136
|
+
id TEXT PRIMARY KEY DEFAULT (lower(hex(randomblob(8)))),
|
|
137
|
+
timestamp TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ','now')),
|
|
138
|
+
project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
139
|
+
page_id TEXT REFERENCES pages(id) ON DELETE SET NULL,
|
|
140
|
+
url TEXT NOT NULL,
|
|
141
|
+
lcp REAL,
|
|
142
|
+
fcp REAL,
|
|
143
|
+
cls REAL,
|
|
144
|
+
tti REAL,
|
|
145
|
+
ttfb REAL,
|
|
146
|
+
score REAL,
|
|
147
|
+
raw_audit TEXT
|
|
148
|
+
)
|
|
149
|
+
`)
|
|
150
|
+
|
|
151
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_perf_project_ts ON performance_snapshots(project_id, timestamp DESC)`)
|
|
152
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_perf_page ON performance_snapshots(page_id)`)
|
|
153
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { getBrowserScript } from "./browser-script.ts"
|
|
3
|
+
|
|
4
|
+
describe("getBrowserScript", () => {
|
|
5
|
+
it("returns a non-empty string", () => {
|
|
6
|
+
const script = getBrowserScript("http://localhost:3460")
|
|
7
|
+
expect(typeof script).toBe("string")
|
|
8
|
+
expect(script.length).toBeGreaterThan(100)
|
|
9
|
+
})
|
|
10
|
+
|
|
11
|
+
it("embeds the server URL", () => {
|
|
12
|
+
const script = getBrowserScript("http://localhost:3460")
|
|
13
|
+
expect(script).toContain("http://localhost:3460")
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
it("hooks console.error", () => {
|
|
17
|
+
const script = getBrowserScript("http://localhost:3460")
|
|
18
|
+
expect(script).toContain("console.error")
|
|
19
|
+
})
|
|
20
|
+
|
|
21
|
+
it("hooks window.onerror / unhandledrejection", () => {
|
|
22
|
+
const script = getBrowserScript("http://localhost:3460")
|
|
23
|
+
expect(script).toContain("unhandledrejection")
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
it("pushes to /api/logs", () => {
|
|
27
|
+
const script = getBrowserScript("http://localhost:3460")
|
|
28
|
+
expect(script).toContain("/api/logs")
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
it("uses data-project attribute", () => {
|
|
32
|
+
const script = getBrowserScript("http://localhost:3460")
|
|
33
|
+
expect(script).toContain("data-project")
|
|
34
|
+
})
|
|
35
|
+
})
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
/** Returns the minified browser tracking script served at GET /script.js */
|
|
2
|
+
export function getBrowserScript(serverUrl: string): string {
|
|
3
|
+
return `(function(){
|
|
4
|
+
var cfg={url:'${serverUrl}',projectId:null};
|
|
5
|
+
var el=document.currentScript;
|
|
6
|
+
if(el){cfg.projectId=el.getAttribute('data-project')||null;}
|
|
7
|
+
var q=[];
|
|
8
|
+
function flush(){if(!q.length)return;var b=q.splice(0);fetch(cfg.url+'/api/logs',{method:'POST',headers:{'Content-Type':'application/json'},body:JSON.stringify(b),keepalive:true}).catch(function(){});}
|
|
9
|
+
setInterval(flush,2000);
|
|
10
|
+
function push(level,msg,extra){
|
|
11
|
+
q.push(Object.assign({level:level,message:String(msg),source:'script',url:location.href,timestamp:new Date().toISOString()},cfg.projectId?{project_id:cfg.projectId}:{},extra||{}));
|
|
12
|
+
if(q.length>=10)flush();
|
|
13
|
+
}
|
|
14
|
+
var _ce=console.error.bind(console);
|
|
15
|
+
console.error=function(){_ce.apply(console,arguments);push('error',Array.from(arguments).join(' '));};
|
|
16
|
+
var _cw=console.warn.bind(console);
|
|
17
|
+
console.warn=function(){_cw.apply(console,arguments);push('warn',Array.from(arguments).join(' '));};
|
|
18
|
+
window.addEventListener('error',function(e){push('error',e.message,{stack_trace:e.error?e.error.stack:null,url:e.filename});});
|
|
19
|
+
window.addEventListener('unhandledrejection',function(e){push('error','Unhandled promise rejection: '+(e.reason&&e.reason.message||String(e.reason)),{stack_trace:e.reason&&e.reason.stack||null});});
|
|
20
|
+
window.addEventListener('beforeunload',flush);
|
|
21
|
+
window.__logs={push:push,flush:flush,config:cfg};
|
|
22
|
+
})();`
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function initLogsScript(config: { projectId: string; url: string }): void {
|
|
26
|
+
if (typeof window === "undefined") return
|
|
27
|
+
const script = document.createElement("script")
|
|
28
|
+
script.src = `${config.url}/script.js`
|
|
29
|
+
script.setAttribute("data-project", config.projectId)
|
|
30
|
+
document.head.appendChild(script)
|
|
31
|
+
}
|