@hasna/logs 0.0.1 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dashboard/README.md +73 -0
- package/dashboard/bun.lock +526 -0
- package/dashboard/eslint.config.js +23 -0
- package/dashboard/index.html +13 -0
- package/dashboard/package.json +32 -0
- package/dashboard/public/favicon.svg +1 -0
- package/dashboard/public/icons.svg +24 -0
- package/dashboard/src/App.css +184 -0
- package/dashboard/src/App.tsx +49 -0
- package/dashboard/src/api.ts +33 -0
- package/dashboard/src/assets/hero.png +0 -0
- package/dashboard/src/assets/react.svg +1 -0
- package/dashboard/src/assets/vite.svg +1 -0
- package/dashboard/src/index.css +111 -0
- package/dashboard/src/main.tsx +10 -0
- package/dashboard/src/pages/Alerts.tsx +69 -0
- package/dashboard/src/pages/Issues.tsx +50 -0
- package/dashboard/src/pages/Perf.tsx +75 -0
- package/dashboard/src/pages/Projects.tsx +67 -0
- package/dashboard/src/pages/Summary.tsx +67 -0
- package/dashboard/src/pages/Tail.tsx +65 -0
- package/dashboard/tsconfig.app.json +28 -0
- package/dashboard/tsconfig.json +7 -0
- package/dashboard/tsconfig.node.json +26 -0
- package/dashboard/vite.config.ts +14 -0
- package/dist/cli/index.js +116 -12
- package/dist/mcp/index.js +306 -100
- package/dist/server/index.js +592 -7
- package/package.json +12 -2
- package/sdk/package.json +3 -2
- package/sdk/src/index.ts +1 -1
- package/sdk/src/types.ts +56 -0
- package/src/cli/index.ts +114 -4
- package/src/db/index.ts +10 -0
- package/src/db/migrations/001_alert_rules.ts +21 -0
- package/src/db/migrations/002_issues.ts +21 -0
- package/src/db/migrations/003_retention.ts +15 -0
- package/src/db/migrations/004_page_auth.ts +13 -0
- package/src/lib/alerts.test.ts +67 -0
- package/src/lib/alerts.ts +117 -0
- package/src/lib/compare.test.ts +52 -0
- package/src/lib/compare.ts +85 -0
- package/src/lib/diagnose.test.ts +55 -0
- package/src/lib/diagnose.ts +76 -0
- package/src/lib/export.test.ts +66 -0
- package/src/lib/export.ts +65 -0
- package/src/lib/health.test.ts +48 -0
- package/src/lib/health.ts +51 -0
- package/src/lib/ingest.ts +25 -2
- package/src/lib/issues.test.ts +79 -0
- package/src/lib/issues.ts +70 -0
- package/src/lib/page-auth.test.ts +54 -0
- package/src/lib/page-auth.ts +48 -0
- package/src/lib/retention.test.ts +42 -0
- package/src/lib/retention.ts +62 -0
- package/src/lib/scanner.ts +21 -2
- package/src/lib/scheduler.ts +6 -0
- package/src/lib/session-context.ts +28 -0
- package/src/mcp/index.ts +133 -89
- package/src/server/index.ts +12 -1
- package/src/server/routes/alerts.ts +32 -0
- package/src/server/routes/issues.ts +43 -0
- package/src/server/routes/logs.ts +21 -0
- package/src/server/routes/projects.ts +25 -0
- package/src/server/routes/stream.ts +43 -0
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { ingestBatch } from "./ingest.ts"
|
|
4
|
+
import { diagnose } from "./diagnose.ts"
|
|
5
|
+
|
|
6
|
+
function seedProject(db: ReturnType<typeof createTestDb>) {
|
|
7
|
+
return db.prepare("INSERT INTO projects (name) VALUES ('app') RETURNING id").get() as { id: string }
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
describe("diagnose", () => {
|
|
11
|
+
it("returns empty diagnosis for project with no logs", () => {
|
|
12
|
+
const db = createTestDb()
|
|
13
|
+
const p = seedProject(db)
|
|
14
|
+
const result = diagnose(db, p.id)
|
|
15
|
+
expect(result.project_id).toBe(p.id)
|
|
16
|
+
expect(result.top_errors).toHaveLength(0)
|
|
17
|
+
expect(result.summary).toContain("No errors")
|
|
18
|
+
})
|
|
19
|
+
|
|
20
|
+
it("surfaces top errors", () => {
|
|
21
|
+
const db = createTestDb()
|
|
22
|
+
const p = seedProject(db)
|
|
23
|
+
ingestBatch(db, [
|
|
24
|
+
{ level: "error", message: "DB timeout", service: "api", project_id: p.id },
|
|
25
|
+
{ level: "error", message: "DB timeout", service: "api", project_id: p.id },
|
|
26
|
+
{ level: "error", message: "Auth failed", service: "auth", project_id: p.id },
|
|
27
|
+
])
|
|
28
|
+
const result = diagnose(db, p.id)
|
|
29
|
+
expect(result.top_errors.length).toBeGreaterThan(0)
|
|
30
|
+
expect(result.top_errors[0]!.message).toBe("DB timeout")
|
|
31
|
+
expect(result.top_errors[0]!.count).toBe(2)
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
it("populates summary with error info", () => {
|
|
35
|
+
const db = createTestDb()
|
|
36
|
+
const p = seedProject(db)
|
|
37
|
+
ingestBatch(db, [{ level: "error", message: "boom", service: "api", project_id: p.id }])
|
|
38
|
+
const result = diagnose(db, p.id)
|
|
39
|
+
expect(result.summary).toContain("error")
|
|
40
|
+
})
|
|
41
|
+
|
|
42
|
+
it("groups error_rate_by_service", () => {
|
|
43
|
+
const db = createTestDb()
|
|
44
|
+
const p = seedProject(db)
|
|
45
|
+
ingestBatch(db, [
|
|
46
|
+
{ level: "error", message: "e1", service: "api", project_id: p.id },
|
|
47
|
+
{ level: "info", message: "i1", service: "api", project_id: p.id },
|
|
48
|
+
{ level: "warn", message: "w1", service: "db", project_id: p.id },
|
|
49
|
+
])
|
|
50
|
+
const result = diagnose(db, p.id)
|
|
51
|
+
const api = result.error_rate_by_service.find(s => s.service === "api")
|
|
52
|
+
expect(api?.errors).toBe(1)
|
|
53
|
+
expect(api?.total).toBe(2)
|
|
54
|
+
})
|
|
55
|
+
})
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
|
|
3
|
+
export interface DiagnosisResult {
|
|
4
|
+
project_id: string
|
|
5
|
+
window: string
|
|
6
|
+
top_errors: { message: string; count: number; service: string | null; last_seen: string }[]
|
|
7
|
+
error_rate_by_service: { service: string | null; errors: number; warns: number; total: number }[]
|
|
8
|
+
failing_pages: { page_id: string; url: string; error_count: number }[]
|
|
9
|
+
perf_regressions: { page_id: string; url: string; score_now: number | null; score_prev: number | null; delta: number | null }[]
|
|
10
|
+
summary: string
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export function diagnose(db: Database, projectId: string, since?: string): DiagnosisResult {
|
|
14
|
+
const window = since ?? new Date(Date.now() - 24 * 3600 * 1000).toISOString()
|
|
15
|
+
|
|
16
|
+
// Top errors by message
|
|
17
|
+
const top_errors = db.prepare(`
|
|
18
|
+
SELECT message, COUNT(*) as count, service, MAX(timestamp) as last_seen
|
|
19
|
+
FROM logs
|
|
20
|
+
WHERE project_id = $p AND level IN ('error','fatal') AND timestamp >= $since
|
|
21
|
+
GROUP BY message, service
|
|
22
|
+
ORDER BY count DESC
|
|
23
|
+
LIMIT 10
|
|
24
|
+
`).all({ $p: projectId, $since: window }) as DiagnosisResult["top_errors"]
|
|
25
|
+
|
|
26
|
+
// Error rate by service
|
|
27
|
+
const error_rate_by_service = db.prepare(`
|
|
28
|
+
SELECT service,
|
|
29
|
+
SUM(CASE WHEN level IN ('error','fatal') THEN 1 ELSE 0 END) as errors,
|
|
30
|
+
SUM(CASE WHEN level = 'warn' THEN 1 ELSE 0 END) as warns,
|
|
31
|
+
COUNT(*) as total
|
|
32
|
+
FROM logs
|
|
33
|
+
WHERE project_id = $p AND timestamp >= $since
|
|
34
|
+
GROUP BY service
|
|
35
|
+
ORDER BY errors DESC
|
|
36
|
+
`).all({ $p: projectId, $since: window }) as DiagnosisResult["error_rate_by_service"]
|
|
37
|
+
|
|
38
|
+
// Failing pages (most errors)
|
|
39
|
+
const failing_pages = db.prepare(`
|
|
40
|
+
SELECT l.page_id, p.url, COUNT(*) as error_count
|
|
41
|
+
FROM logs l
|
|
42
|
+
JOIN pages p ON p.id = l.page_id
|
|
43
|
+
WHERE l.project_id = $p AND l.level IN ('error','fatal') AND l.timestamp >= $since AND l.page_id IS NOT NULL
|
|
44
|
+
GROUP BY l.page_id, p.url
|
|
45
|
+
ORDER BY error_count DESC
|
|
46
|
+
LIMIT 10
|
|
47
|
+
`).all({ $p: projectId, $since: window }) as DiagnosisResult["failing_pages"]
|
|
48
|
+
|
|
49
|
+
// Perf regressions: compare latest vs previous snapshot per page
|
|
50
|
+
const perf_regressions = db.prepare(`
|
|
51
|
+
SELECT * FROM (
|
|
52
|
+
SELECT
|
|
53
|
+
cur.page_id,
|
|
54
|
+
p.url,
|
|
55
|
+
cur.score as score_now,
|
|
56
|
+
prev.score as score_prev,
|
|
57
|
+
(cur.score - prev.score) as delta
|
|
58
|
+
FROM performance_snapshots cur
|
|
59
|
+
JOIN pages p ON p.id = cur.page_id
|
|
60
|
+
LEFT JOIN performance_snapshots prev ON prev.page_id = cur.page_id AND prev.id != cur.id
|
|
61
|
+
WHERE cur.project_id = $p
|
|
62
|
+
AND cur.timestamp = (SELECT MAX(timestamp) FROM performance_snapshots WHERE page_id = cur.page_id)
|
|
63
|
+
AND (prev.timestamp = (SELECT MAX(timestamp) FROM performance_snapshots WHERE page_id = cur.page_id AND id != cur.id) OR prev.id IS NULL)
|
|
64
|
+
) WHERE delta < -5 OR delta IS NULL
|
|
65
|
+
ORDER BY delta ASC
|
|
66
|
+
LIMIT 10
|
|
67
|
+
`).all({ $p: projectId }) as DiagnosisResult["perf_regressions"]
|
|
68
|
+
|
|
69
|
+
const totalErrors = top_errors.reduce((s, e) => s + e.count, 0)
|
|
70
|
+
const topService = error_rate_by_service[0]
|
|
71
|
+
const summary = totalErrors === 0
|
|
72
|
+
? "No errors in this window. All looks good."
|
|
73
|
+
: `${totalErrors} error(s) detected. Worst service: ${topService?.service ?? "unknown"} (${topService?.errors ?? 0} errors). ${failing_pages.length} page(s) with errors. ${perf_regressions.length} perf regression(s).`
|
|
74
|
+
|
|
75
|
+
return { project_id: projectId, window, top_errors, error_rate_by_service, failing_pages, perf_regressions, summary }
|
|
76
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { ingestBatch } from "./ingest.ts"
|
|
4
|
+
import { exportToCsv, exportToJson } from "./export.ts"
|
|
5
|
+
|
|
6
|
+
function seed(db: ReturnType<typeof createTestDb>) {
|
|
7
|
+
ingestBatch(db, [
|
|
8
|
+
{ level: "error", message: "boom", service: "api" },
|
|
9
|
+
{ level: "info", message: "ok", service: "web" },
|
|
10
|
+
{ level: "warn", message: 'has "quotes"', service: "db" },
|
|
11
|
+
])
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
describe("exportToJson", () => {
|
|
15
|
+
it("exports all logs as JSON array", () => {
|
|
16
|
+
const db = createTestDb()
|
|
17
|
+
seed(db)
|
|
18
|
+
const chunks: string[] = []
|
|
19
|
+
const count = exportToJson(db, {}, s => chunks.push(s))
|
|
20
|
+
expect(count).toBe(3)
|
|
21
|
+
const parsed = JSON.parse(chunks.join(""))
|
|
22
|
+
expect(Array.isArray(parsed)).toBe(true)
|
|
23
|
+
expect(parsed).toHaveLength(3)
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
it("filters by level", () => {
|
|
27
|
+
const db = createTestDb()
|
|
28
|
+
seed(db)
|
|
29
|
+
const chunks: string[] = []
|
|
30
|
+
const count = exportToJson(db, { level: "error" }, s => chunks.push(s))
|
|
31
|
+
expect(count).toBe(1)
|
|
32
|
+
const parsed = JSON.parse(chunks.join(""))
|
|
33
|
+
expect(parsed[0].level).toBe("error")
|
|
34
|
+
})
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
describe("exportToCsv", () => {
|
|
38
|
+
it("exports CSV with header", () => {
|
|
39
|
+
const db = createTestDb()
|
|
40
|
+
seed(db)
|
|
41
|
+
const chunks: string[] = []
|
|
42
|
+
const count = exportToCsv(db, {}, s => chunks.push(s))
|
|
43
|
+
expect(count).toBe(3)
|
|
44
|
+
const csv = chunks.join("")
|
|
45
|
+
expect(csv).toContain("id,timestamp,level")
|
|
46
|
+
expect(csv).toContain("error")
|
|
47
|
+
expect(csv).toContain("boom")
|
|
48
|
+
})
|
|
49
|
+
|
|
50
|
+
it("escapes CSV quotes", () => {
|
|
51
|
+
const db = createTestDb()
|
|
52
|
+
seed(db)
|
|
53
|
+
const chunks: string[] = []
|
|
54
|
+
exportToCsv(db, { level: "warn" }, s => chunks.push(s))
|
|
55
|
+
const csv = chunks.join("")
|
|
56
|
+
expect(csv).toContain('"has ""quotes"""')
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
it("filters by service", () => {
|
|
60
|
+
const db = createTestDb()
|
|
61
|
+
seed(db)
|
|
62
|
+
const chunks: string[] = []
|
|
63
|
+
const count = exportToCsv(db, { service: "api" }, s => chunks.push(s))
|
|
64
|
+
expect(count).toBe(1)
|
|
65
|
+
})
|
|
66
|
+
})
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import type { LogRow } from "../types/index.ts"
|
|
3
|
+
|
|
4
|
+
export interface ExportOptions {
|
|
5
|
+
project_id?: string
|
|
6
|
+
since?: string
|
|
7
|
+
until?: string
|
|
8
|
+
level?: string
|
|
9
|
+
service?: string
|
|
10
|
+
limit?: number
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function* iterLogs(db: Database, opts: ExportOptions): Generator<LogRow> {
|
|
14
|
+
const conditions: string[] = []
|
|
15
|
+
const params: Record<string, unknown> = {}
|
|
16
|
+
if (opts.project_id) { conditions.push("project_id = $p"); params.$p = opts.project_id }
|
|
17
|
+
if (opts.since) { conditions.push("timestamp >= $since"); params.$since = opts.since }
|
|
18
|
+
if (opts.until) { conditions.push("timestamp <= $until"); params.$until = opts.until }
|
|
19
|
+
if (opts.level) { conditions.push("level = $level"); params.$level = opts.level }
|
|
20
|
+
if (opts.service) { conditions.push("service = $service"); params.$service = opts.service }
|
|
21
|
+
const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : ""
|
|
22
|
+
const limit = opts.limit ?? 100_000
|
|
23
|
+
|
|
24
|
+
// Batch in pages of 1000 to avoid memory issues
|
|
25
|
+
let offset = 0
|
|
26
|
+
while (offset < limit) {
|
|
27
|
+
const batch = db.prepare(`SELECT * FROM logs ${where} ORDER BY timestamp ASC LIMIT 1000 OFFSET $offset`)
|
|
28
|
+
.all({ ...params, $offset: offset }) as LogRow[]
|
|
29
|
+
if (!batch.length) break
|
|
30
|
+
yield* batch
|
|
31
|
+
offset += batch.length
|
|
32
|
+
if (batch.length < 1000) break
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export function exportToJson(db: Database, opts: ExportOptions, writeLine: (s: string) => void): number {
|
|
37
|
+
writeLine("[")
|
|
38
|
+
let count = 0
|
|
39
|
+
for (const row of iterLogs(db, opts)) {
|
|
40
|
+
writeLine((count > 0 ? "," : "") + JSON.stringify(row))
|
|
41
|
+
count++
|
|
42
|
+
}
|
|
43
|
+
writeLine("]")
|
|
44
|
+
return count
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const CSV_HEADER = "id,timestamp,level,service,message,trace_id,url\n"
|
|
48
|
+
|
|
49
|
+
export function exportToCsv(db: Database, opts: ExportOptions, writeLine: (s: string) => void): number {
|
|
50
|
+
writeLine(CSV_HEADER)
|
|
51
|
+
let count = 0
|
|
52
|
+
for (const row of iterLogs(db, opts)) {
|
|
53
|
+
const fields = [row.id, row.timestamp, row.level, row.service ?? "", escapeCSV(row.message), row.trace_id ?? "", row.url ?? ""]
|
|
54
|
+
writeLine(fields.join(",") + "\n")
|
|
55
|
+
count++
|
|
56
|
+
}
|
|
57
|
+
return count
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function escapeCSV(s: string): string {
|
|
61
|
+
if (s.includes(",") || s.includes('"') || s.includes("\n")) {
|
|
62
|
+
return `"${s.replace(/"/g, '""')}"`
|
|
63
|
+
}
|
|
64
|
+
return s
|
|
65
|
+
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { ingestBatch } from "./ingest.ts"
|
|
4
|
+
import { getHealth } from "./health.ts"
|
|
5
|
+
|
|
6
|
+
describe("getHealth", () => {
|
|
7
|
+
it("returns status ok", () => {
|
|
8
|
+
const db = createTestDb()
|
|
9
|
+
const h = getHealth(db)
|
|
10
|
+
expect(h.status).toBe("ok")
|
|
11
|
+
})
|
|
12
|
+
|
|
13
|
+
it("counts total logs", () => {
|
|
14
|
+
const db = createTestDb()
|
|
15
|
+
ingestBatch(db, [{ level: "info", message: "a" }, { level: "error", message: "b" }])
|
|
16
|
+
const h = getHealth(db)
|
|
17
|
+
expect(h.total_logs).toBe(2)
|
|
18
|
+
})
|
|
19
|
+
|
|
20
|
+
it("returns logs_by_level breakdown", () => {
|
|
21
|
+
const db = createTestDb()
|
|
22
|
+
ingestBatch(db, [{ level: "info", message: "a" }, { level: "error", message: "b" }, { level: "error", message: "c" }])
|
|
23
|
+
const h = getHealth(db)
|
|
24
|
+
expect(h.logs_by_level["error"]).toBe(2)
|
|
25
|
+
expect(h.logs_by_level["info"]).toBe(1)
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
it("counts projects", () => {
|
|
29
|
+
const db = createTestDb()
|
|
30
|
+
db.prepare("INSERT INTO projects (name) VALUES ('p1')").run()
|
|
31
|
+
db.prepare("INSERT INTO projects (name) VALUES ('p2')").run()
|
|
32
|
+
const h = getHealth(db)
|
|
33
|
+
expect(h.projects).toBe(2)
|
|
34
|
+
})
|
|
35
|
+
|
|
36
|
+
it("returns uptime_seconds >= 0", () => {
|
|
37
|
+
const h = getHealth(createTestDb())
|
|
38
|
+
expect(h.uptime_seconds).toBeGreaterThanOrEqual(0)
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
it("returns newest and oldest log timestamps", () => {
|
|
42
|
+
const db = createTestDb()
|
|
43
|
+
ingestBatch(db, [{ level: "info", message: "first" }, { level: "warn", message: "last" }])
|
|
44
|
+
const h = getHealth(db)
|
|
45
|
+
expect(h.oldest_log).toBeTruthy()
|
|
46
|
+
expect(h.newest_log).toBeTruthy()
|
|
47
|
+
})
|
|
48
|
+
})
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
|
|
3
|
+
const startTime = Date.now()
|
|
4
|
+
|
|
5
|
+
export interface HealthResult {
|
|
6
|
+
status: "ok"
|
|
7
|
+
uptime_seconds: number
|
|
8
|
+
db_size_bytes: number | null
|
|
9
|
+
projects: number
|
|
10
|
+
total_logs: number
|
|
11
|
+
logs_by_level: Record<string, number>
|
|
12
|
+
oldest_log: string | null
|
|
13
|
+
newest_log: string | null
|
|
14
|
+
scheduler_jobs: number
|
|
15
|
+
open_issues: number
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export function getHealth(db: Database): HealthResult {
|
|
19
|
+
const projects = (db.prepare("SELECT COUNT(*) as c FROM projects").get() as { c: number }).c
|
|
20
|
+
const total_logs = (db.prepare("SELECT COUNT(*) as c FROM logs").get() as { c: number }).c
|
|
21
|
+
const scheduler_jobs = (db.prepare("SELECT COUNT(*) as c FROM scan_jobs WHERE enabled = 1").get() as { c: number }).c
|
|
22
|
+
const open_issues = (db.prepare("SELECT COUNT(*) as c FROM issues WHERE status = 'open'").get() as { c: number }).c
|
|
23
|
+
|
|
24
|
+
const levelRows = db.prepare("SELECT level, COUNT(*) as c FROM logs GROUP BY level").all() as { level: string; c: number }[]
|
|
25
|
+
const logs_by_level = Object.fromEntries(levelRows.map(r => [r.level, r.c]))
|
|
26
|
+
|
|
27
|
+
const oldest = db.prepare("SELECT MIN(timestamp) as t FROM logs").get() as { t: string | null }
|
|
28
|
+
const newest = db.prepare("SELECT MAX(timestamp) as t FROM logs").get() as { t: string | null }
|
|
29
|
+
|
|
30
|
+
let db_size_bytes: number | null = null
|
|
31
|
+
try {
|
|
32
|
+
const dbPath = process.env.LOGS_DB_PATH
|
|
33
|
+
if (dbPath) {
|
|
34
|
+
const { statSync } = require("node:fs")
|
|
35
|
+
db_size_bytes = statSync(dbPath).size
|
|
36
|
+
}
|
|
37
|
+
} catch { /* in-memory or not accessible */ }
|
|
38
|
+
|
|
39
|
+
return {
|
|
40
|
+
status: "ok",
|
|
41
|
+
uptime_seconds: Math.floor((Date.now() - startTime) / 1000),
|
|
42
|
+
db_size_bytes,
|
|
43
|
+
projects,
|
|
44
|
+
total_logs,
|
|
45
|
+
logs_by_level,
|
|
46
|
+
oldest_log: oldest.t,
|
|
47
|
+
newest_log: newest.t,
|
|
48
|
+
scheduler_jobs,
|
|
49
|
+
open_issues,
|
|
50
|
+
}
|
|
51
|
+
}
|
package/src/lib/ingest.ts
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
1
|
import type { Database } from "bun:sqlite"
|
|
2
2
|
import type { LogEntry, LogRow } from "../types/index.ts"
|
|
3
|
+
import { upsertIssue } from "./issues.ts"
|
|
4
|
+
import { evaluateAlerts } from "./alerts.ts"
|
|
5
|
+
|
|
6
|
+
const ERROR_LEVELS = new Set(["warn", "error", "fatal"])
|
|
3
7
|
|
|
4
8
|
export function ingestLog(db: Database, entry: LogEntry): LogRow {
|
|
5
9
|
const stmt = db.prepare(`
|
|
@@ -7,7 +11,7 @@ export function ingestLog(db: Database, entry: LogEntry): LogRow {
|
|
|
7
11
|
VALUES ($project_id, $page_id, $level, $source, $service, $message, $trace_id, $session_id, $agent, $url, $stack_trace, $metadata)
|
|
8
12
|
RETURNING *
|
|
9
13
|
`)
|
|
10
|
-
|
|
14
|
+
const row = stmt.get({
|
|
11
15
|
$project_id: entry.project_id ?? null,
|
|
12
16
|
$page_id: entry.page_id ?? null,
|
|
13
17
|
$level: entry.level,
|
|
@@ -21,6 +25,16 @@ export function ingestLog(db: Database, entry: LogEntry): LogRow {
|
|
|
21
25
|
$stack_trace: entry.stack_trace ?? null,
|
|
22
26
|
$metadata: entry.metadata ? JSON.stringify(entry.metadata) : null,
|
|
23
27
|
}) as LogRow
|
|
28
|
+
|
|
29
|
+
// Side effects: issue grouping + alert evaluation (fire-and-forget)
|
|
30
|
+
if (ERROR_LEVELS.has(entry.level)) {
|
|
31
|
+
if (entry.project_id) {
|
|
32
|
+
upsertIssue(db, { project_id: entry.project_id, level: entry.level, service: entry.service, message: entry.message, stack_trace: entry.stack_trace })
|
|
33
|
+
evaluateAlerts(db, entry.project_id, entry.service ?? null, entry.level).catch(() => {})
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return row
|
|
24
38
|
}
|
|
25
39
|
|
|
26
40
|
export function ingestBatch(db: Database, entries: LogEntry[]): LogRow[] {
|
|
@@ -47,5 +61,14 @@ export function ingestBatch(db: Database, entries: LogEntry[]): LogRow[] {
|
|
|
47
61
|
}) as LogRow
|
|
48
62
|
)
|
|
49
63
|
)
|
|
50
|
-
|
|
64
|
+
const rows = tx(entries)
|
|
65
|
+
|
|
66
|
+
// Issue grouping for error-level entries (outside transaction for perf)
|
|
67
|
+
for (const entry of entries) {
|
|
68
|
+
if (ERROR_LEVELS.has(entry.level) && entry.project_id) {
|
|
69
|
+
upsertIssue(db, { project_id: entry.project_id, level: entry.level, service: entry.service, message: entry.message, stack_trace: entry.stack_trace })
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return rows
|
|
51
74
|
}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { computeFingerprint, getIssue, listIssues, updateIssueStatus, upsertIssue } from "./issues.ts"
|
|
4
|
+
|
|
5
|
+
function seedProject(db: ReturnType<typeof createTestDb>) {
|
|
6
|
+
return db.prepare("INSERT INTO projects (name) VALUES ('app') RETURNING id").get() as { id: string }
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
describe("computeFingerprint", () => {
|
|
10
|
+
it("returns consistent hash for same input", () => {
|
|
11
|
+
const a = computeFingerprint("error", "api", "DB connection failed")
|
|
12
|
+
const b = computeFingerprint("error", "api", "DB connection failed")
|
|
13
|
+
expect(a).toBe(b)
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
it("returns different hash for different messages", () => {
|
|
17
|
+
const a = computeFingerprint("error", "api", "timeout")
|
|
18
|
+
const b = computeFingerprint("error", "api", "DB error")
|
|
19
|
+
expect(a).not.toBe(b)
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
it("normalizes hex IDs in messages", () => {
|
|
23
|
+
const a = computeFingerprint("error", "api", "Error for id abc123def456")
|
|
24
|
+
const b = computeFingerprint("error", "api", "Error for id 000fffaabbcc")
|
|
25
|
+
expect(a).toBe(b)
|
|
26
|
+
})
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
describe("upsertIssue", () => {
|
|
30
|
+
it("creates a new issue", () => {
|
|
31
|
+
const db = createTestDb()
|
|
32
|
+
const p = seedProject(db)
|
|
33
|
+
const issue = upsertIssue(db, { project_id: p.id, level: "error", service: "api", message: "DB timeout" })
|
|
34
|
+
expect(issue.id).toBeTruthy()
|
|
35
|
+
expect(issue.count).toBe(1)
|
|
36
|
+
expect(issue.status).toBe("open")
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
it("increments count on duplicate", () => {
|
|
40
|
+
const db = createTestDb()
|
|
41
|
+
const p = seedProject(db)
|
|
42
|
+
upsertIssue(db, { project_id: p.id, level: "error", message: "Same error" })
|
|
43
|
+
upsertIssue(db, { project_id: p.id, level: "error", message: "Same error" })
|
|
44
|
+
const issue = upsertIssue(db, { project_id: p.id, level: "error", message: "Same error" })
|
|
45
|
+
expect(issue.count).toBe(3)
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
it("reopens resolved issues", () => {
|
|
49
|
+
const db = createTestDb()
|
|
50
|
+
const p = seedProject(db)
|
|
51
|
+
const issue = upsertIssue(db, { project_id: p.id, level: "error", message: "err" })
|
|
52
|
+
updateIssueStatus(db, issue.id, "resolved")
|
|
53
|
+
const reopened = upsertIssue(db, { project_id: p.id, level: "error", message: "err" })
|
|
54
|
+
expect(reopened.status).toBe("open")
|
|
55
|
+
})
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
describe("listIssues", () => {
|
|
59
|
+
it("filters by project and status", () => {
|
|
60
|
+
const db = createTestDb()
|
|
61
|
+
const p = seedProject(db)
|
|
62
|
+
const issue = upsertIssue(db, { project_id: p.id, level: "error", message: "database connection timed out" })
|
|
63
|
+
updateIssueStatus(db, issue.id, "resolved")
|
|
64
|
+
upsertIssue(db, { project_id: p.id, level: "error", message: "authentication service unavailable" })
|
|
65
|
+
expect(listIssues(db, p.id, "open")).toHaveLength(1)
|
|
66
|
+
expect(listIssues(db, p.id, "resolved")).toHaveLength(1)
|
|
67
|
+
expect(listIssues(db, p.id)).toHaveLength(2)
|
|
68
|
+
})
|
|
69
|
+
})
|
|
70
|
+
|
|
71
|
+
describe("updateIssueStatus", () => {
|
|
72
|
+
it("updates status", () => {
|
|
73
|
+
const db = createTestDb()
|
|
74
|
+
const p = seedProject(db)
|
|
75
|
+
const issue = upsertIssue(db, { project_id: p.id, level: "error", message: "x" })
|
|
76
|
+
const updated = updateIssueStatus(db, issue.id, "ignored")
|
|
77
|
+
expect(updated?.status).toBe("ignored")
|
|
78
|
+
})
|
|
79
|
+
})
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import { createHash } from "node:crypto"
|
|
3
|
+
|
|
4
|
+
export interface Issue {
|
|
5
|
+
id: string
|
|
6
|
+
project_id: string | null
|
|
7
|
+
fingerprint: string
|
|
8
|
+
level: string
|
|
9
|
+
service: string | null
|
|
10
|
+
message_template: string
|
|
11
|
+
first_seen: string
|
|
12
|
+
last_seen: string
|
|
13
|
+
count: number
|
|
14
|
+
status: "open" | "resolved" | "ignored"
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function computeFingerprint(level: string, service: string | null, message: string, stackTrace?: string | null): string {
|
|
18
|
+
// Normalize message: strip hex IDs, numbers, timestamps
|
|
19
|
+
const normalized = message
|
|
20
|
+
.replace(/[0-9a-f]{8,}/gi, "<id>")
|
|
21
|
+
.replace(/\d+/g, "<n>")
|
|
22
|
+
.replace(/https?:\/\/[^\s]+/g, "<url>")
|
|
23
|
+
.trim()
|
|
24
|
+
const stackFrame = stackTrace ? stackTrace.split("\n").slice(0, 3).join("|") : ""
|
|
25
|
+
const raw = `${level}|${service ?? ""}|${normalized}|${stackFrame}`
|
|
26
|
+
return createHash("sha256").update(raw).digest("hex").slice(0, 16)
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
export function upsertIssue(db: Database, data: {
|
|
30
|
+
project_id?: string
|
|
31
|
+
level: string
|
|
32
|
+
service?: string | null
|
|
33
|
+
message: string
|
|
34
|
+
stack_trace?: string | null
|
|
35
|
+
}): Issue {
|
|
36
|
+
const fingerprint = computeFingerprint(data.level, data.service ?? null, data.message, data.stack_trace)
|
|
37
|
+
return db.prepare(`
|
|
38
|
+
INSERT INTO issues (project_id, fingerprint, level, service, message_template)
|
|
39
|
+
VALUES ($project_id, $fingerprint, $level, $service, $message_template)
|
|
40
|
+
ON CONFLICT(project_id, fingerprint) DO UPDATE SET
|
|
41
|
+
count = count + 1,
|
|
42
|
+
last_seen = strftime('%Y-%m-%dT%H:%M:%fZ','now'),
|
|
43
|
+
status = CASE WHEN status = 'resolved' THEN 'open' ELSE status END
|
|
44
|
+
RETURNING *
|
|
45
|
+
`).get({
|
|
46
|
+
$project_id: data.project_id ?? null,
|
|
47
|
+
$fingerprint: fingerprint,
|
|
48
|
+
$level: data.level,
|
|
49
|
+
$service: data.service ?? null,
|
|
50
|
+
$message_template: data.message.slice(0, 500),
|
|
51
|
+
}) as Issue
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export function listIssues(db: Database, projectId?: string, status?: string, limit = 50): Issue[] {
|
|
55
|
+
const conditions: string[] = []
|
|
56
|
+
const params: Record<string, unknown> = { $limit: limit }
|
|
57
|
+
if (projectId) { conditions.push("project_id = $p"); params.$p = projectId }
|
|
58
|
+
if (status) { conditions.push("status = $status"); params.$status = status }
|
|
59
|
+
const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : ""
|
|
60
|
+
return db.prepare(`SELECT * FROM issues ${where} ORDER BY last_seen DESC LIMIT $limit`).all(params) as Issue[]
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export function getIssue(db: Database, id: string): Issue | null {
|
|
64
|
+
return db.prepare("SELECT * FROM issues WHERE id = $id").get({ $id: id }) as Issue | null
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
export function updateIssueStatus(db: Database, id: string, status: "open" | "resolved" | "ignored"): Issue | null {
|
|
68
|
+
return db.prepare("UPDATE issues SET status = $status WHERE id = $id RETURNING *")
|
|
69
|
+
.get({ $id: id, $status: status }) as Issue | null
|
|
70
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { deletePageAuth, getPageAuth, setPageAuth } from "./page-auth.ts"
|
|
4
|
+
|
|
5
|
+
function seedPage(db: ReturnType<typeof createTestDb>) {
|
|
6
|
+
const p = db.prepare("INSERT INTO projects (name) VALUES ('app') RETURNING id").get() as { id: string }
|
|
7
|
+
const page = db.prepare("INSERT INTO pages (project_id, url) VALUES (?, 'https://app.com') RETURNING id").get(p.id) as { id: string }
|
|
8
|
+
return { projectId: p.id, pageId: page.id }
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
describe("page auth", () => {
|
|
12
|
+
it("sets and retrieves bearer auth", () => {
|
|
13
|
+
const db = createTestDb()
|
|
14
|
+
const { pageId } = seedPage(db)
|
|
15
|
+
setPageAuth(db, pageId, "bearer", "my-token-123")
|
|
16
|
+
const auth = getPageAuth(db, pageId)
|
|
17
|
+
expect(auth?.type).toBe("bearer")
|
|
18
|
+
expect(auth?.credentials).toBe("my-token-123")
|
|
19
|
+
})
|
|
20
|
+
|
|
21
|
+
it("credentials are encrypted at rest", () => {
|
|
22
|
+
const db = createTestDb()
|
|
23
|
+
const { pageId } = seedPage(db)
|
|
24
|
+
setPageAuth(db, pageId, "bearer", "secret-token")
|
|
25
|
+
const raw = db.prepare("SELECT credentials FROM page_auth WHERE page_id = ?").get(pageId) as { credentials: string }
|
|
26
|
+
// Raw value should NOT be the plaintext token
|
|
27
|
+
expect(raw.credentials).not.toBe("secret-token")
|
|
28
|
+
expect(raw.credentials).toContain(":") // IV:encrypted format
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
it("upserts on duplicate page_id", () => {
|
|
32
|
+
const db = createTestDb()
|
|
33
|
+
const { pageId } = seedPage(db)
|
|
34
|
+
setPageAuth(db, pageId, "bearer", "token-v1")
|
|
35
|
+
setPageAuth(db, pageId, "bearer", "token-v2")
|
|
36
|
+
const auth = getPageAuth(db, pageId)
|
|
37
|
+
expect(auth?.credentials).toBe("token-v2")
|
|
38
|
+
const { c } = db.prepare("SELECT COUNT(*) as c FROM page_auth WHERE page_id = ?").get(pageId) as { c: number }
|
|
39
|
+
expect(c).toBe(1)
|
|
40
|
+
})
|
|
41
|
+
|
|
42
|
+
it("returns null for unknown page", () => {
|
|
43
|
+
const db = createTestDb()
|
|
44
|
+
expect(getPageAuth(db, "nope")).toBeNull()
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
it("deletes auth", () => {
|
|
48
|
+
const db = createTestDb()
|
|
49
|
+
const { pageId } = seedPage(db)
|
|
50
|
+
setPageAuth(db, pageId, "basic", "user:pass")
|
|
51
|
+
deletePageAuth(db, pageId)
|
|
52
|
+
expect(getPageAuth(db, pageId)).toBeNull()
|
|
53
|
+
})
|
|
54
|
+
})
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import { createCipheriv, createDecipheriv, randomBytes } from "node:crypto"
|
|
3
|
+
|
|
4
|
+
const SECRET_KEY = Buffer.from((process.env.LOGS_SECRET_KEY ?? "open-logs-default-key-32bytesXXX").padEnd(32).slice(0, 32))
|
|
5
|
+
|
|
6
|
+
export interface PageAuth {
|
|
7
|
+
id: string
|
|
8
|
+
page_id: string
|
|
9
|
+
type: "cookie" | "bearer" | "basic"
|
|
10
|
+
credentials: string
|
|
11
|
+
created_at: string
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function encrypt(text: string): string {
|
|
15
|
+
const iv = randomBytes(16)
|
|
16
|
+
const cipher = createCipheriv("aes-256-cbc", SECRET_KEY, iv)
|
|
17
|
+
const encrypted = Buffer.concat([cipher.update(text, "utf8"), cipher.final()])
|
|
18
|
+
return iv.toString("hex") + ":" + encrypted.toString("hex")
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function decrypt(text: string): string {
|
|
22
|
+
const [ivHex, encHex] = text.split(":")
|
|
23
|
+
if (!ivHex || !encHex) return text
|
|
24
|
+
const iv = Buffer.from(ivHex, "hex")
|
|
25
|
+
const enc = Buffer.from(encHex, "hex")
|
|
26
|
+
const decipher = createDecipheriv("aes-256-cbc", SECRET_KEY, iv)
|
|
27
|
+
return Buffer.concat([decipher.update(enc), decipher.final()]).toString("utf8")
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function setPageAuth(db: Database, pageId: string, type: PageAuth["type"], credentials: string): PageAuth {
|
|
31
|
+
const encrypted = encrypt(credentials)
|
|
32
|
+
return db.prepare(`
|
|
33
|
+
INSERT INTO page_auth (page_id, type, credentials)
|
|
34
|
+
VALUES ($page_id, $type, $credentials)
|
|
35
|
+
ON CONFLICT(page_id) DO UPDATE SET type = excluded.type, credentials = excluded.credentials
|
|
36
|
+
RETURNING *
|
|
37
|
+
`).get({ $page_id: pageId, $type: type, $credentials: encrypted }) as PageAuth
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export function getPageAuth(db: Database, pageId: string): { type: PageAuth["type"]; credentials: string } | null {
|
|
41
|
+
const row = db.prepare("SELECT * FROM page_auth WHERE page_id = $id").get({ $id: pageId }) as PageAuth | null
|
|
42
|
+
if (!row) return null
|
|
43
|
+
return { type: row.type, credentials: decrypt(row.credentials) }
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function deletePageAuth(db: Database, pageId: string): void {
|
|
47
|
+
db.run("DELETE FROM page_auth WHERE page_id = $id", { $id: pageId })
|
|
48
|
+
}
|