@hasna/logs 0.0.1 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,117 @@
1
+ import type { Database } from "bun:sqlite"
2
+
3
+ export interface AlertRule {
4
+ id: string
5
+ project_id: string
6
+ name: string
7
+ service: string | null
8
+ level: string
9
+ threshold_count: number
10
+ window_seconds: number
11
+ action: "webhook" | "log"
12
+ webhook_url: string | null
13
+ enabled: number
14
+ last_fired_at: string | null
15
+ created_at: string
16
+ }
17
+
18
+ export function createAlertRule(db: Database, data: {
19
+ project_id: string
20
+ name: string
21
+ service?: string
22
+ level?: string
23
+ threshold_count?: number
24
+ window_seconds?: number
25
+ action?: "webhook" | "log"
26
+ webhook_url?: string
27
+ }): AlertRule {
28
+ return db.prepare(`
29
+ INSERT INTO alert_rules (project_id, name, service, level, threshold_count, window_seconds, action, webhook_url)
30
+ VALUES ($project_id, $name, $service, $level, $threshold_count, $window_seconds, $action, $webhook_url)
31
+ RETURNING *
32
+ `).get({
33
+ $project_id: data.project_id,
34
+ $name: data.name,
35
+ $service: data.service ?? null,
36
+ $level: data.level ?? "error",
37
+ $threshold_count: data.threshold_count ?? 10,
38
+ $window_seconds: data.window_seconds ?? 60,
39
+ $action: data.action ?? "webhook",
40
+ $webhook_url: data.webhook_url ?? null,
41
+ }) as AlertRule
42
+ }
43
+
44
+ export function listAlertRules(db: Database, projectId?: string): AlertRule[] {
45
+ if (projectId) {
46
+ return db.prepare("SELECT * FROM alert_rules WHERE project_id = $p ORDER BY created_at DESC").all({ $p: projectId }) as AlertRule[]
47
+ }
48
+ return db.prepare("SELECT * FROM alert_rules ORDER BY created_at DESC").all() as AlertRule[]
49
+ }
50
+
51
+ export function updateAlertRule(db: Database, id: string, data: Partial<Pick<AlertRule, "enabled" | "threshold_count" | "window_seconds" | "webhook_url">>): AlertRule | null {
52
+ const fields = Object.keys(data).map(k => `${k} = $${k}`).join(", ")
53
+ if (!fields) return db.prepare("SELECT * FROM alert_rules WHERE id = $id").get({ $id: id }) as AlertRule | null
54
+ const params = Object.fromEntries(Object.entries(data).map(([k, v]) => [`$${k}`, v]))
55
+ params.$id = id
56
+ return db.prepare(`UPDATE alert_rules SET ${fields} WHERE id = $id RETURNING *`).get(params) as AlertRule | null
57
+ }
58
+
59
+ export function deleteAlertRule(db: Database, id: string): void {
60
+ db.run("DELETE FROM alert_rules WHERE id = $id", { $id: id })
61
+ }
62
+
63
+ export async function evaluateAlerts(db: Database, projectId: string, service: string | null, level: string): Promise<void> {
64
+ const rules = db.prepare(`
65
+ SELECT * FROM alert_rules
66
+ WHERE project_id = $p AND level = $level AND enabled = 1
67
+ AND ($service IS NULL OR service IS NULL OR service = $service)
68
+ `).all({ $p: projectId, $level: level, $service: service }) as AlertRule[]
69
+
70
+ for (const rule of rules) {
71
+ const since = new Date(Date.now() - rule.window_seconds * 1000).toISOString()
72
+ const conditions = ["project_id = $p", "level = $level", "timestamp >= $since"]
73
+ const params: Record<string, unknown> = { $p: projectId, $level: rule.level, $since: since }
74
+ if (rule.service) { conditions.push("service = $service"); params.$service = rule.service }
75
+
76
+ const { count } = db.prepare(`SELECT COUNT(*) as count FROM logs WHERE ${conditions.join(" AND ")}`).get(params) as { count: number }
77
+
78
+ if (count >= rule.threshold_count) {
79
+ await fireAlert(db, rule, count)
80
+ }
81
+ }
82
+ }
83
+
84
+ async function fireAlert(db: Database, rule: AlertRule, count: number): Promise<void> {
85
+ // Debounce: don't fire more than once per window
86
+ if (rule.last_fired_at) {
87
+ const lastFired = new Date(rule.last_fired_at).getTime()
88
+ if (Date.now() - lastFired < rule.window_seconds * 1000) return
89
+ }
90
+
91
+ db.run("UPDATE alert_rules SET last_fired_at = strftime('%Y-%m-%dT%H:%M:%fZ','now') WHERE id = $id", { $id: rule.id })
92
+
93
+ const payload = {
94
+ alert: rule.name,
95
+ project_id: rule.project_id,
96
+ level: rule.level,
97
+ service: rule.service,
98
+ count,
99
+ threshold: rule.threshold_count,
100
+ window_seconds: rule.window_seconds,
101
+ fired_at: new Date().toISOString(),
102
+ }
103
+
104
+ if (rule.action === "webhook" && rule.webhook_url) {
105
+ try {
106
+ await fetch(rule.webhook_url, {
107
+ method: "POST",
108
+ headers: { "Content-Type": "application/json" },
109
+ body: JSON.stringify(payload),
110
+ })
111
+ } catch (err) {
112
+ console.error(`Alert webhook failed for rule ${rule.id}:`, err)
113
+ }
114
+ } else {
115
+ console.warn(`[ALERT] ${rule.name}:`, JSON.stringify(payload))
116
+ }
117
+ }
@@ -0,0 +1,52 @@
1
+ import { describe, expect, it } from "bun:test"
2
+ import { createTestDb } from "../db/index.ts"
3
+ import { ingestBatch } from "./ingest.ts"
4
+ import { compare } from "./compare.ts"
5
+
6
+ function seedProject(db: ReturnType<typeof createTestDb>) {
7
+ return db.prepare("INSERT INTO projects (name) VALUES ('app') RETURNING id").get() as { id: string }
8
+ }
9
+
10
+ describe("compare", () => {
11
+ it("detects new errors in window B", () => {
12
+ const db = createTestDb()
13
+ const p = seedProject(db)
14
+ const dayAgo = new Date(Date.now() - 48 * 3600 * 1000).toISOString()
15
+ const halfDayAgo = new Date(Date.now() - 24 * 3600 * 1000).toISOString()
16
+ const now = new Date().toISOString()
17
+
18
+ // Window A: old error
19
+ db.prepare("INSERT INTO logs (project_id, level, message, service, timestamp) VALUES (?, 'error', 'old bug', 'api', ?)").run(p.id, dayAgo)
20
+ // Window B: new error
21
+ db.prepare("INSERT INTO logs (project_id, level, message, service, timestamp) VALUES (?, 'error', 'new bug', 'api', ?)").run(p.id, now)
22
+
23
+ const result = compare(db, p.id, dayAgo, halfDayAgo, halfDayAgo, now)
24
+ expect(result.new_errors.some(e => e.message === "new bug")).toBe(true)
25
+ expect(result.resolved_errors.some(e => e.message === "old bug")).toBe(true)
26
+ })
27
+
28
+ it("returns empty diff when no changes", () => {
29
+ const db = createTestDb()
30
+ const p = seedProject(db)
31
+ const since = new Date(Date.now() - 48 * 3600 * 1000).toISOString()
32
+ const mid = new Date(Date.now() - 24 * 3600 * 1000).toISOString()
33
+ const now = new Date().toISOString()
34
+ const result = compare(db, p.id, since, mid, mid, now)
35
+ expect(result.new_errors).toHaveLength(0)
36
+ expect(result.resolved_errors).toHaveLength(0)
37
+ })
38
+
39
+ it("has correct structure", () => {
40
+ const db = createTestDb()
41
+ const p = seedProject(db)
42
+ const since = new Date(Date.now() - 48 * 3600 * 1000).toISOString()
43
+ const mid = new Date(Date.now() - 24 * 3600 * 1000).toISOString()
44
+ const now = new Date().toISOString()
45
+ const result = compare(db, p.id, since, mid, mid, now)
46
+ expect(result).toHaveProperty("project_id")
47
+ expect(result).toHaveProperty("new_errors")
48
+ expect(result).toHaveProperty("resolved_errors")
49
+ expect(result).toHaveProperty("error_delta_by_service")
50
+ expect(result).toHaveProperty("summary")
51
+ })
52
+ })
@@ -0,0 +1,85 @@
1
+ import type { Database } from "bun:sqlite"
2
+
3
+ export interface CompareResult {
4
+ project_id: string
5
+ window_a: { since: string; until: string }
6
+ window_b: { since: string; until: string }
7
+ new_errors: { message: string; service: string | null; count: number }[]
8
+ resolved_errors: { message: string; service: string | null; count: number }[]
9
+ error_delta_by_service: { service: string | null; errors_a: number; errors_b: number; delta: number }[]
10
+ perf_delta_by_page: { page_id: string; url: string; score_a: number | null; score_b: number | null; delta: number | null }[]
11
+ summary: string
12
+ }
13
+
14
+ function getErrorsByMessage(db: Database, projectId: string, since: string, until: string) {
15
+ return db.prepare(`
16
+ SELECT message, service, COUNT(*) as count
17
+ FROM logs
18
+ WHERE project_id = $p AND level IN ('error','fatal') AND timestamp >= $since AND timestamp <= $until
19
+ GROUP BY message, service
20
+ `).all({ $p: projectId, $since: since, $until: until }) as { message: string; service: string | null; count: number }[]
21
+ }
22
+
23
+ function getErrorsByService(db: Database, projectId: string, since: string, until: string) {
24
+ return db.prepare(`
25
+ SELECT service, COUNT(*) as errors
26
+ FROM logs
27
+ WHERE project_id = $p AND level IN ('error','fatal') AND timestamp >= $since AND timestamp <= $until
28
+ GROUP BY service
29
+ `).all({ $p: projectId, $since: since, $until: until }) as { service: string | null; errors: number }[]
30
+ }
31
+
32
+ export function compare(
33
+ db: Database,
34
+ projectId: string,
35
+ aSince: string, aUntil: string,
36
+ bSince: string, bUntil: string,
37
+ ): CompareResult {
38
+ const errorsA = getErrorsByMessage(db, projectId, aSince, aUntil)
39
+ const errorsB = getErrorsByMessage(db, projectId, bSince, bUntil)
40
+
41
+ const keyA = new Set(errorsA.map(e => `${e.service}|${e.message}`))
42
+ const keyB = new Set(errorsB.map(e => `${e.service}|${e.message}`))
43
+
44
+ const new_errors = errorsB.filter(e => !keyA.has(`${e.service}|${e.message}`))
45
+ const resolved_errors = errorsA.filter(e => !keyB.has(`${e.service}|${e.message}`))
46
+
47
+ // Service-level delta
48
+ const svcA = getErrorsByService(db, projectId, aSince, aUntil)
49
+ const svcB = getErrorsByService(db, projectId, bSince, bUntil)
50
+ const svcMapA = new Map(svcA.map(s => [s.service, s.errors]))
51
+ const svcMapB = new Map(svcB.map(s => [s.service, s.errors]))
52
+ const allSvcs = new Set([...svcMapA.keys(), ...svcMapB.keys()])
53
+ const error_delta_by_service = [...allSvcs].map(svc => ({
54
+ service: svc,
55
+ errors_a: svcMapA.get(svc) ?? 0,
56
+ errors_b: svcMapB.get(svc) ?? 0,
57
+ delta: (svcMapB.get(svc) ?? 0) - (svcMapA.get(svc) ?? 0),
58
+ })).sort((a, b) => Math.abs(b.delta) - Math.abs(a.delta))
59
+
60
+ // Perf delta per page
61
+ const perf_delta_by_page = db.prepare(`
62
+ SELECT
63
+ pa.page_id, pg.url,
64
+ pa.score as score_a,
65
+ pb.score as score_b,
66
+ (pb.score - pa.score) as delta
67
+ FROM
68
+ (SELECT page_id, AVG(score) as score FROM performance_snapshots WHERE project_id = $p AND timestamp >= $as AND timestamp <= $au GROUP BY page_id) pa
69
+ JOIN pages pg ON pg.id = pa.page_id
70
+ LEFT JOIN (SELECT page_id, AVG(score) as score FROM performance_snapshots WHERE project_id = $p AND timestamp >= $bs AND timestamp <= $bu GROUP BY page_id) pb ON pb.page_id = pa.page_id
71
+ ORDER BY delta ASC
72
+ `).all({ $p: projectId, $as: aSince, $au: aUntil, $bs: bSince, $bu: bUntil }) as CompareResult["perf_delta_by_page"]
73
+
74
+ const summary = [
75
+ `${new_errors.length} new error type(s), ${resolved_errors.length} resolved.`,
76
+ error_delta_by_service.filter(s => s.delta > 0).map(s => `${s.service ?? "unknown"}: +${s.delta}`).join(", ") || "No error increases.",
77
+ ].join(" ")
78
+
79
+ return {
80
+ project_id: projectId,
81
+ window_a: { since: aSince, until: aUntil },
82
+ window_b: { since: bSince, until: bUntil },
83
+ new_errors, resolved_errors, error_delta_by_service, perf_delta_by_page, summary,
84
+ }
85
+ }
@@ -0,0 +1,55 @@
1
+ import { describe, expect, it } from "bun:test"
2
+ import { createTestDb } from "../db/index.ts"
3
+ import { ingestBatch } from "./ingest.ts"
4
+ import { diagnose } from "./diagnose.ts"
5
+
6
+ function seedProject(db: ReturnType<typeof createTestDb>) {
7
+ return db.prepare("INSERT INTO projects (name) VALUES ('app') RETURNING id").get() as { id: string }
8
+ }
9
+
10
+ describe("diagnose", () => {
11
+ it("returns empty diagnosis for project with no logs", () => {
12
+ const db = createTestDb()
13
+ const p = seedProject(db)
14
+ const result = diagnose(db, p.id)
15
+ expect(result.project_id).toBe(p.id)
16
+ expect(result.top_errors).toHaveLength(0)
17
+ expect(result.summary).toContain("No errors")
18
+ })
19
+
20
+ it("surfaces top errors", () => {
21
+ const db = createTestDb()
22
+ const p = seedProject(db)
23
+ ingestBatch(db, [
24
+ { level: "error", message: "DB timeout", service: "api", project_id: p.id },
25
+ { level: "error", message: "DB timeout", service: "api", project_id: p.id },
26
+ { level: "error", message: "Auth failed", service: "auth", project_id: p.id },
27
+ ])
28
+ const result = diagnose(db, p.id)
29
+ expect(result.top_errors.length).toBeGreaterThan(0)
30
+ expect(result.top_errors[0]!.message).toBe("DB timeout")
31
+ expect(result.top_errors[0]!.count).toBe(2)
32
+ })
33
+
34
+ it("populates summary with error info", () => {
35
+ const db = createTestDb()
36
+ const p = seedProject(db)
37
+ ingestBatch(db, [{ level: "error", message: "boom", service: "api", project_id: p.id }])
38
+ const result = diagnose(db, p.id)
39
+ expect(result.summary).toContain("error")
40
+ })
41
+
42
+ it("groups error_rate_by_service", () => {
43
+ const db = createTestDb()
44
+ const p = seedProject(db)
45
+ ingestBatch(db, [
46
+ { level: "error", message: "e1", service: "api", project_id: p.id },
47
+ { level: "info", message: "i1", service: "api", project_id: p.id },
48
+ { level: "warn", message: "w1", service: "db", project_id: p.id },
49
+ ])
50
+ const result = diagnose(db, p.id)
51
+ const api = result.error_rate_by_service.find(s => s.service === "api")
52
+ expect(api?.errors).toBe(1)
53
+ expect(api?.total).toBe(2)
54
+ })
55
+ })
@@ -0,0 +1,76 @@
1
+ import type { Database } from "bun:sqlite"
2
+
3
+ export interface DiagnosisResult {
4
+ project_id: string
5
+ window: string
6
+ top_errors: { message: string; count: number; service: string | null; last_seen: string }[]
7
+ error_rate_by_service: { service: string | null; errors: number; warns: number; total: number }[]
8
+ failing_pages: { page_id: string; url: string; error_count: number }[]
9
+ perf_regressions: { page_id: string; url: string; score_now: number | null; score_prev: number | null; delta: number | null }[]
10
+ summary: string
11
+ }
12
+
13
+ export function diagnose(db: Database, projectId: string, since?: string): DiagnosisResult {
14
+ const window = since ?? new Date(Date.now() - 24 * 3600 * 1000).toISOString()
15
+
16
+ // Top errors by message
17
+ const top_errors = db.prepare(`
18
+ SELECT message, COUNT(*) as count, service, MAX(timestamp) as last_seen
19
+ FROM logs
20
+ WHERE project_id = $p AND level IN ('error','fatal') AND timestamp >= $since
21
+ GROUP BY message, service
22
+ ORDER BY count DESC
23
+ LIMIT 10
24
+ `).all({ $p: projectId, $since: window }) as DiagnosisResult["top_errors"]
25
+
26
+ // Error rate by service
27
+ const error_rate_by_service = db.prepare(`
28
+ SELECT service,
29
+ SUM(CASE WHEN level IN ('error','fatal') THEN 1 ELSE 0 END) as errors,
30
+ SUM(CASE WHEN level = 'warn' THEN 1 ELSE 0 END) as warns,
31
+ COUNT(*) as total
32
+ FROM logs
33
+ WHERE project_id = $p AND timestamp >= $since
34
+ GROUP BY service
35
+ ORDER BY errors DESC
36
+ `).all({ $p: projectId, $since: window }) as DiagnosisResult["error_rate_by_service"]
37
+
38
+ // Failing pages (most errors)
39
+ const failing_pages = db.prepare(`
40
+ SELECT l.page_id, p.url, COUNT(*) as error_count
41
+ FROM logs l
42
+ JOIN pages p ON p.id = l.page_id
43
+ WHERE l.project_id = $p AND l.level IN ('error','fatal') AND l.timestamp >= $since AND l.page_id IS NOT NULL
44
+ GROUP BY l.page_id, p.url
45
+ ORDER BY error_count DESC
46
+ LIMIT 10
47
+ `).all({ $p: projectId, $since: window }) as DiagnosisResult["failing_pages"]
48
+
49
+ // Perf regressions: compare latest vs previous snapshot per page
50
+ const perf_regressions = db.prepare(`
51
+ SELECT * FROM (
52
+ SELECT
53
+ cur.page_id,
54
+ p.url,
55
+ cur.score as score_now,
56
+ prev.score as score_prev,
57
+ (cur.score - prev.score) as delta
58
+ FROM performance_snapshots cur
59
+ JOIN pages p ON p.id = cur.page_id
60
+ LEFT JOIN performance_snapshots prev ON prev.page_id = cur.page_id AND prev.id != cur.id
61
+ WHERE cur.project_id = $p
62
+ AND cur.timestamp = (SELECT MAX(timestamp) FROM performance_snapshots WHERE page_id = cur.page_id)
63
+ AND (prev.timestamp = (SELECT MAX(timestamp) FROM performance_snapshots WHERE page_id = cur.page_id AND id != cur.id) OR prev.id IS NULL)
64
+ ) WHERE delta < -5 OR delta IS NULL
65
+ ORDER BY delta ASC
66
+ LIMIT 10
67
+ `).all({ $p: projectId }) as DiagnosisResult["perf_regressions"]
68
+
69
+ const totalErrors = top_errors.reduce((s, e) => s + e.count, 0)
70
+ const topService = error_rate_by_service[0]
71
+ const summary = totalErrors === 0
72
+ ? "No errors in this window. All looks good."
73
+ : `${totalErrors} error(s) detected. Worst service: ${topService?.service ?? "unknown"} (${topService?.errors ?? 0} errors). ${failing_pages.length} page(s) with errors. ${perf_regressions.length} perf regression(s).`
74
+
75
+ return { project_id: projectId, window, top_errors, error_rate_by_service, failing_pages, perf_regressions, summary }
76
+ }
@@ -0,0 +1,66 @@
1
+ import { describe, expect, it } from "bun:test"
2
+ import { createTestDb } from "../db/index.ts"
3
+ import { ingestBatch } from "./ingest.ts"
4
+ import { exportToCsv, exportToJson } from "./export.ts"
5
+
6
+ function seed(db: ReturnType<typeof createTestDb>) {
7
+ ingestBatch(db, [
8
+ { level: "error", message: "boom", service: "api" },
9
+ { level: "info", message: "ok", service: "web" },
10
+ { level: "warn", message: 'has "quotes"', service: "db" },
11
+ ])
12
+ }
13
+
14
+ describe("exportToJson", () => {
15
+ it("exports all logs as JSON array", () => {
16
+ const db = createTestDb()
17
+ seed(db)
18
+ const chunks: string[] = []
19
+ const count = exportToJson(db, {}, s => chunks.push(s))
20
+ expect(count).toBe(3)
21
+ const parsed = JSON.parse(chunks.join(""))
22
+ expect(Array.isArray(parsed)).toBe(true)
23
+ expect(parsed).toHaveLength(3)
24
+ })
25
+
26
+ it("filters by level", () => {
27
+ const db = createTestDb()
28
+ seed(db)
29
+ const chunks: string[] = []
30
+ const count = exportToJson(db, { level: "error" }, s => chunks.push(s))
31
+ expect(count).toBe(1)
32
+ const parsed = JSON.parse(chunks.join(""))
33
+ expect(parsed[0].level).toBe("error")
34
+ })
35
+ })
36
+
37
+ describe("exportToCsv", () => {
38
+ it("exports CSV with header", () => {
39
+ const db = createTestDb()
40
+ seed(db)
41
+ const chunks: string[] = []
42
+ const count = exportToCsv(db, {}, s => chunks.push(s))
43
+ expect(count).toBe(3)
44
+ const csv = chunks.join("")
45
+ expect(csv).toContain("id,timestamp,level")
46
+ expect(csv).toContain("error")
47
+ expect(csv).toContain("boom")
48
+ })
49
+
50
+ it("escapes CSV quotes", () => {
51
+ const db = createTestDb()
52
+ seed(db)
53
+ const chunks: string[] = []
54
+ exportToCsv(db, { level: "warn" }, s => chunks.push(s))
55
+ const csv = chunks.join("")
56
+ expect(csv).toContain('"has ""quotes"""')
57
+ })
58
+
59
+ it("filters by service", () => {
60
+ const db = createTestDb()
61
+ seed(db)
62
+ const chunks: string[] = []
63
+ const count = exportToCsv(db, { service: "api" }, s => chunks.push(s))
64
+ expect(count).toBe(1)
65
+ })
66
+ })
@@ -0,0 +1,65 @@
1
+ import type { Database } from "bun:sqlite"
2
+ import type { LogRow } from "../types/index.ts"
3
+
4
+ export interface ExportOptions {
5
+ project_id?: string
6
+ since?: string
7
+ until?: string
8
+ level?: string
9
+ service?: string
10
+ limit?: number
11
+ }
12
+
13
+ function* iterLogs(db: Database, opts: ExportOptions): Generator<LogRow> {
14
+ const conditions: string[] = []
15
+ const params: Record<string, unknown> = {}
16
+ if (opts.project_id) { conditions.push("project_id = $p"); params.$p = opts.project_id }
17
+ if (opts.since) { conditions.push("timestamp >= $since"); params.$since = opts.since }
18
+ if (opts.until) { conditions.push("timestamp <= $until"); params.$until = opts.until }
19
+ if (opts.level) { conditions.push("level = $level"); params.$level = opts.level }
20
+ if (opts.service) { conditions.push("service = $service"); params.$service = opts.service }
21
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : ""
22
+ const limit = opts.limit ?? 100_000
23
+
24
+ // Batch in pages of 1000 to avoid memory issues
25
+ let offset = 0
26
+ while (offset < limit) {
27
+ const batch = db.prepare(`SELECT * FROM logs ${where} ORDER BY timestamp ASC LIMIT 1000 OFFSET $offset`)
28
+ .all({ ...params, $offset: offset }) as LogRow[]
29
+ if (!batch.length) break
30
+ yield* batch
31
+ offset += batch.length
32
+ if (batch.length < 1000) break
33
+ }
34
+ }
35
+
36
+ export function exportToJson(db: Database, opts: ExportOptions, writeLine: (s: string) => void): number {
37
+ writeLine("[")
38
+ let count = 0
39
+ for (const row of iterLogs(db, opts)) {
40
+ writeLine((count > 0 ? "," : "") + JSON.stringify(row))
41
+ count++
42
+ }
43
+ writeLine("]")
44
+ return count
45
+ }
46
+
47
+ const CSV_HEADER = "id,timestamp,level,service,message,trace_id,url\n"
48
+
49
+ export function exportToCsv(db: Database, opts: ExportOptions, writeLine: (s: string) => void): number {
50
+ writeLine(CSV_HEADER)
51
+ let count = 0
52
+ for (const row of iterLogs(db, opts)) {
53
+ const fields = [row.id, row.timestamp, row.level, row.service ?? "", escapeCSV(row.message), row.trace_id ?? "", row.url ?? ""]
54
+ writeLine(fields.join(",") + "\n")
55
+ count++
56
+ }
57
+ return count
58
+ }
59
+
60
+ function escapeCSV(s: string): string {
61
+ if (s.includes(",") || s.includes('"') || s.includes("\n")) {
62
+ return `"${s.replace(/"/g, '""')}"`
63
+ }
64
+ return s
65
+ }
@@ -0,0 +1,48 @@
1
+ import { describe, expect, it } from "bun:test"
2
+ import { createTestDb } from "../db/index.ts"
3
+ import { ingestBatch } from "./ingest.ts"
4
+ import { getHealth } from "./health.ts"
5
+
6
+ describe("getHealth", () => {
7
+ it("returns status ok", () => {
8
+ const db = createTestDb()
9
+ const h = getHealth(db)
10
+ expect(h.status).toBe("ok")
11
+ })
12
+
13
+ it("counts total logs", () => {
14
+ const db = createTestDb()
15
+ ingestBatch(db, [{ level: "info", message: "a" }, { level: "error", message: "b" }])
16
+ const h = getHealth(db)
17
+ expect(h.total_logs).toBe(2)
18
+ })
19
+
20
+ it("returns logs_by_level breakdown", () => {
21
+ const db = createTestDb()
22
+ ingestBatch(db, [{ level: "info", message: "a" }, { level: "error", message: "b" }, { level: "error", message: "c" }])
23
+ const h = getHealth(db)
24
+ expect(h.logs_by_level["error"]).toBe(2)
25
+ expect(h.logs_by_level["info"]).toBe(1)
26
+ })
27
+
28
+ it("counts projects", () => {
29
+ const db = createTestDb()
30
+ db.prepare("INSERT INTO projects (name) VALUES ('p1')").run()
31
+ db.prepare("INSERT INTO projects (name) VALUES ('p2')").run()
32
+ const h = getHealth(db)
33
+ expect(h.projects).toBe(2)
34
+ })
35
+
36
+ it("returns uptime_seconds >= 0", () => {
37
+ const h = getHealth(createTestDb())
38
+ expect(h.uptime_seconds).toBeGreaterThanOrEqual(0)
39
+ })
40
+
41
+ it("returns newest and oldest log timestamps", () => {
42
+ const db = createTestDb()
43
+ ingestBatch(db, [{ level: "info", message: "first" }, { level: "warn", message: "last" }])
44
+ const h = getHealth(db)
45
+ expect(h.oldest_log).toBeTruthy()
46
+ expect(h.newest_log).toBeTruthy()
47
+ })
48
+ })
@@ -0,0 +1,51 @@
1
+ import type { Database } from "bun:sqlite"
2
+
3
+ const startTime = Date.now()
4
+
5
+ export interface HealthResult {
6
+ status: "ok"
7
+ uptime_seconds: number
8
+ db_size_bytes: number | null
9
+ projects: number
10
+ total_logs: number
11
+ logs_by_level: Record<string, number>
12
+ oldest_log: string | null
13
+ newest_log: string | null
14
+ scheduler_jobs: number
15
+ open_issues: number
16
+ }
17
+
18
+ export function getHealth(db: Database): HealthResult {
19
+ const projects = (db.prepare("SELECT COUNT(*) as c FROM projects").get() as { c: number }).c
20
+ const total_logs = (db.prepare("SELECT COUNT(*) as c FROM logs").get() as { c: number }).c
21
+ const scheduler_jobs = (db.prepare("SELECT COUNT(*) as c FROM scan_jobs WHERE enabled = 1").get() as { c: number }).c
22
+ const open_issues = (db.prepare("SELECT COUNT(*) as c FROM issues WHERE status = 'open'").get() as { c: number }).c
23
+
24
+ const levelRows = db.prepare("SELECT level, COUNT(*) as c FROM logs GROUP BY level").all() as { level: string; c: number }[]
25
+ const logs_by_level = Object.fromEntries(levelRows.map(r => [r.level, r.c]))
26
+
27
+ const oldest = db.prepare("SELECT MIN(timestamp) as t FROM logs").get() as { t: string | null }
28
+ const newest = db.prepare("SELECT MAX(timestamp) as t FROM logs").get() as { t: string | null }
29
+
30
+ let db_size_bytes: number | null = null
31
+ try {
32
+ const dbPath = process.env.LOGS_DB_PATH
33
+ if (dbPath) {
34
+ const { statSync } = require("node:fs")
35
+ db_size_bytes = statSync(dbPath).size
36
+ }
37
+ } catch { /* in-memory or not accessible */ }
38
+
39
+ return {
40
+ status: "ok",
41
+ uptime_seconds: Math.floor((Date.now() - startTime) / 1000),
42
+ db_size_bytes,
43
+ projects,
44
+ total_logs,
45
+ logs_by_level,
46
+ oldest_log: oldest.t,
47
+ newest_log: newest.t,
48
+ scheduler_jobs,
49
+ open_issues,
50
+ }
51
+ }
package/src/lib/ingest.ts CHANGED
@@ -1,5 +1,9 @@
1
1
  import type { Database } from "bun:sqlite"
2
2
  import type { LogEntry, LogRow } from "../types/index.ts"
3
+ import { upsertIssue } from "./issues.ts"
4
+ import { evaluateAlerts } from "./alerts.ts"
5
+
6
+ const ERROR_LEVELS = new Set(["warn", "error", "fatal"])
3
7
 
4
8
  export function ingestLog(db: Database, entry: LogEntry): LogRow {
5
9
  const stmt = db.prepare(`
@@ -7,7 +11,7 @@ export function ingestLog(db: Database, entry: LogEntry): LogRow {
7
11
  VALUES ($project_id, $page_id, $level, $source, $service, $message, $trace_id, $session_id, $agent, $url, $stack_trace, $metadata)
8
12
  RETURNING *
9
13
  `)
10
- return stmt.get({
14
+ const row = stmt.get({
11
15
  $project_id: entry.project_id ?? null,
12
16
  $page_id: entry.page_id ?? null,
13
17
  $level: entry.level,
@@ -21,6 +25,16 @@ export function ingestLog(db: Database, entry: LogEntry): LogRow {
21
25
  $stack_trace: entry.stack_trace ?? null,
22
26
  $metadata: entry.metadata ? JSON.stringify(entry.metadata) : null,
23
27
  }) as LogRow
28
+
29
+ // Side effects: issue grouping + alert evaluation (fire-and-forget)
30
+ if (ERROR_LEVELS.has(entry.level)) {
31
+ if (entry.project_id) {
32
+ upsertIssue(db, { project_id: entry.project_id, level: entry.level, service: entry.service, message: entry.message, stack_trace: entry.stack_trace })
33
+ evaluateAlerts(db, entry.project_id, entry.service ?? null, entry.level).catch(() => {})
34
+ }
35
+ }
36
+
37
+ return row
24
38
  }
25
39
 
26
40
  export function ingestBatch(db: Database, entries: LogEntry[]): LogRow[] {
@@ -47,5 +61,14 @@ export function ingestBatch(db: Database, entries: LogEntry[]): LogRow[] {
47
61
  }) as LogRow
48
62
  )
49
63
  )
50
- return tx(entries)
64
+ const rows = tx(entries)
65
+
66
+ // Issue grouping for error-level entries (outside transaction for perf)
67
+ for (const entry of entries) {
68
+ if (ERROR_LEVELS.has(entry.level) && entry.project_id) {
69
+ upsertIssue(db, { project_id: entry.project_id, level: entry.level, service: entry.service, message: entry.message, stack_trace: entry.stack_trace })
70
+ }
71
+ }
72
+
73
+ return rows
51
74
  }