@hasna/logs 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,56 @@
1
+ import type { Database } from "bun:sqlite"
2
+ import type { LogQuery, LogRow } from "../types/index.ts"
3
+
4
+ export function searchLogs(db: Database, q: LogQuery): LogRow[] {
5
+ const conditions: string[] = []
6
+ const params: Record<string, unknown> = {}
7
+
8
+ if (q.project_id) { conditions.push("l.project_id = $project_id"); params.$project_id = q.project_id }
9
+ if (q.page_id) { conditions.push("l.page_id = $page_id"); params.$page_id = q.page_id }
10
+ if (q.service) { conditions.push("l.service = $service"); params.$service = q.service }
11
+ if (q.trace_id) { conditions.push("l.trace_id = $trace_id"); params.$trace_id = q.trace_id }
12
+ if (q.since) { conditions.push("l.timestamp >= $since"); params.$since = q.since }
13
+ if (q.until) { conditions.push("l.timestamp <= $until"); params.$until = q.until }
14
+
15
+ if (q.level) {
16
+ const levels = Array.isArray(q.level) ? q.level : [q.level]
17
+ const placeholders = levels.map((_, i) => `$level${i}`).join(",")
18
+ levels.forEach((lv, i) => { params[`$level${i}`] = lv })
19
+ conditions.push(`l.level IN (${placeholders})`)
20
+ }
21
+
22
+ const limit = q.limit ?? 100
23
+ const offset = q.offset ?? 0
24
+ params.$limit = limit
25
+ params.$offset = offset
26
+
27
+ if (q.text) {
28
+ // FTS search via subquery
29
+ params.$text = q.text
30
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")} AND` : "WHERE"
31
+ const sql = `
32
+ SELECT l.* FROM logs l
33
+ ${where} l.rowid IN (SELECT rowid FROM logs_fts WHERE logs_fts MATCH $text)
34
+ ORDER BY l.timestamp DESC
35
+ LIMIT $limit OFFSET $offset
36
+ `
37
+ return db.prepare(sql).all(params) as LogRow[]
38
+ }
39
+
40
+ const where = conditions.length ? `WHERE ${conditions.join(" AND ")}` : ""
41
+ const sql = `SELECT * FROM logs l ${where} ORDER BY l.timestamp DESC LIMIT $limit OFFSET $offset`
42
+ return db.prepare(sql).all(params) as LogRow[]
43
+ }
44
+
45
+ export function tailLogs(db: Database, projectId?: string, n = 50): LogRow[] {
46
+ if (projectId) {
47
+ return db.prepare("SELECT * FROM logs WHERE project_id = $p ORDER BY timestamp DESC LIMIT $n")
48
+ .all({ $p: projectId, $n: n }) as LogRow[]
49
+ }
50
+ return db.prepare("SELECT * FROM logs ORDER BY timestamp DESC LIMIT $n").all({ $n: n }) as LogRow[]
51
+ }
52
+
53
+ export function getLogContext(db: Database, traceId: string): LogRow[] {
54
+ return db.prepare("SELECT * FROM logs WHERE trace_id = $t ORDER BY timestamp ASC")
55
+ .all({ $t: traceId }) as LogRow[]
56
+ }
@@ -0,0 +1,37 @@
1
+ import { describe, expect, it } from "bun:test"
2
+ import { createTestDb } from "../db/index.ts"
3
+ import { ingestBatch } from "./ingest.ts"
4
+ import { rotateLogs, rotateByProject } from "./rotate.ts"
5
+
6
+ describe("rotateLogs", () => {
7
+ it("does nothing when under maxRows", () => {
8
+ const db = createTestDb()
9
+ ingestBatch(db, [{ level: "info", message: "a" }, { level: "info", message: "b" }])
10
+ const deleted = rotateLogs(db, 100)
11
+ expect(deleted).toBe(0)
12
+ })
13
+
14
+ it("deletes oldest when over maxRows", () => {
15
+ const db = createTestDb()
16
+ ingestBatch(db, Array.from({ length: 10 }, (_, i) => ({ level: "info" as const, message: `msg ${i}` })))
17
+ const deleted = rotateLogs(db, 5)
18
+ expect(deleted).toBe(5)
19
+ const remaining = (db.prepare("SELECT COUNT(*) as c FROM logs").get() as { c: number }).c
20
+ expect(remaining).toBe(5)
21
+ })
22
+ })
23
+
24
+ describe("rotateByProject", () => {
25
+ it("only rotates logs for the specified project", () => {
26
+ const db = createTestDb()
27
+ const p1 = db.prepare("INSERT INTO projects (name) VALUES ('p1') RETURNING id").get() as { id: string }
28
+ const p2 = db.prepare("INSERT INTO projects (name) VALUES ('p2') RETURNING id").get() as { id: string }
29
+ ingestBatch(db, Array.from({ length: 8 }, () => ({ level: "info" as const, message: "x", project_id: p1.id })))
30
+ ingestBatch(db, Array.from({ length: 5 }, () => ({ level: "info" as const, message: "y", project_id: p2.id })))
31
+ rotateByProject(db, p1.id, 3)
32
+ const p1count = (db.prepare("SELECT COUNT(*) as c FROM logs WHERE project_id = ?").get(p1.id) as { c: number }).c
33
+ const p2count = (db.prepare("SELECT COUNT(*) as c FROM logs WHERE project_id = ?").get(p2.id) as { c: number }).c
34
+ expect(p1count).toBe(3)
35
+ expect(p2count).toBe(5) // untouched
36
+ })
37
+ })
@@ -0,0 +1,27 @@
1
+ import type { Database } from "bun:sqlite"
2
+
3
+ const DEFAULT_MAX_ROWS = 100_000
4
+
5
+ export function rotateLogs(db: Database, maxRows = DEFAULT_MAX_ROWS): number {
6
+ const total = (db.prepare("SELECT COUNT(*) as c FROM logs").get() as { c: number }).c
7
+ if (total <= maxRows) return 0
8
+ const toDelete = total - maxRows
9
+ db.prepare(`
10
+ DELETE FROM logs WHERE id IN (
11
+ SELECT id FROM logs ORDER BY timestamp ASC LIMIT ${toDelete}
12
+ )
13
+ `).run()
14
+ return toDelete
15
+ }
16
+
17
+ export function rotateByProject(db: Database, projectId: string, maxRows = DEFAULT_MAX_ROWS): number {
18
+ const total = (db.prepare("SELECT COUNT(*) as c FROM logs WHERE project_id = $p").get({ $p: projectId }) as { c: number }).c
19
+ if (total <= maxRows) return 0
20
+ const toDelete = total - maxRows
21
+ db.prepare(`
22
+ DELETE FROM logs WHERE id IN (
23
+ SELECT id FROM logs WHERE project_id = $p ORDER BY timestamp ASC LIMIT ${toDelete}
24
+ )
25
+ `).run({ $p: projectId })
26
+ return toDelete
27
+ }
@@ -0,0 +1,112 @@
1
+ import type { Database } from "bun:sqlite"
2
+ import { ingestBatch } from "./ingest.ts"
3
+ import { saveSnapshot } from "./perf.ts"
4
+ import { getPage, touchPage } from "./projects.ts"
5
+ import type { LogEntry } from "../types/index.ts"
6
+
7
+ export interface ScanResult {
8
+ logsCollected: number
9
+ errorsFound: number
10
+ perfScore: number | null
11
+ }
12
+
13
+ export async function scanPage(db: Database, projectId: string, pageId: string, urlOverride?: string): Promise<ScanResult> {
14
+ const page = getPage(db, pageId)
15
+ const url = urlOverride || page?.url
16
+ if (!url) throw new Error(`No URL for page ${pageId}`)
17
+
18
+ const { chromium } = await import("playwright")
19
+ const browser = await chromium.launch({ headless: true })
20
+ const context = await browser.newContext({
21
+ userAgent: "Mozilla/5.0 (@hasna/logs scanner) AppleWebKit/537.36",
22
+ })
23
+ const browserPage = await context.newPage()
24
+
25
+ const collected: LogEntry[] = []
26
+ let errorsFound = 0
27
+
28
+ // Capture console output
29
+ browserPage.on("console", (msg) => {
30
+ const level = msg.type() === "error" ? "error" : msg.type() === "warning" ? "warn" : msg.type() === "info" ? "info" : "debug"
31
+ if (level === "error") errorsFound++
32
+ collected.push({
33
+ project_id: projectId,
34
+ page_id: pageId,
35
+ level: level as LogEntry["level"],
36
+ source: "scanner",
37
+ message: msg.text(),
38
+ url,
39
+ })
40
+ })
41
+
42
+ // Capture page errors (uncaught JS exceptions)
43
+ browserPage.on("pageerror", (err) => {
44
+ errorsFound++
45
+ collected.push({
46
+ project_id: projectId,
47
+ page_id: pageId,
48
+ level: "error",
49
+ source: "scanner",
50
+ message: err.message,
51
+ stack_trace: err.stack,
52
+ url,
53
+ })
54
+ })
55
+
56
+ // Capture network failures
57
+ browserPage.on("requestfailed", (req) => {
58
+ collected.push({
59
+ project_id: projectId,
60
+ page_id: pageId,
61
+ level: "warn",
62
+ source: "scanner",
63
+ message: `Network request failed: ${req.url()} — ${req.failure()?.errorText ?? "unknown"}`,
64
+ url,
65
+ })
66
+ })
67
+
68
+ let perfScore: number | null = null
69
+
70
+ try {
71
+ await browserPage.goto(url, { waitUntil: "networkidle", timeout: 30_000 })
72
+
73
+ // Try basic perf metrics via CDP
74
+ try {
75
+ const metrics = await browserPage.evaluate(() => {
76
+ const nav = performance.getEntriesByType("navigation")[0] as PerformanceNavigationTiming | undefined
77
+ const paint = performance.getEntriesByName("first-contentful-paint")[0]
78
+ return {
79
+ ttfb: nav ? nav.responseStart - nav.requestStart : null,
80
+ fcp: paint?.startTime ?? null,
81
+ domLoad: nav ? nav.domContentLoadedEventEnd - nav.startTime : null,
82
+ }
83
+ })
84
+ // Store what we can without full Lighthouse
85
+ if (metrics.fcp !== null || metrics.ttfb !== null) {
86
+ saveSnapshot(db, {
87
+ project_id: projectId,
88
+ page_id: pageId,
89
+ url,
90
+ fcp: metrics.fcp,
91
+ ttfb: metrics.ttfb,
92
+ lcp: null,
93
+ cls: null,
94
+ tti: metrics.domLoad,
95
+ score: null,
96
+ raw_audit: JSON.stringify(metrics),
97
+ })
98
+ }
99
+ } catch {
100
+ // perf metrics optional
101
+ }
102
+ } finally {
103
+ await browser.close()
104
+ }
105
+
106
+ if (collected.length > 0) {
107
+ ingestBatch(db, collected)
108
+ if (page) touchPage(db, pageId)
109
+ }
110
+
111
+ return { logsCollected: collected.length, errorsFound, perfScore }
112
+ }
@@ -0,0 +1,57 @@
1
+ import type { Database } from "bun:sqlite"
2
+ import cron from "node-cron"
3
+ import { finishScanRun, createScanRun, listJobs, updateJob } from "./jobs.ts"
4
+ import { listPages } from "./projects.ts"
5
+ import { scanPage } from "./scanner.ts"
6
+
7
+ const tasks = new Map<string, cron.ScheduledTask>()
8
+
9
+ export function startScheduler(db: Database): void {
10
+ const jobs = listJobs(db).filter(j => j.enabled)
11
+ for (const job of jobs) {
12
+ scheduleJob(db, job.id, job.schedule, job.project_id, job.page_id ?? undefined)
13
+ }
14
+ console.log(`Scheduler started: ${tasks.size} job(s) active`)
15
+ }
16
+
17
+ export function scheduleJob(db: Database, jobId: string, schedule: string, projectId: string, pageId?: string): void {
18
+ if (tasks.has(jobId)) tasks.get(jobId)!.stop()
19
+ const task = cron.schedule(schedule, async () => {
20
+ await runJob(db, jobId, projectId, pageId)
21
+ })
22
+ tasks.set(jobId, task)
23
+ }
24
+
25
+ export function unscheduleJob(jobId: string): void {
26
+ tasks.get(jobId)?.stop()
27
+ tasks.delete(jobId)
28
+ }
29
+
30
+ export async function runJob(db: Database, jobId: string, projectId: string, pageId?: string): Promise<void> {
31
+ const pages = pageId
32
+ ? [{ id: pageId, url: "" }] // will resolve url in scan
33
+ : listPages(db, projectId)
34
+
35
+ await Promise.all(pages.map(async (page) => {
36
+ const run = createScanRun(db, { job_id: jobId, page_id: page.id })
37
+ try {
38
+ const result = await scanPage(db, projectId, page.id, page.url)
39
+ finishScanRun(db, run.id, {
40
+ status: "completed",
41
+ logs_collected: result.logsCollected,
42
+ errors_found: result.errorsFound,
43
+ perf_score: result.perfScore ?? undefined,
44
+ })
45
+ } catch (err) {
46
+ finishScanRun(db, run.id, { status: "failed", logs_collected: 0, errors_found: 0 })
47
+ console.error(`Scan failed for page ${page.id}:`, err)
48
+ }
49
+ }))
50
+
51
+ updateJob(db, jobId, { last_run_at: new Date().toISOString() })
52
+ }
53
+
54
+ export function stopScheduler(): void {
55
+ for (const task of tasks.values()) task.stop()
56
+ tasks.clear()
57
+ }
@@ -0,0 +1,38 @@
1
+ import { describe, expect, it } from "bun:test"
2
+ import { createTestDb } from "../db/index.ts"
3
+ import { ingestBatch } from "./ingest.ts"
4
+ import { summarizeLogs } from "./summarize.ts"
5
+
6
+ describe("summarizeLogs", () => {
7
+ it("returns warn/error/fatal counts grouped by service", () => {
8
+ const db = createTestDb()
9
+ ingestBatch(db, [
10
+ { level: "error", message: "e1", service: "api" },
11
+ { level: "error", message: "e2", service: "api" },
12
+ { level: "warn", message: "w1", service: "db" },
13
+ { level: "info", message: "i1", service: "api" }, // excluded
14
+ { level: "debug", message: "d1", service: "api" }, // excluded
15
+ ])
16
+ const summary = summarizeLogs(db)
17
+ expect(summary.length).toBe(2)
18
+ const api = summary.find(s => s.service === "api" && s.level === "error")
19
+ expect(api?.count).toBe(2)
20
+ const db2 = summary.find(s => s.service === "db")
21
+ expect(db2?.count).toBe(1)
22
+ })
23
+
24
+ it("excludes info/debug from summary", () => {
25
+ const db = createTestDb()
26
+ ingestBatch(db, [
27
+ { level: "info", message: "ok" },
28
+ { level: "debug", message: "trace" },
29
+ ])
30
+ const summary = summarizeLogs(db)
31
+ expect(summary).toHaveLength(0)
32
+ })
33
+
34
+ it("returns empty for no logs", () => {
35
+ const db = createTestDb()
36
+ expect(summarizeLogs(db)).toHaveLength(0)
37
+ })
38
+ })
@@ -0,0 +1,21 @@
1
+ import type { Database } from "bun:sqlite"
2
+ import type { LogSummary } from "../types/index.ts"
3
+
4
+ export function summarizeLogs(db: Database, projectId?: string, since?: string): LogSummary[] {
5
+ const conditions: string[] = ["level IN ('warn','error','fatal')"]
6
+ const params: Record<string, unknown> = {}
7
+
8
+ if (projectId) { conditions.push("project_id = $project_id"); params.$project_id = projectId }
9
+ if (since) { conditions.push("timestamp >= $since"); params.$since = since }
10
+
11
+ const where = `WHERE ${conditions.join(" AND ")}`
12
+ const sql = `
13
+ SELECT project_id, service, page_id, level,
14
+ COUNT(*) as count,
15
+ MAX(timestamp) as latest
16
+ FROM logs ${where}
17
+ GROUP BY project_id, service, page_id, level
18
+ ORDER BY count DESC
19
+ `
20
+ return db.prepare(sql).all(params) as LogSummary[]
21
+ }
@@ -0,0 +1,165 @@
1
+ #!/usr/bin/env bun
2
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"
3
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"
4
+ import { z } from "zod"
5
+ import { getDb } from "../db/index.ts"
6
+ import { ingestLog } from "../lib/ingest.ts"
7
+ import { getLogContext, searchLogs, tailLogs } from "../lib/query.ts"
8
+ import { summarizeLogs } from "../lib/summarize.ts"
9
+ import { createJob, listJobs } from "../lib/jobs.ts"
10
+ import { createPage, createProject, listPages, listProjects } from "../lib/projects.ts"
11
+ import { getLatestSnapshot, getPerfTrend, scoreLabel } from "../lib/perf.ts"
12
+ import type { LogLevel } from "../types/index.ts"
13
+
14
+ const db = getDb()
15
+ const server = new McpServer({ name: "logs", version: "0.0.1" })
16
+
17
+ // Tool registry for search_tools / describe_tools pattern
18
+ const TOOLS: Record<string, string> = {
19
+ register_project: "Register a project (name, github_repo?, base_url?, description?)",
20
+ register_page: "Register a page URL to a project (project_id, url, path?, name?)",
21
+ create_scan_job: "Schedule headless page scans (project_id, schedule, page_id?)",
22
+ log_push: "Push a log entry (level, message, project_id?, service?, trace_id?, metadata?)",
23
+ log_search: "Search logs (project_id?, page_id?, level?, since?, until?, text?, limit?)",
24
+ log_tail: "Get N most recent logs (project_id?, n?)",
25
+ log_summary: "Error/warn counts by service/page (project_id?, since?)",
26
+ log_context: "All logs for a trace_id",
27
+ perf_snapshot: "Latest performance snapshot for a project/page (project_id, page_id?)",
28
+ perf_trend: "Performance over time (project_id, page_id?, since?, limit?)",
29
+ scan_status: "Last scan runs per project (project_id?)",
30
+ list_projects: "List all registered projects",
31
+ list_pages: "List pages for a project (project_id)",
32
+ search_tools: "Search available tools by keyword (query)",
33
+ describe_tools: "List all tools with descriptions",
34
+ }
35
+
36
+ server.tool("search_tools", { query: z.string() }, ({ query }) => {
37
+ const q = query.toLowerCase()
38
+ const matches = Object.entries(TOOLS).filter(([k, v]) => k.includes(q) || v.toLowerCase().includes(q))
39
+ return { content: [{ type: "text", text: matches.map(([k, v]) => `${k}: ${v}`).join("\n") || "No matches" }] }
40
+ })
41
+
42
+ server.tool("describe_tools", {}, () => {
43
+ const text = Object.entries(TOOLS).map(([k, v]) => `${k}: ${v}`).join("\n")
44
+ return { content: [{ type: "text", text }] }
45
+ })
46
+
47
+ server.tool("register_project", {
48
+ name: z.string(),
49
+ github_repo: z.string().optional(),
50
+ base_url: z.string().optional(),
51
+ description: z.string().optional(),
52
+ }, (args) => {
53
+ const project = createProject(db, args)
54
+ return { content: [{ type: "text", text: JSON.stringify(project) }] }
55
+ })
56
+
57
+ server.tool("register_page", {
58
+ project_id: z.string(),
59
+ url: z.string(),
60
+ path: z.string().optional(),
61
+ name: z.string().optional(),
62
+ }, (args) => {
63
+ const page = createPage(db, args)
64
+ return { content: [{ type: "text", text: JSON.stringify(page) }] }
65
+ })
66
+
67
+ server.tool("create_scan_job", {
68
+ project_id: z.string(),
69
+ schedule: z.string(),
70
+ page_id: z.string().optional(),
71
+ }, (args) => {
72
+ const job = createJob(db, args)
73
+ return { content: [{ type: "text", text: JSON.stringify(job) }] }
74
+ })
75
+
76
+ server.tool("log_push", {
77
+ level: z.enum(["debug", "info", "warn", "error", "fatal"]),
78
+ message: z.string(),
79
+ project_id: z.string().optional(),
80
+ service: z.string().optional(),
81
+ trace_id: z.string().optional(),
82
+ session_id: z.string().optional(),
83
+ agent: z.string().optional(),
84
+ url: z.string().optional(),
85
+ metadata: z.record(z.unknown()).optional(),
86
+ }, (args) => {
87
+ const row = ingestLog(db, args)
88
+ return { content: [{ type: "text", text: `Logged: ${row.id}` }] }
89
+ })
90
+
91
+ server.tool("log_search", {
92
+ project_id: z.string().optional(),
93
+ page_id: z.string().optional(),
94
+ level: z.string().optional(),
95
+ service: z.string().optional(),
96
+ since: z.string().optional(),
97
+ until: z.string().optional(),
98
+ text: z.string().optional(),
99
+ trace_id: z.string().optional(),
100
+ limit: z.number().optional(),
101
+ }, (args) => {
102
+ const rows = searchLogs(db, {
103
+ ...args,
104
+ level: args.level ? (args.level.split(",") as LogLevel[]) : undefined,
105
+ })
106
+ return { content: [{ type: "text", text: JSON.stringify(rows) }] }
107
+ })
108
+
109
+ server.tool("log_tail", {
110
+ project_id: z.string().optional(),
111
+ n: z.number().optional(),
112
+ }, ({ project_id, n }) => {
113
+ const rows = tailLogs(db, project_id, n ?? 50)
114
+ return { content: [{ type: "text", text: JSON.stringify(rows) }] }
115
+ })
116
+
117
+ server.tool("log_summary", {
118
+ project_id: z.string().optional(),
119
+ since: z.string().optional(),
120
+ }, ({ project_id, since }) => {
121
+ const summary = summarizeLogs(db, project_id, since)
122
+ return { content: [{ type: "text", text: JSON.stringify(summary) }] }
123
+ })
124
+
125
+ server.tool("log_context", { trace_id: z.string() }, ({ trace_id }) => {
126
+ const rows = getLogContext(db, trace_id)
127
+ return { content: [{ type: "text", text: JSON.stringify(rows) }] }
128
+ })
129
+
130
+ server.tool("perf_snapshot", {
131
+ project_id: z.string(),
132
+ page_id: z.string().optional(),
133
+ }, ({ project_id, page_id }) => {
134
+ const snap = getLatestSnapshot(db, project_id, page_id)
135
+ const label = snap ? scoreLabel(snap.score) : "unknown"
136
+ return { content: [{ type: "text", text: JSON.stringify({ ...snap, label }) }] }
137
+ })
138
+
139
+ server.tool("perf_trend", {
140
+ project_id: z.string(),
141
+ page_id: z.string().optional(),
142
+ since: z.string().optional(),
143
+ limit: z.number().optional(),
144
+ }, ({ project_id, page_id, since, limit }) => {
145
+ const trend = getPerfTrend(db, project_id, page_id, since, limit ?? 50)
146
+ return { content: [{ type: "text", text: JSON.stringify(trend) }] }
147
+ })
148
+
149
+ server.tool("scan_status", {
150
+ project_id: z.string().optional(),
151
+ }, ({ project_id }) => {
152
+ const jobs = listJobs(db, project_id)
153
+ return { content: [{ type: "text", text: JSON.stringify(jobs) }] }
154
+ })
155
+
156
+ server.tool("list_projects", {}, () => {
157
+ return { content: [{ type: "text", text: JSON.stringify(listProjects(db)) }] }
158
+ })
159
+
160
+ server.tool("list_pages", { project_id: z.string() }, ({ project_id }) => {
161
+ return { content: [{ type: "text", text: JSON.stringify(listPages(db, project_id)) }] }
162
+ })
163
+
164
+ const transport = new StdioServerTransport()
165
+ await server.connect(transport)
@@ -0,0 +1,42 @@
1
+ #!/usr/bin/env bun
2
+ import { Hono } from "hono"
3
+ import { cors } from "hono/cors"
4
+ import { getDb } from "../db/index.ts"
5
+ import { getBrowserScript } from "../lib/browser-script.ts"
6
+ import { startScheduler } from "../lib/scheduler.ts"
7
+ import { jobsRoutes } from "./routes/jobs.ts"
8
+ import { logsRoutes } from "./routes/logs.ts"
9
+ import { perfRoutes } from "./routes/perf.ts"
10
+ import { projectsRoutes } from "./routes/projects.ts"
11
+
12
+ const PORT = Number(process.env.LOGS_PORT ?? 3460)
13
+ const db = getDb()
14
+ const app = new Hono()
15
+
16
+ app.use("*", cors())
17
+
18
+ // Browser tracking script
19
+ app.get("/script.js", (c) => {
20
+ const host = `${c.req.header("x-forwarded-proto") ?? "http"}://${c.req.header("host") ?? `localhost:${PORT}`}`
21
+ c.header("Content-Type", "application/javascript")
22
+ c.header("Cache-Control", "public, max-age=300")
23
+ return c.text(getBrowserScript(host))
24
+ })
25
+
26
+ // API routes
27
+ app.route("/api/logs", logsRoutes(db))
28
+ app.route("/api/projects", projectsRoutes(db))
29
+ app.route("/api/jobs", jobsRoutes(db))
30
+ app.route("/api/perf", perfRoutes(db))
31
+
32
+ app.get("/", (c) => c.json({ service: "@hasna/logs", port: PORT, status: "ok" }))
33
+
34
+ // Start scheduler
35
+ startScheduler(db)
36
+
37
+ console.log(`@hasna/logs server running on http://localhost:${PORT}`)
38
+
39
+ export default {
40
+ port: PORT,
41
+ fetch: app.fetch,
42
+ }
@@ -0,0 +1,32 @@
1
+ import { Hono } from "hono"
2
+ import type { Database } from "bun:sqlite"
3
+ import { createJob, deleteJob, listJobs, updateJob } from "../../lib/jobs.ts"
4
+
5
+ export function jobsRoutes(db: Database) {
6
+ const app = new Hono()
7
+
8
+ app.post("/", async (c) => {
9
+ const body = await c.req.json()
10
+ if (!body.project_id || !body.schedule) return c.json({ error: "project_id and schedule are required" }, 422)
11
+ return c.json(createJob(db, body), 201)
12
+ })
13
+
14
+ app.get("/", (c) => {
15
+ const { project_id } = c.req.query()
16
+ return c.json(listJobs(db, project_id || undefined))
17
+ })
18
+
19
+ app.put("/:id", async (c) => {
20
+ const body = await c.req.json()
21
+ const updated = updateJob(db, c.req.param("id"), body)
22
+ if (!updated) return c.json({ error: "not found" }, 404)
23
+ return c.json(updated)
24
+ })
25
+
26
+ app.delete("/:id", (c) => {
27
+ deleteJob(db, c.req.param("id"))
28
+ return c.json({ deleted: true })
29
+ })
30
+
31
+ return app
32
+ }
@@ -0,0 +1,65 @@
1
+ import { Hono } from "hono"
2
+ import type { Database } from "bun:sqlite"
3
+ import { ingestBatch, ingestLog } from "../../lib/ingest.ts"
4
+ import { getLogContext, searchLogs, tailLogs } from "../../lib/query.ts"
5
+ import { summarizeLogs } from "../../lib/summarize.ts"
6
+ import type { LogEntry, LogLevel } from "../../types/index.ts"
7
+
8
+ export function logsRoutes(db: Database) {
9
+ const app = new Hono()
10
+
11
+ // POST /api/logs — ingest single or batch
12
+ app.post("/", async (c) => {
13
+ const body = await c.req.json()
14
+ if (Array.isArray(body)) {
15
+ const rows = ingestBatch(db, body as LogEntry[])
16
+ return c.json({ inserted: rows.length }, 201)
17
+ }
18
+ const row = ingestLog(db, body as LogEntry)
19
+ return c.json(row, 201)
20
+ })
21
+
22
+ // GET /api/logs
23
+ app.get("/", (c) => {
24
+ const { project_id, page_id, level, service, since, until, text, trace_id, limit, offset, fields } = c.req.query()
25
+ const rows = searchLogs(db, {
26
+ project_id: project_id || undefined,
27
+ page_id: page_id || undefined,
28
+ level: level ? (level.split(",") as LogLevel[]) : undefined,
29
+ service: service || undefined,
30
+ since: since || undefined,
31
+ until: until || undefined,
32
+ text: text || undefined,
33
+ trace_id: trace_id || undefined,
34
+ limit: limit ? Number(limit) : 100,
35
+ offset: offset ? Number(offset) : 0,
36
+ })
37
+ if (fields) {
38
+ const keys = fields.split(",")
39
+ return c.json(rows.map(r => Object.fromEntries(keys.map(k => [k, (r as Record<string, unknown>)[k]]))))
40
+ }
41
+ return c.json(rows)
42
+ })
43
+
44
+ // GET /api/logs/tail
45
+ app.get("/tail", (c) => {
46
+ const { project_id, n } = c.req.query()
47
+ const rows = tailLogs(db, project_id || undefined, n ? Number(n) : 50)
48
+ return c.json(rows)
49
+ })
50
+
51
+ // GET /api/logs/summary
52
+ app.get("/summary", (c) => {
53
+ const { project_id, since } = c.req.query()
54
+ const summary = summarizeLogs(db, project_id || undefined, since || undefined)
55
+ return c.json(summary)
56
+ })
57
+
58
+ // GET /api/logs/:trace_id/context
59
+ app.get("/:trace_id/context", (c) => {
60
+ const rows = getLogContext(db, c.req.param("trace_id"))
61
+ return c.json(rows)
62
+ })
63
+
64
+ return app
65
+ }
@@ -0,0 +1,23 @@
1
+ import { Hono } from "hono"
2
+ import type { Database } from "bun:sqlite"
3
+ import { getLatestSnapshot, getPerfTrend } from "../../lib/perf.ts"
4
+
5
+ export function perfRoutes(db: Database) {
6
+ const app = new Hono()
7
+
8
+ app.get("/", (c) => {
9
+ const { project_id, page_id, since } = c.req.query()
10
+ if (!project_id) return c.json({ error: "project_id is required" }, 422)
11
+ const snap = getLatestSnapshot(db, project_id, page_id || undefined)
12
+ return c.json(snap)
13
+ })
14
+
15
+ app.get("/trend", (c) => {
16
+ const { project_id, page_id, since, limit } = c.req.query()
17
+ if (!project_id) return c.json({ error: "project_id is required" }, 422)
18
+ const trend = getPerfTrend(db, project_id, page_id || undefined, since || undefined, limit ? Number(limit) : 50)
19
+ return c.json(trend)
20
+ })
21
+
22
+ return app
23
+ }