@hasna/logs 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/biome.json +13 -0
- package/dist/cli/index.js +2272 -0
- package/dist/mcp/index.js +28507 -0
- package/dist/server/index.js +1862 -0
- package/package.json +43 -0
- package/sdk/package.json +21 -0
- package/sdk/src/index.ts +143 -0
- package/src/cli/index.ts +193 -0
- package/src/db/index.test.ts +33 -0
- package/src/db/index.ts +153 -0
- package/src/lib/browser-script.test.ts +35 -0
- package/src/lib/browser-script.ts +31 -0
- package/src/lib/github.ts +38 -0
- package/src/lib/ingest.test.ts +57 -0
- package/src/lib/ingest.ts +51 -0
- package/src/lib/jobs.test.ts +69 -0
- package/src/lib/jobs.ts +63 -0
- package/src/lib/lighthouse.ts +65 -0
- package/src/lib/perf.test.ts +45 -0
- package/src/lib/perf.ts +46 -0
- package/src/lib/projects.test.ts +73 -0
- package/src/lib/projects.ts +59 -0
- package/src/lib/query.test.ts +104 -0
- package/src/lib/query.ts +56 -0
- package/src/lib/rotate.test.ts +37 -0
- package/src/lib/rotate.ts +27 -0
- package/src/lib/scanner.ts +112 -0
- package/src/lib/scheduler.ts +57 -0
- package/src/lib/summarize.test.ts +38 -0
- package/src/lib/summarize.ts +21 -0
- package/src/mcp/index.ts +165 -0
- package/src/server/index.ts +42 -0
- package/src/server/routes/jobs.ts +32 -0
- package/src/server/routes/logs.ts +65 -0
- package/src/server/routes/perf.ts +23 -0
- package/src/server/routes/projects.ts +42 -0
- package/src/server/server.test.ts +194 -0
- package/src/types/index.ts +119 -0
- package/tsconfig.json +22 -0
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import type { Project } from "../types/index.ts"
|
|
3
|
+
import { updateProject } from "./projects.ts"
|
|
4
|
+
|
|
5
|
+
interface GithubRepo {
|
|
6
|
+
description: string | null
|
|
7
|
+
default_branch: string
|
|
8
|
+
topics: string[]
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
interface GithubCommit {
|
|
12
|
+
sha: string
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export async function syncGithubRepo(db: Database, project: Project): Promise<Project | null> {
|
|
16
|
+
if (!project.github_repo) return project
|
|
17
|
+
const repo = project.github_repo.replace(/^https?:\/\/github\.com\//, "")
|
|
18
|
+
const headers: Record<string, string> = { "Accept": "application/vnd.github.v3+json" }
|
|
19
|
+
if (process.env.GITHUB_TOKEN) headers["Authorization"] = `Bearer ${process.env.GITHUB_TOKEN}`
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
const [repoRes, commitRes] = await Promise.all([
|
|
23
|
+
fetch(`https://api.github.com/repos/${repo}`, { headers }),
|
|
24
|
+
fetch(`https://api.github.com/repos/${repo}/commits?per_page=1`, { headers }),
|
|
25
|
+
])
|
|
26
|
+
if (!repoRes.ok) return project
|
|
27
|
+
const repoData = await repoRes.json() as GithubRepo
|
|
28
|
+
const commits = commitRes.ok ? await commitRes.json() as GithubCommit[] : []
|
|
29
|
+
return updateProject(db, project.id, {
|
|
30
|
+
github_description: repoData.description,
|
|
31
|
+
github_branch: repoData.default_branch,
|
|
32
|
+
github_sha: commits[0]?.sha ?? null,
|
|
33
|
+
last_synced_at: new Date().toISOString(),
|
|
34
|
+
})
|
|
35
|
+
} catch {
|
|
36
|
+
return project
|
|
37
|
+
}
|
|
38
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { ingestBatch, ingestLog } from "./ingest.ts"
|
|
4
|
+
|
|
5
|
+
describe("ingest", () => {
|
|
6
|
+
it("inserts a single log entry", () => {
|
|
7
|
+
const db = createTestDb()
|
|
8
|
+
const row = ingestLog(db, { level: "error", message: "test error", service: "api" })
|
|
9
|
+
expect(row.id).toBeTruthy()
|
|
10
|
+
expect(row.level).toBe("error")
|
|
11
|
+
expect(row.message).toBe("test error")
|
|
12
|
+
expect(row.service).toBe("api")
|
|
13
|
+
expect(row.source).toBe("sdk")
|
|
14
|
+
expect(row.timestamp).toBeTruthy()
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
it("inserts with all optional fields", () => {
|
|
18
|
+
const db = createTestDb()
|
|
19
|
+
const row = ingestLog(db, {
|
|
20
|
+
level: "info",
|
|
21
|
+
message: "hello",
|
|
22
|
+
source: "scanner",
|
|
23
|
+
trace_id: "trace-123",
|
|
24
|
+
session_id: "sess-456",
|
|
25
|
+
agent: "brutus",
|
|
26
|
+
url: "https://example.com",
|
|
27
|
+
stack_trace: "Error at line 1",
|
|
28
|
+
metadata: { foo: "bar" },
|
|
29
|
+
})
|
|
30
|
+
expect(row.trace_id).toBe("trace-123")
|
|
31
|
+
expect(row.agent).toBe("brutus")
|
|
32
|
+
expect(row.metadata).toBe(JSON.stringify({ foo: "bar" }))
|
|
33
|
+
})
|
|
34
|
+
|
|
35
|
+
it("inserts a batch", () => {
|
|
36
|
+
const db = createTestDb()
|
|
37
|
+
const rows = ingestBatch(db, [
|
|
38
|
+
{ level: "warn", message: "warn 1" },
|
|
39
|
+
{ level: "error", message: "err 1" },
|
|
40
|
+
{ level: "info", message: "info 1" },
|
|
41
|
+
])
|
|
42
|
+
expect(rows).toHaveLength(3)
|
|
43
|
+
expect(rows[0]!.level).toBe("warn")
|
|
44
|
+
expect(rows[2]!.level).toBe("info")
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
it("batch is transactional", () => {
|
|
48
|
+
const db = createTestDb()
|
|
49
|
+
const before = (db.prepare("SELECT COUNT(*) as c FROM logs").get() as { c: number }).c
|
|
50
|
+
ingestBatch(db, [
|
|
51
|
+
{ level: "debug", message: "a" },
|
|
52
|
+
{ level: "fatal", message: "b" },
|
|
53
|
+
])
|
|
54
|
+
const after = (db.prepare("SELECT COUNT(*) as c FROM logs").get() as { c: number }).c
|
|
55
|
+
expect(after - before).toBe(2)
|
|
56
|
+
})
|
|
57
|
+
})
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import type { LogEntry, LogRow } from "../types/index.ts"
|
|
3
|
+
|
|
4
|
+
export function ingestLog(db: Database, entry: LogEntry): LogRow {
|
|
5
|
+
const stmt = db.prepare(`
|
|
6
|
+
INSERT INTO logs (project_id, page_id, level, source, service, message, trace_id, session_id, agent, url, stack_trace, metadata)
|
|
7
|
+
VALUES ($project_id, $page_id, $level, $source, $service, $message, $trace_id, $session_id, $agent, $url, $stack_trace, $metadata)
|
|
8
|
+
RETURNING *
|
|
9
|
+
`)
|
|
10
|
+
return stmt.get({
|
|
11
|
+
$project_id: entry.project_id ?? null,
|
|
12
|
+
$page_id: entry.page_id ?? null,
|
|
13
|
+
$level: entry.level,
|
|
14
|
+
$source: entry.source ?? "sdk",
|
|
15
|
+
$service: entry.service ?? null,
|
|
16
|
+
$message: entry.message,
|
|
17
|
+
$trace_id: entry.trace_id ?? null,
|
|
18
|
+
$session_id: entry.session_id ?? null,
|
|
19
|
+
$agent: entry.agent ?? null,
|
|
20
|
+
$url: entry.url ?? null,
|
|
21
|
+
$stack_trace: entry.stack_trace ?? null,
|
|
22
|
+
$metadata: entry.metadata ? JSON.stringify(entry.metadata) : null,
|
|
23
|
+
}) as LogRow
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function ingestBatch(db: Database, entries: LogEntry[]): LogRow[] {
|
|
27
|
+
const insert = db.prepare(`
|
|
28
|
+
INSERT INTO logs (project_id, page_id, level, source, service, message, trace_id, session_id, agent, url, stack_trace, metadata)
|
|
29
|
+
VALUES ($project_id, $page_id, $level, $source, $service, $message, $trace_id, $session_id, $agent, $url, $stack_trace, $metadata)
|
|
30
|
+
RETURNING *
|
|
31
|
+
`)
|
|
32
|
+
const tx = db.transaction((items: LogEntry[]) =>
|
|
33
|
+
items.map(entry =>
|
|
34
|
+
insert.get({
|
|
35
|
+
$project_id: entry.project_id ?? null,
|
|
36
|
+
$page_id: entry.page_id ?? null,
|
|
37
|
+
$level: entry.level,
|
|
38
|
+
$source: entry.source ?? "sdk",
|
|
39
|
+
$service: entry.service ?? null,
|
|
40
|
+
$message: entry.message,
|
|
41
|
+
$trace_id: entry.trace_id ?? null,
|
|
42
|
+
$session_id: entry.session_id ?? null,
|
|
43
|
+
$agent: entry.agent ?? null,
|
|
44
|
+
$url: entry.url ?? null,
|
|
45
|
+
$stack_trace: entry.stack_trace ?? null,
|
|
46
|
+
$metadata: entry.metadata ? JSON.stringify(entry.metadata) : null,
|
|
47
|
+
}) as LogRow
|
|
48
|
+
)
|
|
49
|
+
)
|
|
50
|
+
return tx(entries)
|
|
51
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { createJob, createScanRun, deleteJob, finishScanRun, listJobs, listScanRuns, updateJob } from "./jobs.ts"
|
|
4
|
+
|
|
5
|
+
function seedProject(db: ReturnType<typeof createTestDb>) {
|
|
6
|
+
return db.prepare("INSERT INTO projects (name) VALUES ('test') RETURNING id").get() as { id: string }
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
describe("jobs", () => {
|
|
10
|
+
it("creates a job", () => {
|
|
11
|
+
const db = createTestDb()
|
|
12
|
+
const p = seedProject(db)
|
|
13
|
+
const job = createJob(db, { project_id: p.id, schedule: "*/5 * * * *" })
|
|
14
|
+
expect(job.id).toBeTruthy()
|
|
15
|
+
expect(job.schedule).toBe("*/5 * * * *")
|
|
16
|
+
expect(job.enabled).toBe(1)
|
|
17
|
+
})
|
|
18
|
+
|
|
19
|
+
it("lists jobs for a project", () => {
|
|
20
|
+
const db = createTestDb()
|
|
21
|
+
const p = seedProject(db)
|
|
22
|
+
createJob(db, { project_id: p.id, schedule: "*/5 * * * *" })
|
|
23
|
+
createJob(db, { project_id: p.id, schedule: "*/10 * * * *" })
|
|
24
|
+
expect(listJobs(db, p.id)).toHaveLength(2)
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
it("updates a job", () => {
|
|
28
|
+
const db = createTestDb()
|
|
29
|
+
const p = seedProject(db)
|
|
30
|
+
const job = createJob(db, { project_id: p.id, schedule: "*/5 * * * *" })
|
|
31
|
+
const updated = updateJob(db, job.id, { enabled: 0 })
|
|
32
|
+
expect(updated?.enabled).toBe(0)
|
|
33
|
+
})
|
|
34
|
+
|
|
35
|
+
it("deletes a job", () => {
|
|
36
|
+
const db = createTestDb()
|
|
37
|
+
const p = seedProject(db)
|
|
38
|
+
const job = createJob(db, { project_id: p.id, schedule: "*/5 * * * *" })
|
|
39
|
+
deleteJob(db, job.id)
|
|
40
|
+
expect(listJobs(db, p.id)).toHaveLength(0)
|
|
41
|
+
})
|
|
42
|
+
})
|
|
43
|
+
|
|
44
|
+
describe("scan runs", () => {
|
|
45
|
+
it("creates and finishes a scan run", () => {
|
|
46
|
+
const db = createTestDb()
|
|
47
|
+
const p = seedProject(db)
|
|
48
|
+
const job = createJob(db, { project_id: p.id, schedule: "*/5 * * * *" })
|
|
49
|
+
const run = createScanRun(db, { job_id: job.id })
|
|
50
|
+
expect(run.status).toBe("running")
|
|
51
|
+
expect(run.logs_collected).toBe(0)
|
|
52
|
+
|
|
53
|
+
const finished = finishScanRun(db, run.id, { status: "completed", logs_collected: 12, errors_found: 3, perf_score: 87.5 })
|
|
54
|
+
expect(finished?.status).toBe("completed")
|
|
55
|
+
expect(finished?.logs_collected).toBe(12)
|
|
56
|
+
expect(finished?.errors_found).toBe(3)
|
|
57
|
+
expect(finished?.perf_score).toBe(87.5)
|
|
58
|
+
expect(finished?.finished_at).toBeTruthy()
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
it("lists scan runs for a job", () => {
|
|
62
|
+
const db = createTestDb()
|
|
63
|
+
const p = seedProject(db)
|
|
64
|
+
const job = createJob(db, { project_id: p.id, schedule: "*/5 * * * *" })
|
|
65
|
+
createScanRun(db, { job_id: job.id })
|
|
66
|
+
createScanRun(db, { job_id: job.id })
|
|
67
|
+
expect(listScanRuns(db, job.id)).toHaveLength(2)
|
|
68
|
+
})
|
|
69
|
+
})
|
package/src/lib/jobs.ts
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import type { ScanJob, ScanRun } from "../types/index.ts"
|
|
3
|
+
|
|
4
|
+
export function createJob(db: Database, data: { project_id: string; schedule: string; page_id?: string }): ScanJob {
|
|
5
|
+
return db.prepare(`
|
|
6
|
+
INSERT INTO scan_jobs (project_id, page_id, schedule)
|
|
7
|
+
VALUES ($project_id, $page_id, $schedule)
|
|
8
|
+
RETURNING *
|
|
9
|
+
`).get({
|
|
10
|
+
$project_id: data.project_id,
|
|
11
|
+
$page_id: data.page_id ?? null,
|
|
12
|
+
$schedule: data.schedule,
|
|
13
|
+
}) as ScanJob
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function listJobs(db: Database, projectId?: string): ScanJob[] {
|
|
17
|
+
if (projectId) {
|
|
18
|
+
return db.prepare("SELECT * FROM scan_jobs WHERE project_id = $p ORDER BY created_at DESC").all({ $p: projectId }) as ScanJob[]
|
|
19
|
+
}
|
|
20
|
+
return db.prepare("SELECT * FROM scan_jobs ORDER BY created_at DESC").all() as ScanJob[]
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function getJob(db: Database, id: string): ScanJob | null {
|
|
24
|
+
return db.prepare("SELECT * FROM scan_jobs WHERE id = $id").get({ $id: id }) as ScanJob | null
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export function updateJob(db: Database, id: string, data: { enabled?: number; schedule?: string; last_run_at?: string }): ScanJob | null {
|
|
28
|
+
const fields = Object.keys(data).map(k => `${k} = $${k}`).join(", ")
|
|
29
|
+
if (!fields) return getJob(db, id)
|
|
30
|
+
const params = Object.fromEntries(Object.entries(data).map(([k, v]) => [`$${k}`, v]))
|
|
31
|
+
params.$id = id
|
|
32
|
+
return db.prepare(`UPDATE scan_jobs SET ${fields} WHERE id = $id RETURNING *`).get(params) as ScanJob | null
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function deleteJob(db: Database, id: string): void {
|
|
36
|
+
db.run("DELETE FROM scan_jobs WHERE id = $id", { $id: id })
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export function createScanRun(db: Database, data: { job_id: string; page_id?: string }): ScanRun {
|
|
40
|
+
return db.prepare(`
|
|
41
|
+
INSERT INTO scan_runs (job_id, page_id) VALUES ($job_id, $page_id) RETURNING *
|
|
42
|
+
`).get({ $job_id: data.job_id, $page_id: data.page_id ?? null }) as ScanRun
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export function finishScanRun(db: Database, id: string, data: { status: "completed" | "failed"; logs_collected: number; errors_found: number; perf_score?: number }): ScanRun | null {
|
|
46
|
+
return db.prepare(`
|
|
47
|
+
UPDATE scan_runs SET finished_at = strftime('%Y-%m-%dT%H:%M:%fZ','now'),
|
|
48
|
+
status = $status, logs_collected = $logs_collected,
|
|
49
|
+
errors_found = $errors_found, perf_score = $perf_score
|
|
50
|
+
WHERE id = $id RETURNING *
|
|
51
|
+
`).get({
|
|
52
|
+
$id: id,
|
|
53
|
+
$status: data.status,
|
|
54
|
+
$logs_collected: data.logs_collected,
|
|
55
|
+
$errors_found: data.errors_found,
|
|
56
|
+
$perf_score: data.perf_score ?? null,
|
|
57
|
+
}) as ScanRun | null
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function listScanRuns(db: Database, jobId: string, limit = 20): ScanRun[] {
|
|
61
|
+
return db.prepare("SELECT * FROM scan_runs WHERE job_id = $j ORDER BY started_at DESC LIMIT $l")
|
|
62
|
+
.all({ $j: jobId, $l: limit }) as ScanRun[]
|
|
63
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import { saveSnapshot } from "./perf.ts"
|
|
3
|
+
import type { PerformanceSnapshot } from "../types/index.ts"
|
|
4
|
+
|
|
5
|
+
export interface LighthouseResult {
|
|
6
|
+
lcp: number | null
|
|
7
|
+
fcp: number | null
|
|
8
|
+
cls: number | null
|
|
9
|
+
tti: number | null
|
|
10
|
+
ttfb: number | null
|
|
11
|
+
score: number | null
|
|
12
|
+
raw_audit: string
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export async function runLighthouse(url: string): Promise<LighthouseResult | null> {
|
|
16
|
+
try {
|
|
17
|
+
// Dynamic import — lighthouse is an optional peer dep
|
|
18
|
+
const { default: lighthouse } = await import("lighthouse" as string)
|
|
19
|
+
const { chromium } = await import("playwright")
|
|
20
|
+
|
|
21
|
+
const browser = await chromium.launch({ headless: true, args: ["--remote-debugging-port=9222"] })
|
|
22
|
+
try {
|
|
23
|
+
const result = await lighthouse(url, {
|
|
24
|
+
port: 9222,
|
|
25
|
+
output: "json",
|
|
26
|
+
logLevel: "silent",
|
|
27
|
+
onlyCategories: ["performance"],
|
|
28
|
+
} as Parameters<typeof lighthouse>[1])
|
|
29
|
+
|
|
30
|
+
if (!result) return null
|
|
31
|
+
const audits = result.lhr.audits
|
|
32
|
+
const score = result.lhr.categories["performance"]?.score
|
|
33
|
+
|
|
34
|
+
return {
|
|
35
|
+
lcp: (audits["largest-contentful-paint"]?.numericValue ?? null) as number | null,
|
|
36
|
+
fcp: (audits["first-contentful-paint"]?.numericValue ?? null) as number | null,
|
|
37
|
+
cls: (audits["cumulative-layout-shift"]?.numericValue ?? null) as number | null,
|
|
38
|
+
tti: (audits["interactive"]?.numericValue ?? null) as number | null,
|
|
39
|
+
ttfb: (audits["server-response-time"]?.numericValue ?? null) as number | null,
|
|
40
|
+
score: score !== undefined ? score * 100 : null,
|
|
41
|
+
raw_audit: JSON.stringify(result.lhr.audits),
|
|
42
|
+
}
|
|
43
|
+
} finally {
|
|
44
|
+
await browser.close()
|
|
45
|
+
}
|
|
46
|
+
} catch {
|
|
47
|
+
return null
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export async function runAndSaveLighthouse(
|
|
52
|
+
db: Database,
|
|
53
|
+
url: string,
|
|
54
|
+
projectId: string,
|
|
55
|
+
pageId?: string,
|
|
56
|
+
): Promise<PerformanceSnapshot | null> {
|
|
57
|
+
const result = await runLighthouse(url)
|
|
58
|
+
if (!result) return null
|
|
59
|
+
return saveSnapshot(db, {
|
|
60
|
+
project_id: projectId,
|
|
61
|
+
page_id: pageId ?? null,
|
|
62
|
+
url,
|
|
63
|
+
...result,
|
|
64
|
+
})
|
|
65
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { getLatestSnapshot, getPerfTrend, saveSnapshot, scoreLabel } from "./perf.ts"
|
|
4
|
+
|
|
5
|
+
function seedProject(db: ReturnType<typeof createTestDb>) {
|
|
6
|
+
return db.prepare("INSERT INTO projects (name) VALUES ('app') RETURNING id").get() as { id: string }
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
describe("perf", () => {
|
|
10
|
+
it("saves and retrieves a snapshot", () => {
|
|
11
|
+
const db = createTestDb()
|
|
12
|
+
const p = seedProject(db)
|
|
13
|
+
const snap = saveSnapshot(db, { project_id: p.id, url: "https://app.com", lcp: 1200, fcp: 800, cls: 0.05, tti: 2000, ttfb: 100, score: 91, raw_audit: null, page_id: null })
|
|
14
|
+
expect(snap.id).toBeTruthy()
|
|
15
|
+
expect(snap.score).toBe(91)
|
|
16
|
+
const latest = getLatestSnapshot(db, p.id)
|
|
17
|
+
expect(latest?.id).toBe(snap.id)
|
|
18
|
+
})
|
|
19
|
+
|
|
20
|
+
it("returns null when no snapshot exists", () => {
|
|
21
|
+
const db = createTestDb()
|
|
22
|
+
const p = seedProject(db)
|
|
23
|
+
expect(getLatestSnapshot(db, p.id)).toBeNull()
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
it("returns trend in desc order", () => {
|
|
27
|
+
const db = createTestDb()
|
|
28
|
+
const p = seedProject(db)
|
|
29
|
+
saveSnapshot(db, { project_id: p.id, url: "https://app.com", lcp: 1000, fcp: 700, cls: 0.03, tti: 1800, ttfb: 90, score: 95, raw_audit: null, page_id: null })
|
|
30
|
+
saveSnapshot(db, { project_id: p.id, url: "https://app.com", lcp: 2000, fcp: 1200, cls: 0.1, tti: 3000, ttfb: 200, score: 70, raw_audit: null, page_id: null })
|
|
31
|
+
const trend = getPerfTrend(db, p.id)
|
|
32
|
+
expect(trend).toHaveLength(2)
|
|
33
|
+
expect(trend[0]!.timestamp >= trend[1]!.timestamp).toBe(true)
|
|
34
|
+
})
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
describe("scoreLabel", () => {
|
|
38
|
+
it("returns green for >= 90", () => expect(scoreLabel(90)).toBe("green"))
|
|
39
|
+
it("returns green for 100", () => expect(scoreLabel(100)).toBe("green"))
|
|
40
|
+
it("returns yellow for 50-89", () => expect(scoreLabel(75)).toBe("yellow"))
|
|
41
|
+
it("returns yellow for 50", () => expect(scoreLabel(50)).toBe("yellow"))
|
|
42
|
+
it("returns red for < 50", () => expect(scoreLabel(49)).toBe("red"))
|
|
43
|
+
it("returns red for 0", () => expect(scoreLabel(0)).toBe("red"))
|
|
44
|
+
it("returns unknown for null", () => expect(scoreLabel(null)).toBe("unknown"))
|
|
45
|
+
})
|
package/src/lib/perf.ts
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import type { PerformanceSnapshot } from "../types/index.ts"
|
|
3
|
+
|
|
4
|
+
export function saveSnapshot(db: Database, data: Omit<PerformanceSnapshot, "id" | "timestamp">): PerformanceSnapshot {
|
|
5
|
+
return db.prepare(`
|
|
6
|
+
INSERT INTO performance_snapshots (project_id, page_id, url, lcp, fcp, cls, tti, ttfb, score, raw_audit)
|
|
7
|
+
VALUES ($project_id, $page_id, $url, $lcp, $fcp, $cls, $tti, $ttfb, $score, $raw_audit)
|
|
8
|
+
RETURNING *
|
|
9
|
+
`).get({
|
|
10
|
+
$project_id: data.project_id,
|
|
11
|
+
$page_id: data.page_id ?? null,
|
|
12
|
+
$url: data.url,
|
|
13
|
+
$lcp: data.lcp ?? null,
|
|
14
|
+
$fcp: data.fcp ?? null,
|
|
15
|
+
$cls: data.cls ?? null,
|
|
16
|
+
$tti: data.tti ?? null,
|
|
17
|
+
$ttfb: data.ttfb ?? null,
|
|
18
|
+
$score: data.score ?? null,
|
|
19
|
+
$raw_audit: data.raw_audit ?? null,
|
|
20
|
+
}) as PerformanceSnapshot
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function getLatestSnapshot(db: Database, projectId: string, pageId?: string): PerformanceSnapshot | null {
|
|
24
|
+
if (pageId) {
|
|
25
|
+
return db.prepare("SELECT * FROM performance_snapshots WHERE project_id = $p AND page_id = $pg ORDER BY timestamp DESC LIMIT 1")
|
|
26
|
+
.get({ $p: projectId, $pg: pageId }) as PerformanceSnapshot | null
|
|
27
|
+
}
|
|
28
|
+
return db.prepare("SELECT * FROM performance_snapshots WHERE project_id = $p ORDER BY timestamp DESC LIMIT 1")
|
|
29
|
+
.get({ $p: projectId }) as PerformanceSnapshot | null
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function getPerfTrend(db: Database, projectId: string, pageId?: string, since?: string, limit = 50): PerformanceSnapshot[] {
|
|
33
|
+
const conditions = ["project_id = $p"]
|
|
34
|
+
const params: Record<string, unknown> = { $p: projectId, $limit: limit }
|
|
35
|
+
if (pageId) { conditions.push("page_id = $pg"); params.$pg = pageId }
|
|
36
|
+
if (since) { conditions.push("timestamp >= $since"); params.$since = since }
|
|
37
|
+
return db.prepare(`SELECT * FROM performance_snapshots WHERE ${conditions.join(" AND ")} ORDER BY timestamp DESC LIMIT $limit`)
|
|
38
|
+
.all(params) as PerformanceSnapshot[]
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export function scoreLabel(score: number | null): "green" | "yellow" | "red" | "unknown" {
|
|
42
|
+
if (score === null) return "unknown"
|
|
43
|
+
if (score >= 90) return "green"
|
|
44
|
+
if (score >= 50) return "yellow"
|
|
45
|
+
return "red"
|
|
46
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { createPage, createProject, getPage, getProject, listPages, listProjects, touchPage, updateProject } from "./projects.ts"
|
|
4
|
+
|
|
5
|
+
describe("projects", () => {
|
|
6
|
+
it("creates a project", () => {
|
|
7
|
+
const db = createTestDb()
|
|
8
|
+
const p = createProject(db, { name: "my-app", github_repo: "https://github.com/foo/bar", base_url: "https://myapp.com" })
|
|
9
|
+
expect(p.id).toBeTruthy()
|
|
10
|
+
expect(p.name).toBe("my-app")
|
|
11
|
+
expect(p.github_repo).toBe("https://github.com/foo/bar")
|
|
12
|
+
})
|
|
13
|
+
|
|
14
|
+
it("lists projects", () => {
|
|
15
|
+
const db = createTestDb()
|
|
16
|
+
createProject(db, { name: "app1" })
|
|
17
|
+
createProject(db, { name: "app2" })
|
|
18
|
+
expect(listProjects(db)).toHaveLength(2)
|
|
19
|
+
})
|
|
20
|
+
|
|
21
|
+
it("gets a project by id", () => {
|
|
22
|
+
const db = createTestDb()
|
|
23
|
+
const p = createProject(db, { name: "test" })
|
|
24
|
+
expect(getProject(db, p.id)?.name).toBe("test")
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
it("returns null for unknown id", () => {
|
|
28
|
+
expect(getProject(createTestDb(), "nope")).toBeNull()
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
it("updates project fields", () => {
|
|
32
|
+
const db = createTestDb()
|
|
33
|
+
const p = createProject(db, { name: "x" })
|
|
34
|
+
const updated = updateProject(db, p.id, { github_sha: "abc123" })
|
|
35
|
+
expect(updated?.github_sha).toBe("abc123")
|
|
36
|
+
})
|
|
37
|
+
})
|
|
38
|
+
|
|
39
|
+
describe("pages", () => {
|
|
40
|
+
it("creates a page", () => {
|
|
41
|
+
const db = createTestDb()
|
|
42
|
+
const p = createProject(db, { name: "app" })
|
|
43
|
+
const page = createPage(db, { project_id: p.id, url: "https://app.com/dashboard", name: "Dashboard" })
|
|
44
|
+
expect(page.id).toBeTruthy()
|
|
45
|
+
expect(page.url).toBe("https://app.com/dashboard")
|
|
46
|
+
expect(page.name).toBe("Dashboard")
|
|
47
|
+
})
|
|
48
|
+
|
|
49
|
+
it("upserts on duplicate url", () => {
|
|
50
|
+
const db = createTestDb()
|
|
51
|
+
const p = createProject(db, { name: "app" })
|
|
52
|
+
createPage(db, { project_id: p.id, url: "https://app.com/", name: "Home" })
|
|
53
|
+
createPage(db, { project_id: p.id, url: "https://app.com/", name: "Home v2" })
|
|
54
|
+
expect(listPages(db, p.id)).toHaveLength(1)
|
|
55
|
+
})
|
|
56
|
+
|
|
57
|
+
it("lists pages for a project", () => {
|
|
58
|
+
const db = createTestDb()
|
|
59
|
+
const p = createProject(db, { name: "app" })
|
|
60
|
+
createPage(db, { project_id: p.id, url: "https://app.com/a" })
|
|
61
|
+
createPage(db, { project_id: p.id, url: "https://app.com/b" })
|
|
62
|
+
expect(listPages(db, p.id)).toHaveLength(2)
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
it("touches last_scanned_at", () => {
|
|
66
|
+
const db = createTestDb()
|
|
67
|
+
const p = createProject(db, { name: "app" })
|
|
68
|
+
const page = createPage(db, { project_id: p.id, url: "https://app.com/" })
|
|
69
|
+
expect(page.last_scanned_at).toBeNull()
|
|
70
|
+
touchPage(db, page.id)
|
|
71
|
+
expect(getPage(db, page.id)?.last_scanned_at).toBeTruthy()
|
|
72
|
+
})
|
|
73
|
+
})
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import type { Database } from "bun:sqlite"
|
|
2
|
+
import type { Page, Project } from "../types/index.ts"
|
|
3
|
+
|
|
4
|
+
// Projects
|
|
5
|
+
export function createProject(db: Database, data: { name: string; github_repo?: string; base_url?: string; description?: string }): Project {
|
|
6
|
+
return db.prepare(`
|
|
7
|
+
INSERT INTO projects (name, github_repo, base_url, description)
|
|
8
|
+
VALUES ($name, $github_repo, $base_url, $description)
|
|
9
|
+
RETURNING *
|
|
10
|
+
`).get({
|
|
11
|
+
$name: data.name,
|
|
12
|
+
$github_repo: data.github_repo ?? null,
|
|
13
|
+
$base_url: data.base_url ?? null,
|
|
14
|
+
$description: data.description ?? null,
|
|
15
|
+
}) as Project
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export function listProjects(db: Database): Project[] {
|
|
19
|
+
return db.prepare("SELECT * FROM projects ORDER BY created_at DESC").all() as Project[]
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export function getProject(db: Database, id: string): Project | null {
|
|
23
|
+
return db.prepare("SELECT * FROM projects WHERE id = $id").get({ $id: id }) as Project | null
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function updateProject(db: Database, id: string, data: Partial<Pick<Project, "name" | "github_repo" | "base_url" | "description" | "github_description" | "github_branch" | "github_sha" | "last_synced_at">>): Project | null {
|
|
27
|
+
const fields = Object.keys(data).map(k => `${k} = $${k}`).join(", ")
|
|
28
|
+
if (!fields) return getProject(db, id)
|
|
29
|
+
const params = Object.fromEntries(Object.entries(data).map(([k, v]) => [`$${k}`, v]))
|
|
30
|
+
params.$id = id
|
|
31
|
+
return db.prepare(`UPDATE projects SET ${fields} WHERE id = $id RETURNING *`).get(params) as Project | null
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Pages
|
|
35
|
+
export function createPage(db: Database, data: { project_id: string; url: string; path?: string; name?: string }): Page {
|
|
36
|
+
return db.prepare(`
|
|
37
|
+
INSERT INTO pages (project_id, url, path, name)
|
|
38
|
+
VALUES ($project_id, $url, $path, $name)
|
|
39
|
+
ON CONFLICT(project_id, url) DO UPDATE SET name = excluded.name
|
|
40
|
+
RETURNING *
|
|
41
|
+
`).get({
|
|
42
|
+
$project_id: data.project_id,
|
|
43
|
+
$url: data.url,
|
|
44
|
+
$path: data.path ?? new URL(data.url).pathname,
|
|
45
|
+
$name: data.name ?? null,
|
|
46
|
+
}) as Page
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export function listPages(db: Database, projectId: string): Page[] {
|
|
50
|
+
return db.prepare("SELECT * FROM pages WHERE project_id = $p ORDER BY created_at ASC").all({ $p: projectId }) as Page[]
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export function getPage(db: Database, id: string): Page | null {
|
|
54
|
+
return db.prepare("SELECT * FROM pages WHERE id = $id").get({ $id: id }) as Page | null
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export function touchPage(db: Database, id: string): void {
|
|
58
|
+
db.run("UPDATE pages SET last_scanned_at = strftime('%Y-%m-%dT%H:%M:%fZ','now') WHERE id = $id", { $id: id })
|
|
59
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { describe, expect, it } from "bun:test"
|
|
2
|
+
import { createTestDb } from "../db/index.ts"
|
|
3
|
+
import { ingestBatch } from "./ingest.ts"
|
|
4
|
+
import { getLogContext, searchLogs, tailLogs } from "./query.ts"
|
|
5
|
+
|
|
6
|
+
function seed(db: ReturnType<typeof createTestDb>) {
|
|
7
|
+
ingestBatch(db, [
|
|
8
|
+
{ level: "error", message: "DB connection failed", service: "api", trace_id: "t1" },
|
|
9
|
+
{ level: "warn", message: "Slow query detected", service: "api", trace_id: "t1" },
|
|
10
|
+
{ level: "info", message: "User login", service: "auth" },
|
|
11
|
+
{ level: "debug", message: "Cache miss", service: "cache" },
|
|
12
|
+
{ level: "fatal", message: "Out of memory", service: "worker" },
|
|
13
|
+
])
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
describe("searchLogs", () => {
|
|
17
|
+
it("returns all logs without filters", () => {
|
|
18
|
+
const db = createTestDb()
|
|
19
|
+
seed(db)
|
|
20
|
+
const rows = searchLogs(db, {})
|
|
21
|
+
expect(rows.length).toBe(5)
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
it("filters by level", () => {
|
|
25
|
+
const db = createTestDb()
|
|
26
|
+
seed(db)
|
|
27
|
+
const rows = searchLogs(db, { level: "error" })
|
|
28
|
+
expect(rows.every(r => r.level === "error")).toBe(true)
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
it("filters by multiple levels", () => {
|
|
32
|
+
const db = createTestDb()
|
|
33
|
+
seed(db)
|
|
34
|
+
const rows = searchLogs(db, { level: ["error", "fatal"] })
|
|
35
|
+
expect(rows).toHaveLength(2)
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
it("filters by service", () => {
|
|
39
|
+
const db = createTestDb()
|
|
40
|
+
seed(db)
|
|
41
|
+
const rows = searchLogs(db, { service: "api" })
|
|
42
|
+
expect(rows).toHaveLength(2)
|
|
43
|
+
})
|
|
44
|
+
|
|
45
|
+
it("full-text search on message", () => {
|
|
46
|
+
const db = createTestDb()
|
|
47
|
+
seed(db)
|
|
48
|
+
const rows = searchLogs(db, { text: "connection" })
|
|
49
|
+
expect(rows).toHaveLength(1)
|
|
50
|
+
expect(rows[0]!.message).toContain("connection")
|
|
51
|
+
})
|
|
52
|
+
|
|
53
|
+
it("filters by trace_id", () => {
|
|
54
|
+
const db = createTestDb()
|
|
55
|
+
seed(db)
|
|
56
|
+
const rows = searchLogs(db, { trace_id: "t1" })
|
|
57
|
+
expect(rows).toHaveLength(2)
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
it("respects limit", () => {
|
|
61
|
+
const db = createTestDb()
|
|
62
|
+
seed(db)
|
|
63
|
+
const rows = searchLogs(db, { limit: 2 })
|
|
64
|
+
expect(rows).toHaveLength(2)
|
|
65
|
+
})
|
|
66
|
+
|
|
67
|
+
it("returns results ordered by timestamp desc", () => {
|
|
68
|
+
const db = createTestDb()
|
|
69
|
+
seed(db)
|
|
70
|
+
const rows = searchLogs(db, {})
|
|
71
|
+
expect(rows[0]!.timestamp >= rows[rows.length - 1]!.timestamp).toBe(true)
|
|
72
|
+
})
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
describe("tailLogs", () => {
|
|
76
|
+
it("returns n most recent logs", () => {
|
|
77
|
+
const db = createTestDb()
|
|
78
|
+
seed(db)
|
|
79
|
+
const rows = tailLogs(db, undefined, 3)
|
|
80
|
+
expect(rows).toHaveLength(3)
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
it("filters by project_id", () => {
|
|
84
|
+
const db = createTestDb()
|
|
85
|
+
const rows = tailLogs(db, "nonexistent")
|
|
86
|
+
expect(rows).toHaveLength(0)
|
|
87
|
+
})
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
describe("getLogContext", () => {
|
|
91
|
+
it("returns all logs for a trace_id in asc order", () => {
|
|
92
|
+
const db = createTestDb()
|
|
93
|
+
seed(db)
|
|
94
|
+
const rows = getLogContext(db, "t1")
|
|
95
|
+
expect(rows).toHaveLength(2)
|
|
96
|
+
expect(rows[0]!.timestamp <= rows[1]!.timestamp).toBe(true)
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
it("returns empty for unknown trace_id", () => {
|
|
100
|
+
const db = createTestDb()
|
|
101
|
+
const rows = getLogContext(db, "unknown")
|
|
102
|
+
expect(rows).toHaveLength(0)
|
|
103
|
+
})
|
|
104
|
+
})
|