@hasna/logs 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/dashboard/README.md +73 -0
  2. package/dashboard/bun.lock +526 -0
  3. package/dashboard/eslint.config.js +23 -0
  4. package/dashboard/index.html +13 -0
  5. package/dashboard/package.json +32 -0
  6. package/dashboard/public/favicon.svg +1 -0
  7. package/dashboard/public/icons.svg +24 -0
  8. package/dashboard/src/App.css +184 -0
  9. package/dashboard/src/App.tsx +49 -0
  10. package/dashboard/src/api.ts +33 -0
  11. package/dashboard/src/assets/hero.png +0 -0
  12. package/dashboard/src/assets/react.svg +1 -0
  13. package/dashboard/src/assets/vite.svg +1 -0
  14. package/dashboard/src/index.css +111 -0
  15. package/dashboard/src/main.tsx +10 -0
  16. package/dashboard/src/pages/Alerts.tsx +69 -0
  17. package/dashboard/src/pages/Issues.tsx +50 -0
  18. package/dashboard/src/pages/Perf.tsx +75 -0
  19. package/dashboard/src/pages/Projects.tsx +67 -0
  20. package/dashboard/src/pages/Summary.tsx +67 -0
  21. package/dashboard/src/pages/Tail.tsx +65 -0
  22. package/dashboard/tsconfig.app.json +28 -0
  23. package/dashboard/tsconfig.json +7 -0
  24. package/dashboard/tsconfig.node.json +26 -0
  25. package/dashboard/vite.config.ts +14 -0
  26. package/dist/cli/index.js +80 -9
  27. package/dist/mcp/index.js +217 -96
  28. package/dist/server/index.js +307 -7
  29. package/package.json +3 -1
  30. package/sdk/package.json +3 -2
  31. package/sdk/src/index.ts +1 -1
  32. package/sdk/src/types.ts +56 -0
  33. package/src/cli/index.ts +70 -4
  34. package/src/lib/count.test.ts +44 -0
  35. package/src/lib/count.ts +45 -0
  36. package/src/lib/diagnose.ts +26 -11
  37. package/src/lib/parse-time.test.ts +37 -0
  38. package/src/lib/parse-time.ts +14 -0
  39. package/src/lib/projects.ts +10 -0
  40. package/src/lib/query.ts +10 -2
  41. package/src/lib/session-context.ts +28 -0
  42. package/src/lib/summarize.ts +2 -1
  43. package/src/mcp/index.ts +138 -59
  44. package/src/server/index.ts +4 -1
  45. package/src/server/routes/logs.ts +28 -1
@@ -1,8 +1,13 @@
1
1
  import type { Database } from "bun:sqlite"
2
+ import { parseTime } from "./parse-time.ts"
2
3
 
3
4
  export interface DiagnosisResult {
4
5
  project_id: string
5
6
  window: string
7
+ score: "green" | "yellow" | "red"
8
+ error_count: number
9
+ warn_count: number
10
+ has_perf_regression: boolean
6
11
  top_errors: { message: string; count: number; service: string | null; last_seen: string }[]
7
12
  error_rate_by_service: { service: string | null; errors: number; warns: number; total: number }[]
8
13
  failing_pages: { page_id: string; url: string; error_count: number }[]
@@ -10,21 +15,25 @@ export interface DiagnosisResult {
10
15
  summary: string
11
16
  }
12
17
 
13
- export function diagnose(db: Database, projectId: string, since?: string): DiagnosisResult {
14
- const window = since ?? new Date(Date.now() - 24 * 3600 * 1000).toISOString()
18
+ export type DiagnoseInclude = "top_errors" | "error_rate" | "failing_pages" | "perf"
19
+
20
+ export function diagnose(db: Database, projectId: string, since?: string, include?: DiagnoseInclude[]): DiagnosisResult {
21
+ const window = parseTime(since) ?? since ?? new Date(Date.now() - 24 * 3600 * 1000).toISOString()
22
+ const all = !include || include.length === 0
23
+ const want = (k: DiagnoseInclude) => all || include!.includes(k)
15
24
 
16
25
  // Top errors by message
17
- const top_errors = db.prepare(`
26
+ const top_errors = want("top_errors") ? db.prepare(`
18
27
  SELECT message, COUNT(*) as count, service, MAX(timestamp) as last_seen
19
28
  FROM logs
20
29
  WHERE project_id = $p AND level IN ('error','fatal') AND timestamp >= $since
21
30
  GROUP BY message, service
22
31
  ORDER BY count DESC
23
32
  LIMIT 10
24
- `).all({ $p: projectId, $since: window }) as DiagnosisResult["top_errors"]
33
+ `).all({ $p: projectId, $since: window }) as DiagnosisResult["top_errors"] : []
25
34
 
26
35
  // Error rate by service
27
- const error_rate_by_service = db.prepare(`
36
+ const error_rate_by_service = want("error_rate") ? db.prepare(`
28
37
  SELECT service,
29
38
  SUM(CASE WHEN level IN ('error','fatal') THEN 1 ELSE 0 END) as errors,
30
39
  SUM(CASE WHEN level = 'warn' THEN 1 ELSE 0 END) as warns,
@@ -33,10 +42,10 @@ export function diagnose(db: Database, projectId: string, since?: string): Diagn
33
42
  WHERE project_id = $p AND timestamp >= $since
34
43
  GROUP BY service
35
44
  ORDER BY errors DESC
36
- `).all({ $p: projectId, $since: window }) as DiagnosisResult["error_rate_by_service"]
45
+ `).all({ $p: projectId, $since: window }) as DiagnosisResult["error_rate_by_service"] : []
37
46
 
38
47
  // Failing pages (most errors)
39
- const failing_pages = db.prepare(`
48
+ const failing_pages = want("failing_pages") ? db.prepare(`
40
49
  SELECT l.page_id, p.url, COUNT(*) as error_count
41
50
  FROM logs l
42
51
  JOIN pages p ON p.id = l.page_id
@@ -44,10 +53,10 @@ export function diagnose(db: Database, projectId: string, since?: string): Diagn
44
53
  GROUP BY l.page_id, p.url
45
54
  ORDER BY error_count DESC
46
55
  LIMIT 10
47
- `).all({ $p: projectId, $since: window }) as DiagnosisResult["failing_pages"]
56
+ `).all({ $p: projectId, $since: window }) as DiagnosisResult["failing_pages"] : []
48
57
 
49
58
  // Perf regressions: compare latest vs previous snapshot per page
50
- const perf_regressions = db.prepare(`
59
+ const perf_regressions = want("perf") ? db.prepare(`
51
60
  SELECT * FROM (
52
61
  SELECT
53
62
  cur.page_id,
@@ -64,13 +73,19 @@ export function diagnose(db: Database, projectId: string, since?: string): Diagn
64
73
  ) WHERE delta < -5 OR delta IS NULL
65
74
  ORDER BY delta ASC
66
75
  LIMIT 10
67
- `).all({ $p: projectId }) as DiagnosisResult["perf_regressions"]
76
+ `).all({ $p: projectId }) as DiagnosisResult["perf_regressions"] : []
68
77
 
69
78
  const totalErrors = top_errors.reduce((s, e) => s + e.count, 0)
79
+ const totalWarns = error_rate_by_service.reduce((s, r) => s + r.warns, 0)
70
80
  const topService = error_rate_by_service[0]
81
+ const score: "green" | "yellow" | "red" = totalErrors === 0 ? "green" : totalErrors <= 10 ? "yellow" : "red"
71
82
  const summary = totalErrors === 0
72
83
  ? "No errors in this window. All looks good."
73
84
  : `${totalErrors} error(s) detected. Worst service: ${topService?.service ?? "unknown"} (${topService?.errors ?? 0} errors). ${failing_pages.length} page(s) with errors. ${perf_regressions.length} perf regression(s).`
74
85
 
75
- return { project_id: projectId, window, top_errors, error_rate_by_service, failing_pages, perf_regressions, summary }
86
+ return {
87
+ project_id: projectId, window, score, error_count: totalErrors, warn_count: totalWarns,
88
+ has_perf_regression: perf_regressions.length > 0,
89
+ top_errors, error_rate_by_service, failing_pages, perf_regressions, summary,
90
+ }
76
91
  }
@@ -0,0 +1,37 @@
1
+ import { describe, expect, it } from "bun:test"
2
+ import { parseTime } from "./parse-time.ts"
3
+
4
+ describe("parseTime", () => {
5
+ it("returns undefined for undefined input", () => expect(parseTime(undefined)).toBeUndefined())
6
+ it("returns ISO string unchanged", () => {
7
+ const iso = "2026-01-01T00:00:00.000Z"
8
+ expect(parseTime(iso)).toBe(iso)
9
+ })
10
+ it("parses 30m", () => {
11
+ const result = parseTime("30m")!
12
+ const diff = Date.now() - new Date(result).getTime()
13
+ expect(diff).toBeGreaterThan(29 * 60 * 1000)
14
+ expect(diff).toBeLessThan(31 * 60 * 1000)
15
+ })
16
+ it("parses 1h", () => {
17
+ const result = parseTime("1h")!
18
+ const diff = Date.now() - new Date(result).getTime()
19
+ expect(diff).toBeGreaterThan(59 * 60 * 1000)
20
+ expect(diff).toBeLessThan(61 * 60 * 1000)
21
+ })
22
+ it("parses 7d", () => {
23
+ const result = parseTime("7d")!
24
+ const diff = Date.now() - new Date(result).getTime()
25
+ expect(diff).toBeGreaterThan(6.9 * 86400 * 1000)
26
+ expect(diff).toBeLessThan(7.1 * 86400 * 1000)
27
+ })
28
+ it("parses 1w", () => {
29
+ const result = parseTime("1w")!
30
+ const diff = Date.now() - new Date(result).getTime()
31
+ expect(diff).toBeGreaterThan(6.9 * 86400 * 1000)
32
+ })
33
+ it("returns unknown strings unchanged", () => {
34
+ expect(parseTime("yesterday")).toBe("yesterday")
35
+ expect(parseTime("now")).toBe("now")
36
+ })
37
+ })
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Parses a relative time string or ISO timestamp into an ISO timestamp.
3
+ * Accepts: "30m", "1h", "2h", "24h", "7d", "1w" or any ISO string.
4
+ * Returns the input unchanged if it doesn't match a relative format.
5
+ */
6
+ export function parseTime(val: string | undefined): string | undefined {
7
+ if (!val) return undefined
8
+ const m = val.match(/^(\d+(?:\.\d+)?)(m|h|d|w)$/)
9
+ if (!m) return val
10
+ const n = parseFloat(m[1]!)
11
+ const unit = m[2]!
12
+ const ms = n * ({ m: 60, h: 3600, d: 86400, w: 604800 }[unit]!) * 1000
13
+ return new Date(Date.now() - ms).toISOString()
14
+ }
@@ -54,6 +54,16 @@ export function getPage(db: Database, id: string): Page | null {
54
54
  return db.prepare("SELECT * FROM pages WHERE id = $id").get({ $id: id }) as Page | null
55
55
  }
56
56
 
57
+ /** Resolves a project ID or name to a project ID. Returns null if not found or input is empty. */
58
+ export function resolveProjectId(db: Database, idOrName: string | undefined | null): string | null {
59
+ if (!idOrName) return null
60
+ // Looks like a hex ID (8+ hex chars)
61
+ if (/^[0-9a-f]{8,}$/i.test(idOrName)) return idOrName
62
+ // Try name lookup (case-insensitive)
63
+ const p = db.prepare("SELECT id FROM projects WHERE LOWER(name) = LOWER($n)").get({ $n: idOrName }) as { id: string } | null
64
+ return p?.id ?? null
65
+ }
66
+
57
67
  export function touchPage(db: Database, id: string): void {
58
68
  db.run("UPDATE pages SET last_scanned_at = strftime('%Y-%m-%dT%H:%M:%fZ','now') WHERE id = $id", { $id: id })
59
69
  }
package/src/lib/query.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  import type { Database } from "bun:sqlite"
2
2
  import type { LogQuery, LogRow } from "../types/index.ts"
3
+ import { parseTime } from "./parse-time.ts"
3
4
 
4
5
  export function searchLogs(db: Database, q: LogQuery): LogRow[] {
5
6
  const conditions: string[] = []
@@ -9,8 +10,8 @@ export function searchLogs(db: Database, q: LogQuery): LogRow[] {
9
10
  if (q.page_id) { conditions.push("l.page_id = $page_id"); params.$page_id = q.page_id }
10
11
  if (q.service) { conditions.push("l.service = $service"); params.$service = q.service }
11
12
  if (q.trace_id) { conditions.push("l.trace_id = $trace_id"); params.$trace_id = q.trace_id }
12
- if (q.since) { conditions.push("l.timestamp >= $since"); params.$since = q.since }
13
- if (q.until) { conditions.push("l.timestamp <= $until"); params.$until = q.until }
13
+ if (q.since) { conditions.push("l.timestamp >= $since"); params.$since = parseTime(q.since) ?? q.since }
14
+ if (q.until) { conditions.push("l.timestamp <= $until"); params.$until = parseTime(q.until) ?? q.until }
14
15
 
15
16
  if (q.level) {
16
17
  const levels = Array.isArray(q.level) ? q.level : [q.level]
@@ -54,3 +55,10 @@ export function getLogContext(db: Database, traceId: string): LogRow[] {
54
55
  return db.prepare("SELECT * FROM logs WHERE trace_id = $t ORDER BY timestamp ASC")
55
56
  .all({ $t: traceId }) as LogRow[]
56
57
  }
58
+
59
+ export function getLogContextFromId(db: Database, logId: string): LogRow[] {
60
+ const log = db.prepare("SELECT * FROM logs WHERE id = $id").get({ $id: logId }) as LogRow | null
61
+ if (!log) return []
62
+ if (log.trace_id) return getLogContext(db, log.trace_id)
63
+ return [log]
64
+ }
@@ -0,0 +1,28 @@
1
+ import type { Database } from "bun:sqlite"
2
+ import type { LogRow } from "../types/index.ts"
3
+
4
+ export interface SessionContext {
5
+ session_id: string
6
+ logs: LogRow[]
7
+ session?: Record<string, unknown>
8
+ error?: string
9
+ }
10
+
11
+ export async function getSessionContext(db: Database, sessionId: string): Promise<SessionContext> {
12
+ const logs = db.prepare("SELECT * FROM logs WHERE session_id = $s ORDER BY timestamp ASC")
13
+ .all({ $s: sessionId }) as LogRow[]
14
+
15
+ const sessionsUrl = process.env.SESSIONS_URL
16
+ if (!sessionsUrl) {
17
+ return { session_id: sessionId, logs }
18
+ }
19
+
20
+ try {
21
+ const res = await fetch(`${sessionsUrl.replace(/\/$/, "")}/api/sessions/${sessionId}`)
22
+ if (!res.ok) return { session_id: sessionId, logs }
23
+ const session = await res.json() as Record<string, unknown>
24
+ return { session_id: sessionId, logs, session }
25
+ } catch (err) {
26
+ return { session_id: sessionId, logs, error: String(err) }
27
+ }
28
+ }
@@ -1,12 +1,13 @@
1
1
  import type { Database } from "bun:sqlite"
2
2
  import type { LogSummary } from "../types/index.ts"
3
+ import { parseTime } from "./parse-time.ts"
3
4
 
4
5
  export function summarizeLogs(db: Database, projectId?: string, since?: string): LogSummary[] {
5
6
  const conditions: string[] = ["level IN ('warn','error','fatal')"]
6
7
  const params: Record<string, unknown> = {}
7
8
 
8
9
  if (projectId) { conditions.push("project_id = $project_id"); params.$project_id = projectId }
9
- if (since) { conditions.push("timestamp >= $since"); params.$since = since }
10
+ if (since) { conditions.push("timestamp >= $since"); params.$since = parseTime(since) ?? since }
10
11
 
11
12
  const where = `WHERE ${conditions.join(" AND ")}`
12
13
  const sql = `
package/src/mcp/index.ts CHANGED
@@ -3,92 +3,130 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"
3
3
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"
4
4
  import { z } from "zod"
5
5
  import { getDb } from "../db/index.ts"
6
- import { ingestLog } from "../lib/ingest.ts"
7
- import { getLogContext, searchLogs, tailLogs } from "../lib/query.ts"
6
+ import { ingestBatch, ingestLog } from "../lib/ingest.ts"
7
+ import { getLogContext, getLogContextFromId, searchLogs, tailLogs } from "../lib/query.ts"
8
8
  import { summarizeLogs } from "../lib/summarize.ts"
9
+ import { countLogs } from "../lib/count.ts"
9
10
  import { createJob, listJobs } from "../lib/jobs.ts"
10
- import { createPage, createProject, listPages, listProjects } from "../lib/projects.ts"
11
+ import { createPage, createProject, listPages, listProjects, resolveProjectId } from "../lib/projects.ts"
11
12
  import { getLatestSnapshot, getPerfTrend, scoreLabel } from "../lib/perf.ts"
12
13
  import { createAlertRule, deleteAlertRule, listAlertRules } from "../lib/alerts.ts"
13
14
  import { listIssues, updateIssueStatus } from "../lib/issues.ts"
14
15
  import { diagnose } from "../lib/diagnose.ts"
15
16
  import { compare } from "../lib/compare.ts"
16
17
  import { getHealth } from "../lib/health.ts"
18
+ import { getSessionContext } from "../lib/session-context.ts"
19
+ import { parseTime } from "../lib/parse-time.ts"
17
20
  import type { LogLevel, LogRow } from "../types/index.ts"
18
21
 
19
22
  const db = getDb()
20
- const server = new McpServer({ name: "logs", version: "0.1.0" })
23
+ const server = new McpServer({ name: "logs", version: "0.3.0" })
21
24
 
22
25
  const BRIEF_FIELDS: (keyof LogRow)[] = ["id", "timestamp", "level", "message", "service"]
23
26
 
24
27
  function applyBrief(rows: LogRow[], brief = true): unknown[] {
25
28
  if (!brief) return rows
26
- return rows.map(r => ({ id: r.id, timestamp: r.timestamp, level: r.level, message: r.message, service: r.service }))
29
+ return rows.map(r => ({
30
+ id: r.id,
31
+ timestamp: r.timestamp,
32
+ level: r.level,
33
+ message: r.message,
34
+ service: r.service,
35
+ age_seconds: Math.floor((Date.now() - new Date(r.timestamp).getTime()) / 1000),
36
+ }))
27
37
  }
28
38
 
29
- const TOOLS: Record<string, string> = {
30
- register_project: "Register a project (name, github_repo?, base_url?, description?)",
31
- register_page: "Register a page URL (project_id, url, path?, name?)",
32
- create_scan_job: "Schedule page scans (project_id, schedule, page_id?)",
33
- log_push: "Push a log entry (level, message, project_id?, service?, trace_id?, metadata?)",
34
- log_search: "Search logs (project_id?, level?, since?, text?, brief?=true, limit?)",
35
- log_tail: "Recent logs (project_id?, n?, brief?=true)",
36
- log_summary: "Error/warn counts by service (project_id?, since?)",
37
- log_context: "All logs for a trace_id (trace_id, brief?=true)",
38
- log_diagnose: "Full diagnosis: top errors, failing pages, perf regressions (project_id, since?)",
39
- log_compare: "Compare two time windows for new/resolved errors and perf delta",
40
- perf_snapshot: "Latest perf snapshot (project_id, page_id?)",
41
- perf_trend: "Perf over time (project_id, page_id?, since?, limit?)",
42
- scan_status: "Last scan jobs (project_id?)",
43
- list_projects: "List all projects",
44
- list_pages: "List pages for a project (project_id)",
45
- list_issues: "List grouped error issues (project_id?, status?, limit?)",
46
- resolve_issue: "Update issue status (id, status: open|resolved|ignored)",
47
- create_alert_rule: "Create alert rule (project_id, name, level, threshold_count, window_seconds, webhook_url?)",
48
- list_alert_rules: "List alert rules (project_id?)",
49
- delete_alert_rule: "Delete alert rule (id)",
50
- get_health: "Server health + DB stats",
51
- search_tools: "Search tools by keyword (query)",
52
- describe_tools: "List all tools",
39
+ function rp(idOrName?: string): string | undefined {
40
+ if (!idOrName) return undefined
41
+ return resolveProjectId(db, idOrName) ?? idOrName
42
+ }
43
+
44
+ // Tool registry with param signatures for discoverability
45
+ const TOOLS: Record<string, { desc: string; params: string }> = {
46
+ register_project: { desc: "Register a project", params: "(name, github_repo?, base_url?, description?)" },
47
+ register_page: { desc: "Register a page URL to a project", params: "(project_id, url, path?, name?)" },
48
+ create_scan_job: { desc: "Schedule headless page scans", params: "(project_id, schedule, page_id?)" },
49
+ resolve_project: { desc: "Resolve project name to ID", params: "(name)" },
50
+ log_push: { desc: "Push a single log entry", params: "(level, message, project_id?, service?, trace_id?, metadata?)" },
51
+ log_push_batch: { desc: "Push multiple log entries in one call", params: "(entries: Array<{level, message, project_id?, service?, trace_id?}>)" },
52
+ log_search: { desc: "Search logs", params: "(project_id?, level?, since?, until?, text?, service?, limit?=100, brief?=true)" },
53
+ log_tail: { desc: "Get N most recent logs", params: "(project_id?, n?=50, brief?=true)" },
54
+ log_count: { desc: "Count logs zero token cost, pure signal", params: "(project_id?, service?, level?, since?, until?)" },
55
+ log_recent_errors: { desc: "Shortcut: recent errors + fatals", params: "(project_id?, since?='1h', limit?=20)" },
56
+ log_summary: { desc: "Error/warn counts by service", params: "(project_id?, since?)" },
57
+ log_context: { desc: "All logs for a trace_id", params: "(trace_id, brief?=true)" },
58
+ log_context_from_id: { desc: "Trace context from a log ID (no trace_id needed)", params: "(log_id, brief?=true)" },
59
+ log_diagnose: { desc: "Full diagnosis: score, top errors, failing pages, perf regressions", params: "(project_id, since?='24h', include?=['top_errors','error_rate','failing_pages','perf'])" },
60
+ log_compare: { desc: "Diff two time windows for new/resolved errors", params: "(project_id, a_since, a_until, b_since, b_until)" },
61
+ log_session_context: { desc: "Logs + session metadata for a session_id", params: "(session_id, brief?=true)" },
62
+ perf_snapshot: { desc: "Latest performance snapshot", params: "(project_id, page_id?)" },
63
+ perf_trend: { desc: "Performance over time", params: "(project_id, page_id?, since?, limit?=50)" },
64
+ scan_status: { desc: "Last scan jobs", params: "(project_id?)" },
65
+ list_projects: { desc: "List all projects", params: "()" },
66
+ list_pages: { desc: "List pages for a project", params: "(project_id)" },
67
+ list_issues: { desc: "List grouped error issues", params: "(project_id?, status?, limit?=50)" },
68
+ resolve_issue: { desc: "Update issue status", params: "(id, status: open|resolved|ignored)" },
69
+ create_alert_rule: { desc: "Create alert rule", params: "(project_id, name, level?, threshold_count?, window_seconds?, webhook_url?)" },
70
+ list_alert_rules: { desc: "List alert rules", params: "(project_id?)" },
71
+ delete_alert_rule: { desc: "Delete alert rule", params: "(id)" },
72
+ get_health: { desc: "Server health + DB stats", params: "()" },
73
+ search_tools: { desc: "Search tools by keyword — returns names, descriptions, param signatures", params: "(query)" },
74
+ describe_tools: { desc: "List all tools with descriptions and param signatures", params: "()" },
53
75
  }
54
76
 
55
77
  server.tool("search_tools", { query: z.string() }, ({ query }) => {
56
78
  const q = query.toLowerCase()
57
- const matches = Object.entries(TOOLS).filter(([k, v]) => k.includes(q) || v.toLowerCase().includes(q))
58
- return { content: [{ type: "text", text: matches.map(([k, v]) => `${k}: ${v}`).join("\n") || "No matches" }] }
79
+ const matches = Object.entries(TOOLS).filter(([k, v]) => k.includes(q) || v.desc.toLowerCase().includes(q))
80
+ const text = matches.map(([k, v]) => `${k}${v.params} ${v.desc}`).join("\n") || "No matches"
81
+ return { content: [{ type: "text", text }] }
59
82
  })
60
83
 
61
84
  server.tool("describe_tools", {}, () => ({
62
- content: [{ type: "text", text: Object.entries(TOOLS).map(([k, v]) => `${k}: ${v}`).join("\n") }]
85
+ content: [{ type: "text", text: Object.entries(TOOLS).map(([k, v]) => `${k}${v.params} ${v.desc}`).join("\n") }]
63
86
  }))
64
87
 
88
+ server.tool("resolve_project", { name: z.string() }, ({ name }) => {
89
+ const id = resolveProjectId(db, name)
90
+ const project = id ? db.prepare("SELECT * FROM projects WHERE id = $id").get({ $id: id }) : null
91
+ return { content: [{ type: "text", text: JSON.stringify(project ?? { error: `Project '${name}' not found` }) }] }
92
+ })
93
+
65
94
  server.tool("register_project", {
66
95
  name: z.string(), github_repo: z.string().optional(), base_url: z.string().optional(), description: z.string().optional(),
67
96
  }, (args) => ({ content: [{ type: "text", text: JSON.stringify(createProject(db, args)) }] }))
68
97
 
69
98
  server.tool("register_page", {
70
99
  project_id: z.string(), url: z.string(), path: z.string().optional(), name: z.string().optional(),
71
- }, (args) => ({ content: [{ type: "text", text: JSON.stringify(createPage(db, args)) }] }))
100
+ }, (args) => ({ content: [{ type: "text", text: JSON.stringify(createPage(db, { ...args, project_id: rp(args.project_id) ?? args.project_id })) }] }))
72
101
 
73
102
  server.tool("create_scan_job", {
74
103
  project_id: z.string(), schedule: z.string(), page_id: z.string().optional(),
75
- }, (args) => ({ content: [{ type: "text", text: JSON.stringify(createJob(db, args)) }] }))
104
+ }, (args) => ({ content: [{ type: "text", text: JSON.stringify(createJob(db, { ...args, project_id: rp(args.project_id) ?? args.project_id })) }] }))
76
105
 
77
106
  server.tool("log_push", {
78
107
  level: z.enum(["debug", "info", "warn", "error", "fatal"]),
79
108
  message: z.string(),
80
- project_id: z.string().optional(),
81
- service: z.string().optional(),
82
- trace_id: z.string().optional(),
83
- session_id: z.string().optional(),
84
- agent: z.string().optional(),
85
- url: z.string().optional(),
109
+ project_id: z.string().optional(), service: z.string().optional(),
110
+ trace_id: z.string().optional(), session_id: z.string().optional(),
111
+ agent: z.string().optional(), url: z.string().optional(),
86
112
  metadata: z.record(z.unknown()).optional(),
87
113
  }, (args) => {
88
- const row = ingestLog(db, args)
114
+ const row = ingestLog(db, { ...args, project_id: rp(args.project_id) })
89
115
  return { content: [{ type: "text", text: `Logged: ${row.id}` }] }
90
116
  })
91
117
 
118
+ server.tool("log_push_batch", {
119
+ entries: z.array(z.object({
120
+ level: z.enum(["debug", "info", "warn", "error", "fatal"]),
121
+ message: z.string(),
122
+ project_id: z.string().optional(), service: z.string().optional(),
123
+ trace_id: z.string().optional(), metadata: z.record(z.unknown()).optional(),
124
+ })),
125
+ }, ({ entries }) => {
126
+ const rows = ingestBatch(db, entries.map(e => ({ ...e, project_id: rp(e.project_id) })))
127
+ return { content: [{ type: "text", text: `Logged ${rows.length} entries` }] }
128
+ })
129
+
92
130
  server.tool("log_search", {
93
131
  project_id: z.string().optional(), page_id: z.string().optional(),
94
132
  level: z.string().optional(), service: z.string().optional(),
@@ -96,34 +134,66 @@ server.tool("log_search", {
96
134
  text: z.string().optional(), trace_id: z.string().optional(),
97
135
  limit: z.number().optional(), brief: z.boolean().optional(),
98
136
  }, (args) => {
99
- const rows = searchLogs(db, { ...args, level: args.level ? (args.level.split(",") as LogLevel[]) : undefined })
137
+ const rows = searchLogs(db, {
138
+ ...args,
139
+ project_id: rp(args.project_id),
140
+ level: args.level ? (args.level.split(",") as LogLevel[]) : undefined,
141
+ since: parseTime(args.since) ?? args.since,
142
+ until: parseTime(args.until) ?? args.until,
143
+ })
100
144
  return { content: [{ type: "text", text: JSON.stringify(applyBrief(rows, args.brief !== false)) }] }
101
145
  })
102
146
 
103
147
  server.tool("log_tail", {
104
148
  project_id: z.string().optional(), n: z.number().optional(), brief: z.boolean().optional(),
105
149
  }, ({ project_id, n, brief }) => {
106
- const rows = tailLogs(db, project_id, n ?? 50)
150
+ const rows = tailLogs(db, rp(project_id), n ?? 50)
107
151
  return { content: [{ type: "text", text: JSON.stringify(applyBrief(rows, brief !== false)) }] }
108
152
  })
109
153
 
154
+ server.tool("log_count", {
155
+ project_id: z.string().optional(), service: z.string().optional(),
156
+ level: z.string().optional(), since: z.string().optional(), until: z.string().optional(),
157
+ }, (args) => ({
158
+ content: [{ type: "text", text: JSON.stringify(countLogs(db, { ...args, project_id: rp(args.project_id) })) }]
159
+ }))
160
+
161
+ server.tool("log_recent_errors", {
162
+ project_id: z.string().optional(), since: z.string().optional(), limit: z.number().optional(),
163
+ }, ({ project_id, since, limit }) => {
164
+ const rows = searchLogs(db, {
165
+ project_id: rp(project_id),
166
+ level: ["error", "fatal"],
167
+ since: parseTime(since ?? "1h"),
168
+ limit: limit ?? 20,
169
+ })
170
+ return { content: [{ type: "text", text: JSON.stringify(applyBrief(rows, true)) }] }
171
+ })
172
+
110
173
  server.tool("log_summary", {
111
174
  project_id: z.string().optional(), since: z.string().optional(),
112
175
  }, ({ project_id, since }) => ({
113
- content: [{ type: "text", text: JSON.stringify(summarizeLogs(db, project_id, since)) }]
176
+ content: [{ type: "text", text: JSON.stringify(summarizeLogs(db, rp(project_id), parseTime(since) ?? since)) }]
114
177
  }))
115
178
 
116
179
  server.tool("log_context", {
117
180
  trace_id: z.string(), brief: z.boolean().optional(),
118
- }, ({ trace_id, brief }) => {
119
- const rows = getLogContext(db, trace_id)
120
- return { content: [{ type: "text", text: JSON.stringify(applyBrief(rows, brief !== false)) }] }
121
- })
181
+ }, ({ trace_id, brief }) => ({
182
+ content: [{ type: "text", text: JSON.stringify(applyBrief(getLogContext(db, trace_id), brief !== false)) }]
183
+ }))
184
+
185
+ server.tool("log_context_from_id", {
186
+ log_id: z.string(), brief: z.boolean().optional(),
187
+ }, ({ log_id, brief }) => ({
188
+ content: [{ type: "text", text: JSON.stringify(applyBrief(getLogContextFromId(db, log_id), brief !== false)) }]
189
+ }))
122
190
 
123
191
  server.tool("log_diagnose", {
124
- project_id: z.string(), since: z.string().optional(),
125
- }, ({ project_id, since }) => ({
126
- content: [{ type: "text", text: JSON.stringify(diagnose(db, project_id, since)) }]
192
+ project_id: z.string(),
193
+ since: z.string().optional(),
194
+ include: z.array(z.enum(["top_errors", "error_rate", "failing_pages", "perf"])).optional(),
195
+ }, ({ project_id, since, include }) => ({
196
+ content: [{ type: "text", text: JSON.stringify(diagnose(db, rp(project_id) ?? project_id, since, include)) }]
127
197
  }))
128
198
 
129
199
  server.tool("log_compare", {
@@ -131,26 +201,35 @@ server.tool("log_compare", {
131
201
  a_since: z.string(), a_until: z.string(),
132
202
  b_since: z.string(), b_until: z.string(),
133
203
  }, ({ project_id, a_since, a_until, b_since, b_until }) => ({
134
- content: [{ type: "text", text: JSON.stringify(compare(db, project_id, a_since, a_until, b_since, b_until)) }]
204
+ content: [{ type: "text", text: JSON.stringify(compare(db, rp(project_id) ?? project_id,
205
+ parseTime(a_since) ?? a_since, parseTime(a_until) ?? a_until,
206
+ parseTime(b_since) ?? b_since, parseTime(b_until) ?? b_until)) }]
135
207
  }))
136
208
 
209
+ server.tool("log_session_context", {
210
+ session_id: z.string(), brief: z.boolean().optional(),
211
+ }, async ({ session_id, brief }) => {
212
+ const ctx = await getSessionContext(db, session_id)
213
+ return { content: [{ type: "text", text: JSON.stringify({ ...ctx, logs: applyBrief(ctx.logs, brief !== false) }) }] }
214
+ })
215
+
137
216
  server.tool("perf_snapshot", {
138
217
  project_id: z.string(), page_id: z.string().optional(),
139
218
  }, ({ project_id, page_id }) => {
140
- const snap = getLatestSnapshot(db, project_id, page_id)
219
+ const snap = getLatestSnapshot(db, rp(project_id) ?? project_id, page_id)
141
220
  return { content: [{ type: "text", text: JSON.stringify(snap ? { ...snap, label: scoreLabel(snap.score) } : null) }] }
142
221
  })
143
222
 
144
223
  server.tool("perf_trend", {
145
224
  project_id: z.string(), page_id: z.string().optional(), since: z.string().optional(), limit: z.number().optional(),
146
225
  }, ({ project_id, page_id, since, limit }) => ({
147
- content: [{ type: "text", text: JSON.stringify(getPerfTrend(db, project_id, page_id, since, limit ?? 50)) }]
226
+ content: [{ type: "text", text: JSON.stringify(getPerfTrend(db, rp(project_id) ?? project_id, page_id, parseTime(since) ?? since, limit ?? 50)) }]
148
227
  }))
149
228
 
150
229
  server.tool("scan_status", {
151
230
  project_id: z.string().optional(),
152
231
  }, ({ project_id }) => ({
153
- content: [{ type: "text", text: JSON.stringify(listJobs(db, project_id)) }]
232
+ content: [{ type: "text", text: JSON.stringify(listJobs(db, rp(project_id))) }]
154
233
  }))
155
234
 
156
235
  server.tool("list_projects", {}, () => ({
@@ -158,13 +237,13 @@ server.tool("list_projects", {}, () => ({
158
237
  }))
159
238
 
160
239
  server.tool("list_pages", { project_id: z.string() }, ({ project_id }) => ({
161
- content: [{ type: "text", text: JSON.stringify(listPages(db, project_id)) }]
240
+ content: [{ type: "text", text: JSON.stringify(listPages(db, rp(project_id) ?? project_id)) }]
162
241
  }))
163
242
 
164
243
  server.tool("list_issues", {
165
244
  project_id: z.string().optional(), status: z.string().optional(), limit: z.number().optional(),
166
245
  }, ({ project_id, status, limit }) => ({
167
- content: [{ type: "text", text: JSON.stringify(listIssues(db, project_id, status, limit ?? 50)) }]
246
+ content: [{ type: "text", text: JSON.stringify(listIssues(db, rp(project_id), status, limit ?? 50)) }]
168
247
  }))
169
248
 
170
249
  server.tool("resolve_issue", {
@@ -178,12 +257,12 @@ server.tool("create_alert_rule", {
178
257
  level: z.string().optional(), service: z.string().optional(),
179
258
  threshold_count: z.number().optional(), window_seconds: z.number().optional(),
180
259
  action: z.enum(["webhook", "log"]).optional(), webhook_url: z.string().optional(),
181
- }, (args) => ({ content: [{ type: "text", text: JSON.stringify(createAlertRule(db, args)) }] }))
260
+ }, (args) => ({ content: [{ type: "text", text: JSON.stringify(createAlertRule(db, { ...args, project_id: rp(args.project_id) ?? args.project_id })) }] }))
182
261
 
183
262
  server.tool("list_alert_rules", {
184
263
  project_id: z.string().optional(),
185
264
  }, ({ project_id }) => ({
186
- content: [{ type: "text", text: JSON.stringify(listAlertRules(db, project_id)) }]
265
+ content: [{ type: "text", text: JSON.stringify(listAlertRules(db, rp(project_id))) }]
187
266
  }))
188
267
 
189
268
  server.tool("delete_alert_rule", { id: z.string() }, ({ id }) => {
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env bun
2
2
  import { Hono } from "hono"
3
3
  import { cors } from "hono/cors"
4
+ import { serveStatic } from "hono/bun"
4
5
  import { getDb } from "../db/index.ts"
5
6
  import { getBrowserScript } from "../lib/browser-script.ts"
6
7
  import { getHealth } from "../lib/health.ts"
@@ -37,7 +38,9 @@ app.route("/api/issues", issuesRoutes(db))
37
38
  app.route("/api/perf", perfRoutes(db))
38
39
 
39
40
  app.get("/health", (c) => c.json(getHealth(db)))
40
- app.get("/", (c) => c.json({ service: "@hasna/logs", port: PORT, status: "ok" }))
41
+ app.get("/dashboard", (c) => c.redirect("/dashboard/"))
42
+ app.use("/dashboard/*", serveStatic({ root: "./dashboard/dist", rewriteRequestPath: (p) => p.replace(/^\/dashboard/, "") }))
43
+ app.get("/", (c) => c.json({ service: "@hasna/logs", port: PORT, status: "ok", dashboard: `http://localhost:${PORT}/dashboard/` }))
41
44
 
42
45
  // Start scheduler
43
46
  startScheduler(db)
@@ -4,6 +4,9 @@ import { ingestBatch, ingestLog } from "../../lib/ingest.ts"
4
4
  import { getLogContext, searchLogs, tailLogs } from "../../lib/query.ts"
5
5
  import { summarizeLogs } from "../../lib/summarize.ts"
6
6
  import { exportToCsv, exportToJson } from "../../lib/export.ts"
7
+ import { countLogs } from "../../lib/count.ts"
8
+ import { parseTime } from "../../lib/parse-time.ts"
9
+ import { resolveProjectId } from "../../lib/projects.ts"
7
10
  import type { LogEntry, LogLevel } from "../../types/index.ts"
8
11
 
9
12
  export function logsRoutes(db: Database) {
@@ -52,10 +55,34 @@ export function logsRoutes(db: Database) {
52
55
  // GET /api/logs/summary
53
56
  app.get("/summary", (c) => {
54
57
  const { project_id, since } = c.req.query()
55
- const summary = summarizeLogs(db, project_id || undefined, since || undefined)
58
+ const summary = summarizeLogs(db, resolveProjectId(db, project_id) || undefined, parseTime(since) || since || undefined)
56
59
  return c.json(summary)
57
60
  })
58
61
 
62
+ // GET /api/logs/count
63
+ app.get("/count", (c) => {
64
+ const { project_id, service, level, since, until } = c.req.query()
65
+ return c.json(countLogs(db, {
66
+ project_id: resolveProjectId(db, project_id) || undefined,
67
+ service: service || undefined,
68
+ level: level || undefined,
69
+ since: since || undefined,
70
+ until: until || undefined,
71
+ }))
72
+ })
73
+
74
+ // GET /api/logs/recent-errors
75
+ app.get("/recent-errors", (c) => {
76
+ const { project_id, since, limit } = c.req.query()
77
+ const rows = searchLogs(db, {
78
+ project_id: resolveProjectId(db, project_id) || undefined,
79
+ level: ["error", "fatal"],
80
+ since: parseTime(since || "1h"),
81
+ limit: limit ? Number(limit) : 20,
82
+ })
83
+ return c.json(rows.map(r => ({ id: r.id, timestamp: r.timestamp, level: r.level, message: r.message, service: r.service, age_seconds: Math.floor((Date.now() - new Date(r.timestamp).getTime()) / 1000) })))
84
+ })
85
+
59
86
  // GET /api/logs/:trace_id/context
60
87
  app.get("/:trace_id/context", (c) => {
61
88
  const rows = getLogContext(db, c.req.param("trace_id"))