@loghead/core 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/loggerhead +0 -0
- package/build/npm/bin/loghead +0 -0
- package/build/npm/package.json +32 -0
- package/deno.lock +1003 -0
- package/dist/api/server.js +111 -0
- package/dist/cli_main.js +68 -0
- package/dist/db/client.js +53 -0
- package/dist/db/migrate.js +65 -0
- package/dist/services/auth.js +48 -0
- package/dist/services/db.js +157 -0
- package/dist/services/ollama.js +40 -0
- package/dist/types.js +2 -0
- package/dist/ui/main.js +71 -0
- package/dist/utils/startup.js +55 -0
- package/loggerhead.db +0 -0
- package/package.json +28 -24
- package/src/api/server.ts +123 -0
- package/src/cli_main.ts +66 -0
- package/src/db/client.ts +19 -0
- package/src/db/migrate.ts +67 -0
- package/src/services/auth.ts +50 -0
- package/src/services/db.ts +171 -0
- package/src/services/ollama.ts +39 -0
- package/src/tests/db.test.ts +63 -0
- package/src/types.ts +31 -0
- package/src/ui/main.ts +78 -0
- package/src/utils/startup.ts +52 -0
- package/tsconfig.json +15 -0
- /package/{bin → build}/loghead +0 -0
- /package/{README.md → build/npm/README.md} +0 -0
package/package.json
CHANGED
|
@@ -1,32 +1,36 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loghead/core",
|
|
3
|
-
"version": "0.1.
|
|
4
|
-
"description": "
|
|
3
|
+
"version": "0.1.3",
|
|
4
|
+
"description": "Core API and Database for Loghead",
|
|
5
|
+
"main": "dist/index.js",
|
|
5
6
|
"bin": {
|
|
6
|
-
"loghead": "./
|
|
7
|
+
"loghead": "./dist/cli_main.js"
|
|
7
8
|
},
|
|
8
|
-
"files": [
|
|
9
|
-
"bin"
|
|
10
|
-
],
|
|
11
|
-
"os": [
|
|
12
|
-
"darwin"
|
|
13
|
-
],
|
|
14
|
-
"cpu": [
|
|
15
|
-
"arm64"
|
|
16
|
-
],
|
|
17
9
|
"scripts": {
|
|
18
|
-
"
|
|
10
|
+
"start": "tsx src/cli_main.ts start",
|
|
11
|
+
"ui": "tsx src/cli_main.ts ui",
|
|
12
|
+
"build": "tsc",
|
|
13
|
+
"dev": "tsx watch src/cli_main.ts",
|
|
14
|
+
"deploy": "npm publish --access public"
|
|
19
15
|
},
|
|
20
|
-
"
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
"
|
|
16
|
+
"dependencies": {
|
|
17
|
+
"better-sqlite3": "^9.4.0",
|
|
18
|
+
"express": "^4.18.2",
|
|
19
|
+
"cors": "^2.8.5",
|
|
20
|
+
"dotenv": "^16.4.1",
|
|
21
|
+
"yargs": "^17.7.2",
|
|
22
|
+
"jsonwebtoken": "^9.0.2",
|
|
23
|
+
"chalk": "^5.3.0",
|
|
24
|
+
"inquirer": "^9.2.14",
|
|
25
|
+
"cli-table3": "^0.6.3",
|
|
26
|
+
"ollama": "^0.5.9"
|
|
24
27
|
},
|
|
25
|
-
"
|
|
26
|
-
"
|
|
27
|
-
"
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
"
|
|
28
|
+
"devDependencies": {
|
|
29
|
+
"@types/better-sqlite3": "^7.6.9",
|
|
30
|
+
"@types/express": "^4.17.21",
|
|
31
|
+
"@types/cors": "^2.8.17",
|
|
32
|
+
"@types/yargs": "^17.0.32",
|
|
33
|
+
"@types/jsonwebtoken": "^9.0.5",
|
|
34
|
+
"@types/inquirer": "^9.0.7"
|
|
31
35
|
}
|
|
32
|
-
}
|
|
36
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import express from "express";
|
|
2
|
+
import cors from "cors";
|
|
3
|
+
import { DbService } from "../services/db";
|
|
4
|
+
import { AuthService } from "../services/auth";
|
|
5
|
+
import chalk from "chalk";
|
|
6
|
+
|
|
7
|
+
const auth = new AuthService();
|
|
8
|
+
|
|
9
|
+
export async function startApiServer(db: DbService) {
|
|
10
|
+
const app = express();
|
|
11
|
+
const port = process.env.PORT || 4567;
|
|
12
|
+
|
|
13
|
+
app.use(cors());
|
|
14
|
+
app.use(express.json());
|
|
15
|
+
|
|
16
|
+
await auth.initialize();
|
|
17
|
+
|
|
18
|
+
console.log(chalk.bold.green(`✔ Loghead Core API Server running on http://localhost:${port}`));
|
|
19
|
+
|
|
20
|
+
app.post("/api/ingest", async (req, res) => {
|
|
21
|
+
try {
|
|
22
|
+
const authHeader = req.headers.authorization;
|
|
23
|
+
if (!authHeader || !authHeader.startsWith("Bearer ")) {
|
|
24
|
+
return res.status(401).send("Unauthorized: Missing token");
|
|
25
|
+
}
|
|
26
|
+
const token = authHeader.split(" ")[1];
|
|
27
|
+
const payload = await auth.verifyToken(token);
|
|
28
|
+
if (!payload || !payload.streamId) {
|
|
29
|
+
return res.status(401).send("Unauthorized: Invalid token");
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const { streamId, logs } = req.body;
|
|
33
|
+
|
|
34
|
+
if (streamId !== payload.streamId) {
|
|
35
|
+
return res.status(403).send("Forbidden: Token does not match streamId");
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (!logs) {
|
|
39
|
+
return res.status(400).send("Missing logs");
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const logEntries = Array.isArray(logs) ? logs : [logs];
|
|
43
|
+
|
|
44
|
+
for (const log of logEntries) {
|
|
45
|
+
let content = "";
|
|
46
|
+
let metadata = {};
|
|
47
|
+
|
|
48
|
+
if (typeof log === "string") {
|
|
49
|
+
content = log;
|
|
50
|
+
} else if (typeof log === "object") {
|
|
51
|
+
content = log.content || JSON.stringify(log);
|
|
52
|
+
metadata = log.metadata || {};
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (content) {
|
|
56
|
+
await db.addLog(streamId, content, metadata);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
res.json({ success: true, count: logEntries.length });
|
|
61
|
+
} catch (e) {
|
|
62
|
+
console.error("Ingest error:", e);
|
|
63
|
+
res.status(500).json({ error: String(e) });
|
|
64
|
+
}
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
app.get("/api/projects", (req, res) => {
|
|
68
|
+
const projects = db.listProjects();
|
|
69
|
+
res.json(projects);
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
app.post("/api/projects", (req, res) => {
|
|
73
|
+
const projects = db.listProjects();
|
|
74
|
+
res.json(projects);
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
app.get("/api/streams", (req, res) => {
|
|
78
|
+
const projectId = req.query.projectId as string;
|
|
79
|
+
if (projectId) {
|
|
80
|
+
const streams = db.listStreams(projectId);
|
|
81
|
+
res.json(streams);
|
|
82
|
+
} else {
|
|
83
|
+
res.status(400).send("Missing projectId");
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
app.post("/api/streams", (req, res) => {
|
|
88
|
+
const projectId = req.body.projectId;
|
|
89
|
+
if (projectId) {
|
|
90
|
+
const streams = db.listStreams(projectId);
|
|
91
|
+
res.json(streams);
|
|
92
|
+
} else {
|
|
93
|
+
res.status(400).send("Missing projectId");
|
|
94
|
+
}
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
app.post("/api/streams/create", async (req, res) => {
|
|
98
|
+
const body = req.body;
|
|
99
|
+
const stream = await db.createStream(body.projectId, body.type, body.name, {});
|
|
100
|
+
res.json(stream);
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
app.get("/api/logs", async (req, res) => {
|
|
104
|
+
const streamId = req.query.streamId as string;
|
|
105
|
+
if (!streamId) {
|
|
106
|
+
return res.status(400).send("Missing streamId");
|
|
107
|
+
}
|
|
108
|
+
const limit = parseInt((req.query.limit as string) || "50");
|
|
109
|
+
const query = req.query.q as string;
|
|
110
|
+
|
|
111
|
+
let logs;
|
|
112
|
+
if (query) {
|
|
113
|
+
logs = await db.searchLogs(streamId, query, limit);
|
|
114
|
+
} else {
|
|
115
|
+
logs = db.getRecentLogs(streamId, limit);
|
|
116
|
+
}
|
|
117
|
+
res.json(logs);
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
app.listen(port, () => {
|
|
121
|
+
// listening
|
|
122
|
+
});
|
|
123
|
+
}
|
package/src/cli_main.ts
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import yargs from "yargs";
|
|
3
|
+
import { hideBin } from "yargs/helpers";
|
|
4
|
+
import { DbService } from "./services/db";
|
|
5
|
+
import { startApiServer } from "./api/server";
|
|
6
|
+
import { migrate } from "./db/migrate";
|
|
7
|
+
// import { ensureInfrastructure } from "./utils/startup"; // Might need adjustment
|
|
8
|
+
import { startTui } from "./ui/main";
|
|
9
|
+
|
|
10
|
+
const db = new DbService();
|
|
11
|
+
|
|
12
|
+
async function main() {
|
|
13
|
+
const argv = await yargs(hideBin(process.argv))
|
|
14
|
+
.command("init", "Initialize/Migrate database", {}, async () => {
|
|
15
|
+
console.log("Initializing database...");
|
|
16
|
+
await migrate();
|
|
17
|
+
})
|
|
18
|
+
.command("start", "Start API Server", {}, async () => {
|
|
19
|
+
// await ensureInfrastructure();
|
|
20
|
+
await startApiServer(db);
|
|
21
|
+
})
|
|
22
|
+
.command("ui", "Start Terminal UI", {}, async () => {
|
|
23
|
+
await startTui(db);
|
|
24
|
+
})
|
|
25
|
+
.command("projects <cmd> [name]", "Manage projects", (yargs) => {
|
|
26
|
+
yargs
|
|
27
|
+
.command("list", "List projects", {}, () => {
|
|
28
|
+
const projects = db.listProjects();
|
|
29
|
+
console.table(projects);
|
|
30
|
+
})
|
|
31
|
+
.command("add <name>", "Add project", {}, (argv) => {
|
|
32
|
+
const p = db.createProject(argv.name as string);
|
|
33
|
+
console.log(`Project created: ${p.id}`);
|
|
34
|
+
});
|
|
35
|
+
})
|
|
36
|
+
.command("streams <cmd> [type] [name]", "Manage streams", (yargs) => {
|
|
37
|
+
yargs
|
|
38
|
+
.command("list", "List streams", {
|
|
39
|
+
project: { type: "string", demandOption: true }
|
|
40
|
+
}, (argv) => {
|
|
41
|
+
const streams = db.listStreams(argv.project);
|
|
42
|
+
console.table(streams);
|
|
43
|
+
})
|
|
44
|
+
.command("add <type> <name>", "Add stream", {
|
|
45
|
+
project: { type: "string", demandOption: true },
|
|
46
|
+
container: { type: "string" }
|
|
47
|
+
}, async (argv) => {
|
|
48
|
+
const config: Record<string, unknown> = {};
|
|
49
|
+
if (argv.type === "docker" && argv.container) {
|
|
50
|
+
config.container = argv.container;
|
|
51
|
+
}
|
|
52
|
+
const s = await db.createStream(argv.project, argv.type as string, argv.name as string, config);
|
|
53
|
+
console.log(`Stream created: ${s.id}`);
|
|
54
|
+
console.log(`Token: ${s.token}`);
|
|
55
|
+
});
|
|
56
|
+
})
|
|
57
|
+
.demandCommand(1)
|
|
58
|
+
.strict()
|
|
59
|
+
.help()
|
|
60
|
+
.parse();
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
main().catch(err => {
|
|
64
|
+
console.error(err);
|
|
65
|
+
process.exit(1);
|
|
66
|
+
});
|
package/src/db/client.ts
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import Database from "better-sqlite3";
|
|
2
|
+
import * as sqliteVec from "sqlite-vec";
|
|
3
|
+
import path from "path";
|
|
4
|
+
import dotenv from "dotenv";
|
|
5
|
+
|
|
6
|
+
dotenv.config();
|
|
7
|
+
|
|
8
|
+
const dbPath = process.env.LOGHEAD_DB_PATH || "loghead.db";
|
|
9
|
+
|
|
10
|
+
const db = new Database(dbPath);
|
|
11
|
+
|
|
12
|
+
// Load sqlite-vec extension
|
|
13
|
+
try {
|
|
14
|
+
sqliteVec.load(db);
|
|
15
|
+
} catch (e) {
|
|
16
|
+
console.error("Failed to load sqlite-vec extension:", e);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export { db };
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { db } from "./client";
|
|
2
|
+
|
|
3
|
+
export function migrate(verbose = true) {
|
|
4
|
+
if (verbose) console.log("Running migrations...");
|
|
5
|
+
|
|
6
|
+
// Enable foreign keys
|
|
7
|
+
db.exec("PRAGMA foreign_keys = ON;");
|
|
8
|
+
|
|
9
|
+
// System Config table (for secrets, etc.)
|
|
10
|
+
db.exec(`
|
|
11
|
+
CREATE TABLE IF NOT EXISTS system_config (
|
|
12
|
+
key TEXT PRIMARY KEY,
|
|
13
|
+
value TEXT NOT NULL
|
|
14
|
+
);
|
|
15
|
+
`);
|
|
16
|
+
|
|
17
|
+
// Projects table
|
|
18
|
+
db.exec(`
|
|
19
|
+
CREATE TABLE IF NOT EXISTS projects (
|
|
20
|
+
id TEXT PRIMARY KEY,
|
|
21
|
+
name TEXT NOT NULL,
|
|
22
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
23
|
+
);
|
|
24
|
+
`);
|
|
25
|
+
|
|
26
|
+
// Data Streams table
|
|
27
|
+
db.exec(`
|
|
28
|
+
CREATE TABLE IF NOT EXISTS data_streams (
|
|
29
|
+
id TEXT PRIMARY KEY,
|
|
30
|
+
project_id TEXT,
|
|
31
|
+
type TEXT NOT NULL,
|
|
32
|
+
name TEXT NOT NULL,
|
|
33
|
+
config TEXT DEFAULT '{}',
|
|
34
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
35
|
+
FOREIGN KEY(project_id) REFERENCES projects(id) ON DELETE CASCADE
|
|
36
|
+
);
|
|
37
|
+
`);
|
|
38
|
+
|
|
39
|
+
// Logs table
|
|
40
|
+
db.exec(`
|
|
41
|
+
CREATE TABLE IF NOT EXISTS logs (
|
|
42
|
+
id TEXT PRIMARY KEY,
|
|
43
|
+
stream_id TEXT,
|
|
44
|
+
content TEXT NOT NULL,
|
|
45
|
+
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
46
|
+
metadata TEXT DEFAULT '{}',
|
|
47
|
+
FOREIGN KEY(stream_id) REFERENCES data_streams(id) ON DELETE CASCADE
|
|
48
|
+
);
|
|
49
|
+
`);
|
|
50
|
+
|
|
51
|
+
// Vector table (using sqlite-vec)
|
|
52
|
+
// Assuming 1024 dimensions for qwen3-embedding:0.6b (check actual dim)
|
|
53
|
+
// qwen2.5-0.5b is 1536?
|
|
54
|
+
// qwen-embedding-0.6b might be 384 or 1024?
|
|
55
|
+
// Let's assume 1024 as per previous code.
|
|
56
|
+
try {
|
|
57
|
+
db.exec(`
|
|
58
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS vec_logs USING vec0(
|
|
59
|
+
embedding float[1024]
|
|
60
|
+
);
|
|
61
|
+
`);
|
|
62
|
+
} catch (e) {
|
|
63
|
+
console.warn("Failed to create virtual vector table. Is sqlite-vec loaded?", e);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
if (verbose) console.log("Migrations complete.");
|
|
67
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import jwt from "jsonwebtoken";
|
|
2
|
+
import { db } from "../db/client";
|
|
3
|
+
import { randomBytes } from "crypto";
|
|
4
|
+
|
|
5
|
+
// Helper type for DB access
|
|
6
|
+
// deno-lint-ignore no-explicit-any
|
|
7
|
+
type DbAny = any;
|
|
8
|
+
|
|
9
|
+
export class AuthService {
|
|
10
|
+
private secretKey: string | null = null;
|
|
11
|
+
|
|
12
|
+
async initialize() {
|
|
13
|
+
if (this.secretKey) return;
|
|
14
|
+
|
|
15
|
+
// Try to load secret from DB
|
|
16
|
+
const row = (db.prepare("SELECT value FROM system_config WHERE key = 'jwt_secret'") as unknown as DbAny).get();
|
|
17
|
+
|
|
18
|
+
let rawSecret = row?.value;
|
|
19
|
+
|
|
20
|
+
if (!rawSecret) {
|
|
21
|
+
// Generate new secret
|
|
22
|
+
rawSecret = randomBytes(64).toString('hex');
|
|
23
|
+
(db.prepare("INSERT INTO system_config (key, value) VALUES ('jwt_secret', ?)") as unknown as DbAny).run(rawSecret);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
this.secretKey = rawSecret;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async createStreamToken(streamId: string): Promise<string> {
|
|
30
|
+
await this.initialize();
|
|
31
|
+
if (!this.secretKey) throw new Error("Auth not initialized");
|
|
32
|
+
|
|
33
|
+
const token = jwt.sign({ sub: streamId, iss: "loghead" }, this.secretKey, { algorithm: "HS512" });
|
|
34
|
+
return token;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
async verifyToken(token: string): Promise<{ streamId: string } | null> {
|
|
38
|
+
await this.initialize();
|
|
39
|
+
if (!this.secretKey) throw new Error("Auth not initialized");
|
|
40
|
+
|
|
41
|
+
try {
|
|
42
|
+
const payload = jwt.verify(token, this.secretKey, { issuer: "loghead", algorithms: ["HS512"] }) as jwt.JwtPayload;
|
|
43
|
+
if (!payload.sub) return null;
|
|
44
|
+
return { streamId: payload.sub };
|
|
45
|
+
} catch (e) {
|
|
46
|
+
console.error("Token verification failed:", e);
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
import { db } from "../db/client";
|
|
2
|
+
import { OllamaService } from "./ollama";
|
|
3
|
+
import { AuthService } from "./auth";
|
|
4
|
+
import { Project, Stream, Log, SearchResult } from "../types";
|
|
5
|
+
import { randomUUID } from "crypto";
|
|
6
|
+
|
|
7
|
+
const ollama = new OllamaService();
|
|
8
|
+
const auth = new AuthService();
|
|
9
|
+
|
|
10
|
+
// Helper to cast DB results to avoid no-explicit-any
|
|
11
|
+
// deno-lint-ignore no-explicit-any
|
|
12
|
+
type DbAny = any;
|
|
13
|
+
|
|
14
|
+
export class DbService {
|
|
15
|
+
createProject(name: string): Project {
|
|
16
|
+
const id = randomUUID();
|
|
17
|
+
(db.prepare("INSERT INTO projects (id, name) VALUES (?, ?)") as unknown as DbAny).run(id, name);
|
|
18
|
+
return this.getProject(id);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
getProject(id: string): Project {
|
|
22
|
+
return (db.prepare("SELECT * FROM projects WHERE id = ?") as unknown as DbAny).get(id);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
deleteProject(id: string): boolean {
|
|
26
|
+
(db.prepare("DELETE FROM projects WHERE id = ?") as unknown as DbAny).run(id);
|
|
27
|
+
return true;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
listProjects(): Project[] {
|
|
31
|
+
console.error("Listing projects...");
|
|
32
|
+
try {
|
|
33
|
+
const projects = (db.prepare("SELECT * FROM projects ORDER BY created_at DESC") as unknown as DbAny).all();
|
|
34
|
+
console.error(`Found ${projects.length} projects.`);
|
|
35
|
+
return projects.map((p: Project) => {
|
|
36
|
+
const streams = (db.prepare("SELECT * FROM data_streams WHERE project_id = ?") as unknown as DbAny).all(p.id);
|
|
37
|
+
return { ...p, streams };
|
|
38
|
+
});
|
|
39
|
+
} catch (e) {
|
|
40
|
+
console.error("Error in listProjects:", e);
|
|
41
|
+
throw e;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async createStream(projectId: string, type: string, name: string, config: Record<string, unknown> = {}): Promise<Stream & { token: string }> {
|
|
46
|
+
const id = randomUUID();
|
|
47
|
+
(db.prepare("INSERT INTO data_streams (id, project_id, type, name, config) VALUES (?, ?, ?, ?, ?)") as unknown as DbAny).run(
|
|
48
|
+
id, projectId, type, name, JSON.stringify(config)
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
const token = await auth.createStreamToken(id);
|
|
52
|
+
const stream = this.getStream(id);
|
|
53
|
+
|
|
54
|
+
return { ...stream, token };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
getStream(id: string): Stream {
|
|
58
|
+
const stream = (db.prepare("SELECT * FROM data_streams WHERE id = ?") as unknown as DbAny).get(id);
|
|
59
|
+
if (stream && typeof stream.config === "string") {
|
|
60
|
+
try { stream.config = JSON.parse(stream.config); } catch { /* ignore */ }
|
|
61
|
+
}
|
|
62
|
+
return stream;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
deleteStream(id: string): boolean {
|
|
66
|
+
(db.prepare("DELETE FROM data_streams WHERE id = ?") as unknown as DbAny).run(id);
|
|
67
|
+
return true;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
listStreams(projectId: string): Stream[] {
|
|
71
|
+
const streams = (db.prepare("SELECT * FROM data_streams WHERE project_id = ? ORDER BY created_at DESC") as unknown as DbAny).all(projectId);
|
|
72
|
+
return streams.map((s: Stream) => {
|
|
73
|
+
if (typeof s.config === "string") try { s.config = JSON.parse(s.config); } catch { /* ignore */ }
|
|
74
|
+
return s;
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
async addLog(streamId: string, content: string, metadata: Record<string, unknown> = {}): Promise<{ id: string }> {
|
|
79
|
+
// Generate embedding
|
|
80
|
+
let embedding: number[] | null = null;
|
|
81
|
+
try {
|
|
82
|
+
embedding = await ollama.generateEmbedding(content);
|
|
83
|
+
} catch (_e) {
|
|
84
|
+
// console.warn("Embedding failed", _e);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
const id = randomUUID();
|
|
88
|
+
const metadataStr = JSON.stringify(metadata);
|
|
89
|
+
|
|
90
|
+
// Manual Transaction
|
|
91
|
+
const insertTx = db.transaction(() => {
|
|
92
|
+
// 1. Insert into logs
|
|
93
|
+
(db.prepare("INSERT INTO logs (id, stream_id, content, metadata) VALUES (?, ?, ?, ?)") as unknown as DbAny).run(
|
|
94
|
+
id, streamId, content, metadataStr
|
|
95
|
+
);
|
|
96
|
+
|
|
97
|
+
// 2. Get rowid
|
|
98
|
+
const rowInfo = (db.prepare("SELECT last_insert_rowid() as rowid") as unknown as DbAny).get();
|
|
99
|
+
const rowid = rowInfo.rowid;
|
|
100
|
+
|
|
101
|
+
// 3. Insert into vec_logs if embedding exists
|
|
102
|
+
if (embedding && embedding.length > 0) {
|
|
103
|
+
const vectorJson = JSON.stringify(embedding);
|
|
104
|
+
(db.prepare("INSERT INTO vec_logs(rowid, embedding) VALUES (?, ?)") as unknown as DbAny).run(rowid, vectorJson);
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
try {
|
|
109
|
+
insertTx();
|
|
110
|
+
} catch (e) {
|
|
111
|
+
throw e;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return { id };
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
async searchLogs(streamId: string, query: string, limit = 10): Promise<SearchResult[]> {
|
|
118
|
+
const embedding = await ollama.generateEmbedding(query);
|
|
119
|
+
if (!embedding) return [];
|
|
120
|
+
|
|
121
|
+
const vectorJson = JSON.stringify(embedding);
|
|
122
|
+
|
|
123
|
+
// KNN Search
|
|
124
|
+
const rows = (db.prepare(`
|
|
125
|
+
SELECT l.content, l.timestamp, l.metadata, v.distance
|
|
126
|
+
FROM vec_logs v
|
|
127
|
+
JOIN logs l ON l.rowid = v.rowid
|
|
128
|
+
WHERE v.embedding MATCH ? AND k = ? AND l.stream_id = ?
|
|
129
|
+
ORDER BY v.distance
|
|
130
|
+
`) as unknown as DbAny).all(vectorJson, limit, streamId);
|
|
131
|
+
|
|
132
|
+
return rows.map((row: { content: string; timestamp: string; metadata: string; distance: number }) => {
|
|
133
|
+
let meta: Record<string, unknown> | undefined;
|
|
134
|
+
try { meta = JSON.parse(row.metadata); } catch { /* ignore */ }
|
|
135
|
+
return {
|
|
136
|
+
content: row.content,
|
|
137
|
+
timestamp: row.timestamp,
|
|
138
|
+
similarity: 1 - row.distance, // Rough approx
|
|
139
|
+
metadata: (meta && Object.keys(meta).length > 0) ? meta : undefined
|
|
140
|
+
};
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
getRecentLogs(streamId: string, limit = 50): Log[] {
|
|
145
|
+
const rows = (db.prepare(`
|
|
146
|
+
SELECT content, timestamp, metadata FROM logs
|
|
147
|
+
WHERE stream_id = ?
|
|
148
|
+
ORDER BY timestamp DESC
|
|
149
|
+
LIMIT ?
|
|
150
|
+
`) as unknown as DbAny).all(streamId, limit);
|
|
151
|
+
|
|
152
|
+
return rows.map((row: Log) => {
|
|
153
|
+
let meta = row.metadata;
|
|
154
|
+
if (typeof meta === "string") {
|
|
155
|
+
try { meta = JSON.parse(meta); } catch { /* ignore */ }
|
|
156
|
+
}
|
|
157
|
+
return {
|
|
158
|
+
id: row.id, // Ensure id is included if needed, or update Log type
|
|
159
|
+
stream_id: streamId,
|
|
160
|
+
content: row.content,
|
|
161
|
+
timestamp: row.timestamp,
|
|
162
|
+
metadata: (typeof meta === "object" && meta && Object.keys(meta).length > 0) ? meta : {}
|
|
163
|
+
} as Log;
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
close() {
|
|
168
|
+
db.close();
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { Ollama } from "ollama";
|
|
2
|
+
|
|
3
|
+
export class OllamaService {
|
|
4
|
+
private client: Ollama;
|
|
5
|
+
private model: string;
|
|
6
|
+
|
|
7
|
+
constructor(host = "http://localhost:11434", model = "qwen3-embedding:0.6b") {
|
|
8
|
+
this.client = new Ollama({ host });
|
|
9
|
+
this.model = model;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
async generateEmbedding(prompt: string): Promise<number[]> {
|
|
13
|
+
try {
|
|
14
|
+
const response = await this.client.embeddings({
|
|
15
|
+
model: this.model,
|
|
16
|
+
prompt: prompt,
|
|
17
|
+
});
|
|
18
|
+
return response.embedding;
|
|
19
|
+
} catch (error) {
|
|
20
|
+
console.error("Failed to generate embedding:", error);
|
|
21
|
+
throw error;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async ensureModel() {
|
|
26
|
+
try {
|
|
27
|
+
const list = await this.client.list();
|
|
28
|
+
const exists = list.models.some((m) => m.name.includes(this.model));
|
|
29
|
+
|
|
30
|
+
if (!exists) {
|
|
31
|
+
console.log(`Model ${this.model} not found. Pulling...`);
|
|
32
|
+
await this.client.pull({ model: this.model });
|
|
33
|
+
console.log("Model pulled.");
|
|
34
|
+
}
|
|
35
|
+
} catch (e) {
|
|
36
|
+
console.warn("Could not check/pull ollama model:", e);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { assertEquals, assertExists } from "@std/assert";
|
|
2
|
+
import { migrate } from "../db/migrate.ts";
|
|
3
|
+
|
|
4
|
+
// Set up environment to use in-memory DB before importing client
|
|
5
|
+
Deno.env.set("LOGHEAD_DB_PATH", ":memory:");
|
|
6
|
+
|
|
7
|
+
// Dynamic import to ensure env var is set before module loads
|
|
8
|
+
// We trigger the side-effect of client.ts (creating DB connection)
|
|
9
|
+
await import("../db/client.ts");
|
|
10
|
+
const { DbService } = await import("../services/db.ts");
|
|
11
|
+
|
|
12
|
+
Deno.test("DbService - Project Management", async (t) => {
|
|
13
|
+
// @ts-ignore: Dynamic import returns any
|
|
14
|
+
const service = new DbService();
|
|
15
|
+
|
|
16
|
+
// Initialize DB schema
|
|
17
|
+
migrate(false);
|
|
18
|
+
|
|
19
|
+
await t.step("createProject creates a project", () => {
|
|
20
|
+
const project = service.createProject("Test Project");
|
|
21
|
+
assertExists(project.id);
|
|
22
|
+
assertEquals(project.name, "Test Project");
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
await t.step("listProjects returns projects", () => {
|
|
26
|
+
const projects = service.listProjects();
|
|
27
|
+
assertExists(projects.find(p => p.name === "Test Project"));
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
await t.step("deleteProject deletes a project", () => {
|
|
31
|
+
const projects = service.listProjects();
|
|
32
|
+
const project = projects.find(p => p.name === "Test Project");
|
|
33
|
+
if (project) {
|
|
34
|
+
service.deleteProject(project.id);
|
|
35
|
+
|
|
36
|
+
const remaining = service.listProjects();
|
|
37
|
+
assertEquals(remaining.find(p => p.name === "Test Project"), undefined);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
Deno.test("DbService - Stream Management", async (t) => {
|
|
43
|
+
// @ts-ignore: Dynamic import returns any
|
|
44
|
+
const service = new DbService();
|
|
45
|
+
|
|
46
|
+
// Create a fresh project for this test suite
|
|
47
|
+
const project = service.createProject("Stream Project");
|
|
48
|
+
|
|
49
|
+
await t.step("createStream creates a stream", () => {
|
|
50
|
+
const stream = service.createStream(project.id, "terminal", "My Stream", { foo: "bar" });
|
|
51
|
+
assertExists(stream.id);
|
|
52
|
+
assertEquals(stream.name, "My Stream");
|
|
53
|
+
assertEquals(stream.type, "terminal");
|
|
54
|
+
// @ts-ignore: Property access
|
|
55
|
+
assertEquals(stream.config, { foo: "bar" });
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
await t.step("listStreams returns streams", () => {
|
|
59
|
+
const streams = service.listStreams(project.id);
|
|
60
|
+
assertEquals(streams.length, 1);
|
|
61
|
+
assertEquals(streams[0].name, "My Stream");
|
|
62
|
+
});
|
|
63
|
+
});
|