@tikoci/rosetta 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json ADDED
@@ -0,0 +1,34 @@
1
+ {
2
+ "name": "@tikoci/rosetta",
3
+ "version": "0.2.0",
4
+ "description": "RouterOS documentation as SQLite FTS5 — RAG search + command glossary via MCP",
5
+ "type": "module",
6
+ "license": "MIT",
7
+ "bin": {
8
+ "rosetta": "bin/rosetta.js"
9
+ },
10
+ "repository": {
11
+ "type": "git",
12
+ "url": "https://github.com/tikoci/rosetta.git"
13
+ },
14
+ "files": [
15
+ "bin/",
16
+ "src/",
17
+ "matrix/"
18
+ ],
19
+ "scripts": {
20
+ "test": "bun test",
21
+ "typecheck": "bun tsc --noEmit",
22
+ "lint": "biome check src/"
23
+ },
24
+ "dependencies": {
25
+ "@modelcontextprotocol/sdk": "^1.27.1",
26
+ "zod": "^4.3.6"
27
+ },
28
+ "devDependencies": {
29
+ "@biomejs/biome": "^2.4.7",
30
+ "@types/bun": "^1.3.10",
31
+ "linkedom": "^0.18.9",
32
+ "typescript": "^6.0.2"
33
+ }
34
+ }
@@ -0,0 +1,267 @@
1
+ #!/usr/bin/env bun
2
+
3
+ /**
4
+ * assess-html.ts — Evaluate the Confluence HTML export for parseability.
5
+ *
6
+ * Analyzes all HTML files in the export directory and outputs:
7
+ * - Page count, text volume, code blocks, table counts
8
+ * - Property table detection (confluenceTable with Property|Description headers)
9
+ * - Breadcrumb depth distribution
10
+ * - Menu paths found in code blocks and body text
11
+ * - Edge cases and anomalies
12
+ *
13
+ * Usage: bun run src/assess-html.ts [html-dir]
14
+ */
15
+
16
+ import { readdirSync, readFileSync } from "node:fs";
17
+ import { basename, resolve } from "node:path";
18
+ import { parseHTML } from "linkedom";
19
+
20
+ const HTML_DIR = process.argv[2] || resolve(import.meta.dirname, "../box/latest/ROS");
21
+
22
+ const htmlFiles = readdirSync(HTML_DIR)
23
+ .filter((f) => f.endsWith(".html") && f !== "index.html")
24
+ .sort();
25
+
26
+ console.log(`Scanning ${htmlFiles.length} HTML files in ${HTML_DIR}\n`);
27
+
28
+ // Filename pattern: Slug_PageID.html or just PageID.html
29
+ const filenameRe = /^(?:(.+?)_)?(\d+)\.html$/;
30
+ // RouterOS menu path pattern
31
+ const menuPathRe = /\/[a-z][a-z0-9-]*(?:\/[a-z][a-z0-9-]*)*/g;
32
+
33
+ interface PageInfo {
34
+ file: string;
35
+ pageId: number;
36
+ slug: string;
37
+ title: string;
38
+ breadcrumbs: string[];
39
+ depth: number;
40
+ wordCount: number;
41
+ codeBlocks: number;
42
+ codeLines: number;
43
+ codeLangs: string[];
44
+ tables: number;
45
+ propertyTables: number;
46
+ propertyCount: number;
47
+ menuPaths: string[];
48
+ listProperties: number;
49
+ }
50
+
51
+ const pages: PageInfo[] = [];
52
+ const depthDist: Record<number, number> = {};
53
+ const allMenuPaths = new Set<string>();
54
+ const tablHeaderPatterns: Record<string, number> = {};
55
+ const codeLangDist: Record<string, number> = {};
56
+ const anomalies: string[] = [];
57
+
58
+ for (const file of htmlFiles) {
59
+ const html = readFileSync(resolve(HTML_DIR, file), "utf-8");
60
+ const { document } = parseHTML(html);
61
+
62
+ // Parse filename
63
+ const match = basename(file).match(filenameRe);
64
+ const pageId = match ? Number(match[2]) : 0;
65
+ const slug = match?.[1] || "";
66
+
67
+ if (!pageId) {
68
+ anomalies.push(`No page ID in filename: ${file}`);
69
+ }
70
+
71
+ // Title
72
+ const titleEl = document.querySelector("#title-text");
73
+ const title = titleEl?.textContent?.replace(/^\s*RouterOS\s*:\s*/i, "").trim() || "";
74
+
75
+ // Breadcrumbs
76
+ const breadcrumbs: string[] = [];
77
+ for (const li of document.querySelectorAll("#breadcrumbs li")) {
78
+ const a = li.querySelector("a");
79
+ if (a) breadcrumbs.push(a.textContent?.trim() || "");
80
+ }
81
+ const depth = breadcrumbs.length;
82
+ depthDist[depth] = (depthDist[depth] || 0) + 1;
83
+
84
+ // Text content
85
+ const mainContent = document.querySelector("#main-content");
86
+ const text = mainContent?.textContent || "";
87
+ const wordCount = text.split(/\s+/).filter(Boolean).length;
88
+
89
+ // Code blocks
90
+ const codeEls = document.querySelectorAll("pre.syntaxhighlighter-pre");
91
+ const codeLangs: string[] = [];
92
+ let codeLines = 0;
93
+ for (const el of codeEls) {
94
+ const params = el.getAttribute("data-syntaxhighlighter-params") || "";
95
+ const brushMatch = params.match(/brush:\s*(\w+)/);
96
+ if (brushMatch) {
97
+ const lang = brushMatch[1];
98
+ codeLangs.push(lang);
99
+ codeLangDist[lang] = (codeLangDist[lang] || 0) + 1;
100
+ }
101
+ codeLines += (el.textContent || "").split("\n").filter((l) => l.trim()).length;
102
+ }
103
+
104
+ // Tables — classify headers
105
+ const tables = document.querySelectorAll("table.confluenceTable");
106
+ let propertyTables = 0;
107
+ let propertyCount = 0;
108
+ for (const table of tables) {
109
+ const headerCells = Array.from(table.querySelectorAll("th.confluenceTh, thead th"));
110
+ const headerText = headerCells.map((th) => th.textContent?.trim() || "").join(" | ");
111
+ if (headerText) {
112
+ tablHeaderPatterns[headerText] = (tablHeaderPatterns[headerText] || 0) + 1;
113
+ }
114
+
115
+ // Detect property tables: first header is "Property" (case-insensitive)
116
+ const isPropertyTable = headerCells.some(
117
+ (th) => th.textContent?.trim().toLowerCase() === "property"
118
+ );
119
+ if (isPropertyTable) {
120
+ propertyTables++;
121
+ // Count data rows (skip header)
122
+ const rows = table.querySelectorAll("tbody tr");
123
+ // First row might be header in tbody (no thead)
124
+ const firstRowIsHeader = headerCells.length > 0;
125
+ propertyCount += Math.max(0, rows.length - (firstRowIsHeader ? 1 : 0));
126
+ }
127
+ }
128
+
129
+ // List-based properties: <ul>/<li> with <strong> followed by parenthetical type
130
+ let listProperties = 0;
131
+ if (mainContent) {
132
+ for (const li of mainContent.querySelectorAll("ul > li")) {
133
+ const strong = li.querySelector("strong");
134
+ if (strong && li.textContent?.includes("(")) {
135
+ listProperties++;
136
+ }
137
+ }
138
+ }
139
+
140
+ // Menu paths from code blocks and body
141
+ const menuPaths: string[] = [];
142
+ const bodyText = mainContent?.textContent || "";
143
+ for (const m of bodyText.matchAll(menuPathRe)) {
144
+ const p = m[0];
145
+ // Filter noise: must be at least 2 segments and look like a RouterOS path
146
+ if (p.split("/").length >= 3 && !p.includes("http") && !p.includes("www")) {
147
+ menuPaths.push(p);
148
+ allMenuPaths.add(p);
149
+ }
150
+ }
151
+
152
+ pages.push({
153
+ file,
154
+ pageId,
155
+ slug,
156
+ title,
157
+ breadcrumbs,
158
+ depth,
159
+ wordCount,
160
+ codeBlocks: codeEls.length,
161
+ codeLines,
162
+ codeLangs: [...new Set(codeLangs)],
163
+ tables: tables.length,
164
+ propertyTables,
165
+ propertyCount,
166
+ menuPaths: [...new Set(menuPaths)],
167
+ listProperties,
168
+ });
169
+ }
170
+
171
+ // Summary stats
172
+ const totalWords = pages.reduce((s, p) => s + p.wordCount, 0);
173
+ const totalCodeLines = pages.reduce((s, p) => s + p.codeLines, 0);
174
+ const totalCodeBlocks = pages.reduce((s, p) => s + p.codeBlocks, 0);
175
+ const totalTables = pages.reduce((s, p) => s + p.tables, 0);
176
+ const totalPropertyTables = pages.reduce((s, p) => s + p.propertyTables, 0);
177
+ const totalProperties = pages.reduce((s, p) => s + p.propertyCount, 0);
178
+ const totalListProps = pages.reduce((s, p) => s + p.listProperties, 0);
179
+ const pagesWithPropertyTables = pages.filter((p) => p.propertyTables > 0).length;
180
+ const pagesWithMenuPaths = pages.filter((p) => p.menuPaths.length > 0).length;
181
+ const pagesWithCode = pages.filter((p) => p.codeBlocks > 0).length;
182
+
183
+ console.log("=== HTML Archive Assessment ===\n");
184
+ console.log(`Pages: ${pages.length}`);
185
+ console.log(`Total words: ${totalWords.toLocaleString()}`);
186
+ console.log(`Total code blocks: ${totalCodeBlocks}`);
187
+ console.log(`Total code lines: ${totalCodeLines.toLocaleString()}`);
188
+ console.log(`Pages with code: ${pagesWithCode}`);
189
+ console.log(`Total tables: ${totalTables}`);
190
+ console.log(`Property tables: ${totalPropertyTables} (in ${pagesWithPropertyTables} pages)`);
191
+ console.log(`Properties (table): ${totalProperties}`);
192
+ console.log(`Properties (list): ${totalListProps}`);
193
+ console.log(`Unique menu paths: ${allMenuPaths.size}`);
194
+ console.log(`Pages w/ menu path: ${pagesWithMenuPaths}`);
195
+
196
+ console.log("\n--- Breadcrumb Depth Distribution ---");
197
+ for (const [depth, count] of Object.entries(depthDist).sort(([a], [b]) => +a - +b)) {
198
+ console.log(` depth ${depth}: ${count} pages`);
199
+ }
200
+
201
+ console.log("\n--- Code Language Distribution ---");
202
+ for (const [lang, count] of Object.entries(codeLangDist).sort(([, a], [, b]) => b - a)) {
203
+ console.log(` ${lang}: ${count} blocks`);
204
+ }
205
+
206
+ console.log("\n--- Table Header Patterns (top 20) ---");
207
+ const sortedHeaders = Object.entries(tablHeaderPatterns).sort(([, a], [, b]) => b - a);
208
+ for (const [header, count] of sortedHeaders.slice(0, 20)) {
209
+ console.log(` [${count}x] ${header}`);
210
+ }
211
+
212
+ console.log("\n--- Largest Pages (by word count) ---");
213
+ const byWords = [...pages].sort((a, b) => b.wordCount - a.wordCount);
214
+ for (const p of byWords.slice(0, 15)) {
215
+ console.log(` ${p.wordCount.toLocaleString()} words — ${p.title || p.file} (${p.propertyTables} prop tables, ${p.codeBlocks} code blocks)`);
216
+ }
217
+
218
+ console.log("\n--- Pages Without Property Tables (sample) ---");
219
+ const noProps = pages.filter((p) => p.propertyTables === 0 && p.wordCount > 100);
220
+ for (const p of noProps.slice(0, 10)) {
221
+ console.log(` ${p.title || p.file} — ${p.wordCount} words, ${p.tables} tables, ${p.listProperties} list props`);
222
+ }
223
+
224
+ if (anomalies.length > 0) {
225
+ console.log("\n--- Anomalies ---");
226
+ for (const a of anomalies) console.log(` ${a}`);
227
+ }
228
+
229
+ // Top menu paths
230
+ console.log("\n--- Unique Menu Paths (sample, top 30) ---");
231
+ const sortedPaths = [...allMenuPaths].sort();
232
+ for (const p of sortedPaths.slice(0, 30)) {
233
+ console.log(` ${p}`);
234
+ }
235
+
236
+ // Write summary JSON
237
+ const summary = {
238
+ pageCount: pages.length,
239
+ totalWords,
240
+ totalCodeBlocks,
241
+ totalCodeLines,
242
+ totalTables,
243
+ totalPropertyTables,
244
+ totalProperties,
245
+ totalListProps,
246
+ uniqueMenuPaths: allMenuPaths.size,
247
+ depthDistribution: depthDist,
248
+ codeLangDistribution: codeLangDist,
249
+ tableHeaderPatterns: Object.fromEntries(sortedHeaders),
250
+ pages: pages.map((p) => ({
251
+ file: p.file,
252
+ pageId: p.pageId,
253
+ title: p.title,
254
+ depth: p.depth,
255
+ wordCount: p.wordCount,
256
+ codeBlocks: p.codeBlocks,
257
+ tables: p.tables,
258
+ propertyTables: p.propertyTables,
259
+ propertyCount: p.propertyCount,
260
+ listProperties: p.listProperties,
261
+ menuPaths: p.menuPaths,
262
+ })),
263
+ };
264
+
265
+ const outPath = resolve(import.meta.dirname, "../ros-html-assessment.json");
266
+ await Bun.write(outPath, JSON.stringify(summary, null, 2));
267
+ console.log(`\nFull assessment written to ${outPath}`);
package/src/db.ts ADDED
@@ -0,0 +1,360 @@
1
+ /**
2
+ * db.ts — SQLite schema for RouterOS documentation.
3
+ *
4
+ * DB path: DB_PATH env var, or <project-root>/ros-help.db
5
+ *
6
+ * Tables:
7
+ * pages — one row per Confluence HTML page
8
+ * pages_fts — FTS5 over title, path, text, code
9
+ * properties — extracted property tables (name, type, default, description)
10
+ * properties_fts — FTS5 over name, description
11
+ * callouts — note/warning/info callout blocks from pages
12
+ * callouts_fts — FTS5 over callout content
13
+ * commands — RouterOS command tree from inspect.json (latest version)
14
+ * command_versions — junction: which commands exist in which RouterOS versions
15
+ * ros_versions — metadata for each extracted RouterOS version
16
+ * devices — MikroTik product hardware specs from product matrix CSV
17
+ * devices_fts — FTS5 over product name, code, architecture, CPU
18
+ * changelogs — parsed changelog entries per RouterOS version
19
+ * changelogs_fts — FTS5 over category, description
20
+ */
21
+
22
+ import sqlite from "bun:sqlite";
23
+ import path from "node:path";
24
+
25
+ declare const IS_COMPILED: boolean;
26
+
27
+ /**
28
+ * Resolve the base directory for finding ros-help.db:
29
+ * - Compiled binary: directory containing the executable
30
+ * - Dev mode: project root (one level up from src/)
31
+ */
32
+ const baseDir =
33
+ typeof IS_COMPILED !== "undefined" && IS_COMPILED
34
+ ? path.dirname(process.execPath)
35
+ : path.resolve(import.meta.dirname, "..");
36
+
37
+ export const DB_PATH =
38
+ process.env.DB_PATH?.trim() || path.join(baseDir, "ros-help.db");
39
+
40
+ export const db = new sqlite(DB_PATH);
41
+
42
+ export function initDb() {
43
+ db.run("PRAGMA journal_mode=WAL;");
44
+ db.run("PRAGMA foreign_keys=ON;");
45
+
46
+ db.run(`CREATE TABLE IF NOT EXISTS schema_migrations (
47
+ version TEXT PRIMARY KEY,
48
+ applied_at TEXT NOT NULL
49
+ );`);
50
+
51
+ // -- Pages (from Confluence HTML export) --
52
+
53
+ db.run(`CREATE TABLE IF NOT EXISTS pages (
54
+ id INTEGER PRIMARY KEY,
55
+ slug TEXT NOT NULL,
56
+ title TEXT NOT NULL,
57
+ path TEXT NOT NULL,
58
+ depth INTEGER NOT NULL,
59
+ parent_id INTEGER REFERENCES pages(id),
60
+ url TEXT NOT NULL,
61
+ text TEXT NOT NULL,
62
+ code TEXT NOT NULL,
63
+ code_lang TEXT,
64
+ author TEXT,
65
+ last_updated TEXT,
66
+ word_count INTEGER NOT NULL,
67
+ code_lines INTEGER NOT NULL,
68
+ html_file TEXT NOT NULL
69
+ );`);
70
+
71
+ db.run(`CREATE VIRTUAL TABLE IF NOT EXISTS pages_fts USING fts5(
72
+ title, path, text, code,
73
+ content=pages,
74
+ content_rowid=id,
75
+ tokenize='porter unicode61'
76
+ );`);
77
+
78
+ db.run(`CREATE TRIGGER IF NOT EXISTS pages_ai AFTER INSERT ON pages BEGIN
79
+ INSERT INTO pages_fts(rowid, title, path, text, code)
80
+ VALUES (new.id, new.title, new.path, new.text, new.code);
81
+ END;`);
82
+ db.run(`CREATE TRIGGER IF NOT EXISTS pages_ad AFTER DELETE ON pages BEGIN
83
+ INSERT INTO pages_fts(pages_fts, rowid, title, path, text, code)
84
+ VALUES('delete', old.id, old.title, old.path, old.text, old.code);
85
+ END;`);
86
+ db.run(`CREATE TRIGGER IF NOT EXISTS pages_au AFTER UPDATE ON pages BEGIN
87
+ INSERT INTO pages_fts(pages_fts, rowid, title, path, text, code)
88
+ VALUES('delete', old.id, old.title, old.path, old.text, old.code);
89
+ INSERT INTO pages_fts(rowid, title, path, text, code)
90
+ VALUES (new.id, new.title, new.path, new.text, new.code);
91
+ END;`);
92
+
93
+ // -- Properties (extracted from confluenceTable) --
94
+
95
+ db.run(`CREATE TABLE IF NOT EXISTS properties (
96
+ id INTEGER PRIMARY KEY,
97
+ page_id INTEGER NOT NULL REFERENCES pages(id),
98
+ name TEXT NOT NULL,
99
+ type TEXT,
100
+ default_val TEXT,
101
+ description TEXT NOT NULL,
102
+ section TEXT,
103
+ sort_order INTEGER NOT NULL,
104
+ UNIQUE(page_id, name, section)
105
+ );`);
106
+
107
+ db.run(`CREATE VIRTUAL TABLE IF NOT EXISTS properties_fts USING fts5(
108
+ name, description,
109
+ content=properties,
110
+ content_rowid=id,
111
+ tokenize='porter unicode61'
112
+ );`);
113
+
114
+ db.run(`CREATE TRIGGER IF NOT EXISTS props_ai AFTER INSERT ON properties BEGIN
115
+ INSERT INTO properties_fts(rowid, name, description)
116
+ VALUES (new.id, new.name, new.description);
117
+ END;`);
118
+ db.run(`CREATE TRIGGER IF NOT EXISTS props_ad AFTER DELETE ON properties BEGIN
119
+ INSERT INTO properties_fts(properties_fts, rowid, name, description)
120
+ VALUES('delete', old.id, old.name, old.description);
121
+ END;`);
122
+ db.run(`CREATE TRIGGER IF NOT EXISTS props_au AFTER UPDATE ON properties BEGIN
123
+ INSERT INTO properties_fts(properties_fts, rowid, name, description)
124
+ VALUES('delete', old.id, old.name, old.description);
125
+ INSERT INTO properties_fts(rowid, name, description)
126
+ VALUES (new.id, new.name, new.description);
127
+ END;`);
128
+
129
+ // -- Callouts (note/warning/info blocks from Confluence pages) --
130
+
131
+ db.run(`CREATE TABLE IF NOT EXISTS callouts (
132
+ id INTEGER PRIMARY KEY,
133
+ page_id INTEGER NOT NULL REFERENCES pages(id),
134
+ type TEXT NOT NULL,
135
+ content TEXT NOT NULL,
136
+ sort_order INTEGER NOT NULL
137
+ );`);
138
+
139
+ db.run(`CREATE VIRTUAL TABLE IF NOT EXISTS callouts_fts USING fts5(
140
+ content,
141
+ content=callouts,
142
+ content_rowid=id,
143
+ tokenize='porter unicode61'
144
+ );`);
145
+
146
+ db.run(`CREATE TRIGGER IF NOT EXISTS callouts_ai AFTER INSERT ON callouts BEGIN
147
+ INSERT INTO callouts_fts(rowid, content)
148
+ VALUES (new.id, new.content);
149
+ END;`);
150
+ db.run(`CREATE TRIGGER IF NOT EXISTS callouts_ad AFTER DELETE ON callouts BEGIN
151
+ INSERT INTO callouts_fts(callouts_fts, rowid, content)
152
+ VALUES('delete', old.id, old.content);
153
+ END;`);
154
+ db.run(`CREATE TRIGGER IF NOT EXISTS callouts_au AFTER UPDATE ON callouts BEGIN
155
+ INSERT INTO callouts_fts(callouts_fts, rowid, content)
156
+ VALUES('delete', old.id, old.content);
157
+ INSERT INTO callouts_fts(rowid, content)
158
+ VALUES (new.id, new.content);
159
+ END;`);
160
+
161
+ db.run(`CREATE INDEX IF NOT EXISTS idx_callouts_page ON callouts(page_id);`);
162
+ db.run(`CREATE INDEX IF NOT EXISTS idx_callouts_type ON callouts(type);`);
163
+
164
+ // -- Sections (page chunks split by headings, for large-page retrieval) --
165
+
166
+ // Migration: drop legacy sections table (from PDF-era schema) if it lacks page_id
167
+ const secCols = db.prepare("SELECT name FROM pragma_table_info('sections')").all() as Array<{ name: string }>;
168
+ if (secCols.length > 0 && !secCols.some((c) => c.name === "page_id")) {
169
+ db.run("DROP TABLE sections;");
170
+ }
171
+
172
+ db.run(`CREATE TABLE IF NOT EXISTS sections (
173
+ id INTEGER PRIMARY KEY,
174
+ page_id INTEGER NOT NULL REFERENCES pages(id),
175
+ heading TEXT NOT NULL,
176
+ level INTEGER NOT NULL,
177
+ anchor_id TEXT NOT NULL,
178
+ text TEXT NOT NULL,
179
+ code TEXT NOT NULL,
180
+ word_count INTEGER NOT NULL,
181
+ sort_order INTEGER NOT NULL
182
+ );`);
183
+
184
+ db.run(`CREATE INDEX IF NOT EXISTS idx_sections_page ON sections(page_id);`);
185
+ db.run(`CREATE INDEX IF NOT EXISTS idx_sections_anchor ON sections(page_id, anchor_id);`);
186
+
187
+ // -- Commands (from inspect.json) --
188
+
189
+ db.run(`CREATE TABLE IF NOT EXISTS commands (
190
+ id INTEGER PRIMARY KEY,
191
+ path TEXT NOT NULL UNIQUE,
192
+ name TEXT NOT NULL,
193
+ type TEXT NOT NULL,
194
+ parent_path TEXT,
195
+ page_id INTEGER REFERENCES pages(id),
196
+ description TEXT,
197
+ ros_version TEXT
198
+ );`);
199
+
200
+ // Migration: add ros_version column if missing (from pre-version schema)
201
+ const cmdCols = db.prepare("PRAGMA table_info(commands)").all() as Array<{ name: string }>;
202
+ if (!cmdCols.some((c) => c.name === "ros_version")) {
203
+ db.run("ALTER TABLE commands ADD COLUMN ros_version TEXT;");
204
+ }
205
+
206
+ db.run(`CREATE INDEX IF NOT EXISTS idx_commands_parent ON commands(parent_path);`);
207
+ db.run(`CREATE INDEX IF NOT EXISTS idx_commands_page ON commands(page_id);`);
208
+ db.run(`CREATE INDEX IF NOT EXISTS idx_commands_type ON commands(type);`);
209
+
210
+ // -- Command version tracking --
211
+
212
+ db.run(`CREATE TABLE IF NOT EXISTS ros_versions (
213
+ version TEXT PRIMARY KEY,
214
+ channel TEXT,
215
+ extra_packages INTEGER NOT NULL DEFAULT 0,
216
+ extracted_at TEXT NOT NULL
217
+ );`);
218
+
219
+ db.run(`CREATE TABLE IF NOT EXISTS command_versions (
220
+ command_path TEXT NOT NULL,
221
+ ros_version TEXT NOT NULL REFERENCES ros_versions(version),
222
+ PRIMARY KEY (command_path, ros_version)
223
+ );`);
224
+
225
+ db.run(`CREATE INDEX IF NOT EXISTS idx_cmdver_version ON command_versions(ros_version);`);
226
+
227
+ // -- Devices (MikroTik product matrix) --
228
+
229
+ db.run(`CREATE TABLE IF NOT EXISTS devices (
230
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
231
+ product_name TEXT NOT NULL UNIQUE,
232
+ product_code TEXT,
233
+ architecture TEXT,
234
+ cpu TEXT,
235
+ cpu_cores INTEGER,
236
+ cpu_frequency TEXT,
237
+ license_level INTEGER,
238
+ operating_system TEXT,
239
+ ram TEXT,
240
+ ram_mb INTEGER,
241
+ storage TEXT,
242
+ storage_mb INTEGER,
243
+ dimensions TEXT,
244
+ poe_in TEXT,
245
+ poe_out TEXT,
246
+ poe_out_ports TEXT,
247
+ poe_in_voltage TEXT,
248
+ dc_inputs INTEGER,
249
+ dc_jack_voltage TEXT,
250
+ max_power_w REAL,
251
+ wireless_24_chains INTEGER,
252
+ antenna_24_dbi REAL,
253
+ wireless_5_chains INTEGER,
254
+ antenna_5_dbi REAL,
255
+ eth_fast INTEGER,
256
+ eth_gigabit INTEGER,
257
+ eth_2500 INTEGER,
258
+ usb_ports INTEGER,
259
+ combo_ports INTEGER,
260
+ sfp_ports INTEGER,
261
+ sfp_plus_ports INTEGER,
262
+ eth_multigig INTEGER,
263
+ sim_slots INTEGER,
264
+ memory_cards TEXT,
265
+ usb_type TEXT,
266
+ msrp_usd REAL
267
+ );`);
268
+
269
+ db.run(`CREATE VIRTUAL TABLE IF NOT EXISTS devices_fts USING fts5(
270
+ product_name, product_code, architecture, cpu,
271
+ content=devices,
272
+ content_rowid=id,
273
+ tokenize='unicode61'
274
+ );`);
275
+
276
+ db.run(`CREATE TRIGGER IF NOT EXISTS devices_ai AFTER INSERT ON devices BEGIN
277
+ INSERT INTO devices_fts(rowid, product_name, product_code, architecture, cpu)
278
+ VALUES (new.id, new.product_name, new.product_code, new.architecture, new.cpu);
279
+ END;`);
280
+ db.run(`CREATE TRIGGER IF NOT EXISTS devices_ad AFTER DELETE ON devices BEGIN
281
+ INSERT INTO devices_fts(devices_fts, rowid, product_name, product_code, architecture, cpu)
282
+ VALUES('delete', old.id, old.product_name, old.product_code, old.architecture, old.cpu);
283
+ END;`);
284
+ db.run(`CREATE TRIGGER IF NOT EXISTS devices_au AFTER UPDATE ON devices BEGIN
285
+ INSERT INTO devices_fts(devices_fts, rowid, product_name, product_code, architecture, cpu)
286
+ VALUES('delete', old.id, old.product_name, old.product_code, old.architecture, old.cpu);
287
+ INSERT INTO devices_fts(rowid, product_name, product_code, architecture, cpu)
288
+ VALUES (new.id, new.product_name, new.product_code, new.architecture, new.cpu);
289
+ END;`);
290
+
291
+ // -- Changelogs (parsed per-entry from MikroTik download server) --
292
+
293
+ db.run(`CREATE TABLE IF NOT EXISTS changelogs (
294
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
295
+ version TEXT NOT NULL,
296
+ released TEXT,
297
+ category TEXT NOT NULL,
298
+ is_breaking INTEGER NOT NULL DEFAULT 0,
299
+ description TEXT NOT NULL,
300
+ sort_order INTEGER NOT NULL,
301
+ UNIQUE(version, sort_order)
302
+ );`);
303
+
304
+ db.run(`CREATE INDEX IF NOT EXISTS idx_changelogs_version ON changelogs(version);`);
305
+ db.run(`CREATE INDEX IF NOT EXISTS idx_changelogs_category ON changelogs(category);`);
306
+
307
+ db.run(`CREATE VIRTUAL TABLE IF NOT EXISTS changelogs_fts USING fts5(
308
+ category, description,
309
+ content=changelogs,
310
+ content_rowid=id,
311
+ tokenize='porter unicode61'
312
+ );`);
313
+
314
+ db.run(`CREATE TRIGGER IF NOT EXISTS changelogs_ai AFTER INSERT ON changelogs BEGIN
315
+ INSERT INTO changelogs_fts(rowid, category, description)
316
+ VALUES (new.id, new.category, new.description);
317
+ END;`);
318
+ db.run(`CREATE TRIGGER IF NOT EXISTS changelogs_ad AFTER DELETE ON changelogs BEGIN
319
+ INSERT INTO changelogs_fts(changelogs_fts, rowid, category, description)
320
+ VALUES('delete', old.id, old.category, old.description);
321
+ END;`);
322
+ db.run(`CREATE TRIGGER IF NOT EXISTS changelogs_au AFTER UPDATE ON changelogs BEGIN
323
+ INSERT INTO changelogs_fts(changelogs_fts, rowid, category, description)
324
+ VALUES('delete', old.id, old.category, old.description);
325
+ INSERT INTO changelogs_fts(rowid, category, description)
326
+ VALUES (new.id, new.category, new.description);
327
+ END;`);
328
+ }
329
+
330
+ export function getDbStats() {
331
+ const count = (sql: string) =>
332
+ Number((db.prepare(sql).get() as { c: number }).c ?? 0);
333
+ const scalar = (sql: string) => {
334
+ const row = db.prepare(sql).get() as { v: string | null } | null;
335
+ return row?.v ?? null;
336
+ };
337
+ return {
338
+ db_path: DB_PATH,
339
+ pages: count("SELECT COUNT(*) AS c FROM pages"),
340
+ sections: count("SELECT COUNT(*) AS c FROM sections"),
341
+ properties: count("SELECT COUNT(*) AS c FROM properties"),
342
+ callouts: count("SELECT COUNT(*) AS c FROM callouts"),
343
+ commands: count("SELECT COUNT(*) AS c FROM commands"),
344
+ commands_linked: count("SELECT COUNT(*) AS c FROM commands WHERE page_id IS NOT NULL"),
345
+ devices: count("SELECT COUNT(*) AS c FROM devices"),
346
+ changelogs: count("SELECT COUNT(*) AS c FROM changelogs"),
347
+ changelog_versions: count("SELECT COUNT(DISTINCT version) AS c FROM changelogs"),
348
+ ros_versions: count("SELECT COUNT(*) AS c FROM ros_versions"),
349
+ ros_version_min: scalar("SELECT MIN(version) AS v FROM ros_versions"),
350
+ ros_version_max: scalar("SELECT MAX(version) AS v FROM ros_versions"),
351
+ doc_export: "2026-03-25 (Confluence HTML)",
352
+ };
353
+ }
354
+
355
+ // Run schema init when executed directly
356
+ if (import.meta.main) {
357
+ initDb();
358
+ console.log("Schema initialized:", DB_PATH);
359
+ console.log(getDbStats());
360
+ }