@a13xu/lucid 1.4.0 → 1.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/README.md +118 -14
  2. package/build/config.d.ts +37 -0
  3. package/build/config.js +45 -0
  4. package/build/database.d.ts +36 -1
  5. package/build/database.js +85 -1
  6. package/build/guardian/coding-analyzer.d.ts +11 -0
  7. package/build/guardian/coding-analyzer.js +393 -0
  8. package/build/guardian/coding-rules.d.ts +1 -0
  9. package/build/guardian/coding-rules.js +97 -0
  10. package/build/index.js +164 -3
  11. package/build/indexer/ast.d.ts +9 -0
  12. package/build/indexer/ast.js +158 -0
  13. package/build/indexer/project.js +21 -13
  14. package/build/memory/experience.d.ts +11 -0
  15. package/build/memory/experience.js +85 -0
  16. package/build/retrieval/context.d.ts +29 -0
  17. package/build/retrieval/context.js +219 -0
  18. package/build/retrieval/qdrant.d.ts +16 -0
  19. package/build/retrieval/qdrant.js +135 -0
  20. package/build/retrieval/tfidf.d.ts +14 -0
  21. package/build/retrieval/tfidf.js +64 -0
  22. package/build/security/alerts.d.ts +44 -0
  23. package/build/security/alerts.js +228 -0
  24. package/build/security/env.d.ts +24 -0
  25. package/build/security/env.js +85 -0
  26. package/build/security/guard.d.ts +35 -0
  27. package/build/security/guard.js +133 -0
  28. package/build/security/ratelimit.d.ts +34 -0
  29. package/build/security/ratelimit.js +105 -0
  30. package/build/security/smtp.d.ts +26 -0
  31. package/build/security/smtp.js +125 -0
  32. package/build/security/ssrf.d.ts +18 -0
  33. package/build/security/ssrf.js +109 -0
  34. package/build/security/waf.d.ts +33 -0
  35. package/build/security/waf.js +174 -0
  36. package/build/tools/coding-guard.d.ts +24 -0
  37. package/build/tools/coding-guard.js +82 -0
  38. package/build/tools/context.d.ts +39 -0
  39. package/build/tools/context.js +105 -0
  40. package/build/tools/init.d.ts +41 -1
  41. package/build/tools/init.js +124 -22
  42. package/build/tools/remember.d.ts +4 -4
  43. package/build/tools/reward.d.ts +29 -0
  44. package/build/tools/reward.js +154 -0
  45. package/build/tools/sync.js +15 -0
  46. package/package.json +9 -2
@@ -0,0 +1,219 @@
1
+ // Smart context assembly — TF-IDF + recency boost + AST skeleton pruning
2
+ // Falls back gracefully: Qdrant → TF-IDF → recency-only
3
+ import { decompress } from "../store/content.js";
4
+ import { rankByRelevance } from "./tfidf.js";
5
+ import { extractSkeleton, renderSkeleton } from "../indexer/ast.js";
6
+ import { searchQdrant } from "./qdrant.js";
7
+ import { getQdrantConfig } from "../config.js";
8
+ import { getFileRewardsMap } from "../memory/experience.js";
9
+ // ---------------------------------------------------------------------------
10
+ // Token estimation (1 token ≈ 4 chars is the standard heuristic)
11
+ // ---------------------------------------------------------------------------
12
+ export function estimateTokens(text) {
13
+ return Math.ceil(text.length / 4);
14
+ }
15
+ // ---------------------------------------------------------------------------
16
+ // Relevant fragment extraction (lines around query matches)
17
+ // ---------------------------------------------------------------------------
18
+ export function extractFragments(source, query, contextLines = 3) {
19
+ const lines = source.split("\n");
20
+ const terms = query.toLowerCase().split(/\s+/).filter((t) => t.length > 2);
21
+ const hitLines = new Set();
22
+ for (let i = 0; i < lines.length; i++) {
23
+ const lower = lines[i].toLowerCase();
24
+ if (terms.some((t) => lower.includes(t))) {
25
+ for (let j = Math.max(0, i - contextLines); j <= Math.min(lines.length - 1, i + contextLines); j++) {
26
+ hitLines.add(j);
27
+ }
28
+ }
29
+ }
30
+ if (hitLines.size === 0)
31
+ return "";
32
+ const sorted = [...hitLines].sort((a, b) => a - b);
33
+ const out = [];
34
+ let prev = -2;
35
+ for (const n of sorted) {
36
+ if (n > prev + 1)
37
+ out.push("…");
38
+ out.push(`${n + 1}: ${lines[n]}`);
39
+ prev = n;
40
+ }
41
+ return out.join("\n");
42
+ }
43
+ // ---------------------------------------------------------------------------
44
+ // Simple line-level diff (no external deps)
45
+ // ---------------------------------------------------------------------------
46
+ export function computeDiff(prev, curr, maxChanges = 40) {
47
+ const pLines = prev.split("\n");
48
+ const cLines = curr.split("\n");
49
+ const out = [];
50
+ let changes = 0;
51
+ const maxLen = Math.max(pLines.length, cLines.length);
52
+ for (let i = 0; i < maxLen; i++) {
53
+ if (changes >= maxChanges) {
54
+ out.push(`[… +${Math.abs(cLines.length - pLines.length)} more line changes, truncated]`);
55
+ break;
56
+ }
57
+ const p = pLines[i];
58
+ const c = cLines[i];
59
+ if (p === c)
60
+ continue;
61
+ if (p === undefined) {
62
+ out.push(`+${i + 1}: ${c}`);
63
+ }
64
+ else if (c === undefined) {
65
+ out.push(`-${i + 1}: ${p}`);
66
+ }
67
+ else {
68
+ out.push(`-${i + 1}: ${p}`);
69
+ out.push(`+${i + 1}: ${c}`);
70
+ }
71
+ changes++;
72
+ }
73
+ return out.length > 0 ? out.join("\n") : "[no line changes]";
74
+ }
75
+ // ---------------------------------------------------------------------------
76
+ // Main function
77
+ // ---------------------------------------------------------------------------
78
+ export async function assembleContext(query, stmts, cfg, opts = {}) {
79
+ const maxTokens = opts.maxTokens ?? cfg.maxContextTokens;
80
+ const maxPerFile = opts.maxTokensPerFile ?? cfg.maxTokensPerFile;
81
+ const recentHours = opts.recentHours ?? cfg.recentWindowHours;
82
+ const topK = opts.topK ?? 10;
83
+ const allRows = stmts.getAllFiles.all();
84
+ if (!Array.isArray(allRows) || allRows.length === 0) {
85
+ return { files: [], totalTokens: 0, strategy: "tfidf", truncated: false, skippedFiles: 0 };
86
+ }
87
+ // Apply whitelist dirs filter
88
+ const dirs = opts.dirs ?? cfg.whitelistDirs;
89
+ const filtered = dirs && dirs.length > 0
90
+ ? allRows.filter((r) => dirs.some((d) => r.filepath.replace(/\\/g, "/").includes(d)))
91
+ : allRows;
92
+ // Recency cutoff
93
+ const nowSec = Math.floor(Date.now() / 1000);
94
+ const cutoffSec = nowSec - recentHours * 3600;
95
+ const recentSet = new Set(filtered.filter((r) => (r.indexed_at ?? 0) >= cutoffSec).map((r) => r.filepath));
96
+ // If recentOnly, skip files outside the window
97
+ const candidates = opts.recentOnly
98
+ ? filtered.filter((r) => recentSet.has(r.filepath))
99
+ : filtered;
100
+ if (candidates.length === 0) {
101
+ return { files: [], totalTokens: 0, strategy: opts.recentOnly ? "recent" : "tfidf", truncated: false, skippedFiles: filtered.length };
102
+ }
103
+ // Decompress all candidates
104
+ const decompressed = candidates.map((r) => ({
105
+ filepath: r.filepath,
106
+ language: r.language,
107
+ indexedAt: r.indexed_at ?? 0,
108
+ text: decompress(r.content),
109
+ }));
110
+ // ---------------------------------------------------------------------------
111
+ // Ranking strategy
112
+ // ---------------------------------------------------------------------------
113
+ let strategy = "tfidf";
114
+ let ranked;
115
+ // Load experience-based rewards for ranking boost (decayed, normalized to +0.25 max)
116
+ const fileRewards = getFileRewardsMap(stmts);
117
+ const qdrantCfg = getQdrantConfig(cfg);
118
+ if (qdrantCfg && !opts.recentOnly) {
119
+ // Try Qdrant first
120
+ try {
121
+ const chunks = await searchQdrant(query, topK * 3, qdrantCfg);
122
+ if (chunks.length > 0) {
123
+ strategy = "qdrant";
124
+ // Deduplicate by filepath, preserve order
125
+ const seen = new Set();
126
+ const qdrantOrder = [];
127
+ for (const c of chunks) {
128
+ if (!seen.has(c.filepath)) {
129
+ seen.add(c.filepath);
130
+ qdrantOrder.push(c.filepath);
131
+ }
132
+ }
133
+ // Place Qdrant matches first, then remaining by TF-IDF + reward boost
134
+ const tfidfRanked = rankByRelevance(query, decompressed);
135
+ const tfidfOrder = tfidfRanked.map((s) => s.filepath).filter((fp) => !seen.has(fp));
136
+ const orderedFps = [...qdrantOrder, ...tfidfOrder];
137
+ const fpToDoc = new Map(decompressed.map((d) => [d.filepath, d]));
138
+ ranked = orderedFps.map((fp) => fpToDoc.get(fp)).filter(Boolean);
139
+ }
140
+ else {
141
+ ranked = rankAndBoost(query, decompressed, recentSet, fileRewards);
142
+ }
143
+ }
144
+ catch {
145
+ // Qdrant unreachable — fall back to TF-IDF
146
+ ranked = rankAndBoost(query, decompressed, recentSet, fileRewards);
147
+ }
148
+ }
149
+ else {
150
+ ranked = rankAndBoost(query, decompressed, recentSet, fileRewards);
151
+ if (opts.recentOnly)
152
+ strategy = "recent";
153
+ }
154
+ // ---------------------------------------------------------------------------
155
+ // Assemble context with token budget
156
+ // ---------------------------------------------------------------------------
157
+ const result = [];
158
+ let totalTokens = 0;
159
+ let truncated = false;
160
+ let skippedFiles = 0;
161
+ for (const file of ranked) {
162
+ if (totalTokens >= maxTokens) {
163
+ truncated = true;
164
+ break;
165
+ }
166
+ const remaining = maxTokens - totalTokens;
167
+ const fullTokens = estimateTokens(file.text);
168
+ const isRecent = recentSet.has(file.filepath);
169
+ let content;
170
+ let reason;
171
+ if (opts.skeletonOnly || fullTokens > maxPerFile) {
172
+ const sk = extractSkeleton(file.text, file.language);
173
+ const skText = renderSkeleton(sk, file.filepath);
174
+ const fragments = query ? extractFragments(file.text, query) : "";
175
+ content = fragments
176
+ ? `${skText}\n\n// — relevant fragments —\n${fragments}`
177
+ : skText;
178
+ reason = opts.skeletonOnly ? "skeleton" : `skeleton (${fullTokens} tokens > limit ${maxPerFile})`;
179
+ }
180
+ else {
181
+ content = file.text;
182
+ reason = "full";
183
+ }
184
+ if (isRecent)
185
+ reason += " +recent";
186
+ const contentTokens = estimateTokens(content);
187
+ if (contentTokens < 10) {
188
+ skippedFiles++;
189
+ continue;
190
+ }
191
+ // Truncate to remaining budget
192
+ const usedTokens = Math.min(contentTokens, remaining);
193
+ const finalContent = usedTokens < contentTokens
194
+ ? content.slice(0, usedTokens * 4) + "\n… [truncated]"
195
+ : content;
196
+ result.push({ filepath: file.filepath, language: file.language, tokens: usedTokens, content: finalContent, reason });
197
+ totalTokens += usedTokens;
198
+ }
199
+ skippedFiles += ranked.length - result.length - (truncated ? 0 : 0);
200
+ return { files: result, totalTokens, strategy, truncated, skippedFiles };
201
+ }
202
+ // ---------------------------------------------------------------------------
203
+ // TF-IDF + recency boost ranking
204
+ // ---------------------------------------------------------------------------
205
+ function rankAndBoost(query, docs, recentSet, fileRewards) {
206
+ const scored = rankByRelevance(query, docs);
207
+ const scoreMap = new Map(scored.map((s) => [s.filepath, s.score]));
208
+ // Compute normalizer for experience boost (max 0.25, avoids dominating TF-IDF)
209
+ const maxReward = fileRewards && fileRewards.size > 0
210
+ ? Math.max(...fileRewards.values())
211
+ : 0;
212
+ return [...docs].sort((a, b) => {
213
+ const rewardBoostA = maxReward > 0 ? ((fileRewards.get(a.filepath) ?? 0) / maxReward) * 0.25 : 0;
214
+ const rewardBoostB = maxReward > 0 ? ((fileRewards.get(b.filepath) ?? 0) / maxReward) * 0.25 : 0;
215
+ const sA = (scoreMap.get(a.filepath) ?? 0) + (recentSet.has(a.filepath) ? 0.3 : 0) + rewardBoostA;
216
+ const sB = (scoreMap.get(b.filepath) ?? 0) + (recentSet.has(b.filepath) ? 0.3 : 0) + rewardBoostB;
217
+ return sB - sA;
218
+ });
219
+ }
@@ -0,0 +1,16 @@
1
+ import type { ResolvedConfig } from "../config.js";
2
+ type QdrantCfg = NonNullable<ResolvedConfig["qdrant"]>;
3
+ export interface VectorChunk {
4
+ id: number;
5
+ filepath: string;
6
+ chunkIndex: number;
7
+ text: string;
8
+ score: number;
9
+ }
10
+ /** Index one file into Qdrant (called by sync_file when Qdrant is configured). */
11
+ export declare function indexFileInQdrant(filepath: string, text: string, cfg: QdrantCfg): Promise<void>;
12
+ /** Top-k semantic search across all indexed chunks. */
13
+ export declare function searchQdrant(query: string, topK: number, cfg: QdrantCfg): Promise<VectorChunk[]>;
14
+ /** Check if Qdrant collection exists and is reachable. */
15
+ export declare function pingQdrant(cfg: QdrantCfg): Promise<boolean>;
16
+ export {};
@@ -0,0 +1,135 @@
1
+ // Qdrant vector search — via direct REST API calls (no npm dependency)
2
+ // Only active when QDRANT_URL is set (via env var or lucid.config.json)
3
+ // Falls back silently to TF-IDF when unavailable
4
+ import { safeFetch } from "../security/ssrf.js";
5
+ // ---------------------------------------------------------------------------
6
+ // Embedding generation (OpenAI-compatible endpoint)
7
+ // ---------------------------------------------------------------------------
8
+ async function embed(texts, cfg) {
9
+ if (!cfg.embeddingApiKey) {
10
+ throw new Error("No embedding API key (set OPENAI_API_KEY or embeddingApiKey in lucid.config.json)");
11
+ }
12
+ const url = cfg.embeddingUrl ?? "https://api.openai.com/v1/embeddings";
13
+ const res = await safeFetch(url, {
14
+ method: "POST",
15
+ headers: {
16
+ "Content-Type": "application/json",
17
+ Authorization: `Bearer ${cfg.embeddingApiKey}`,
18
+ },
19
+ body: JSON.stringify({ model: cfg.embeddingModel ?? "text-embedding-3-small", input: texts }),
20
+ });
21
+ if (!res.ok) {
22
+ const body = await res.text();
23
+ throw new Error(`Embedding API ${res.status}: ${body.slice(0, 200)}`);
24
+ }
25
+ const json = await res.json();
26
+ return json.data.map((d) => d.embedding);
27
+ }
28
+ // ---------------------------------------------------------------------------
29
+ // Qdrant REST helpers
30
+ // ---------------------------------------------------------------------------
31
+ async function qdrantRequest(cfg, method, path, body) {
32
+ const url = `${cfg.url.replace(/\/$/, "")}${path}`;
33
+ const headers = { "Content-Type": "application/json" };
34
+ if (cfg.apiKey)
35
+ headers["api-key"] = cfg.apiKey;
36
+ const res = await safeFetch(url, {
37
+ method,
38
+ headers,
39
+ body: body !== undefined ? JSON.stringify(body) : undefined,
40
+ });
41
+ if (!res.ok) {
42
+ const text = await res.text();
43
+ throw new Error(`Qdrant ${method} ${path} → ${res.status}: ${text.slice(0, 200)}`);
44
+ }
45
+ return res.json();
46
+ }
47
+ async function ensureCollection(cfg) {
48
+ const col = cfg.collection;
49
+ try {
50
+ await qdrantRequest(cfg, "GET", `/collections/${col}`);
51
+ }
52
+ catch {
53
+ // Create collection
54
+ await qdrantRequest(cfg, "PUT", `/collections/${col}`, {
55
+ vectors: { size: cfg.vectorDim ?? 1536, distance: "Cosine" },
56
+ });
57
+ }
58
+ }
59
+ // ---------------------------------------------------------------------------
60
+ // Chunking
61
+ // ---------------------------------------------------------------------------
62
+ const CHUNK_LINES = 80;
63
+ function chunkFile(filepath, text) {
64
+ const lines = text.split("\n");
65
+ const chunks = [];
66
+ for (let i = 0; i < lines.length; i += CHUNK_LINES) {
67
+ const chunkText = lines.slice(i, i + CHUNK_LINES).join("\n");
68
+ const chunkIndex = Math.floor(i / CHUNK_LINES);
69
+ // Deterministic integer ID from filepath + chunk index
70
+ const id = stableId(`${filepath}::${chunkIndex}`);
71
+ chunks.push({ id, text: chunkText, chunkIndex });
72
+ }
73
+ return chunks;
74
+ }
75
+ function stableId(s) {
76
+ let h = 0x811c9dc5;
77
+ for (let i = 0; i < s.length; i++) {
78
+ h ^= s.charCodeAt(i);
79
+ h = Math.imul(h, 0x01000193);
80
+ }
81
+ // Ensure positive 32-bit int
82
+ return (h >>> 0) % 2_000_000_000;
83
+ }
84
+ // ---------------------------------------------------------------------------
85
+ // Public API
86
+ // ---------------------------------------------------------------------------
87
+ /** Index one file into Qdrant (called by sync_file when Qdrant is configured). */
88
+ export async function indexFileInQdrant(filepath, text, cfg) {
89
+ await ensureCollection(cfg);
90
+ const chunks = chunkFile(filepath, text);
91
+ if (chunks.length === 0)
92
+ return;
93
+ // Batch embed (max 96 texts per request for most providers)
94
+ const BATCH = 32;
95
+ for (let b = 0; b < chunks.length; b += BATCH) {
96
+ const batch = chunks.slice(b, b + BATCH);
97
+ const vectors = await embed(batch.map((c) => c.text), cfg);
98
+ const points = batch.map((c, idx) => ({
99
+ id: c.id,
100
+ vector: vectors[idx],
101
+ payload: { filepath, chunkIndex: c.chunkIndex, text: c.text },
102
+ }));
103
+ await qdrantRequest(cfg, "PUT", `/collections/${cfg.collection}/points`, {
104
+ points,
105
+ });
106
+ }
107
+ }
108
+ /** Top-k semantic search across all indexed chunks. */
109
+ export async function searchQdrant(query, topK, cfg) {
110
+ const [queryVec] = await embed([query], cfg);
111
+ if (!queryVec)
112
+ return [];
113
+ const result = await qdrantRequest(cfg, "POST", `/collections/${cfg.collection}/points/search`, {
114
+ vector: queryVec,
115
+ limit: topK,
116
+ with_payload: true,
117
+ });
118
+ return result.result.map((r) => ({
119
+ id: r.id,
120
+ filepath: r.payload["filepath"],
121
+ chunkIndex: r.payload["chunkIndex"],
122
+ text: r.payload["text"],
123
+ score: r.score,
124
+ }));
125
+ }
126
+ /** Check if Qdrant collection exists and is reachable. */
127
+ export async function pingQdrant(cfg) {
128
+ try {
129
+ await qdrantRequest(cfg, "GET", `/collections/${cfg.collection}`);
130
+ return true;
131
+ }
132
+ catch {
133
+ return false;
134
+ }
135
+ }
@@ -0,0 +1,14 @@
1
+ export declare function tokenize(text: string): string[];
2
+ export interface ScoredFile {
3
+ filepath: string;
4
+ score: number;
5
+ matchedTerms: string[];
6
+ }
7
+ /**
8
+ * Rank files by TF-IDF relevance to a query.
9
+ * Returns all files sorted by score descending (score=0 files included at bottom).
10
+ */
11
+ export declare function rankByRelevance(query: string, files: Array<{
12
+ filepath: string;
13
+ text: string;
14
+ }>): ScoredFile[];
@@ -0,0 +1,64 @@
1
+ // TF-IDF scoring — pure JS, no external deps
2
+ // Used as the default relevance engine when Qdrant is not configured
3
+ const STOPWORDS = new Set([
4
+ "the", "and", "for", "are", "but", "not", "you", "all", "can", "had",
5
+ "her", "was", "one", "our", "out", "day", "get", "has", "him", "his",
6
+ "how", "its", "let", "may", "new", "now", "old", "own", "say", "she",
7
+ "too", "use", "way", "who", "will", "with", "that", "this", "from",
8
+ "they", "been", "have", "their", "said", "each", "which", "what",
9
+ // code keywords (too common to be discriminative)
10
+ "return", "const", "import", "export", "function", "class", "type",
11
+ "interface", "string", "number", "boolean", "void", "null", "undefined",
12
+ "async", "await", "true", "false", "default", "module", "require",
13
+ "self", "def", "pass", "else", "elif", "then", "end", "var", "let",
14
+ ]);
15
+ export function tokenize(text) {
16
+ return text
17
+ .toLowerCase()
18
+ .replace(/[^a-z0-9_]/g, " ")
19
+ .split(/\s+/)
20
+ .filter((t) => t.length >= 3 && !STOPWORDS.has(t));
21
+ }
22
+ /**
23
+ * Rank files by TF-IDF relevance to a query.
24
+ * Returns all files sorted by score descending (score=0 files included at bottom).
25
+ */
26
+ export function rankByRelevance(query, files) {
27
+ if (files.length === 0)
28
+ return [];
29
+ const queryTerms = tokenize(query);
30
+ if (queryTerms.length === 0) {
31
+ return files.map((f) => ({ filepath: f.filepath, score: 0, matchedTerms: [] }));
32
+ }
33
+ const N = files.length;
34
+ // Compute per-doc term frequencies + document frequencies
35
+ const df = new Map();
36
+ const docTF = [];
37
+ for (const file of files) {
38
+ const tokens = tokenize(file.text);
39
+ const tf = new Map();
40
+ for (const t of tokens)
41
+ tf.set(t, (tf.get(t) ?? 0) + 1);
42
+ docTF.push(tf);
43
+ for (const term of tf.keys())
44
+ df.set(term, (df.get(term) ?? 0) + 1);
45
+ }
46
+ const results = [];
47
+ for (let i = 0; i < files.length; i++) {
48
+ const tf = docTF[i];
49
+ const totalTokens = Math.max([...tf.values()].reduce((a, b) => a + b, 0), 1);
50
+ let score = 0;
51
+ const matched = [];
52
+ for (const qt of queryTerms) {
53
+ const freq = tf.get(qt) ?? 0;
54
+ if (freq > 0) {
55
+ const tfScore = freq / totalTokens;
56
+ const idf = Math.log((N + 1) / ((df.get(qt) ?? 0) + 1)) + 1;
57
+ score += tfScore * idf;
58
+ matched.push(qt);
59
+ }
60
+ }
61
+ results.push({ filepath: files[i].filepath, score, matchedTerms: matched });
62
+ }
63
+ return results.sort((a, b) => b.score - a.score);
64
+ }
@@ -0,0 +1,44 @@
1
+ /**
2
+ * Security alert dispatcher.
3
+ *
4
+ * Channels (all optional, configured via lucid-admin.json + env vars):
5
+ * - Webhook (generic HTTP POST, HMAC-SHA256 signed)
6
+ * - Slack (incoming webhook)
7
+ * - Email (SMTP via smtp.ts)
8
+ *
9
+ * Sensitive values MUST come from environment variables:
10
+ * LUCID_SMTP_PASS — SMTP password
11
+ * LUCID_WEBHOOK_SECRET — HMAC signing secret for webhook
12
+ *
13
+ * Config is stored in <project>/.claude/lucid-admin.json (non-sensitive fields only).
14
+ */
15
+ import type { Severity } from "./waf.js";
16
+ export interface AlertEvent {
17
+ severity: Severity;
18
+ rule: string;
19
+ tool: string;
20
+ detail: string;
21
+ timestamp: string;
22
+ projectDir?: string;
23
+ }
24
+ export interface AdminConfig {
25
+ adminName?: string;
26
+ adminEmail?: string;
27
+ smtpHost?: string;
28
+ smtpPort?: number;
29
+ smtpUser?: string;
30
+ smtpFrom?: string;
31
+ webhookUrl?: string;
32
+ slackWebhookUrl?: string;
33
+ /** Severities that trigger an alert (default: ["critical", "high"]) */
34
+ alertOn?: Severity[];
35
+ projectName?: string;
36
+ }
37
+ export declare const ADMIN_CONFIG_FILE = "lucid-admin.json";
38
+ export declare function loadAdminConfig(projectDir: string): AdminConfig;
39
+ export declare function saveAdminConfig(projectDir: string, cfg: AdminConfig): void;
40
+ export declare function getAdminConfig(): AdminConfig;
41
+ export declare function isAdminConfigured(): boolean;
42
+ export declare function sendAlert(event: AlertEvent): Promise<void>;
43
+ /** Send a test alert to verify all configured channels work. */
44
+ export declare function sendTestAlert(projectDir: string): Promise<string[]>;