@rigour-labs/core 5.0.1 → 5.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/README.md +9 -1
  2. package/dist/gates/agent-team.d.ts +0 -1
  3. package/dist/gates/agent-team.js +0 -1
  4. package/dist/gates/checkpoint.d.ts +0 -2
  5. package/dist/gates/checkpoint.js +0 -2
  6. package/dist/gates/context-window-artifacts.d.ts +6 -2
  7. package/dist/gates/context-window-artifacts.js +107 -31
  8. package/dist/gates/deep-analysis.d.ts +2 -0
  9. package/dist/gates/deep-analysis.js +41 -11
  10. package/dist/gates/dependency.d.ts +0 -2
  11. package/dist/gates/dependency.js +23 -5
  12. package/dist/gates/deprecated-apis.d.ts +0 -2
  13. package/dist/gates/deprecated-apis.js +33 -20
  14. package/dist/gates/duplication-drift/index.d.ts +61 -0
  15. package/dist/gates/duplication-drift/index.js +240 -0
  16. package/dist/gates/duplication-drift/similarity.d.ts +68 -0
  17. package/dist/gates/duplication-drift/similarity.js +177 -0
  18. package/dist/gates/duplication-drift/tokenizer.d.ts +55 -0
  19. package/dist/gates/duplication-drift/tokenizer.js +195 -0
  20. package/dist/gates/frontend-secret-exposure.d.ts +0 -3
  21. package/dist/gates/frontend-secret-exposure.js +1 -114
  22. package/dist/gates/frontend-secret-patterns.d.ts +33 -0
  23. package/dist/gates/frontend-secret-patterns.js +119 -0
  24. package/dist/gates/{hallucinated-imports.d.ts → hallucinated-imports/index.d.ts} +2 -29
  25. package/dist/gates/hallucinated-imports/index.js +174 -0
  26. package/dist/gates/hallucinated-imports/js-resolver.d.ts +45 -0
  27. package/dist/gates/hallucinated-imports/js-resolver.js +320 -0
  28. package/dist/gates/hallucinated-imports/manifest-discovery.d.ts +28 -0
  29. package/dist/gates/hallucinated-imports/manifest-discovery.js +114 -0
  30. package/dist/gates/hallucinated-imports/python-resolver.d.ts +24 -0
  31. package/dist/gates/hallucinated-imports/python-resolver.js +306 -0
  32. package/dist/gates/hallucinated-imports-lang.d.ts +2 -2
  33. package/dist/gates/hallucinated-imports-lang.js +269 -34
  34. package/dist/gates/hallucinated-imports.test.js +1 -2
  35. package/dist/gates/inconsistent-error-handling.d.ts +0 -5
  36. package/dist/gates/inconsistent-error-handling.js +15 -144
  37. package/dist/gates/language-adapters/csharp-adapter.d.ts +16 -0
  38. package/dist/gates/language-adapters/csharp-adapter.js +211 -0
  39. package/dist/gates/language-adapters/go-adapter.d.ts +26 -0
  40. package/dist/gates/language-adapters/go-adapter.js +195 -0
  41. package/dist/gates/language-adapters/index.d.ts +15 -0
  42. package/dist/gates/language-adapters/index.js +16 -0
  43. package/dist/gates/language-adapters/java-adapter.d.ts +16 -0
  44. package/dist/gates/language-adapters/java-adapter.js +237 -0
  45. package/dist/gates/language-adapters/js-adapter.d.ts +26 -0
  46. package/dist/gates/language-adapters/js-adapter.js +279 -0
  47. package/dist/gates/language-adapters/python-adapter.d.ts +25 -0
  48. package/dist/gates/language-adapters/python-adapter.js +183 -0
  49. package/dist/gates/language-adapters/registry.d.ts +26 -0
  50. package/dist/gates/language-adapters/registry.js +65 -0
  51. package/dist/gates/language-adapters/ruby-adapter.d.ts +25 -0
  52. package/dist/gates/language-adapters/ruby-adapter.js +217 -0
  53. package/dist/gates/language-adapters/rust-adapter.d.ts +27 -0
  54. package/dist/gates/language-adapters/rust-adapter.js +235 -0
  55. package/dist/gates/language-adapters/types.d.ts +60 -0
  56. package/dist/gates/language-adapters/types.js +22 -0
  57. package/dist/gates/logic-drift-extractors.d.ts +15 -0
  58. package/dist/gates/logic-drift-extractors.js +34 -0
  59. package/dist/gates/logic-drift.d.ts +0 -30
  60. package/dist/gates/logic-drift.js +39 -129
  61. package/dist/gates/phantom-apis.d.ts +0 -2
  62. package/dist/gates/phantom-apis.js +49 -20
  63. package/dist/gates/promise-safety.d.ts +0 -1
  64. package/dist/gates/promise-safety.js +14 -2
  65. package/dist/gates/runner.js +51 -22
  66. package/dist/gates/security-patterns-data.d.ts +14 -0
  67. package/dist/gates/security-patterns-data.js +235 -0
  68. package/dist/gates/security-patterns.d.ts +17 -3
  69. package/dist/gates/security-patterns.js +80 -211
  70. package/dist/gates/side-effect-analysis/categorizer.d.ts +32 -0
  71. package/dist/gates/side-effect-analysis/categorizer.js +83 -0
  72. package/dist/gates/{side-effect-analysis.d.ts → side-effect-analysis/index.d.ts} +3 -5
  73. package/dist/gates/{side-effect-analysis.js → side-effect-analysis/index.js} +33 -45
  74. package/dist/gates/side-effect-analysis/scope-tracker.d.ts +37 -0
  75. package/dist/gates/side-effect-analysis/scope-tracker.js +40 -0
  76. package/dist/gates/side-effect-helpers/index.d.ts +4 -0
  77. package/dist/gates/side-effect-helpers/index.js +4 -0
  78. package/dist/gates/side-effect-helpers/pattern-detection.d.ts +123 -0
  79. package/dist/gates/{side-effect-helpers.js → side-effect-helpers/pattern-detection.js} +22 -468
  80. package/dist/gates/side-effect-helpers/resource-tracking.d.ts +80 -0
  81. package/dist/gates/side-effect-helpers/resource-tracking.js +281 -0
  82. package/dist/gates/side-effect-helpers/scope-analysis.d.ts +21 -0
  83. package/dist/gates/side-effect-helpers/scope-analysis.js +146 -0
  84. package/dist/gates/side-effect-helpers/types.d.ts +38 -0
  85. package/dist/gates/side-effect-helpers/types.js +41 -0
  86. package/dist/gates/side-effect-rules.d.ts +0 -1
  87. package/dist/gates/side-effect-rules.js +0 -1
  88. package/dist/gates/style-drift-rules.d.ts +86 -0
  89. package/dist/gates/style-drift-rules.js +103 -0
  90. package/dist/gates/style-drift.d.ts +7 -16
  91. package/dist/gates/style-drift.js +101 -119
  92. package/dist/gates/test-quality-matchers.d.ts +53 -0
  93. package/dist/gates/test-quality-matchers.js +86 -0
  94. package/dist/gates/test-quality.d.ts +0 -3
  95. package/dist/gates/test-quality.js +47 -44
  96. package/dist/hooks/checker.d.ts +0 -1
  97. package/dist/hooks/checker.js +0 -2
  98. package/dist/hooks/dlp-templates.d.ts +0 -1
  99. package/dist/hooks/dlp-templates.js +0 -4
  100. package/dist/hooks/index.d.ts +0 -2
  101. package/dist/hooks/index.js +0 -2
  102. package/dist/hooks/input-validator.d.ts +0 -1
  103. package/dist/hooks/input-validator.js +0 -1
  104. package/dist/hooks/input-validator.test.js +0 -1
  105. package/dist/hooks/standalone-checker.d.ts +0 -1
  106. package/dist/hooks/standalone-checker.js +0 -1
  107. package/dist/hooks/standalone-dlp-checker.d.ts +0 -1
  108. package/dist/hooks/standalone-dlp-checker.js +0 -1
  109. package/dist/hooks/templates.d.ts +0 -1
  110. package/dist/hooks/templates.js +0 -1
  111. package/dist/hooks/types.d.ts +0 -1
  112. package/dist/hooks/types.js +0 -1
  113. package/dist/index.d.ts +1 -1
  114. package/dist/index.js +1 -1
  115. package/dist/services/adaptive-thresholds.d.ts +0 -2
  116. package/dist/services/adaptive-thresholds.js +0 -2
  117. package/dist/services/filesystem-cache.d.ts +0 -1
  118. package/dist/services/filesystem-cache.js +0 -1
  119. package/dist/services/score-history.d.ts +0 -1
  120. package/dist/services/score-history.js +0 -1
  121. package/dist/services/temporal-drift.d.ts +1 -2
  122. package/dist/services/temporal-drift.js +7 -8
  123. package/dist/storage/db.d.ts +23 -7
  124. package/dist/storage/db.js +116 -55
  125. package/dist/storage/findings.d.ts +4 -3
  126. package/dist/storage/findings.js +13 -20
  127. package/dist/storage/local-memory.d.ts +4 -4
  128. package/dist/storage/local-memory.js +20 -22
  129. package/dist/storage/patterns.d.ts +5 -5
  130. package/dist/storage/patterns.js +20 -26
  131. package/dist/storage/scans.d.ts +6 -6
  132. package/dist/storage/scans.js +12 -21
  133. package/dist/types/index.d.ts +1 -0
  134. package/dist/utils/scanner.js +1 -1
  135. package/package.json +7 -8
  136. package/dist/gates/duplication-drift.d.ts +0 -128
  137. package/dist/gates/duplication-drift.js +0 -585
  138. package/dist/gates/hallucinated-imports.js +0 -641
  139. package/dist/gates/side-effect-helpers.d.ts +0 -260
@@ -2,27 +2,31 @@
2
2
  * SQLite storage layer for Rigour Brain.
3
3
  * Single file at ~/.rigour/rigour.db stores all scan history, findings,
4
4
  * learned patterns, and feedback. ACID-safe, portable, queryable.
5
+ *
6
+ * Uses node-sqlite3 (async, widely supported, no native build issues).
7
+ * All public APIs are async. Graceful degradation if sqlite3 not installed.
5
8
  */
6
9
  import path from 'path';
7
10
  import os from 'os';
8
11
  import fs from 'fs-extra';
9
12
  import { createRequire } from 'module';
10
- // better-sqlite3 is optional — graceful degradation if not installed.
11
- // It's a native C++ addon that uses require() semantics, so we use createRequire.
12
- let Database = null;
13
- let _dbResolved = false;
14
- function loadDatabase() {
15
- if (_dbResolved)
16
- return Database;
17
- _dbResolved = true;
13
+ // ---------------------------------------------------------------------------
14
+ // Optional dynamic import of sqlite3
15
+ // ---------------------------------------------------------------------------
16
+ let sqlite3Module = null;
17
+ let _resolved = false;
18
+ function loadSqlite3() {
19
+ if (_resolved)
20
+ return sqlite3Module;
21
+ _resolved = true;
18
22
  try {
19
23
  const require = createRequire(import.meta.url);
20
- Database = require('better-sqlite3');
24
+ sqlite3Module = require('sqlite3');
21
25
  }
22
26
  catch {
23
- Database = null;
27
+ sqlite3Module = null;
24
28
  }
25
- return Database;
29
+ return sqlite3Module;
26
30
  }
27
31
  const RIGOUR_DIR = path.join(os.homedir(), '.rigour');
28
32
  const DB_PATH = path.join(RIGOUR_DIR, 'rigour.db');
@@ -108,87 +112,144 @@ CREATE INDEX IF NOT EXISTS idx_findings_category ON findings(category);
108
112
  CREATE INDEX IF NOT EXISTS idx_patterns_repo ON patterns(repo);
109
113
  CREATE INDEX IF NOT EXISTS idx_patterns_strength ON patterns(strength);
110
114
  `;
115
+ function wrapDatabase(raw) {
116
+ const db = {
117
+ run(sql, ...params) {
118
+ return new Promise((resolve, reject) => {
119
+ raw.run(sql, ...params, function (err) {
120
+ if (err)
121
+ return reject(err);
122
+ resolve({ changes: this.changes, lastID: this.lastID });
123
+ });
124
+ });
125
+ },
126
+ get(sql, ...params) {
127
+ return new Promise((resolve, reject) => {
128
+ raw.get(sql, ...params, (err, row) => {
129
+ if (err)
130
+ return reject(err);
131
+ resolve(row);
132
+ });
133
+ });
134
+ },
135
+ all(sql, ...params) {
136
+ return new Promise((resolve, reject) => {
137
+ raw.all(sql, ...params, (err, rows) => {
138
+ if (err)
139
+ return reject(err);
140
+ resolve(rows || []);
141
+ });
142
+ });
143
+ },
144
+ exec(sql) {
145
+ return new Promise((resolve, reject) => {
146
+ raw.exec(sql, (err) => {
147
+ if (err)
148
+ return reject(err);
149
+ resolve();
150
+ });
151
+ });
152
+ },
153
+ close() {
154
+ return new Promise((resolve, reject) => {
155
+ raw.close((err) => {
156
+ if (err)
157
+ return reject(err);
158
+ resolve();
159
+ });
160
+ });
161
+ },
162
+ async transaction(fn) {
163
+ await db.exec('BEGIN TRANSACTION');
164
+ try {
165
+ const result = await fn(db);
166
+ await db.exec('COMMIT');
167
+ return result;
168
+ }
169
+ catch (err) {
170
+ await db.exec('ROLLBACK');
171
+ throw err;
172
+ }
173
+ },
174
+ };
175
+ return db;
176
+ }
111
177
  /**
112
178
  * Open (or create) the Rigour SQLite database.
113
- * Returns null if better-sqlite3 is not available.
179
+ * Returns null if sqlite3 is not available.
114
180
  */
115
- export function openDatabase(dbPath) {
116
- const Db = loadDatabase();
117
- if (!Db)
181
+ export async function openDatabase(dbPath) {
182
+ const sqlite3 = loadSqlite3();
183
+ if (!sqlite3)
118
184
  return null;
119
185
  const resolvedPath = dbPath || DB_PATH;
120
186
  fs.ensureDirSync(path.dirname(resolvedPath));
121
- const db = new Db(resolvedPath);
187
+ const raw = await new Promise((resolve, reject) => {
188
+ const instance = new sqlite3.Database(resolvedPath, (err) => {
189
+ if (err)
190
+ return reject(err);
191
+ resolve(instance);
192
+ });
193
+ });
194
+ const db = wrapDatabase(raw);
122
195
  // WAL mode for better concurrent read performance
123
- db.pragma('journal_mode = WAL');
124
- db.pragma('foreign_keys = ON');
196
+ await db.exec('PRAGMA journal_mode = WAL');
197
+ await db.exec('PRAGMA foreign_keys = ON');
125
198
  // Run schema creation + migrations
126
- db.exec(SCHEMA_SQL);
127
- runMigrations(db);
128
- return {
129
- db,
130
- close() {
131
- db.close();
132
- },
133
- };
199
+ await db.exec(SCHEMA_SQL);
200
+ await runMigrations(db);
201
+ return db;
134
202
  }
135
203
  /**
136
204
  * Run incremental schema migrations based on stored version.
137
205
  */
138
- function runMigrations(db) {
139
- const row = db.prepare("SELECT value FROM meta WHERE key = 'schema_version'").get();
206
+ async function runMigrations(db) {
207
+ const row = await db.get("SELECT value FROM meta WHERE key = 'schema_version'");
140
208
  const current = row ? parseInt(row.value, 10) : 0;
141
209
  if (current < 1) {
142
- // v1: base schema (already created by SCHEMA_SQL)
143
- db.prepare("INSERT OR REPLACE INTO meta (key, value) VALUES ('schema_version', '1')").run();
210
+ await db.run("INSERT OR REPLACE INTO meta (key, value) VALUES ('schema_version', '1')");
144
211
  }
145
212
  if (current < 2) {
146
- // v2: retention indexes for compaction queries
147
- db.exec(`
213
+ await db.exec(`
148
214
  CREATE INDEX IF NOT EXISTS idx_findings_file ON findings(file);
149
215
  CREATE INDEX IF NOT EXISTS idx_scans_repo_ts ON scans(repo, timestamp);
150
216
  `);
151
- db.prepare("INSERT OR REPLACE INTO meta (key, value) VALUES ('schema_version', '2')").run();
217
+ await db.run("INSERT OR REPLACE INTO meta (key, value) VALUES ('schema_version', '2')");
152
218
  }
153
219
  // Future: if (current < 3) { ... ALTER TABLE ... }
154
220
  }
155
221
  /**
156
222
  * Compact the database — prune old data, reclaim disk space.
157
- * Retention policy: keep last `retainDays` of findings, merge old patterns.
158
223
  */
159
- export function compactDatabase(retainDays = 90) {
160
- const Db = loadDatabase();
161
- if (!Db)
224
+ export async function compactDatabase(retainDays = 90) {
225
+ const sqlite3 = loadSqlite3();
226
+ if (!sqlite3)
162
227
  return { pruned: 0, patternsDecayed: 0, sizeBefore: 0, sizeAfter: 0 };
163
228
  const resolvedPath = DB_PATH;
164
229
  const sizeBefore = fs.existsSync(resolvedPath) ? fs.statSync(resolvedPath).size : 0;
165
- const db = new Db(resolvedPath);
166
- db.pragma('journal_mode = WAL');
230
+ const db = await openDatabase(resolvedPath);
231
+ if (!db)
232
+ return { pruned: 0, patternsDecayed: 0, sizeBefore, sizeAfter: sizeBefore };
167
233
  const cutoff = Date.now() - (retainDays * 24 * 60 * 60 * 1000);
168
234
  let pruned = 0;
169
235
  let patternsDecayed = 0;
170
236
  try {
171
- db.transaction(() => {
172
- // 1. Delete old findings (keep scan records for trend lines)
173
- const r1 = db.prepare(`
237
+ await db.transaction(async (tx) => {
238
+ const r1 = await tx.run(`
174
239
  DELETE FROM findings WHERE scan_id IN (
175
240
  SELECT id FROM scans WHERE timestamp < ?
176
241
  )
177
- `).run(cutoff);
242
+ `, cutoff);
178
243
  pruned += r1.changes;
179
- // 2. Prune weak patterns (never grew, seen < 3 times)
180
- const r2 = db.prepare("DELETE FROM patterns WHERE strength < 0.3 AND times_seen < 3").run();
244
+ const r2 = await tx.run("DELETE FROM patterns WHERE strength < 0.3 AND times_seen < 3");
181
245
  patternsDecayed += r2.changes;
182
- // 3. Prune orphaned feedback
183
- db.prepare("DELETE FROM feedback WHERE finding_id NOT IN (SELECT id FROM findings)").run();
184
- // 4. Prune old codebase index entries
185
- db.prepare("DELETE FROM codebase WHERE last_indexed < ?").run(cutoff);
186
- })();
187
- // 5. Reclaim disk space
188
- db.exec('VACUUM');
246
+ await tx.run("DELETE FROM feedback WHERE finding_id NOT IN (SELECT id FROM findings)");
247
+ await tx.run("DELETE FROM codebase WHERE last_indexed < ?", cutoff);
248
+ });
249
+ await db.exec('VACUUM');
189
250
  }
190
251
  finally {
191
- db.close();
252
+ await db.close();
192
253
  }
193
254
  const sizeAfter = fs.existsSync(resolvedPath) ? fs.statSync(resolvedPath).size : 0;
194
255
  return { pruned, patternsDecayed, sizeBefore, sizeAfter };
@@ -211,9 +272,9 @@ export function resetDatabase() {
211
272
  fs.removeSync(DB_PATH + '-shm');
212
273
  }
213
274
  /**
214
- * Check if SQLite is available (better-sqlite3 installed)
275
+ * Check if SQLite is available (sqlite3 installed)
215
276
  */
216
277
  export function isSQLiteAvailable() {
217
- return loadDatabase() !== null;
278
+ return loadSqlite3() !== null;
218
279
  }
219
280
  export { RIGOUR_DIR, DB_PATH };
@@ -2,14 +2,15 @@ import type { RigourDB } from './db.js';
2
2
  import type { Failure } from '../types/index.js';
3
3
  /**
4
4
  * Insert findings from a scan report into SQLite.
5
+ * Uses a transaction for atomicity on bulk inserts.
5
6
  */
6
- export declare function insertFindings(store: RigourDB, scanId: string, failures: Failure[]): void;
7
+ export declare function insertFindings(store: RigourDB, scanId: string, failures: Failure[]): Promise<void>;
7
8
  /**
8
9
  * Get findings for a specific scan.
9
10
  */
10
- export declare function getFindingsForScan(store: RigourDB, scanId: string): any[];
11
+ export declare function getFindingsForScan(store: RigourDB, scanId: string): Promise<any[]>;
11
12
  /**
12
13
  * Get deep analysis and high-confidence AST findings for a repo.
13
14
  * Used by local memory to match known patterns against new scans.
14
15
  */
15
- export declare function getDeepFindings(store: RigourDB, repo: string, limit?: number): any[];
16
+ export declare function getDeepFindings(store: RigourDB, repo: string, limit?: number): Promise<any[]>;
@@ -4,36 +4,29 @@
4
4
  import { randomUUID } from 'crypto';
5
5
  /**
6
6
  * Insert findings from a scan report into SQLite.
7
+ * Uses a transaction for atomicity on bulk inserts.
7
8
  */
8
- export function insertFindings(store, scanId, failures) {
9
- const stmt = store.db.prepare(`
10
- INSERT INTO findings (id, scan_id, file, line, category, severity, source, provenance, description, suggestion, confidence, verified)
11
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
12
- `);
13
- const insertMany = store.db.transaction((items) => {
14
- for (const f of items) {
15
- stmt.run(randomUUID(), scanId, f.files?.[0] || 'unknown', f.line ?? null, f.category || f.id, f.severity || 'medium', f.source || 'ast', f.provenance || 'traditional', f.details, f.hint ?? null, f.confidence ?? null, f.verified ? 1 : 0);
9
+ export async function insertFindings(store, scanId, failures) {
10
+ await store.transaction(async (tx) => {
11
+ for (const f of failures) {
12
+ await tx.run(`INSERT INTO findings (id, scan_id, file, line, category, severity, source, provenance, description, suggestion, confidence, verified)
13
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, randomUUID(), scanId, f.files?.[0] || 'unknown', f.line ?? null, f.category || f.id, f.severity || 'medium', f.source || 'ast', f.provenance || 'traditional', f.details, f.hint ?? null, f.confidence ?? null, f.verified ? 1 : 0);
16
14
  }
17
15
  });
18
- insertMany(failures);
19
16
  }
20
17
  /**
21
18
  * Get findings for a specific scan.
22
19
  */
23
- export function getFindingsForScan(store, scanId) {
24
- const stmt = store.db.prepare('SELECT * FROM findings WHERE scan_id = ? ORDER BY severity ASC');
25
- return stmt.all(scanId);
20
+ export async function getFindingsForScan(store, scanId) {
21
+ return store.all('SELECT * FROM findings WHERE scan_id = ? ORDER BY severity ASC', scanId);
26
22
  }
27
23
  /**
28
24
  * Get deep analysis and high-confidence AST findings for a repo.
29
25
  * Used by local memory to match known patterns against new scans.
30
26
  */
31
- export function getDeepFindings(store, repo, limit = 50) {
32
- const stmt = store.db.prepare(`
33
- SELECT f.* FROM findings f
34
- JOIN scans s ON f.scan_id = s.id
35
- WHERE s.repo = ? AND (f.source = 'llm' OR f.source = 'hybrid' OR f.confidence >= 0.7)
36
- ORDER BY f.confidence DESC LIMIT ?
37
- `);
38
- return stmt.all(repo, limit);
27
+ export async function getDeepFindings(store, repo, limit = 50) {
28
+ return store.all(`SELECT f.* FROM findings f
29
+ JOIN scans s ON f.scan_id = s.id
30
+ WHERE s.repo = ? AND (f.source = 'llm' OR f.source = 'hybrid' OR f.confidence >= 0.7)
31
+ ORDER BY f.confidence DESC LIMIT ?`, repo, limit);
39
32
  }
@@ -7,10 +7,10 @@ import type { DeepFinding } from '../inference/types.js';
7
7
  * @param cwd - Absolute project root path
8
8
  * @param fileList - Relative file paths (from FileFacts.path or globby)
9
9
  */
10
- export declare function checkLocalPatterns(cwd: string, fileList: string[]): DeepFinding[];
10
+ export declare function checkLocalPatterns(cwd: string, fileList: string[]): Promise<DeepFinding[]>;
11
11
  /**
12
12
  * Post-scan: persist findings and reinforce patterns.
13
- * Wrapped in a single transaction for atomicity.
13
+ * Uses a transaction for atomicity.
14
14
  * Called after every scan (check, scan, scan --deep).
15
15
  */
16
16
  export declare function persistAndReinforce(cwd: string, report: {
@@ -20,11 +20,11 @@ export declare function persistAndReinforce(cwd: string, report: {
20
20
  }, meta?: {
21
21
  deepTier?: string;
22
22
  deepModel?: string;
23
- }): void;
23
+ }): Promise<void>;
24
24
  /**
25
25
  * Get project learning stats (for display in scan output).
26
26
  */
27
- export declare function getProjectStats(cwd: string): ProjectStats | null;
27
+ export declare function getProjectStats(cwd: string): Promise<ProjectStats | null>;
28
28
  export interface ProjectStats {
29
29
  totalScans: number;
30
30
  learnedPatterns: number;
@@ -36,18 +36,18 @@ function buildFileIssueMap(recentFindings) {
36
36
  * @param cwd - Absolute project root path
37
37
  * @param fileList - Relative file paths (from FileFacts.path or globby)
38
38
  */
39
- export function checkLocalPatterns(cwd, fileList) {
40
- const db = openDatabase();
39
+ export async function checkLocalPatterns(cwd, fileList) {
40
+ const db = await openDatabase();
41
41
  if (!db)
42
42
  return [];
43
43
  const repoName = path.basename(cwd);
44
44
  const findings = [];
45
45
  try {
46
- const patterns = getStrongPatterns(db, repoName, INSTANT_MATCH_THRESHOLD);
46
+ const patterns = await getStrongPatterns(db, repoName, INSTANT_MATCH_THRESHOLD);
47
47
  if (patterns.length === 0)
48
48
  return [];
49
49
  // Include both AST and LLM findings from history
50
- const recentFindings = getDeepFindings(db, repoName, 200);
50
+ const recentFindings = await getDeepFindings(db, repoName, 200);
51
51
  if (recentFindings.length === 0)
52
52
  return [];
53
53
  const fileIssueMap = buildFileIssueMap(recentFindings);
@@ -82,35 +82,33 @@ export function checkLocalPatterns(cwd, fileList) {
82
82
  Logger.warn(`Local memory check failed: ${error}`);
83
83
  }
84
84
  finally {
85
- db?.db.close();
85
+ await db?.close();
86
86
  }
87
87
  return findings;
88
88
  }
89
89
  /**
90
90
  * Post-scan: persist findings and reinforce patterns.
91
- * Wrapped in a single transaction for atomicity.
91
+ * Uses a transaction for atomicity.
92
92
  * Called after every scan (check, scan, scan --deep).
93
93
  */
94
- export function persistAndReinforce(cwd, report, meta) {
95
- const db = openDatabase();
94
+ export async function persistAndReinforce(cwd, report, meta) {
95
+ const db = await openDatabase();
96
96
  if (!db)
97
97
  return;
98
98
  const repoName = path.basename(cwd);
99
99
  try {
100
- // Wrap all writes in a single transaction for atomicity
101
- const persist = db.db.transaction(() => {
102
- const scanId = insertScan(db, repoName, report, meta);
100
+ await db.transaction(async (tx) => {
101
+ const scanId = await insertScan(tx, repoName, report, meta);
103
102
  if (report.failures.length > 0) {
104
- insertFindings(db, scanId, report.failures);
103
+ await insertFindings(tx, scanId, report.failures);
105
104
  }
106
105
  for (const f of report.failures) {
107
106
  const category = f.category || f.id;
108
107
  const source = f.source === 'llm' ? 'llm' : 'ast';
109
- reinforcePattern(db, repoName, category, `${f.title}: ${f.details?.substring(0, 120)}`, source);
108
+ await reinforcePattern(tx, repoName, category, `${f.title}: ${f.details?.substring(0, 120)}`, source);
110
109
  }
111
- decayPatterns(db, 30);
110
+ await decayPatterns(tx, 30);
112
111
  });
113
- persist();
114
112
  Logger.info(`Local memory: stored ${report.failures.length} findings, ` +
115
113
  `reinforced ${report.failures.length} patterns for ${repoName}`);
116
114
  }
@@ -118,21 +116,21 @@ export function persistAndReinforce(cwd, report, meta) {
118
116
  Logger.warn(`Local memory persist failed: ${error}`);
119
117
  }
120
118
  finally {
121
- db?.db.close();
119
+ await db?.close();
122
120
  }
123
121
  }
124
122
  /**
125
123
  * Get project learning stats (for display in scan output).
126
124
  */
127
- export function getProjectStats(cwd) {
128
- const db = openDatabase();
125
+ export async function getProjectStats(cwd) {
126
+ const db = await openDatabase();
129
127
  if (!db)
130
128
  return null;
131
129
  const repoName = path.basename(cwd);
132
130
  try {
133
- const scans = getRecentScans(db, repoName, 100);
134
- const patterns = getStrongPatterns(db, repoName, 0.3);
135
- const hardRules = getHardRules(db, repoName);
131
+ const scans = await getRecentScans(db, repoName, 100);
132
+ const patterns = await getStrongPatterns(db, repoName, 0.3);
133
+ const hardRules = await getHardRules(db, repoName);
136
134
  return {
137
135
  totalScans: scans.length,
138
136
  learnedPatterns: patterns.length,
@@ -148,6 +146,6 @@ export function getProjectStats(cwd) {
148
146
  return null;
149
147
  }
150
148
  finally {
151
- db?.db.close();
149
+ await db?.close();
152
150
  }
153
151
  }
@@ -15,21 +15,21 @@ export interface PatternRecord {
15
15
  * If the pattern already exists for this repo, increase strength.
16
16
  * Otherwise, create a new pattern.
17
17
  */
18
- export declare function reinforcePattern(store: RigourDB, repo: string, pattern: string, description: string, source: 'ast' | 'llm' | 'human_feedback'): void;
18
+ export declare function reinforcePattern(store: RigourDB, repo: string, pattern: string, description: string, source: 'ast' | 'llm' | 'human_feedback'): Promise<void>;
19
19
  /**
20
20
  * Decay patterns not seen in the last N days.
21
21
  */
22
- export declare function decayPatterns(store: RigourDB, daysThreshold?: number): number;
22
+ export declare function decayPatterns(store: RigourDB, daysThreshold?: number): Promise<number>;
23
23
  /**
24
24
  * Get strong patterns for a repo (strength > threshold).
25
25
  */
26
- export declare function getStrongPatterns(store: RigourDB, repo: string, threshold?: number): PatternRecord[];
26
+ export declare function getStrongPatterns(store: RigourDB, repo: string, threshold?: number): Promise<PatternRecord[]>;
27
27
  /**
28
28
  * Get all patterns for a repo.
29
29
  */
30
- export declare function getPatterns(store: RigourDB, repo: string): PatternRecord[];
30
+ export declare function getPatterns(store: RigourDB, repo: string): Promise<PatternRecord[]>;
31
31
  /**
32
32
  * Get patterns promoted to hard rules (strength > 0.9).
33
33
  * These can be used as AST-level checks without LLM inference.
34
34
  */
35
- export declare function getHardRules(store: RigourDB, repo: string): PatternRecord[];
35
+ export declare function getHardRules(store: RigourDB, repo: string): Promise<PatternRecord[]>;
@@ -8,55 +8,49 @@ import { randomUUID } from 'crypto';
8
8
  * If the pattern already exists for this repo, increase strength.
9
9
  * Otherwise, create a new pattern.
10
10
  */
11
- export function reinforcePattern(store, repo, pattern, description, source) {
11
+ export async function reinforcePattern(store, repo, pattern, description, source) {
12
12
  const now = Date.now();
13
- const existing = store.db.prepare('SELECT * FROM patterns WHERE repo = ? AND pattern = ?').get(repo, pattern);
13
+ const existing = await store.get('SELECT * FROM patterns WHERE repo = ? AND pattern = ?', repo, pattern);
14
14
  if (existing) {
15
- store.db.prepare(`
16
- UPDATE patterns
17
- SET strength = MIN(strength + 0.15, 1.0),
18
- times_seen = times_seen + 1,
19
- last_seen = ?,
20
- description = COALESCE(?, description)
21
- WHERE id = ?
22
- `).run(now, description, existing.id);
15
+ await store.run(`UPDATE patterns
16
+ SET strength = MIN(strength + 0.15, 1.0),
17
+ times_seen = times_seen + 1,
18
+ last_seen = ?,
19
+ description = COALESCE(?, description)
20
+ WHERE id = ?`, now, description, existing.id);
23
21
  }
24
22
  else {
25
- store.db.prepare(`
26
- INSERT INTO patterns (id, repo, pattern, description, strength, times_seen, first_seen, last_seen, source)
27
- VALUES (?, ?, ?, ?, 0.3, 1, ?, ?, ?)
28
- `).run(randomUUID(), repo, pattern, description, now, now, source);
23
+ await store.run(`INSERT INTO patterns (id, repo, pattern, description, strength, times_seen, first_seen, last_seen, source)
24
+ VALUES (?, ?, ?, ?, 0.3, 1, ?, ?, ?)`, randomUUID(), repo, pattern, description, now, now, source);
29
25
  }
30
26
  }
31
27
  /**
32
28
  * Decay patterns not seen in the last N days.
33
29
  */
34
- export function decayPatterns(store, daysThreshold = 30) {
30
+ export async function decayPatterns(store, daysThreshold = 30) {
35
31
  const cutoff = Date.now() - (daysThreshold * 24 * 60 * 60 * 1000);
36
- const result = store.db.prepare(`
37
- UPDATE patterns SET strength = MAX(strength - 0.05, 0.0)
38
- WHERE last_seen < ?
39
- `).run(cutoff);
32
+ const result = await store.run(`UPDATE patterns SET strength = MAX(strength - 0.05, 0.0)
33
+ WHERE last_seen < ?`, cutoff);
40
34
  // Prune dead patterns
41
- store.db.prepare('DELETE FROM patterns WHERE strength < 0.1').run();
35
+ await store.run('DELETE FROM patterns WHERE strength < 0.1');
42
36
  return result.changes;
43
37
  }
44
38
  /**
45
39
  * Get strong patterns for a repo (strength > threshold).
46
40
  */
47
- export function getStrongPatterns(store, repo, threshold = 0.7) {
48
- return store.db.prepare('SELECT * FROM patterns WHERE repo = ? AND strength >= ? ORDER BY strength DESC').all(repo, threshold);
41
+ export async function getStrongPatterns(store, repo, threshold = 0.7) {
42
+ return store.all('SELECT * FROM patterns WHERE repo = ? AND strength >= ? ORDER BY strength DESC', repo, threshold);
49
43
  }
50
44
  /**
51
45
  * Get all patterns for a repo.
52
46
  */
53
- export function getPatterns(store, repo) {
54
- return store.db.prepare('SELECT * FROM patterns WHERE repo = ? ORDER BY strength DESC').all(repo);
47
+ export async function getPatterns(store, repo) {
48
+ return store.all('SELECT * FROM patterns WHERE repo = ? ORDER BY strength DESC', repo);
55
49
  }
56
50
  /**
57
51
  * Get patterns promoted to hard rules (strength > 0.9).
58
52
  * These can be used as AST-level checks without LLM inference.
59
53
  */
60
- export function getHardRules(store, repo) {
61
- return store.db.prepare('SELECT * FROM patterns WHERE repo = ? AND strength >= 0.9 ORDER BY times_seen DESC').all(repo);
54
+ export async function getHardRules(store, repo) {
55
+ return store.all('SELECT * FROM patterns WHERE repo = ? AND strength >= 0.9 ORDER BY times_seen DESC', repo);
62
56
  }
@@ -21,22 +21,22 @@ export declare function insertScan(store: RigourDB, repo: string, report: Report
21
21
  filesScanned?: number;
22
22
  deepTier?: string;
23
23
  deepModel?: string;
24
- }): string;
24
+ }): Promise<string>;
25
25
  /**
26
26
  * Get recent scans for a repo (newest first).
27
27
  */
28
- export declare function getRecentScans(store: RigourDB, repo: string, limit?: number): ScanRecord[];
28
+ export declare function getRecentScans(store: RigourDB, repo: string, limit?: number): Promise<ScanRecord[]>;
29
29
  /**
30
30
  * Get score trend for a repo.
31
31
  */
32
- export declare function getScoreTrendFromDB(store: RigourDB, repo: string, limit?: number): {
32
+ export declare function getScoreTrendFromDB(store: RigourDB, repo: string, limit?: number): Promise<{
33
33
  scores: number[];
34
34
  direction: 'improving' | 'degrading' | 'stable';
35
- };
35
+ }>;
36
36
  /**
37
37
  * Get most common issue categories for a repo.
38
38
  */
39
- export declare function getTopIssues(store: RigourDB, repo: string, limit?: number): {
39
+ export declare function getTopIssues(store: RigourDB, repo: string, limit?: number): Promise<{
40
40
  category: string;
41
41
  count: number;
42
- }[];
42
+ }[]>;
@@ -5,29 +5,23 @@ import { randomUUID } from 'crypto';
5
5
  /**
6
6
  * Insert a scan record from a Rigour report.
7
7
  */
8
- export function insertScan(store, repo, report, meta) {
8
+ export async function insertScan(store, repo, report, meta) {
9
9
  const id = randomUUID();
10
- const stmt = store.db.prepare(`
11
- INSERT INTO scans (id, repo, commit_hash, timestamp, ai_health_score, code_quality_score, overall_score, files_scanned, duration_ms, deep_tier, deep_model)
12
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
13
- `);
14
- stmt.run(id, repo, meta?.commitHash || null, Date.now(), report.stats.ai_health_score ?? null, report.stats.code_quality_score ?? null, report.stats.score ?? null, meta?.filesScanned ?? null, report.stats.duration_ms, meta?.deepTier ?? null, meta?.deepModel ?? null);
10
+ await store.run(`INSERT INTO scans (id, repo, commit_hash, timestamp, ai_health_score, code_quality_score, overall_score, files_scanned, duration_ms, deep_tier, deep_model)
11
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, id, repo, meta?.commitHash || null, Date.now(), report.stats.ai_health_score ?? null, report.stats.code_quality_score ?? null, report.stats.score ?? null, meta?.filesScanned ?? null, report.stats.duration_ms, meta?.deepTier ?? null, meta?.deepModel ?? null);
15
12
  return id;
16
13
  }
17
14
  /**
18
15
  * Get recent scans for a repo (newest first).
19
16
  */
20
- export function getRecentScans(store, repo, limit = 10) {
21
- const stmt = store.db.prepare(`
22
- SELECT * FROM scans WHERE repo = ? ORDER BY timestamp DESC LIMIT ?
23
- `);
24
- return stmt.all(repo, limit);
17
+ export async function getRecentScans(store, repo, limit = 10) {
18
+ return store.all('SELECT * FROM scans WHERE repo = ? ORDER BY timestamp DESC LIMIT ?', repo, limit);
25
19
  }
26
20
  /**
27
21
  * Get score trend for a repo.
28
22
  */
29
- export function getScoreTrendFromDB(store, repo, limit = 10) {
30
- const scans = getRecentScans(store, repo, limit);
23
+ export async function getScoreTrendFromDB(store, repo, limit = 10) {
24
+ const scans = await getRecentScans(store, repo, limit);
31
25
  const scores = scans
32
26
  .filter(s => s.overall_score != null)
33
27
  .map(s => s.overall_score)
@@ -44,12 +38,9 @@ export function getScoreTrendFromDB(store, repo, limit = 10) {
44
38
  /**
45
39
  * Get most common issue categories for a repo.
46
40
  */
47
- export function getTopIssues(store, repo, limit = 10) {
48
- const stmt = store.db.prepare(`
49
- SELECT f.category, COUNT(*) as count FROM findings f
50
- JOIN scans s ON f.scan_id = s.id
51
- WHERE s.repo = ?
52
- GROUP BY f.category ORDER BY count DESC LIMIT ?
53
- `);
54
- return stmt.all(repo, limit);
41
+ export async function getTopIssues(store, repo, limit = 10) {
42
+ return store.all(`SELECT f.category, COUNT(*) as count FROM findings f
43
+ JOIN scans s ON f.scan_id = s.id
44
+ WHERE s.repo = ?
45
+ GROUP BY f.category ORDER BY count DESC LIMIT ?`, repo, limit);
55
46
  }
@@ -3109,4 +3109,5 @@ export interface DeepOptions {
3109
3109
  apiBaseUrl?: string;
3110
3110
  modelName?: string;
3111
3111
  agents?: number;
3112
+ maxFiles?: number;
3112
3113
  }
@@ -2,7 +2,7 @@ import { globby } from 'globby';
2
2
  import fs from 'fs-extra';
3
3
  import path from 'path';
4
4
  export class FileScanner {
5
- static DEFAULT_PATTERNS = ['**/*.{ts,js,py,css,html,md}'];
5
+ static DEFAULT_PATTERNS = ['**/*.{ts,tsx,js,jsx,mjs,cjs,py,go,rs,rb,cs,java,kt,css,html,md,yaml,yml,toml,json}'];
6
6
  static DEFAULT_IGNORE = [
7
7
  '**/node_modules/**',
8
8
  '**/dist/**',