@rigstate/mcp 0.5.6 → 0.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rigstate/mcp",
3
- "version": "0.5.6",
3
+ "version": "0.5.7",
4
4
  "description": "Rigstate MCP Server - Model Context Protocol for AI Editors",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -38,7 +38,7 @@ class ToolRegistry {
38
38
  */
39
39
  register<T extends z.ZodType>(tool: ToolDefinition<T>) {
40
40
  if (this.tools.has(tool.name)) {
41
- console.warn(`Tool '${tool.name}' is already registered. Overwriting.`);
41
+ console.error(`Tool '${tool.name}' is already registered. Overwriting.`);
42
42
  }
43
43
  this.tools.set(tool.name, tool);
44
44
  }
@@ -130,7 +130,7 @@ export async function analyzeDatabasePerformance(
130
130
  }
131
131
 
132
132
  } catch (e) {
133
- console.warn(`Skipping file ${filePath}: ${e}`);
133
+ console.error(`Skipping file ${filePath}: ${e}`);
134
134
  }
135
135
  }
136
136
 
@@ -1,5 +1,5 @@
1
- import { promises as fs } from 'fs';
2
- import * as path from 'path';
1
+ import { promises as fs, existsSync, statSync } from 'node:fs';
2
+ import * as path from 'node:path';
3
3
  import { AnalyzeDependencyGraphInput } from '../lib/types.js';
4
4
  import { registry } from '../lib/tool-registry.js';
5
5
  import { AnalyzeDependencyGraphInputSchema } from '../lib/schemas.js';
@@ -24,7 +24,6 @@ registry.register({
24
24
  */
25
25
  export async function analyzeDependencyGraph(input: AnalyzeDependencyGraphInput) {
26
26
  // Determine root. Input path is relative to CWD usually.
27
- // If input.path is absolute, use it. Otherwise resolve from CWD.
28
27
  const searchPath = path.isAbsolute(input.path)
29
28
  ? input.path
30
29
  : path.resolve(process.cwd(), input.path);
@@ -37,33 +36,54 @@ export async function analyzeDependencyGraph(input: AnalyzeDependencyGraphInput)
37
36
  };
38
37
  }
39
38
 
40
- // 1. Scan files
39
+ // 1. Scan package.json for External Deps
40
+ let externalDeps: Record<string, string> = {};
41
+ const pkgPath = path.join(process.cwd(), 'package.json');
42
+ if (existsSync(pkgPath)) {
43
+ try {
44
+ const pkgContent = await fs.readFile(pkgPath, 'utf-8');
45
+ const pkg = JSON.parse(pkgContent);
46
+ externalDeps = { ...pkg.dependencies, ...pkg.devDependencies };
47
+ } catch (e) {
48
+ console.error('Failed to parse package.json', e);
49
+ }
50
+ }
51
+
52
+ // 2. Scan TypeScripts files for Internal Deps
41
53
  const allFiles = await getAllFiles(searchPath);
42
- const tsFiles = allFiles.filter(f => /\.(ts|tsx|js|jsx)$/.test(f) && !f.includes('node_modules') && !f.includes('.next') && !f.includes('dist'));
54
+ const tsFiles = allFiles.filter(f => /\.(ts|tsx|js|jsx)$/.test(f) && !f.includes('node_modules') && !f.includes('dist') && !f.includes('.next'));
43
55
 
44
- // 2. Build Graph
56
+ // 3. Build Graph
45
57
  const graph: Record<string, string[]> = {};
46
- const fileSet = new Set(tsFiles);
47
58
 
48
59
  for (const file of tsFiles) {
49
60
  const content = await fs.readFile(file, 'utf-8');
50
61
  const imports = extractImports(content);
51
62
 
63
+ // Resolve imports to file paths relative to searchPath
52
64
  const validDeps: string[] = [];
65
+ const fileDir = path.dirname(file);
53
66
 
54
67
  for (const imp of imports) {
55
- const resolved = resolveImport(file, imp, searchPath);
56
- if (resolved && fileSet.has(resolved)) {
57
- validDeps.push(resolved);
68
+ // Check if it's an external dep
69
+ if (Object.keys(externalDeps).some(d => imp === d || imp.startsWith(d + '/'))) {
70
+ continue; // Skip external deps in internal graph for now
71
+ }
72
+
73
+ // Local import resolution logic (String based only, no require!)
74
+ if (imp.startsWith('.') || imp.startsWith('@/')) {
75
+ const resolved = resolveImportString(file, imp, searchPath);
76
+ if (resolved && tsFiles.includes(resolved)) {
77
+ validDeps.push(path.relative(searchPath, resolved));
78
+ }
58
79
  }
59
80
  }
60
81
 
61
- // Use relative paths for the graph to make it readable
62
82
  const relFile = path.relative(searchPath, file);
63
- graph[relFile] = validDeps.map(d => path.relative(searchPath, d));
83
+ graph[relFile] = validDeps;
64
84
  }
65
85
 
66
- // 3. Detect Cycles
86
+ // 4. Detect Cycles
67
87
  const cycles = detectCycles(graph);
68
88
 
69
89
  return {
@@ -71,13 +91,14 @@ export async function analyzeDependencyGraph(input: AnalyzeDependencyGraphInput)
71
91
  analyzedPath: searchPath,
72
92
  metrics: {
73
93
  totalFiles: tsFiles.length,
74
- circularDependencies: cycles.length
94
+ circularDependencies: cycles.length,
95
+ externalDependencies: Object.keys(externalDeps).length
75
96
  },
76
97
  cycles: cycles,
77
98
  status: cycles.length > 0 ? 'VIOLATION' : 'PASS',
78
99
  summary: cycles.length > 0
79
- ? `FAILED. Detected ${cycles.length} circular dependencies. These must be resolved to maintain architectural integrity.`
80
- : `PASSED. No circular dependencies detected in ${tsFiles.length} files.`
100
+ ? `FAILED. Detected ${cycles.length} circular dependencies. Einar demands resolution!`
101
+ : `PASSED. Architecture is sound. No circular dependencies in ${tsFiles.length} files.`
81
102
  };
82
103
  }
83
104
 
@@ -92,59 +113,53 @@ async function getAllFiles(dir: string): Promise<string[]> {
92
113
  return files.flat();
93
114
  }
94
115
 
95
- /**
96
- * Simple regex extraction of import statements.
97
- * Matches: import ... from '...'
98
- * Matches: import '...'
99
- * Matches: export ... from '...'
100
- */
101
116
  function extractImports(content: string): string[] {
117
+ // Regex to match import ... from "..." or import "..."
102
118
  const regex = /from\s+['"]([^'"]+)['"]|import\s+['"]([^'"]+)['"]/g;
103
119
  const imports: string[] = [];
104
120
  let match;
105
121
  while ((match = regex.exec(content)) !== null) {
106
- // match[1] is 'from "..."; match[2] is import "...";
107
122
  imports.push(match[1] || match[2]);
108
123
  }
109
124
  return imports;
110
125
  }
111
126
 
112
- /**
113
- * Naive resolver.
114
- * Handles:
115
- * - Relative: ./foo, ../bar
116
- * - Alias: @/ -> searchPath/ (Assumes Next.js style)
117
- * - Extensions: tries .ts, .tsx, .js, index.ts, etc.
118
- */
119
- function resolveImport(importer: string, importPath: string, root: string): string | null {
120
- if (!importPath.startsWith('.') && !importPath.startsWith('@/')) {
121
- return null; // Ignore node_modules
122
- }
123
-
124
- let searchDir = path.dirname(importer);
127
+ function resolveImportString(importer: string, importPath: string, root: string): string | null {
128
+ let targetDir = path.dirname(importer);
125
129
  let target = importPath;
126
130
 
127
131
  if (importPath.startsWith('@/')) {
128
132
  target = importPath.replace('@/', '');
129
- searchDir = root; // Assume root is where @/ points to (src or project root)
130
- // Adjust for src if root includes src, ensuring we don't double dip?
131
- // Actually, usually @/ maps to src/ or root. We'll try relative to 'root'.
133
+ targetDir = root; // Assume @ maps to root of scan path
132
134
  }
133
135
 
134
- const startPath = path.resolve(searchDir, target);
136
+ // Construct potential path
137
+ const naivePath = path.resolve(targetDir, target);
138
+
139
+ // Check extensions
140
+ const extensions = ['.ts', '.tsx', '.js', '.jsx', '/index.ts', '/index.tsx'];
141
+
142
+ // We already have the full file list in memory in the main function,
143
+ // but here we do a quick disk check since we are inside a helper.
144
+ // For a REALLY robust solution, we should pass the fileSet to this function,
145
+ // but checking disk is fine for this tool.
135
146
 
136
- // Try extensions
137
- const extensions = ['.ts', '.tsx', '.js', '.jsx', '/index.ts', '/index.tsx', '/index.js', ''];
138
147
  for (const ext of extensions) {
139
- const candidate = startPath + ext;
140
- if (require('fs').existsSync(candidate) && !require('fs').statSync(candidate).isDirectory()) {
148
+ const candidate = naivePath + ext;
149
+ if (existsSync(candidate) && !statSync(candidate).isDirectory()) {
141
150
  return candidate;
142
151
  }
143
152
  }
144
153
 
154
+ // Check if it matches exactly (e.g. file.ts was imported as file.ts)
155
+ if (existsSync(naivePath) && !statSync(naivePath).isDirectory()) {
156
+ return naivePath;
157
+ }
158
+
145
159
  return null;
146
160
  }
147
161
 
162
+
148
163
  function detectCycles(graph: Record<string, string[]>): string[][] {
149
164
  const visited = new Set<string>();
150
165
  const recursionStack = new Set<string>();
@@ -160,7 +175,7 @@ function detectCycles(graph: Record<string, string[]>): string[][] {
160
175
  if (!visited.has(dep)) {
161
176
  dfs(dep, path);
162
177
  } else if (recursionStack.has(dep)) {
163
- // Cycle detected
178
+ // Cycle found
164
179
  const cycleStart = path.indexOf(dep);
165
180
  if (cycleStart !== -1) {
166
181
  cycles.push([...path.slice(cycleStart), dep]);
@@ -110,7 +110,7 @@ export async function completeRoadmapTask(
110
110
  });
111
111
 
112
112
  if (reportError) {
113
- console.warn('Failed to save mission report:', reportError.message);
113
+ console.error('Failed to save mission report:', reportError.message);
114
114
  }
115
115
 
116
116
  // 5. SOVEREIGN HARVESTING: Trigger Reflection & Skill Extraction (Brynjar)
@@ -17,10 +17,10 @@ export const listFeaturesTool: ToolDefinition<typeof InputSchema> = {
17
17
  Useful for understanding the strategic context and major milestones.`,
18
18
  schema: InputSchema,
19
19
  handler: async ({ projectId }, { supabase, userId }) => {
20
- // 1. Verify project ownership
20
+ // 1. Fetch project to verify access and get fallback spec
21
21
  const { data: project, error: projectError } = await supabase
22
22
  .from('projects')
23
- .select('id')
23
+ .select('id, functional_spec')
24
24
  .eq('id', projectId)
25
25
  .eq('owner_id', userId)
26
26
  .single();
@@ -29,38 +29,51 @@ Useful for understanding the strategic context and major milestones.`,
29
29
  throw new Error('Project not found or access denied');
30
30
  }
31
31
 
32
- // 2. Fetch features
33
- const { data: features, error } = await supabase
34
- .from('features')
35
- .select('id, name, description, priority, status')
32
+ // 2. Primary Strategy: Fetch from 'project_features'
33
+ const { data: dbFeatures, error: dbError } = await supabase
34
+ .from('project_features')
35
+ .select('id, name, description, status')
36
36
  .eq('project_id', projectId)
37
- .neq('status', 'ARCHIVED')
38
- .order('created_at', { ascending: false }); // Sort by newest (or priority if column existed properly typed)
37
+ .neq('status', 'shadow') // Exclude shadow features by default unless asked
38
+ .order('created_at', { ascending: false });
39
39
 
40
- // Note: 'horizon' was not in my simple schema mental model, removed to be safe or use 'status' as proxy.
41
- // Assuming strict schema adherence.
42
- // If priority column is text, sorting by it strictly might be weird if not enum.
43
- // Let's trust 'created_at' for stability for now.
40
+ let featuresList: any[] = [];
41
+ let source = 'DB';
44
42
 
45
- if (error) {
46
- throw new Error(`Failed to fetch features: ${error.message}`);
43
+ if (!dbError && dbFeatures && dbFeatures.length > 0) {
44
+ featuresList = dbFeatures.map(f => ({
45
+ ...f,
46
+ title: f.name // Map back to title specifically for uniform handling below
47
+ }));
48
+ } else {
49
+ // 3. Fallback Strategy: Extract from 'functional_spec'
50
+ source = 'FALLBACK_SPEC';
51
+ // Log warning (In a real system, use a structured logger. Here we console.error to stderr)
52
+ console.error(`[WARN] Project ${projectId}: 'project_features' empty or missing. Falling back to 'functional_spec'.`);
53
+
54
+ const spec = project.functional_spec as any;
55
+ if (spec && typeof spec === 'object' && Array.isArray(spec.features)) {
56
+ featuresList = spec.features.map((f: any) => ({
57
+ id: 'legacy',
58
+ title: f.name || f.title,
59
+ description: f.description,
60
+ status: f.status || 'proposed'
61
+ }));
62
+ }
63
+ }
64
+
65
+ // 4. Format Response
66
+ if (featuresList.length === 0) {
67
+ return { content: [{ type: 'text', text: 'No active features found (checked DB and Spec).' }] };
47
68
  }
48
69
 
49
- // 3. Format response
50
- const formatted = (features || []).length > 0
51
- ? (features || []).map(f => {
52
- const priorityStr = f.priority === 'MVP' ? '[MVP] ' : '';
53
- return `- ${priorityStr}${f.name} (${f.status})`;
54
- }).join('\n')
55
- : 'No active features found.';
70
+ const formatted = `=== PROJECT FEATURES (Source: ${source}) ===\n` +
71
+ featuresList.map(f => {
72
+ return `- ${f.title} [${f.status}]`;
73
+ }).join('\n');
56
74
 
57
75
  return {
58
- content: [
59
- {
60
- type: 'text',
61
- text: formatted
62
- }
63
- ]
76
+ content: [{ type: 'text', text: formatted }]
64
77
  };
65
78
  }
66
79
  };
@@ -34,37 +34,35 @@ architecture rules, decisions, and constraints.`,
34
34
  }
35
35
  });
36
36
 
37
- // Generate embedding using the preferred provider (OpenRouter or Google)
37
+ // Generate embedding using the Rigstate Intelligence API (Proxy)
38
38
  async function generateQueryEmbedding(query: string): Promise<number[] | null> {
39
- const openRouterKey = process.env.OPENROUTER_API_KEY;
40
- const googleKey = process.env.GOOGLE_GENERATIVE_AI_API_KEY;
39
+ const apiKey = process.env.RIGSTATE_API_KEY;
40
+ const apiUrl = process.env.RIGSTATE_API_URL || 'http://localhost:3000/api/v1';
41
41
 
42
- if (!openRouterKey && !googleKey) {
42
+ if (!apiKey) {
43
43
  return null;
44
44
  }
45
45
 
46
46
  try {
47
- const { embed } = await import('ai');
48
-
49
- if (openRouterKey) {
50
- const { createOpenRouter } = await import('@openrouter/ai-sdk-provider');
51
- const openrouter = createOpenRouter({ apiKey: openRouterKey });
52
-
53
- // Use Gemini embedding via OpenRouter to maintain 768 dimensions
54
- const { embedding } = await embed({
55
- model: openrouter.embedding('google/text-embedding-004'),
56
- value: query.replace(/\n/g, ' '),
57
- });
58
- return embedding;
59
- } else {
60
- const { google } = await import('@ai-sdk/google');
61
- const { embedding } = await embed({
62
- model: google.embedding('text-embedding-004'),
63
- value: query.replace(/\n/g, ' '),
64
- });
65
- return embedding;
47
+ const response = await fetch(`${apiUrl}/intelligence/embed`, {
48
+ method: 'POST',
49
+ headers: {
50
+ 'Content-Type': 'application/json',
51
+ 'Authorization': `Bearer ${apiKey}`
52
+ },
53
+ body: JSON.stringify({ text: query })
54
+ });
55
+
56
+ if (!response.ok) {
57
+ const errorText = await response.text();
58
+ console.error(`Embedding API error (${response.status}):`, errorText);
59
+ return null;
66
60
  }
61
+
62
+ const result = await response.json() as any;
63
+ return result.data?.embedding || null;
67
64
  } catch (error) {
65
+ console.error('Failed to generate embedding via Proxy:', error);
68
66
  return null;
69
67
  }
70
68
  }
@@ -136,6 +134,22 @@ export async function queryBrain(
136
134
  }));
137
135
  }
138
136
 
137
+ // --- NEW: FETCH RELEVANT FEATURES ---
138
+ // Simple fuzzy search until we vector embedding for features.
139
+ let relevantFeatures: any[] = [];
140
+ try {
141
+ const { data: features } = await supabase
142
+ .from('project_features')
143
+ .select('name, status, description')
144
+ .eq('project_id', projectId)
145
+ .or(`name.ilike.%${query}%,description.ilike.%${query}%`)
146
+ .limit(3);
147
+
148
+ if (features) relevantFeatures = features;
149
+ } catch (e) {
150
+ console.warn('Feature fetch failed in brain query', e);
151
+ }
152
+
139
153
  // Format memories into a readable context block
140
154
  const contextLines = memories.map((m) => {
141
155
  const voteIndicator = m.netVotes && m.netVotes < 0 ? ` [⚠️ POORLY RATED: ${m.netVotes}]` : '';
@@ -146,19 +160,23 @@ export async function queryBrain(
146
160
 
147
161
  const searchType = embedding ? 'TRIPLE-HYBRID (Vector + FTS + Fuzzy)' : 'HYBRID (FTS + Fuzzy)';
148
162
 
149
- const formatted = memories.length > 0
150
- ? `=== PROJECT BRAIN: RELEVANT MEMORIES ===
163
+ let formatted = `=== PROJECT BRAIN: RELEVANT MEMORIES ===
151
164
  Search Mode: ${searchType}
152
- Query: "${query}"
153
- Found ${memories.length} relevant memories:
165
+ Query: "${query}"`;
154
166
 
155
- ${contextLines.join('\n')}
167
+ if (relevantFeatures.length > 0) {
168
+ formatted += `\n\n=== RELATED FEATURES ===\n` +
169
+ relevantFeatures.map((f: any) => `- ${f.name} [${f.status}]`).join('\n');
170
+ }
171
+
172
+ formatted += `\n\nFound ${memories.length} relevant memories:\n\n${contextLines.join('\n')}\n\n==========================================`;
156
173
 
157
- ==========================================`
158
- : `=== PROJECT BRAIN ===
174
+ if (memories.length === 0 && relevantFeatures.length === 0) {
175
+ formatted = `=== PROJECT BRAIN ===
159
176
  Query: "${query}"
160
- No relevant memories found for this query.
177
+ No relevant memories or features found.
161
178
  =======================`;
179
+ }
162
180
 
163
181
  return {
164
182
  query,
@@ -0,0 +1,85 @@
1
+
2
+ export interface SecurityViolation {
3
+ id: string;
4
+ type: string;
5
+ severity: 'LOW' | 'MEDIUM' | 'HIGH' | 'FATAL';
6
+ title: string;
7
+ description: string;
8
+ recommendation: string;
9
+ }
10
+
11
+ /**
12
+ * Checks for Architectural Integrity (SEC-ARCH-01, SEC-ARCH-02, SEC-ARCH-03)
13
+ */
14
+ export function checkArchitectureIntegrity(filePath: string, content: string): SecurityViolation[] {
15
+ const violations: SecurityViolation[] = [];
16
+
17
+ // Define UI context: Components, Hooks, and client-side pages (rough heuristic)
18
+ // We exclude 'api', 'actions', 'lib', 'utils' from this restriction generally,
19
+ // though 'lib'/ 'utils' should also be clean ideally. Let's focus on strict UI layers.
20
+ const isUI = filePath.includes('/components/') || filePath.includes('/hooks/') || (filePath.includes('/app/') && !filePath.includes('/api/') && !filePath.includes('actions.ts') && !filePath.includes('route.ts'));
21
+
22
+ // SEC-ARCH-01: Illegal Supabase Client in UI
23
+ // Rule: /(import.*from\s+['"]@supabase\/supabase-js['"])/g
24
+ if (isUI) {
25
+ const illegalImportRegex = /(import.*from\s+['"]@supabase\/supabase-js['"])/g;
26
+ if (illegalImportRegex.test(content)) {
27
+ violations.push({
28
+ id: 'SEC-ARCH-01',
29
+ type: 'ARCHITECTURE_VIOLATION',
30
+ severity: 'FATAL',
31
+ title: 'Illegal Supabase Client in UI',
32
+ description: 'Direct import of @supabase/supabase-js in a UI component/hook is strictly forbidden. It bypasses the server boundary.',
33
+ recommendation: 'Use the `createClient` helper from our utils or Server Actions for data access.'
34
+ });
35
+ }
36
+ }
37
+
38
+ // SEC-ARCH-02: Direct Query Pattern in UI
39
+ // Rule: /\.(from|select|insert|update|delete|rpc)\s*\(/g
40
+ // Refinement: We specifically target 'supabase.from' effectively or chained calls.
41
+ // To avoid false positives like Array.from, we can perform a simple check.
42
+ if (isUI) {
43
+ const dbActionRegex = /\.(from|select|insert|update|delete|rpc)\s*\(/g;
44
+ const matches = content.match(dbActionRegex);
45
+ if (matches) {
46
+ // Filter out 'Array.from' specifically to permit standard JS patterns
47
+ const suspicious = matches.some(m => !m.includes('.from') || (m.includes('.from') && content.includes('supabase.from')));
48
+
49
+ if (suspicious || (matches.length > 0 && content.includes('supabase'))) {
50
+ violations.push({
51
+ id: 'SEC-ARCH-02',
52
+ type: 'ARCHITECTURE_VIOLATION',
53
+ severity: 'FATAL',
54
+ title: 'Direct Database Query in UI',
55
+ description: 'Detected direct database query pattern (select/insert/update/delete) in a UI component. This exposes logic to the client.',
56
+ recommendation: 'Move all data fetching logic to Server Components or Server Actions.'
57
+ });
58
+ }
59
+ }
60
+ }
61
+
62
+ // SEC-ARCH-03: Missing 'use server'
63
+ // Heuristic: If file is in an 'actions' folder OR exports functions ending in 'Action'/'Mutation',
64
+ // it MUST have 'use server' at the top.
65
+ const isActionFile = filePath.includes('/actions/') || filePath.includes('actions.ts');
66
+
67
+ if (isActionFile) {
68
+ // Check first 200 chars for 'use server'
69
+ const header = content.slice(0, 200);
70
+ const hasUseServer = /['"]use server['"]/.test(header);
71
+
72
+ if (!hasUseServer) {
73
+ violations.push({
74
+ id: 'SEC-ARCH-03',
75
+ type: 'ARCHITECTURE_VIOLATION',
76
+ severity: 'FATAL',
77
+ title: 'Missing "use server" Directive',
78
+ description: 'File appears to be a Server Action module but lacks the "use server" directive.',
79
+ recommendation: 'Add "use server" at the very top of the file.'
80
+ });
81
+ }
82
+ }
83
+
84
+ return violations;
85
+ }
@@ -239,3 +239,4 @@ export function checkAntiLazy(filePath: string, content: string): SecurityViolat
239
239
 
240
240
  return violations;
241
241
  }
242
+
@@ -1,6 +1,7 @@
1
1
  import { SupabaseClient } from '@supabase/supabase-js';
2
2
  import { AuditRlsStatusInput } from '../lib/types.js';
3
3
  import * as Checks from './security-checks.js';
4
+ import * as ArchChecks from './security-checks-arch.js';
4
5
  import { registry } from '../lib/tool-registry.js';
5
6
  import { AuditRlsStatusInputSchema, AuditSecurityIntegrityInputSchema } from '../lib/schemas.js';
6
7
 
@@ -135,6 +136,9 @@ export async function auditSecurityIntegrity(
135
136
  const lazyViolations = Checks.checkAntiLazy(filePath, content);
136
137
  violations.push(...lazyViolations);
137
138
 
139
+ const archViolations = ArchChecks.checkArchitectureIntegrity(filePath, content);
140
+ violations.push(...archViolations);
141
+
138
142
  const score = Math.max(0, 100 - (violations.length * 10));
139
143
  const passed = !violations.some((v: any) => v.severity === 'HIGH' || v.severity === 'FATAL');
140
144