@rigstate/mcp 0.7.4 → 0.7.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,52 +1,19 @@
1
-
2
1
  import { SupabaseClient } from '@supabase/supabase-js';
2
+ import {
3
+ GitCommit,
4
+ DiscoveredFeature,
5
+ ArchaeologicalReport
6
+ } from '../lib/types.js';
7
+ import {
8
+ parseGitLog,
9
+ identifyMilestones,
10
+ analyzeFilesystem,
11
+ generateDiscoveredFeatures
12
+ } from '../lib/arch-analysis.js';
3
13
 
4
14
  /**
5
15
  * Brynjar's Archaeological Scan Tool
6
- *
7
- * Analyzes Git history and file structure to reconstruct
8
- * a project's historical context and generate "Ghost Features"
9
- * that represent completed work.
10
- */
11
-
12
- export interface GitCommit {
13
- hash: string;
14
- message: string;
15
- date: string;
16
- author: string;
17
- }
18
-
19
- export interface DiscoveredFeature {
20
- id: string;
21
- title: string;
22
- description: string;
23
- status: 'COMPLETED';
24
- source: 'git' | 'filesystem' | 'combined';
25
- evidence: string[];
26
- estimatedCompletionDate: string;
27
- priority: number;
28
- }
29
-
30
- export interface ArchaeologicalReport {
31
- projectId: string;
32
- scanDate: string;
33
- gitAnalysis: {
34
- totalCommits: number;
35
- analyzedCommits: number;
36
- milestones: { date: string; summary: string; commits: string[] }[];
37
- };
38
- filesystemAnalysis: {
39
- totalDirectories: number;
40
- featureDirectories: string[];
41
- configFiles: string[];
42
- };
43
- discoveredFeatures: DiscoveredFeature[];
44
- recommendations: string[];
45
- }
46
-
47
- /**
48
- * Fetch Brynjar's persona from the system_prompts table
49
- */
16
+ ...
50
17
  export async function getBrynjarPersona(supabase: SupabaseClient) {
51
18
  const { data: persona, error } = await supabase
52
19
  .from('system_prompts')
@@ -66,185 +33,30 @@ export async function getBrynjarPersona(supabase: SupabaseClient) {
66
33
  }
67
34
 
68
35
  /**
69
- * Parse git log output into structured commits
70
- */
71
- function parseGitLog(logOutput: string): GitCommit[] {
72
- const commits: GitCommit[] = [];
73
- const entries = logOutput.split('\n---COMMIT---\n').filter(Boolean);
74
-
75
- for (const entry of entries) {
76
- const lines = entry.trim().split('\n');
77
- if (lines.length >= 3) {
78
- commits.push({
79
- hash: lines[0]?.replace('hash:', '').trim() || '',
80
- date: lines[1]?.replace('date:', '').trim() || '',
81
- author: lines[2]?.replace('author:', '').trim() || '',
82
- message: lines.slice(3).join('\n').trim()
83
- });
84
- }
85
- }
86
-
87
- return commits;
88
- }
89
-
90
- /**
91
- * Analyze commits to identify major milestones
92
- */
93
- function identifyMilestones(commits: GitCommit[]): { date: string; summary: string; commits: string[] }[] {
94
- const milestones: { date: string; summary: string; commits: string[] }[] = [];
95
-
96
- // Keywords that indicate significant features
97
- const featurePatterns = [
98
- { pattern: /\b(auth|authentication|login|signup|oauth)\b/i, category: 'Authentication System' },
99
- { pattern: /\b(database|schema|migration|supabase|postgres)\b/i, category: 'Database Setup' },
100
- { pattern: /\b(api|endpoint|route)\b/i, category: 'API Development' },
101
- { pattern: /\b(ui|component|layout|design|tailwind)\b/i, category: 'UI/Component Development' },
102
- { pattern: /\b(test|spec|jest|vitest)\b/i, category: 'Testing Infrastructure' },
103
- { pattern: /\b(deploy|ci|cd|github|vercel|docker)\b/i, category: 'DevOps & Deployment' },
104
- { pattern: /\b(feature|implement|add|create|build)\b/i, category: 'Feature Implementation' },
105
- { pattern: /\b(fix|bug|patch|resolve)\b/i, category: 'Bug Fixes & Patches' },
106
- { pattern: /\b(refactor|clean|optimize|improve)\b/i, category: 'Code Quality Improvements' },
107
- { pattern: /\b(docs|readme|documentation)\b/i, category: 'Documentation' },
108
- { pattern: /\b(config|setup|init|scaffold)\b/i, category: 'Project Configuration' },
109
- { pattern: /\b(agent|mcp|ai|llm|openai)\b/i, category: 'AI/Agent Integration' },
110
- { pattern: /\b(roadmap|milestone|chunk)\b/i, category: 'Roadmap System' },
111
- { pattern: /\b(report|pdf|manifest|governance)\b/i, category: 'Reporting & Governance' },
112
- ];
113
-
114
- // Group commits by category
115
- const categoryMap = new Map<string, { commits: GitCommit[]; latestDate: string }>();
116
-
117
- for (const commit of commits) {
118
- for (const { pattern, category } of featurePatterns) {
119
- if (pattern.test(commit.message)) {
120
- if (!categoryMap.has(category)) {
121
- categoryMap.set(category, { commits: [], latestDate: commit.date });
122
- }
123
- const entry = categoryMap.get(category)!;
124
- entry.commits.push(commit);
125
- if (new Date(commit.date) > new Date(entry.latestDate)) {
126
- entry.latestDate = commit.date;
127
- }
128
- break; // Only categorize each commit once
129
- }
130
- }
131
- }
132
-
133
- // Convert to milestones (only include categories with 2+ commits)
134
- for (const [category, data] of categoryMap.entries()) {
135
- if (data.commits.length >= 2) {
136
- milestones.push({
137
- date: data.latestDate,
138
- summary: `${category} (${data.commits.length} commits)`,
139
- commits: data.commits.slice(0, 5).map(c => c.message.split('\n')[0].substring(0, 80))
140
- });
141
- }
142
- }
143
-
144
- // Sort by date descending
145
- milestones.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
146
-
147
- return milestones;
148
- }
149
-
150
- /**
151
- * Map directory structure to potential features
152
- */
153
- function analyzeFilesystem(tree: string[]): { featureDirectories: string[]; configFiles: string[] } {
154
- const featurePatterns = [
155
- /^(apps|packages)\/[^/]+\/src\/(components|features|modules)\/[^/]+$/,
156
- /^(apps|packages)\/[^/]+\/src\/app\/[^/]+$/,
157
- /^src\/(components|features|modules|pages)\/[^/]+$/,
158
- /^(apps|packages)\/[^/]+$/,
159
- ];
160
-
161
- const configPatterns = [
162
- /package\.json$/,
163
- /tsconfig.*\.json$/,
164
- /\.env.*$/,
165
- /next\.config\./,
166
- /tailwind\.config\./,
167
- /supabase.*\.toml$/,
168
- ];
169
-
170
- const featureDirectories = tree.filter(path =>
171
- featurePatterns.some(pattern => pattern.test(path))
172
- );
173
-
174
- const configFiles = tree.filter(path =>
175
- configPatterns.some(pattern => pattern.test(path))
176
- );
177
-
178
- return {
179
- featureDirectories: [...new Set(featureDirectories)].slice(0, 20),
180
- configFiles: [...new Set(configFiles)].slice(0, 10)
181
- };
182
- }
183
-
184
- /**
185
- * Generate discovered features from milestones and filesystem
186
- */
187
- function generateDiscoveredFeatures(
188
- milestones: { date: string; summary: string; commits: string[] }[],
189
- filesystemAnalysis: { featureDirectories: string[]; configFiles: string[] }
190
- ): DiscoveredFeature[] {
191
- const features: DiscoveredFeature[] = [];
192
- let priority = 1;
193
-
194
- // From Git milestones
195
- for (const milestone of milestones.slice(0, 10)) {
196
- const id = `ghost-${Date.now()}-${priority}`;
197
- features.push({
198
- id,
199
- title: milestone.summary.split('(')[0].trim(),
200
- description: `Reconstructed from ${milestone.commits.length} commits. Last activity: ${new Date(milestone.date).toLocaleDateString()}`,
201
- status: 'COMPLETED',
202
- source: 'git',
203
- evidence: milestone.commits,
204
- estimatedCompletionDate: milestone.date,
205
- priority: priority++
206
- });
207
- }
208
-
209
- // From filesystem (major directories as features)
210
- const directoryFeatures = filesystemAnalysis.featureDirectories
211
- .filter(dir => dir.includes('src/') || dir.startsWith('apps/') || dir.startsWith('packages/'))
212
- .slice(0, 5);
213
-
214
- for (const dir of directoryFeatures) {
215
- const name = dir.split('/').pop() || dir;
216
- const id = `ghost-fs-${Date.now()}-${priority}`;
217
-
218
- // Skip if we already have a similar feature from git
219
- if (features.some(f => f.title.toLowerCase().includes(name.toLowerCase()))) {
220
- continue;
221
- }
222
-
223
- features.push({
224
- id,
225
- title: `${name.charAt(0).toUpperCase() + name.slice(1)} Module`,
226
- description: `Detected from directory structure: ${dir}`,
227
- status: 'COMPLETED',
228
- source: 'filesystem',
229
- evidence: [dir],
230
- estimatedCompletionDate: new Date().toISOString(),
231
- priority: priority++
232
- });
233
- }
234
-
235
- return features;
236
- }
237
-
36
+ * Main archaeological scan function
37
+ ...
238
38
  /**
239
39
  * Main archaeological scan function
240
40
  * Called by the MCP server when Brynjar is invoked
241
41
  */
242
42
  export async function performArchaeologicalScan(
243
43
  supabase: SupabaseClient,
44
+ userId: string,
244
45
  projectId: string,
245
46
  gitLog: string,
246
47
  fileTree: string[]
247
48
  ): Promise<ArchaeologicalReport> {
49
+ // 0. Verify project access
50
+ const { data: hasAccess, error: accessError } = await supabase
51
+ .rpc('check_project_access_secure', {
52
+ p_project_id: projectId,
53
+ p_user_id: userId
54
+ });
55
+
56
+ if (accessError || !hasAccess) {
57
+ throw new Error('Project not found or access denied');
58
+ }
59
+
248
60
  console.error(`🏛️ Brynjar is performing an archaeological scan for project ${projectId}...`);
249
61
 
250
62
  // Parse git history
@@ -299,9 +111,20 @@ export async function performArchaeologicalScan(
299
111
  */
300
112
  export async function importGhostFeatures(
301
113
  supabase: SupabaseClient,
114
+ userId: string,
302
115
  projectId: string,
303
116
  features: DiscoveredFeature[]
304
117
  ): Promise<{ success: boolean; imported: number; errors: string[] }> {
118
+ // 0. Verify project access
119
+ const { data: hasAccess, error: accessError } = await supabase
120
+ .rpc('check_project_access_secure', {
121
+ p_project_id: projectId,
122
+ p_user_id: userId
123
+ });
124
+
125
+ if (accessError || !hasAccess) {
126
+ throw new Error('Project not found or access denied');
127
+ }
305
128
  console.error(`🏛️ Brynjar is importing ${features.length} ghost features into the roadmap...`);
306
129
 
307
130
  const errors: string[] = [];
@@ -16,21 +16,33 @@ registry.register({
16
16
  Can trigger a SOFT LOCK if critical issues are found.`,
17
17
  schema: AuditIntegrityGateInputSchema,
18
18
  handler: async (args, context) => {
19
- const result = await runAuditIntegrityGate(context.supabase, args);
19
+ const result = await runAuditIntegrityGate(context.supabase, context.userId, args);
20
20
  return { content: [{ type: 'text', text: result.summary }] };
21
21
  }
22
22
  });
23
23
 
24
24
  export async function runAuditIntegrityGate(
25
25
  supabase: SupabaseClient,
26
+ userId: string,
26
27
  input: AuditIntegrityGateInput
27
28
  ): Promise<IntegrityGateResponse> {
29
+ // 0. Verify project access
30
+ const { data: hasAccess, error: accessError } = await supabase
31
+ .rpc('check_project_access_secure', {
32
+ p_project_id: input.projectId,
33
+ p_user_id: userId
34
+ });
35
+
36
+ if (accessError || !hasAccess) {
37
+ throw new Error('Project not found or access denied');
38
+ }
39
+
28
40
  const checks: IntegrityCheckResult[] = [];
29
41
  let isSoftLocked = false;
30
42
 
31
43
  // 1. Run Guardian Security Audit (RLS)
32
44
  try {
33
- const rlsResult = await auditRlsStatus(supabase, { projectId: input.projectId });
45
+ const rlsResult = await auditRlsStatus(supabase, userId, { projectId: input.projectId });
34
46
  const unsecuredTables = rlsResult.unsecuredTables || [];
35
47
 
36
48
  if (unsecuredTables.length > 0) {
@@ -73,7 +85,7 @@ export async function runAuditIntegrityGate(
73
85
  const fs = await import('fs/promises');
74
86
  const content = await fs.readFile(path, 'utf-8');
75
87
 
76
- const securityResult = await auditSecurityIntegrity(supabase, {
88
+ const securityResult = await auditSecurityIntegrity(supabase, userId, {
77
89
  projectId: input.projectId,
78
90
  filePath: path,
79
91
  content: content
@@ -108,7 +120,7 @@ export async function runAuditIntegrityGate(
108
120
  // 3. Run Sindre Performance Audit (Indexes & N+1)
109
121
  if (input.filePaths && input.filePaths.length > 0) {
110
122
  try {
111
- const perfResult = await analyzeDatabasePerformance(supabase, {
123
+ const perfResult = await analyzeDatabasePerformance(supabase, userId, {
112
124
  projectId: input.projectId,
113
125
  filePaths: input.filePaths
114
126
  });
@@ -5,6 +5,7 @@ import { getProjectMorals } from '../resources/project-morals.js';
5
5
 
6
6
  export async function checkAgentBridge(
7
7
  supabase: SupabaseClient,
8
+ userId: string,
8
9
  projectId: string,
9
10
  action: 'check' | 'update' | 'submit_for_review' = 'check',
10
11
  bridgeId?: string,
@@ -13,6 +14,16 @@ export async function checkAgentBridge(
13
14
  execution_summary?: string, // NEW: required for completion
14
15
  proposal?: string
15
16
  ): Promise<CheckAgentBridgeResponse> {
17
+ // 0. Verify project access
18
+ const { data: hasAccess, error: accessError } = await supabase
19
+ .rpc('check_project_access_secure', {
20
+ p_project_id: projectId,
21
+ p_user_id: userId
22
+ });
23
+
24
+ if (accessError || !hasAccess) {
25
+ throw new Error('Project not found or access denied');
26
+ }
16
27
 
17
28
  // 4. SUBMIT FOR REVIEW MODE
18
29
  if (action === 'submit_for_review') {
@@ -15,6 +15,7 @@ registry.register({
15
15
  handler: async (args, context) => {
16
16
  const result = await checkRulesSync(
17
17
  context.supabase,
18
+ context.userId,
18
19
  args.projectId,
19
20
  args.currentRulesContent
20
21
  );
@@ -37,6 +38,7 @@ const SAFETY_CACHE_RULES = `
37
38
 
38
39
  export async function checkRulesSync(
39
40
  supabase: SupabaseClient,
41
+ userId: string,
40
42
  projectId: string,
41
43
  currentRulesContent?: string
42
44
  ): Promise<CheckRulesSyncResponse> {
@@ -68,6 +70,17 @@ export async function checkRulesSync(
68
70
 
69
71
  // Resilience: Wrap DB check in try/catch to handle network failures
70
72
  try {
73
+ // 1. Verify project access
74
+ const { data: hasAccess, error: accessError } = await supabase
75
+ .rpc('check_project_access_secure', {
76
+ p_project_id: projectId,
77
+ p_user_id: userId
78
+ });
79
+
80
+ if (accessError || !hasAccess) {
81
+ throw new Error('Project not found or access denied');
82
+ }
83
+
71
84
  const { data: project, error } = await supabase
72
85
  .from('projects')
73
86
  .select('name')
@@ -15,6 +15,7 @@ registry.register({
15
15
  handler: async (args, context) => {
16
16
  const result = await completeRoadmapTask(
17
17
  context.supabase,
18
+ context.userId,
18
19
  args.projectId,
19
20
  args.summary,
20
21
  args.taskId,
@@ -33,12 +34,23 @@ export interface CompleteRoadmapTaskResponse {
33
34
 
34
35
  export async function completeRoadmapTask(
35
36
  supabase: SupabaseClient,
37
+ userId: string,
36
38
  projectId: string,
37
39
  summary: string,
38
40
  taskId?: string,
39
41
  gitDiff?: string,
40
42
  integrityGate?: IntegrityGateResponse
41
43
  ): Promise<CompleteRoadmapTaskResponse> {
44
+ // 0. Verify project access
45
+ const { data: hasAccess, error: accessError } = await supabase
46
+ .rpc('check_project_access_secure', {
47
+ p_project_id: projectId,
48
+ p_user_id: userId
49
+ });
50
+
51
+ if (accessError || !hasAccess) {
52
+ throw new Error('Project not found or access denied');
53
+ }
42
54
 
43
55
  // 1. Identify the task
44
56
  let targetTaskId = taskId;
@@ -10,16 +10,31 @@ export async function generateProfessionalPdf(
10
10
  ) {
11
11
  console.error(`🖋️ The Scribe is preparing a ${reportType} briefing for project ${projectId}...`);
12
12
 
13
- // 1. Fetch persona from Prompt CMS (via Scribe Adapter)
13
+ // 1. Verify project access
14
+ const { data: hasAccess, error: accessError } = await supabase
15
+ .rpc('check_project_access_secure', {
16
+ p_project_id: projectId,
17
+ p_user_id: userId
18
+ });
19
+
20
+ if (accessError || !hasAccess) {
21
+ throw new Error('Project not found or access denied');
22
+ }
23
+
24
+ // 1.5 Fetch persona from Prompt CMS (via Scribe Adapter)
14
25
  const persona = await getScribePersona(supabase);
15
26
 
16
27
  // 2. Fetch Project Metadata (expanded)
17
- const { data: project } = await supabase
28
+ const { data: project, error: projectError } = await supabase
18
29
  .from('projects')
19
30
  .select('name, description, project_type, detected_stack')
20
31
  .eq('id', projectId)
21
32
  .single();
22
33
 
34
+ if (projectError || !project) {
35
+ throw new Error('Project details not found');
36
+ }
37
+
23
38
  const projectName = project?.name || 'Rigstate Project';
24
39
  const projectDescription = project?.description || 'A cutting-edge software project built with modern architecture.';
25
40
  const projectType = project?.project_type || 'Web Application';
@@ -96,7 +96,11 @@ export async function getNextRoadmapStep(
96
96
  }
97
97
 
98
98
  return {
99
- nextStep: nextStep as RoadmapChunk,
99
+ nextStep: {
100
+ ...nextStep,
101
+ architectural_brief: nextStep.architectural_brief,
102
+ context_summary: nextStep.context_summary
103
+ } as RoadmapChunk,
100
104
  message: `Next step found: [Step ${nextStep.step_number}] ${nextStep.title}`
101
105
  };
102
106
  }
@@ -10,6 +10,7 @@ import type { ProjectContextResponse, TechStackInfo } from '../lib/types.js';
10
10
  import { injectGlobalContext } from '../lib/context-engine.js';
11
11
  import { registry } from '../lib/tool-registry.js';
12
12
  import { GetProjectContextInputSchema } from '../lib/schemas.js';
13
+ import { buildProjectSummary } from '../lib/project-context-utils.js';
13
14
 
14
15
  // ============================================
15
16
  // Tool Registration
@@ -60,6 +61,7 @@ export async function getProjectContext(
60
61
  detected_stack: any;
61
62
  repository_tree: any;
62
63
  architectural_dna: any;
64
+ functional_spec: any;
63
65
  }
64
66
 
65
67
  // Fetch project securely via RPC (bypasses RLS with strict checks)
@@ -86,16 +88,15 @@ export async function getProjectContext(
86
88
  created_at: projectRow.created_at,
87
89
  last_indexed_at: projectRow.last_indexed_at,
88
90
  detected_stack: projectRow.detected_stack,
89
- repository_tree: projectRow.repository_tree
91
+ repository_tree: projectRow.repository_tree,
92
+ functional_spec: projectRow.functional_spec
90
93
  };
91
94
 
92
95
  // DNA is now directly available
93
96
  const stackDef = (projectRow.architectural_dna as any)?.stack_definition;
94
97
 
95
-
96
-
97
98
  // Continuity Loop: Fetch Active & Next Tasks via secure RPC
98
- const { data: allChunks, error: chunksError } = await supabase
99
+ const { data: allChunks } = await supabase
99
100
  .rpc('get_roadmap_chunks_secure', {
100
101
  p_project_id: projectId,
101
102
  p_user_id: userId
@@ -164,80 +165,17 @@ export async function getProjectContext(
164
165
  )].slice(0, 10) as string[];
165
166
  }
166
167
 
167
- // Build summary
168
- const summaryParts: string[] = [];
169
-
170
- summaryParts.push(`Project Type: ${project.project_type?.toUpperCase() || 'UNKNOWN'}`);
171
-
172
- // Add DNA Stack Info if available (Truth)
173
- if (stackDef) {
174
- summaryParts.push('\n=== ACTIVE MISSION PARAMETERS ===');
175
- if (activeTask) {
176
- summaryParts.push(`⚠️ CURRENT OBJECTIVE: T-${activeTask.step_number}: ${activeTask.title}`);
177
- summaryParts.push(` Role: ${activeTask.role || 'Developer'}`);
178
- if (activeTask.instruction_set) {
179
- summaryParts.push(` Instructions: ${activeTask.instruction_set.substring(0, 200)}...`);
180
- }
181
- summaryParts.push(' ACTION: Focus ALL coding efforts on completing this task.');
182
- } else if (nextTask) {
183
- summaryParts.push(`⏸ SYSTEM IDLE (Waiting for command)`);
184
- summaryParts.push(` Suggested Next Mission: T-${nextTask.step_number}: ${nextTask.title}`);
185
- summaryParts.push(` ACTION: Ask the user "Shall we start T-${nextTask.step_number}?"`);
186
- } else {
187
- summaryParts.push('✅ ALL MISSIONS COMPLETE. Awaiting new roadmap items.');
188
- }
189
-
190
- summaryParts.push('\n=== AI BEHAVIORAL INSTRUCTIONS ===');
191
- if (activeTask) {
192
- summaryParts.push(`1. FOCUS: The user is working on T-${activeTask.step_number}. Help them complete it.`);
193
- summaryParts.push(`2. COMPLIANCE: Ensure all code follows project standards.`);
194
- } else if (nextTask) {
195
- summaryParts.push(`1. NUDGE: No active task found. Suggest starting T-${nextTask.step_number} (${nextTask.title}).`);
196
- summaryParts.push(`2. PROACTIVE: Instead of asking "How can I help?", ask "Shall we start on T-${nextTask.step_number}?"`);
197
- }
198
-
199
- summaryParts.push('\n=== CURRENT STACK ===');
200
- if (stackDef.frontend) summaryParts.push(`Frontend: ${stackDef.frontend.framework} (${stackDef.frontend.language})`);
201
- if (stackDef.backend) summaryParts.push(`Backend: ${stackDef.backend.service} (${stackDef.backend.database})`);
202
- if (stackDef.styling) summaryParts.push(`Styling: ${stackDef.styling.framework} ${stackDef.styling.library || ''}`);
203
- if (stackDef.hosting) summaryParts.push(`Infrastructure: ${stackDef.hosting.provider}`);
204
- } else {
205
- // Fallback to detected stack
206
- if (techStack.framework) summaryParts.push(`Framework: ${techStack.framework}`);
207
- if (techStack.orm) summaryParts.push(`ORM: ${techStack.orm}`);
208
- }
209
-
210
- if (project.description) {
211
- summaryParts.push(`\nDescription: ${project.description}`);
212
- }
213
-
214
- // Add Tooling Guidelines (Critical for Universal IDE Support)
215
- summaryParts.push('\n=== RIGSTATE TOOLING GUIDELINES ===');
216
- summaryParts.push('You have access to specialized MCP tools. USE THEM TO SUCCEED:');
217
- summaryParts.push('1. NEVER guess about architecture. Use `query_brain` to search project documentation.');
218
- summaryParts.push('2. BEFORE coding, check `get_learned_instructions` to see if you have been corrected before.');
219
- summaryParts.push('3. When finishing a task, ALWAYS update the roadmap using `update_roadmap`.');
220
- summaryParts.push('4. If you discover a reusable pattern, submit it with `submit_curator_signal`.');
221
- summaryParts.push('5. For large refactors, use `run_architecture_audit` to check against rules.');
222
- summaryParts.push('6. Store major decisions using `save_decision` (ADR).');
223
-
224
- // Add Digest to Summary
225
- summaryParts.push('\n=== RECENT ACTIVITY DIGEST ===');
226
-
227
- if (agentTasks && agentTasks.length > 0) {
228
- summaryParts.push('\nLatest AI Executions:');
229
- agentTasks.forEach((t: any) => {
230
- const time = t.completed_at ? new Date(t.completed_at).toLocaleString() : 'Recently';
231
- summaryParts.push(`- [${time}] ${t.roadmap_title || 'Task'}: ${t.execution_summary || 'Completed'}`);
232
- });
233
- }
234
-
235
- if (roadmapItems && roadmapItems.length > 0) {
236
- summaryParts.push('\nRoadmap Updates:');
237
- roadmapItems.forEach((i: any) => {
238
- summaryParts.push(`- ${i.title} is now ${i.status}`);
239
- });
240
- }
168
+ // Build summary via utility
169
+ const summary = await buildProjectSummary(
170
+ project,
171
+ techStack,
172
+ activeTask,
173
+ nextTask,
174
+ agentTasks || [],
175
+ roadmapItems || [],
176
+ stackDef,
177
+ supabase
178
+ );
241
179
 
242
180
  const response: ProjectContextResponse = {
243
181
  project: {
@@ -249,7 +187,7 @@ export async function getProjectContext(
249
187
  lastIndexedAt: project.last_indexed_at
250
188
  },
251
189
  techStack,
252
- summary: summaryParts.join('\n') || 'No project context available.'
190
+ summary
253
191
  };
254
192
 
255
193
  // Phase 8.5.5: Inject Global Context (Curator)
@@ -17,16 +17,26 @@ export const listFeaturesTool: ToolDefinition<typeof InputSchema> = {
17
17
  Useful for understanding the strategic context and major milestones.`,
18
18
  schema: InputSchema,
19
19
  handler: async ({ projectId }, { supabase, userId }) => {
20
- // 1. Fetch project to verify access and get fallback spec
20
+ // 1. Verify project access
21
+ const { data: hasAccess, error: accessError } = await supabase
22
+ .rpc('check_project_access_secure', {
23
+ p_project_id: projectId,
24
+ p_user_id: userId
25
+ });
26
+
27
+ if (accessError || !hasAccess) {
28
+ throw new Error('Project not found or access denied');
29
+ }
30
+
31
+ // 2. Fetch project details
21
32
  const { data: project, error: projectError } = await supabase
22
33
  .from('projects')
23
34
  .select('id, functional_spec')
24
35
  .eq('id', projectId)
25
- .eq('owner_id', userId)
26
36
  .single();
27
37
 
28
38
  if (projectError || !project) {
29
- throw new Error('Project not found or access denied');
39
+ throw new Error('Project details not found');
30
40
  }
31
41
 
32
42
  // 2. Primary Strategy: Fetch from 'project_features'
@@ -74,7 +74,12 @@ export async function listRoadmapTasks(
74
74
  priority: t.priority,
75
75
  status: t.status,
76
76
  step_number: t.step_number,
77
- prompt_content: t.prompt_content
77
+ prompt_content: t.prompt_content,
78
+ architectural_brief: t.architectural_brief,
79
+ context_summary: t.context_summary,
80
+ metadata: t.metadata,
81
+ checklist: t.checklist,
82
+ tags: t.tags
78
83
  })),
79
84
  formatted
80
85
  };