@rigstate/mcp 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/.env.example +8 -0
  2. package/README.md +352 -0
  3. package/dist/index.d.ts +2 -0
  4. package/dist/index.js +3445 -0
  5. package/dist/index.js.map +1 -0
  6. package/package.json +43 -0
  7. package/roadmap.json +531 -0
  8. package/src/agents/the-scribe.ts +122 -0
  9. package/src/index.ts +1792 -0
  10. package/src/lib/supabase.ts +120 -0
  11. package/src/lib/tool-registry.ts +134 -0
  12. package/src/lib/types.ts +415 -0
  13. package/src/lib/utils.ts +10 -0
  14. package/src/resources/project-morals.ts +92 -0
  15. package/src/tools/arch-tools.ts +166 -0
  16. package/src/tools/archaeological-scan.ts +335 -0
  17. package/src/tools/check-agent-bridge.ts +169 -0
  18. package/src/tools/check-rules-sync.ts +85 -0
  19. package/src/tools/complete-roadmap-task.ts +96 -0
  20. package/src/tools/generate-professional-pdf.ts +232 -0
  21. package/src/tools/get-latest-decisions.ts +130 -0
  22. package/src/tools/get-next-roadmap-step.ts +76 -0
  23. package/src/tools/get-project-context.ts +163 -0
  24. package/src/tools/index.ts +17 -0
  25. package/src/tools/list-features.ts +67 -0
  26. package/src/tools/list-roadmap-tasks.ts +61 -0
  27. package/src/tools/pending-tasks.ts +228 -0
  28. package/src/tools/planning-tools.ts +123 -0
  29. package/src/tools/query-brain.ts +125 -0
  30. package/src/tools/research-tools.ts +149 -0
  31. package/src/tools/run-architecture-audit.ts +203 -0
  32. package/src/tools/save-decision.ts +77 -0
  33. package/src/tools/security-tools.ts +82 -0
  34. package/src/tools/submit-idea.ts +66 -0
  35. package/src/tools/sync-ide-rules.ts +76 -0
  36. package/src/tools/teacher-mode.ts +171 -0
  37. package/src/tools/ui-tools.ts +191 -0
  38. package/src/tools/update-roadmap.ts +105 -0
  39. package/tsconfig.json +29 -0
  40. package/tsup.config.ts +16 -0
@@ -0,0 +1,92 @@
1
+ /**
2
+ * Rigstate MCP Server - Local Rules Resource
3
+ *
4
+ * Reads local .rigstate/rules.md files and injects them as high-priority rules.
5
+ */
6
+
7
+ import * as fs from 'fs';
8
+ import * as path from 'path';
9
+
10
+ export interface LocalRule {
11
+ source: string;
12
+ content: string;
13
+ priority: 'high' | 'medium';
14
+ }
15
+
16
+ export interface ProjectMoralsResponse {
17
+ rules: LocalRule[];
18
+ formatted: string;
19
+ }
20
+
21
+ const LOCAL_RULE_FILES = [
22
+ '.rigstate/rules.md',
23
+ '.rigstate/morals.md',
24
+ '.rigstate/standards.md',
25
+ '.cursor/rules.md',
26
+ '.cursorrules'
27
+ ];
28
+
29
+ /**
30
+ * Read local project morals/rules from filesystem
31
+ */
32
+ export function getProjectMorals(workspacePath?: string): ProjectMoralsResponse {
33
+ const basePath = workspacePath || process.cwd();
34
+ const rules: LocalRule[] = [];
35
+
36
+ for (const ruleFile of LOCAL_RULE_FILES) {
37
+ const fullPath = path.join(basePath, ruleFile);
38
+
39
+ try {
40
+ if (fs.existsSync(fullPath)) {
41
+ const content = fs.readFileSync(fullPath, 'utf-8');
42
+
43
+ if (content.trim()) {
44
+ rules.push({
45
+ source: ruleFile,
46
+ content: content.trim(),
47
+ priority: ruleFile.includes('.rigstate') ? 'high' : 'medium'
48
+ });
49
+ }
50
+ }
51
+ } catch (e) {
52
+ // File not readable, skip
53
+ }
54
+ }
55
+
56
+ // Format for context injection
57
+ let formatted = '';
58
+
59
+ if (rules.length > 0) {
60
+ formatted = `## LOCAL PROJECT MORALS (HIGH PRIORITY)\n`;
61
+ formatted += `These rules are defined in the local workspace and MUST be followed:\n\n`;
62
+
63
+ for (const rule of rules) {
64
+ formatted += `### Source: ${rule.source}\n`;
65
+ formatted += rule.content;
66
+ formatted += '\n\n';
67
+ }
68
+ } else {
69
+ formatted = `No local rules found. You can create .rigstate/rules.md to define project-specific guidelines.`;
70
+ }
71
+
72
+ return {
73
+ rules,
74
+ formatted
75
+ };
76
+ }
77
+
78
+ /**
79
+ * Check if local rules exist
80
+ */
81
+ export function hasLocalRules(workspacePath?: string): boolean {
82
+ const basePath = workspacePath || process.cwd();
83
+
84
+ for (const ruleFile of LOCAL_RULE_FILES) {
85
+ const fullPath = path.join(basePath, ruleFile);
86
+ if (fs.existsSync(fullPath)) {
87
+ return true;
88
+ }
89
+ }
90
+
91
+ return false;
92
+ }
@@ -0,0 +1,166 @@
1
+ import { promises as fs } from 'fs';
2
+ import * as path from 'path';
3
+ import { AnalyzeDependencyGraphInput } from '../lib/types.js';
4
+
5
+ /**
6
+ * Einar's Tool: Architecture Integrity Scanner
7
+ * Scans the codebase for circular dependencies and structural violations.
8
+ */
9
+ export async function analyzeDependencyGraph(input: AnalyzeDependencyGraphInput) {
10
+ // Determine root. Input path is relative to CWD usually.
11
+ // If input.path is absolute, use it. Otherwise resolve from CWD.
12
+ const searchPath = path.isAbsolute(input.path)
13
+ ? input.path
14
+ : path.resolve(process.cwd(), input.path);
15
+
16
+ try {
17
+ await fs.access(searchPath);
18
+ } catch {
19
+ return {
20
+ error: `Directory not found: ${searchPath}. Ensure you are running the MCP server in the project root or provide an absolute path.`
21
+ };
22
+ }
23
+
24
+ // 1. Scan files
25
+ const allFiles = await getAllFiles(searchPath);
26
+ const tsFiles = allFiles.filter(f => /\.(ts|tsx|js|jsx)$/.test(f) && !f.includes('node_modules') && !f.includes('.next') && !f.includes('dist'));
27
+
28
+ // 2. Build Graph
29
+ const graph: Record<string, string[]> = {};
30
+ const fileSet = new Set(tsFiles);
31
+
32
+ for (const file of tsFiles) {
33
+ const content = await fs.readFile(file, 'utf-8');
34
+ const imports = extractImports(content);
35
+
36
+ const validDeps: string[] = [];
37
+
38
+ for (const imp of imports) {
39
+ const resolved = resolveImport(file, imp, searchPath);
40
+ if (resolved && fileSet.has(resolved)) {
41
+ validDeps.push(resolved);
42
+ }
43
+ }
44
+
45
+ // Use relative paths for the graph to make it readable
46
+ const relFile = path.relative(searchPath, file);
47
+ graph[relFile] = validDeps.map(d => path.relative(searchPath, d));
48
+ }
49
+
50
+ // 3. Detect Cycles
51
+ const cycles = detectCycles(graph);
52
+
53
+ return {
54
+ timestamp: new Date().toISOString(),
55
+ analyzedPath: searchPath,
56
+ metrics: {
57
+ totalFiles: tsFiles.length,
58
+ circularDependencies: cycles.length
59
+ },
60
+ cycles: cycles,
61
+ status: cycles.length > 0 ? 'VIOLATION' : 'PASS',
62
+ summary: cycles.length > 0
63
+ ? `FAILED. Detected ${cycles.length} circular dependencies. These must be resolved to maintain architectural integrity.`
64
+ : `PASSED. No circular dependencies detected in ${tsFiles.length} files.`
65
+ };
66
+ }
67
+
68
+ // --- Helpers ---
69
+
70
+ async function getAllFiles(dir: string): Promise<string[]> {
71
+ const entries = await fs.readdir(dir, { withFileTypes: true });
72
+ const files = await Promise.all(entries.map(async (entry) => {
73
+ const res = path.resolve(dir, entry.name);
74
+ return entry.isDirectory() ? getAllFiles(res) : res;
75
+ }));
76
+ return files.flat();
77
+ }
78
+
79
+ /**
80
+ * Simple regex extraction of import statements.
81
+ * Matches: import ... from '...'
82
+ * Matches: import '...'
83
+ * Matches: export ... from '...'
84
+ */
85
+ function extractImports(content: string): string[] {
86
+ const regex = /from\s+['"]([^'"]+)['"]|import\s+['"]([^'"]+)['"]/g;
87
+ const imports: string[] = [];
88
+ let match;
89
+ while ((match = regex.exec(content)) !== null) {
90
+ // match[1] is 'from "..."; match[2] is import "...";
91
+ imports.push(match[1] || match[2]);
92
+ }
93
+ return imports;
94
+ }
95
+
96
+ /**
97
+ * Naive resolver.
98
+ * Handles:
99
+ * - Relative: ./foo, ../bar
100
+ * - Alias: @/ -> searchPath/ (Assumes Next.js style)
101
+ * - Extensions: tries .ts, .tsx, .js, index.ts, etc.
102
+ */
103
+ function resolveImport(importer: string, importPath: string, root: string): string | null {
104
+ if (!importPath.startsWith('.') && !importPath.startsWith('@/')) {
105
+ return null; // Ignore node_modules
106
+ }
107
+
108
+ let searchDir = path.dirname(importer);
109
+ let target = importPath;
110
+
111
+ if (importPath.startsWith('@/')) {
112
+ target = importPath.replace('@/', '');
113
+ searchDir = root; // Assume root is where @/ points to (src or project root)
114
+ // Adjust for src if root includes src, ensuring we don't double dip?
115
+ // Actually, usually @/ maps to src/ or root. We'll try relative to 'root'.
116
+ }
117
+
118
+ const startPath = path.resolve(searchDir, target);
119
+
120
+ // Try extensions
121
+ const extensions = ['.ts', '.tsx', '.js', '.jsx', '/index.ts', '/index.tsx', '/index.js', ''];
122
+ for (const ext of extensions) {
123
+ const candidate = startPath + ext;
124
+ if (require('fs').existsSync(candidate) && !require('fs').statSync(candidate).isDirectory()) {
125
+ return candidate;
126
+ }
127
+ }
128
+
129
+ return null;
130
+ }
131
+
132
+ function detectCycles(graph: Record<string, string[]>): string[][] {
133
+ const visited = new Set<string>();
134
+ const recursionStack = new Set<string>();
135
+ const cycles: string[][] = [];
136
+
137
+ function dfs(node: string, path: string[]) {
138
+ visited.add(node);
139
+ recursionStack.add(node);
140
+ path.push(node);
141
+
142
+ const deps = graph[node] || [];
143
+ for (const dep of deps) {
144
+ if (!visited.has(dep)) {
145
+ dfs(dep, path);
146
+ } else if (recursionStack.has(dep)) {
147
+ // Cycle detected
148
+ const cycleStart = path.indexOf(dep);
149
+ if (cycleStart !== -1) {
150
+ cycles.push([...path.slice(cycleStart), dep]);
151
+ }
152
+ }
153
+ }
154
+
155
+ recursionStack.delete(node);
156
+ path.pop();
157
+ }
158
+
159
+ for (const node of Object.keys(graph)) {
160
+ if (!visited.has(node)) {
161
+ dfs(node, []);
162
+ }
163
+ }
164
+
165
+ return cycles;
166
+ }
@@ -0,0 +1,335 @@
1
+
2
+ import { SupabaseClient } from '@supabase/supabase-js';
3
+
4
+ /**
5
+ * Brynjar's Archaeological Scan Tool
6
+ *
7
+ * Analyzes Git history and file structure to reconstruct
8
+ * a project's historical context and generate "Ghost Features"
9
+ * that represent completed work.
10
+ */
11
+
12
+ export interface GitCommit {
13
+ hash: string;
14
+ message: string;
15
+ date: string;
16
+ author: string;
17
+ }
18
+
19
+ export interface DiscoveredFeature {
20
+ id: string;
21
+ title: string;
22
+ description: string;
23
+ status: 'COMPLETED';
24
+ source: 'git' | 'filesystem' | 'combined';
25
+ evidence: string[];
26
+ estimatedCompletionDate: string;
27
+ priority: number;
28
+ }
29
+
30
+ export interface ArchaeologicalReport {
31
+ projectId: string;
32
+ scanDate: string;
33
+ gitAnalysis: {
34
+ totalCommits: number;
35
+ analyzedCommits: number;
36
+ milestones: { date: string; summary: string; commits: string[] }[];
37
+ };
38
+ filesystemAnalysis: {
39
+ totalDirectories: number;
40
+ featureDirectories: string[];
41
+ configFiles: string[];
42
+ };
43
+ discoveredFeatures: DiscoveredFeature[];
44
+ recommendations: string[];
45
+ }
46
+
47
+ /**
48
+ * Fetch Brynjar's persona from the system_prompts table
49
+ */
50
+ export async function getBrynjarPersona(supabase: SupabaseClient) {
51
+ const { data: persona, error } = await supabase
52
+ .from('system_prompts')
53
+ .select('*')
54
+ .eq('slug', 'brynjar')
55
+ .single();
56
+
57
+ if (error || !persona) {
58
+ return {
59
+ display_name: 'Brynjar',
60
+ job_title: 'The Librarian / Project Archivist',
61
+ content: 'You are Brynjar, the Project Archivist specialized in reconstructing project history.'
62
+ };
63
+ }
64
+
65
+ return persona;
66
+ }
67
+
68
+ /**
69
+ * Parse git log output into structured commits
70
+ */
71
+ function parseGitLog(logOutput: string): GitCommit[] {
72
+ const commits: GitCommit[] = [];
73
+ const entries = logOutput.split('\n---COMMIT---\n').filter(Boolean);
74
+
75
+ for (const entry of entries) {
76
+ const lines = entry.trim().split('\n');
77
+ if (lines.length >= 3) {
78
+ commits.push({
79
+ hash: lines[0]?.replace('hash:', '').trim() || '',
80
+ date: lines[1]?.replace('date:', '').trim() || '',
81
+ author: lines[2]?.replace('author:', '').trim() || '',
82
+ message: lines.slice(3).join('\n').trim()
83
+ });
84
+ }
85
+ }
86
+
87
+ return commits;
88
+ }
89
+
90
+ /**
91
+ * Analyze commits to identify major milestones
92
+ */
93
+ function identifyMilestones(commits: GitCommit[]): { date: string; summary: string; commits: string[] }[] {
94
+ const milestones: { date: string; summary: string; commits: string[] }[] = [];
95
+
96
+ // Keywords that indicate significant features
97
+ const featurePatterns = [
98
+ { pattern: /\b(auth|authentication|login|signup|oauth)\b/i, category: 'Authentication System' },
99
+ { pattern: /\b(database|schema|migration|supabase|postgres)\b/i, category: 'Database Setup' },
100
+ { pattern: /\b(api|endpoint|route)\b/i, category: 'API Development' },
101
+ { pattern: /\b(ui|component|layout|design|tailwind)\b/i, category: 'UI/Component Development' },
102
+ { pattern: /\b(test|spec|jest|vitest)\b/i, category: 'Testing Infrastructure' },
103
+ { pattern: /\b(deploy|ci|cd|github|vercel|docker)\b/i, category: 'DevOps & Deployment' },
104
+ { pattern: /\b(feature|implement|add|create|build)\b/i, category: 'Feature Implementation' },
105
+ { pattern: /\b(fix|bug|patch|resolve)\b/i, category: 'Bug Fixes & Patches' },
106
+ { pattern: /\b(refactor|clean|optimize|improve)\b/i, category: 'Code Quality Improvements' },
107
+ { pattern: /\b(docs|readme|documentation)\b/i, category: 'Documentation' },
108
+ { pattern: /\b(config|setup|init|scaffold)\b/i, category: 'Project Configuration' },
109
+ { pattern: /\b(agent|mcp|ai|llm|openai)\b/i, category: 'AI/Agent Integration' },
110
+ { pattern: /\b(roadmap|milestone|chunk)\b/i, category: 'Roadmap System' },
111
+ { pattern: /\b(report|pdf|manifest|governance)\b/i, category: 'Reporting & Governance' },
112
+ ];
113
+
114
+ // Group commits by category
115
+ const categoryMap = new Map<string, { commits: GitCommit[]; latestDate: string }>();
116
+
117
+ for (const commit of commits) {
118
+ for (const { pattern, category } of featurePatterns) {
119
+ if (pattern.test(commit.message)) {
120
+ if (!categoryMap.has(category)) {
121
+ categoryMap.set(category, { commits: [], latestDate: commit.date });
122
+ }
123
+ const entry = categoryMap.get(category)!;
124
+ entry.commits.push(commit);
125
+ if (new Date(commit.date) > new Date(entry.latestDate)) {
126
+ entry.latestDate = commit.date;
127
+ }
128
+ break; // Only categorize each commit once
129
+ }
130
+ }
131
+ }
132
+
133
+ // Convert to milestones (only include categories with 2+ commits)
134
+ for (const [category, data] of categoryMap.entries()) {
135
+ if (data.commits.length >= 2) {
136
+ milestones.push({
137
+ date: data.latestDate,
138
+ summary: `${category} (${data.commits.length} commits)`,
139
+ commits: data.commits.slice(0, 5).map(c => c.message.split('\n')[0].substring(0, 80))
140
+ });
141
+ }
142
+ }
143
+
144
+ // Sort by date descending
145
+ milestones.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
146
+
147
+ return milestones;
148
+ }
149
+
150
+ /**
151
+ * Map directory structure to potential features
152
+ */
153
+ function analyzeFilesystem(tree: string[]): { featureDirectories: string[]; configFiles: string[] } {
154
+ const featurePatterns = [
155
+ /^(apps|packages)\/[^/]+\/src\/(components|features|modules)\/[^/]+$/,
156
+ /^(apps|packages)\/[^/]+\/src\/app\/[^/]+$/,
157
+ /^src\/(components|features|modules|pages)\/[^/]+$/,
158
+ /^(apps|packages)\/[^/]+$/,
159
+ ];
160
+
161
+ const configPatterns = [
162
+ /package\.json$/,
163
+ /tsconfig.*\.json$/,
164
+ /\.env.*$/,
165
+ /next\.config\./,
166
+ /tailwind\.config\./,
167
+ /supabase.*\.toml$/,
168
+ ];
169
+
170
+ const featureDirectories = tree.filter(path =>
171
+ featurePatterns.some(pattern => pattern.test(path))
172
+ );
173
+
174
+ const configFiles = tree.filter(path =>
175
+ configPatterns.some(pattern => pattern.test(path))
176
+ );
177
+
178
+ return {
179
+ featureDirectories: [...new Set(featureDirectories)].slice(0, 20),
180
+ configFiles: [...new Set(configFiles)].slice(0, 10)
181
+ };
182
+ }
183
+
184
+ /**
185
+ * Generate discovered features from milestones and filesystem
186
+ */
187
+ function generateDiscoveredFeatures(
188
+ milestones: { date: string; summary: string; commits: string[] }[],
189
+ filesystemAnalysis: { featureDirectories: string[]; configFiles: string[] }
190
+ ): DiscoveredFeature[] {
191
+ const features: DiscoveredFeature[] = [];
192
+ let priority = 1;
193
+
194
+ // From Git milestones
195
+ for (const milestone of milestones.slice(0, 10)) {
196
+ const id = `ghost-${Date.now()}-${priority}`;
197
+ features.push({
198
+ id,
199
+ title: milestone.summary.split('(')[0].trim(),
200
+ description: `Reconstructed from ${milestone.commits.length} commits. Last activity: ${new Date(milestone.date).toLocaleDateString()}`,
201
+ status: 'COMPLETED',
202
+ source: 'git',
203
+ evidence: milestone.commits,
204
+ estimatedCompletionDate: milestone.date,
205
+ priority: priority++
206
+ });
207
+ }
208
+
209
+ // From filesystem (major directories as features)
210
+ const directoryFeatures = filesystemAnalysis.featureDirectories
211
+ .filter(dir => dir.includes('src/') || dir.startsWith('apps/') || dir.startsWith('packages/'))
212
+ .slice(0, 5);
213
+
214
+ for (const dir of directoryFeatures) {
215
+ const name = dir.split('/').pop() || dir;
216
+ const id = `ghost-fs-${Date.now()}-${priority}`;
217
+
218
+ // Skip if we already have a similar feature from git
219
+ if (features.some(f => f.title.toLowerCase().includes(name.toLowerCase()))) {
220
+ continue;
221
+ }
222
+
223
+ features.push({
224
+ id,
225
+ title: `${name.charAt(0).toUpperCase() + name.slice(1)} Module`,
226
+ description: `Detected from directory structure: ${dir}`,
227
+ status: 'COMPLETED',
228
+ source: 'filesystem',
229
+ evidence: [dir],
230
+ estimatedCompletionDate: new Date().toISOString(),
231
+ priority: priority++
232
+ });
233
+ }
234
+
235
+ return features;
236
+ }
237
+
238
+ /**
239
+ * Main archaeological scan function
240
+ * Called by the MCP server when Brynjar is invoked
241
+ */
242
+ export async function performArchaeologicalScan(
243
+ supabase: SupabaseClient,
244
+ projectId: string,
245
+ gitLog: string,
246
+ fileTree: string[]
247
+ ): Promise<ArchaeologicalReport> {
248
+ console.error(`🏛️ Brynjar is performing an archaeological scan for project ${projectId}...`);
249
+
250
+ // Parse git history
251
+ const commits = parseGitLog(gitLog);
252
+ const milestones = identifyMilestones(commits);
253
+
254
+ // Analyze filesystem
255
+ const filesystemAnalysis = analyzeFilesystem(fileTree);
256
+
257
+ // Generate ghost features
258
+ const discoveredFeatures = generateDiscoveredFeatures(milestones, filesystemAnalysis);
259
+
260
+ // Generate recommendations
261
+ const recommendations: string[] = [];
262
+
263
+ if (milestones.length === 0) {
264
+ recommendations.push('No significant milestones detected. Consider adding more descriptive commit messages.');
265
+ }
266
+ if (filesystemAnalysis.featureDirectories.length < 3) {
267
+ recommendations.push('Project structure appears simple. This may be intentional or early-stage.');
268
+ }
269
+ if (discoveredFeatures.length > 0) {
270
+ recommendations.push(`${discoveredFeatures.length} ghost features ready for import into the roadmap.`);
271
+ }
272
+ if (!filesystemAnalysis.configFiles.some(f => f.includes('supabase'))) {
273
+ recommendations.push('No Supabase configuration detected. Database integration may be pending.');
274
+ }
275
+
276
+ const report: ArchaeologicalReport = {
277
+ projectId,
278
+ scanDate: new Date().toISOString(),
279
+ gitAnalysis: {
280
+ totalCommits: commits.length,
281
+ analyzedCommits: Math.min(commits.length, 100),
282
+ milestones
283
+ },
284
+ filesystemAnalysis: {
285
+ totalDirectories: fileTree.filter(f => !f.includes('.')).length,
286
+ ...filesystemAnalysis
287
+ },
288
+ discoveredFeatures,
289
+ recommendations
290
+ };
291
+
292
+ console.error(`🏛️ Brynjar found ${discoveredFeatures.length} ghost features and ${milestones.length} milestones.`);
293
+
294
+ return report;
295
+ }
296
+
297
+ /**
298
+ * Import ghost features into the roadmap as COMPLETED chunks
299
+ */
300
+ export async function importGhostFeatures(
301
+ supabase: SupabaseClient,
302
+ projectId: string,
303
+ features: DiscoveredFeature[]
304
+ ): Promise<{ success: boolean; imported: number; errors: string[] }> {
305
+ console.error(`🏛️ Brynjar is importing ${features.length} ghost features into the roadmap...`);
306
+
307
+ const errors: string[] = [];
308
+ let imported = 0;
309
+
310
+ for (const feature of features) {
311
+ const { error } = await supabase
312
+ .from('roadmap_chunks')
313
+ .insert({
314
+ project_id: projectId,
315
+ title: feature.title,
316
+ description: feature.description,
317
+ status: 'COMPLETED',
318
+ priority: feature.priority,
319
+ is_legacy: true, // Mark as reconstructed historical feature
320
+ completed_at: feature.estimatedCompletionDate, // Original completion date from Git
321
+ created_at: new Date().toISOString(), // Import timestamp
322
+ updated_at: new Date().toISOString()
323
+ });
324
+
325
+ if (error) {
326
+ errors.push(`Failed to import "${feature.title}": ${error.message}`);
327
+ } else {
328
+ imported++;
329
+ }
330
+ }
331
+
332
+ console.error(`🏛️ Brynjar imported ${imported}/${features.length} ghost features.`);
333
+
334
+ return { success: errors.length === 0, imported, errors };
335
+ }