@cpretzinger/boss-claude 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +304 -1
- package/bin/boss-claude.js +1138 -0
- package/bin/commands/mode.js +250 -0
- package/bin/onyx-guard.js +259 -0
- package/bin/onyx-guard.sh +251 -0
- package/bin/prompts.js +284 -0
- package/bin/rollback.js +85 -0
- package/bin/setup-wizard.js +492 -0
- package/config/.env.example +17 -0
- package/lib/README.md +83 -0
- package/lib/agent-logger.js +61 -0
- package/lib/agents/memory-engineers/github-memory-engineer.js +251 -0
- package/lib/agents/memory-engineers/postgres-memory-engineer.js +633 -0
- package/lib/agents/memory-engineers/qdrant-memory-engineer.js +358 -0
- package/lib/agents/memory-engineers/redis-memory-engineer.js +383 -0
- package/lib/agents/memory-supervisor.js +526 -0
- package/lib/agents/registry.js +135 -0
- package/lib/auto-monitor.js +131 -0
- package/lib/checkpoint-hook.js +112 -0
- package/lib/checkpoint.js +319 -0
- package/lib/commentator.js +213 -0
- package/lib/context-scribe.js +120 -0
- package/lib/delegation-strategies.js +326 -0
- package/lib/hierarchy-validator.js +643 -0
- package/lib/index.js +15 -0
- package/lib/init-with-mode.js +261 -0
- package/lib/init.js +44 -6
- package/lib/memory-result-aggregator.js +252 -0
- package/lib/memory.js +35 -7
- package/lib/mode-enforcer.js +473 -0
- package/lib/onyx-banner.js +169 -0
- package/lib/onyx-identity.js +214 -0
- package/lib/onyx-monitor.js +381 -0
- package/lib/onyx-reminder.js +188 -0
- package/lib/onyx-tool-interceptor.js +341 -0
- package/lib/onyx-wrapper.js +315 -0
- package/lib/orchestrator-gate.js +334 -0
- package/lib/output-formatter.js +296 -0
- package/lib/postgres.js +1 -1
- package/lib/prompt-injector.js +220 -0
- package/lib/prompts.js +532 -0
- package/lib/session.js +153 -6
- package/lib/setup/README.md +187 -0
- package/lib/setup/env-manager.js +785 -0
- package/lib/setup/error-recovery.js +630 -0
- package/lib/setup/explain-scopes.js +385 -0
- package/lib/setup/github-instructions.js +333 -0
- package/lib/setup/github-repo.js +254 -0
- package/lib/setup/import-credentials.js +498 -0
- package/lib/setup/index.js +62 -0
- package/lib/setup/init-postgres.js +785 -0
- package/lib/setup/init-redis.js +456 -0
- package/lib/setup/integration-test.js +652 -0
- package/lib/setup/progress.js +357 -0
- package/lib/setup/rollback.js +670 -0
- package/lib/setup/rollback.test.js +452 -0
- package/lib/setup/setup-with-rollback.example.js +351 -0
- package/lib/setup/summary.js +400 -0
- package/lib/setup/test-github-setup.js +10 -0
- package/lib/setup/test-postgres-init.js +98 -0
- package/lib/setup/verify-setup.js +102 -0
- package/lib/task-agent-worker.js +235 -0
- package/lib/token-monitor.js +466 -0
- package/lib/tool-wrapper-integration.js +369 -0
- package/lib/tool-wrapper.js +387 -0
- package/lib/validators/README.md +497 -0
- package/lib/validators/config.js +583 -0
- package/lib/validators/config.test.js +175 -0
- package/lib/validators/github.js +310 -0
- package/lib/validators/github.test.js +61 -0
- package/lib/validators/index.js +15 -0
- package/lib/validators/postgres.js +525 -0
- package/package.json +98 -13
- package/scripts/benchmark-memory.js +433 -0
- package/scripts/check-secrets.sh +12 -0
- package/scripts/fetch-todos.mjs +148 -0
- package/scripts/graceful-shutdown.sh +156 -0
- package/scripts/install-onyx-hooks.js +373 -0
- package/scripts/install.js +119 -18
- package/scripts/redis-monitor.js +284 -0
- package/scripts/redis-setup.js +412 -0
- package/scripts/test-memory-retrieval.js +201 -0
- package/scripts/validate-exports.js +68 -0
- package/scripts/validate-package.js +120 -0
- package/scripts/verify-onyx-deployment.js +309 -0
- package/scripts/verify-redis-deployment.js +354 -0
- package/scripts/verify-redis-init.js +219 -0
|
@@ -0,0 +1,526 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MEMORY SUPERVISOR AGENT
|
|
3
|
+
*
|
|
4
|
+
* Boss-level coordinator that queries 4 specialized engineer agents in PARALLEL
|
|
5
|
+
* to retrieve comprehensive memory data across multiple domains.
|
|
6
|
+
*
|
|
7
|
+
* Architecture:
|
|
8
|
+
* - Redis cache-first strategy (5-minute TTL)
|
|
9
|
+
* - Parallel queries to 4 domain engineers
|
|
10
|
+
* - 5-second timeout for all operations
|
|
11
|
+
* - Result aggregation and ranking
|
|
12
|
+
* - Automatic cache invalidation on updates
|
|
13
|
+
*
|
|
14
|
+
* Engineers:
|
|
15
|
+
* 1. PostgresMemoryEngineer - Database sessions with full-text search
|
|
16
|
+
* 2. RedisMemoryEngineer - Fast cache layer for recent sessions
|
|
17
|
+
* 3. QdrantMemoryEngineer - Vector similarity search (semantic)
|
|
18
|
+
* 4. GitHubMemoryEngineer - Long-term archive in GitHub Issues
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
import Redis from 'ioredis';
|
|
22
|
+
import crypto from 'crypto';
|
|
23
|
+
import dotenv from 'dotenv';
|
|
24
|
+
import { fileURLToPath } from 'url';
|
|
25
|
+
import { dirname, join } from 'path';
|
|
26
|
+
import { existsSync } from 'fs';
|
|
27
|
+
import os from 'os';
|
|
28
|
+
|
|
29
|
+
// Import real memory engineers
|
|
30
|
+
import { PostgresMemoryEngineer } from './memory-engineers/postgres-memory-engineer.js';
|
|
31
|
+
import { RedisMemoryEngineer } from './memory-engineers/redis-memory-engineer.js';
|
|
32
|
+
import { getQdrantMemoryEngineer } from './memory-engineers/qdrant-memory-engineer.js';
|
|
33
|
+
import { GitHubMemoryEngineer } from './memory-engineers/github-memory-engineer.js';
|
|
34
|
+
|
|
35
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
36
|
+
const __dirname = dirname(__filename);
|
|
37
|
+
|
|
38
|
+
// Load environment variables
|
|
39
|
+
const envPath = join(os.homedir(), '.boss-claude', '.env');
|
|
40
|
+
if (existsSync(envPath)) {
|
|
41
|
+
dotenv.config({ path: envPath });
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Redis client singleton
|
|
45
|
+
let redis = null;
|
|
46
|
+
|
|
47
|
+
function getRedis() {
|
|
48
|
+
if (!redis) {
|
|
49
|
+
if (!process.env.REDIS_URL) {
|
|
50
|
+
throw new Error('REDIS_URL not found. Please run: boss-claude init');
|
|
51
|
+
}
|
|
52
|
+
redis = new Redis(process.env.REDIS_URL);
|
|
53
|
+
}
|
|
54
|
+
return redis;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/**
|
|
58
|
+
* Generate cache key for a query
|
|
59
|
+
*/
|
|
60
|
+
function generateCacheKey(query, options = {}) {
|
|
61
|
+
const hashInput = JSON.stringify({ query, ...options });
|
|
62
|
+
const hash = crypto.createHash('sha256').update(hashInput).digest('hex').slice(0, 16);
|
|
63
|
+
return `boss:cache:memory:${hash}`;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Real engineer agent instances
|
|
68
|
+
* Lazy-loaded to avoid circular dependency issues
|
|
69
|
+
*/
|
|
70
|
+
let postgresEngineer = null;
|
|
71
|
+
let redisEngineer = null;
|
|
72
|
+
let qdrantEngineer = null;
|
|
73
|
+
let githubEngineer = null;
|
|
74
|
+
|
|
75
|
+
function getEngineers() {
|
|
76
|
+
if (!postgresEngineer) {
|
|
77
|
+
postgresEngineer = new PostgresMemoryEngineer();
|
|
78
|
+
redisEngineer = new RedisMemoryEngineer();
|
|
79
|
+
qdrantEngineer = getQdrantMemoryEngineer();
|
|
80
|
+
githubEngineer = new GitHubMemoryEngineer();
|
|
81
|
+
}
|
|
82
|
+
return { postgresEngineer, redisEngineer, qdrantEngineer, githubEngineer };
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Get user ID from environment or default
|
|
87
|
+
*/
|
|
88
|
+
function getUserId() {
|
|
89
|
+
return process.env.USER_ID || process.env.USER || 'default-user';
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async function queryPostgresEngineer(query, timeout = 5000) {
|
|
93
|
+
return Promise.race([
|
|
94
|
+
(async () => {
|
|
95
|
+
try {
|
|
96
|
+
const userId = getUserId();
|
|
97
|
+
// Full-text search on session summaries and context data
|
|
98
|
+
const results = await postgresEngineer.fullTextSearch(userId, query, { limit: 5 });
|
|
99
|
+
|
|
100
|
+
return {
|
|
101
|
+
engineer: 'postgres-n8n-specialist',
|
|
102
|
+
domain: 'postgres',
|
|
103
|
+
results: results.map(r => ({
|
|
104
|
+
id: r.id,
|
|
105
|
+
title: `Session: ${r.project || 'Unknown'}`,
|
|
106
|
+
summary: r.summary || '',
|
|
107
|
+
url: `postgres://session/${r.id}`,
|
|
108
|
+
created_at: r.start_time,
|
|
109
|
+
labels: r.context_data?.tags || [],
|
|
110
|
+
relevance: calculateRelevance(query, {
|
|
111
|
+
title: r.project || '',
|
|
112
|
+
summary: r.summary || '',
|
|
113
|
+
labels: r.context_data?.tags || [],
|
|
114
|
+
created_at: r.start_time
|
|
115
|
+
}),
|
|
116
|
+
source: 'postgres-engineer',
|
|
117
|
+
metadata: {
|
|
118
|
+
xp_earned: r.xp_earned,
|
|
119
|
+
tokens_saved: r.tokens_saved,
|
|
120
|
+
duration_seconds: r.duration_seconds
|
|
121
|
+
}
|
|
122
|
+
}))
|
|
123
|
+
};
|
|
124
|
+
} catch (err) {
|
|
125
|
+
console.error('[Postgres Engineer] Error:', err.message);
|
|
126
|
+
return {
|
|
127
|
+
engineer: 'postgres-n8n-specialist',
|
|
128
|
+
domain: 'postgres',
|
|
129
|
+
error: err.message,
|
|
130
|
+
results: []
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
})(),
|
|
134
|
+
new Promise((_, reject) =>
|
|
135
|
+
setTimeout(() => reject(new Error('Postgres engineer timeout')), timeout)
|
|
136
|
+
)
|
|
137
|
+
]).catch(err => ({
|
|
138
|
+
engineer: 'postgres-n8n-specialist',
|
|
139
|
+
domain: 'postgres',
|
|
140
|
+
error: err.message,
|
|
141
|
+
results: []
|
|
142
|
+
}));
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
async function queryRedisEngineer(query, timeout = 5000) {
|
|
146
|
+
return Promise.race([
|
|
147
|
+
(async () => {
|
|
148
|
+
try {
|
|
149
|
+
// Query Redis cache for recent sessions
|
|
150
|
+
const response = await redisEngineer.query(query, {}, 5);
|
|
151
|
+
|
|
152
|
+
return {
|
|
153
|
+
engineer: 'redis-architect',
|
|
154
|
+
domain: 'redis',
|
|
155
|
+
results: response.results.map(r => ({
|
|
156
|
+
id: r.id || r.session_id,
|
|
157
|
+
title: `Session: ${r.repo?.name || r.project || 'Unknown'}`,
|
|
158
|
+
summary: r.summary || '',
|
|
159
|
+
url: `redis://session/${r.id || r.session_id}`,
|
|
160
|
+
created_at: r.started_at || r.timestamp,
|
|
161
|
+
labels: r.tags || [],
|
|
162
|
+
relevance: calculateRelevance(query, {
|
|
163
|
+
title: r.repo?.name || r.project || '',
|
|
164
|
+
summary: r.summary || '',
|
|
165
|
+
labels: r.tags || [],
|
|
166
|
+
created_at: r.started_at || r.timestamp
|
|
167
|
+
}),
|
|
168
|
+
source: 'redis-engineer',
|
|
169
|
+
cache_source: response.source
|
|
170
|
+
}))
|
|
171
|
+
};
|
|
172
|
+
} catch (err) {
|
|
173
|
+
console.error('[Redis Engineer] Error:', err.message);
|
|
174
|
+
return {
|
|
175
|
+
engineer: 'redis-architect',
|
|
176
|
+
domain: 'redis',
|
|
177
|
+
error: err.message,
|
|
178
|
+
results: []
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
})(),
|
|
182
|
+
new Promise((_, reject) =>
|
|
183
|
+
setTimeout(() => reject(new Error('Redis engineer timeout')), timeout)
|
|
184
|
+
)
|
|
185
|
+
]).catch(err => ({
|
|
186
|
+
engineer: 'redis-architect',
|
|
187
|
+
domain: 'redis',
|
|
188
|
+
error: err.message,
|
|
189
|
+
results: []
|
|
190
|
+
}));
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
async function queryQdrantEngineer(query, timeout = 5000) {
|
|
194
|
+
return Promise.race([
|
|
195
|
+
(async () => {
|
|
196
|
+
try {
|
|
197
|
+
// Vector similarity search
|
|
198
|
+
const results = await qdrantEngineer.searchSimilar(query, 5, 0.7);
|
|
199
|
+
|
|
200
|
+
return {
|
|
201
|
+
engineer: 'qdrant-vector-specialist',
|
|
202
|
+
domain: 'qdrant',
|
|
203
|
+
results: results.map(r => ({
|
|
204
|
+
id: r.id,
|
|
205
|
+
title: `Vector Match: ${r.metadata.title || 'Memory'}`,
|
|
206
|
+
summary: r.text || '',
|
|
207
|
+
url: `qdrant://vector/${r.id}`,
|
|
208
|
+
created_at: r.metadata.stored_at,
|
|
209
|
+
labels: r.metadata.tags || [],
|
|
210
|
+
relevance: Math.round(r.score * 10), // Scale 0-1 to 0-10
|
|
211
|
+
source: 'qdrant-engineer',
|
|
212
|
+
vector_score: r.score
|
|
213
|
+
}))
|
|
214
|
+
};
|
|
215
|
+
} catch (err) {
|
|
216
|
+
console.error('[Qdrant Engineer] Error:', err.message);
|
|
217
|
+
return {
|
|
218
|
+
engineer: 'qdrant-vector-specialist',
|
|
219
|
+
domain: 'qdrant',
|
|
220
|
+
error: err.message,
|
|
221
|
+
results: []
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
})(),
|
|
225
|
+
new Promise((_, reject) =>
|
|
226
|
+
setTimeout(() => reject(new Error('Qdrant engineer timeout')), timeout)
|
|
227
|
+
)
|
|
228
|
+
]).catch(err => ({
|
|
229
|
+
engineer: 'qdrant-vector-specialist',
|
|
230
|
+
domain: 'qdrant',
|
|
231
|
+
error: err.message,
|
|
232
|
+
results: []
|
|
233
|
+
}));
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
async function queryGitHubEngineer(query, timeout = 5000) {
|
|
237
|
+
return Promise.race([
|
|
238
|
+
(async () => {
|
|
239
|
+
try {
|
|
240
|
+
// Search GitHub Issues archive
|
|
241
|
+
const response = await githubEngineer.searchArchive(query, 5);
|
|
242
|
+
|
|
243
|
+
if (!response.success) {
|
|
244
|
+
throw new Error(response.error);
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
return {
|
|
248
|
+
engineer: 'github-expert',
|
|
249
|
+
domain: 'github',
|
|
250
|
+
results: response.memories.map(r => ({
|
|
251
|
+
id: r.issue_number,
|
|
252
|
+
title: r.title || '',
|
|
253
|
+
summary: r.summary || '',
|
|
254
|
+
url: r.url,
|
|
255
|
+
created_at: r.created_at,
|
|
256
|
+
labels: r.labels || [],
|
|
257
|
+
relevance: calculateRelevance(query, r),
|
|
258
|
+
source: 'github-engineer'
|
|
259
|
+
}))
|
|
260
|
+
};
|
|
261
|
+
} catch (err) {
|
|
262
|
+
console.error('[GitHub Engineer] Error:', err.message);
|
|
263
|
+
return {
|
|
264
|
+
engineer: 'github-expert',
|
|
265
|
+
domain: 'github',
|
|
266
|
+
error: err.message,
|
|
267
|
+
results: []
|
|
268
|
+
};
|
|
269
|
+
}
|
|
270
|
+
})(),
|
|
271
|
+
new Promise((_, reject) =>
|
|
272
|
+
setTimeout(() => reject(new Error('GitHub engineer timeout')), timeout)
|
|
273
|
+
)
|
|
274
|
+
]).catch(err => ({
|
|
275
|
+
engineer: 'github-expert',
|
|
276
|
+
domain: 'github',
|
|
277
|
+
error: err.message,
|
|
278
|
+
results: []
|
|
279
|
+
}));
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
/**
|
|
283
|
+
* Calculate relevance score for a memory result
|
|
284
|
+
*/
|
|
285
|
+
function calculateRelevance(query, result) {
|
|
286
|
+
const queryLower = query.toLowerCase();
|
|
287
|
+
const titleLower = result.title.toLowerCase();
|
|
288
|
+
const summaryLower = (result.summary || '').toLowerCase();
|
|
289
|
+
|
|
290
|
+
let score = 0;
|
|
291
|
+
|
|
292
|
+
// Exact title match: +10
|
|
293
|
+
if (titleLower.includes(queryLower)) {
|
|
294
|
+
score += 10;
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// Summary match: +5
|
|
298
|
+
if (summaryLower.includes(queryLower)) {
|
|
299
|
+
score += 5;
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
// Label match: +3 per label
|
|
303
|
+
if (result.labels) {
|
|
304
|
+
result.labels.forEach(label => {
|
|
305
|
+
if (label.toLowerCase().includes(queryLower)) {
|
|
306
|
+
score += 3;
|
|
307
|
+
}
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
// Recency bonus (newer = higher score)
|
|
312
|
+
if (result.created_at) {
|
|
313
|
+
const ageInDays = (Date.now() - new Date(result.created_at).getTime()) / (1000 * 60 * 60 * 24);
|
|
314
|
+
if (ageInDays < 7) score += 5;
|
|
315
|
+
else if (ageInDays < 30) score += 3;
|
|
316
|
+
else if (ageInDays < 90) score += 1;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
return score;
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
/**
|
|
323
|
+
* Aggregate and rank results from all engineers
|
|
324
|
+
*/
|
|
325
|
+
function aggregateResults(engineerResponses) {
|
|
326
|
+
const allResults = [];
|
|
327
|
+
const seenUrls = new Set();
|
|
328
|
+
|
|
329
|
+
// Collect all unique results
|
|
330
|
+
engineerResponses.forEach(response => {
|
|
331
|
+
if (response.results && response.results.length > 0) {
|
|
332
|
+
response.results.forEach(result => {
|
|
333
|
+
if (!seenUrls.has(result.url)) {
|
|
334
|
+
seenUrls.add(result.url);
|
|
335
|
+
allResults.push({
|
|
336
|
+
...result,
|
|
337
|
+
engineer: response.engineer,
|
|
338
|
+
domain: response.domain
|
|
339
|
+
});
|
|
340
|
+
}
|
|
341
|
+
});
|
|
342
|
+
}
|
|
343
|
+
});
|
|
344
|
+
|
|
345
|
+
// Sort by relevance score (descending)
|
|
346
|
+
allResults.sort((a, b) => (b.relevance || 0) - (a.relevance || 0));
|
|
347
|
+
|
|
348
|
+
return allResults;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
/**
|
|
352
|
+
* Main supervisor query function
|
|
353
|
+
*
|
|
354
|
+
* @param {string} query - Search query
|
|
355
|
+
* @param {object} options - Query options
|
|
356
|
+
* @param {boolean} options.useCache - Use Redis cache (default: true)
|
|
357
|
+
* @param {number} options.timeout - Engineer timeout in ms (default: 5000)
|
|
358
|
+
* @param {number} options.cacheTtl - Cache TTL in seconds (default: 300 = 5 min)
|
|
359
|
+
* @param {number} options.limit - Max results to return (default: 10)
|
|
360
|
+
* @returns {object} Aggregated memory results with metadata
|
|
361
|
+
*/
|
|
362
|
+
export async function queryMemorySupervisor(query, options = {}) {
|
|
363
|
+
const {
|
|
364
|
+
useCache = true,
|
|
365
|
+
timeout = 5000,
|
|
366
|
+
cacheTtl = 300, // 5 minutes
|
|
367
|
+
limit = 10
|
|
368
|
+
} = options;
|
|
369
|
+
|
|
370
|
+
const startTime = Date.now();
|
|
371
|
+
const client = getRedis();
|
|
372
|
+
const cacheKey = generateCacheKey(query, { limit });
|
|
373
|
+
|
|
374
|
+
// Step 1: Check Redis cache
|
|
375
|
+
if (useCache) {
|
|
376
|
+
try {
|
|
377
|
+
const cached = await client.get(cacheKey);
|
|
378
|
+
if (cached) {
|
|
379
|
+
const cachedData = JSON.parse(cached);
|
|
380
|
+
return {
|
|
381
|
+
...cachedData,
|
|
382
|
+
cache_hit: true,
|
|
383
|
+
query_time_ms: Date.now() - startTime,
|
|
384
|
+
cached_at: cachedData.timestamp
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
} catch (err) {
|
|
388
|
+
console.warn('Cache read error:', err.message);
|
|
389
|
+
// Continue with fresh query on cache error
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
// Step 2: Cache miss - spawn parallel queries to all 4 engineers
|
|
394
|
+
console.log(`[Memory Supervisor] Cache miss for: "${query}"`);
|
|
395
|
+
console.log('[Memory Supervisor] Querying 4 engineers in parallel...');
|
|
396
|
+
|
|
397
|
+
const engineerPromises = [
|
|
398
|
+
queryPostgresEngineer(query, timeout),
|
|
399
|
+
queryRedisEngineer(query, timeout),
|
|
400
|
+
queryQdrantEngineer(query, timeout),
|
|
401
|
+
queryGitHubEngineer(query, timeout)
|
|
402
|
+
];
|
|
403
|
+
|
|
404
|
+
// Wait for all engineers (with individual timeouts)
|
|
405
|
+
const engineerResponses = await Promise.all(engineerPromises);
|
|
406
|
+
|
|
407
|
+
// Step 3: Aggregate results
|
|
408
|
+
const aggregated = aggregateResults(engineerResponses);
|
|
409
|
+
const topResults = aggregated.slice(0, limit);
|
|
410
|
+
|
|
411
|
+
// Step 4: Build response
|
|
412
|
+
const response = {
|
|
413
|
+
query,
|
|
414
|
+
results: topResults,
|
|
415
|
+
total_results: aggregated.length,
|
|
416
|
+
engineers_queried: engineerResponses.map(r => ({
|
|
417
|
+
name: r.engineer,
|
|
418
|
+
domain: r.domain,
|
|
419
|
+
result_count: r.results?.length || 0,
|
|
420
|
+
error: r.error || null
|
|
421
|
+
})),
|
|
422
|
+
query_time_ms: Date.now() - startTime,
|
|
423
|
+
timestamp: new Date().toISOString(),
|
|
424
|
+
cache_hit: false
|
|
425
|
+
};
|
|
426
|
+
|
|
427
|
+
// Step 5: Cache the results
|
|
428
|
+
if (useCache) {
|
|
429
|
+
try {
|
|
430
|
+
await client.setex(cacheKey, cacheTtl, JSON.stringify(response));
|
|
431
|
+
console.log(`[Memory Supervisor] Cached results (TTL: ${cacheTtl}s)`);
|
|
432
|
+
} catch (err) {
|
|
433
|
+
console.warn('Cache write error:', err.message);
|
|
434
|
+
// Continue even if caching fails
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
return response;
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
/**
|
|
442
|
+
* Invalidate cache for a specific query or all memory caches
|
|
443
|
+
*/
|
|
444
|
+
export async function invalidateMemoryCache(query = null) {
|
|
445
|
+
const client = getRedis();
|
|
446
|
+
|
|
447
|
+
if (query) {
|
|
448
|
+
// Invalidate specific query
|
|
449
|
+
const cacheKey = generateCacheKey(query);
|
|
450
|
+
await client.del(cacheKey);
|
|
451
|
+
console.log(`[Memory Supervisor] Invalidated cache for: "${query}"`);
|
|
452
|
+
} else {
|
|
453
|
+
// Invalidate all memory caches
|
|
454
|
+
const keys = await client.keys('boss:cache:memory:*');
|
|
455
|
+
if (keys.length > 0) {
|
|
456
|
+
await client.del(...keys);
|
|
457
|
+
console.log(`[Memory Supervisor] Invalidated ${keys.length} cache entries`);
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
/**
|
|
463
|
+
* Get cache statistics
|
|
464
|
+
*/
|
|
465
|
+
export async function getMemoryCacheStats() {
|
|
466
|
+
const client = getRedis();
|
|
467
|
+
const keys = await client.keys('boss:cache:memory:*');
|
|
468
|
+
|
|
469
|
+
const stats = {
|
|
470
|
+
total_cached_queries: keys.length,
|
|
471
|
+
cache_keys: []
|
|
472
|
+
};
|
|
473
|
+
|
|
474
|
+
for (const key of keys) {
|
|
475
|
+
const ttl = await client.ttl(key);
|
|
476
|
+
const data = await client.get(key);
|
|
477
|
+
if (data) {
|
|
478
|
+
const parsed = JSON.parse(data);
|
|
479
|
+
stats.cache_keys.push({
|
|
480
|
+
key: key.replace('boss:cache:memory:', ''),
|
|
481
|
+
query: parsed.query,
|
|
482
|
+
result_count: parsed.total_results,
|
|
483
|
+
ttl_remaining: ttl,
|
|
484
|
+
cached_at: parsed.timestamp
|
|
485
|
+
});
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
return stats;
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
/**
|
|
493
|
+
* Close all Redis connections (cleanup)
|
|
494
|
+
*/
|
|
495
|
+
export async function closeConnections() {
|
|
496
|
+
try {
|
|
497
|
+
if (redis) {
|
|
498
|
+
await redis.quit();
|
|
499
|
+
redis = null;
|
|
500
|
+
console.log('[Memory Supervisor] Closed Redis connection');
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
// Close engineer connections
|
|
504
|
+
if (redisEngineer && redisEngineer.close) {
|
|
505
|
+
await redisEngineer.close();
|
|
506
|
+
}
|
|
507
|
+
if (postgresEngineer && postgresEngineer.close) {
|
|
508
|
+
await postgresEngineer.close();
|
|
509
|
+
}
|
|
510
|
+
if (qdrantEngineer && qdrantEngineer.close) {
|
|
511
|
+
await qdrantEngineer.close();
|
|
512
|
+
}
|
|
513
|
+
if (githubEngineer && githubEngineer.close) {
|
|
514
|
+
await githubEngineer.close();
|
|
515
|
+
}
|
|
516
|
+
} catch (err) {
|
|
517
|
+
console.warn('[Memory Supervisor] Cleanup error:', err.message);
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
export default {
|
|
522
|
+
queryMemorySupervisor,
|
|
523
|
+
invalidateMemoryCache,
|
|
524
|
+
getMemoryCacheStats,
|
|
525
|
+
closeConnections
|
|
526
|
+
};
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AGENT HIERARCHY REGISTRY
|
|
3
|
+
*
|
|
4
|
+
* Defines the canonical mapping of worker agents to their boss agents.
|
|
5
|
+
* This ensures proper delegation and review chain enforcement.
|
|
6
|
+
*
|
|
7
|
+
* Source: AGENT-HIERARCHY-CANON.md
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
export const AGENT_HIERARCHY = {
|
|
11
|
+
/**
|
|
12
|
+
* Tier 1: Worker Agents (Domain Specialists)
|
|
13
|
+
* Each worker has an assigned boss who reviews their work
|
|
14
|
+
*/
|
|
15
|
+
workers: {
|
|
16
|
+
'n8n-workflow-architect': {
|
|
17
|
+
boss: 'Boss N8N',
|
|
18
|
+
domain: 'n8n',
|
|
19
|
+
description: 'Creates n8n workflows and automations'
|
|
20
|
+
},
|
|
21
|
+
'postgres-n8n-specialist': {
|
|
22
|
+
boss: 'Boss Postgres',
|
|
23
|
+
domain: 'postgres',
|
|
24
|
+
description: 'Database queries and PostgreSQL schema'
|
|
25
|
+
},
|
|
26
|
+
'redis-architect': {
|
|
27
|
+
boss: 'Boss Redis',
|
|
28
|
+
domain: 'redis',
|
|
29
|
+
description: 'Redis architecture and optimization'
|
|
30
|
+
},
|
|
31
|
+
'github-expert': {
|
|
32
|
+
boss: 'Boss GitHub',
|
|
33
|
+
domain: 'github',
|
|
34
|
+
description: 'GitHub operations and CI/CD'
|
|
35
|
+
},
|
|
36
|
+
'automation-architect': {
|
|
37
|
+
boss: 'Boss GitHub',
|
|
38
|
+
domain: 'automation',
|
|
39
|
+
description: 'CI/CD and automation infrastructure'
|
|
40
|
+
},
|
|
41
|
+
'openai-streaming-expert': {
|
|
42
|
+
boss: 'Boss OpenAI',
|
|
43
|
+
domain: 'openai',
|
|
44
|
+
description: 'OpenAI API integration and streaming'
|
|
45
|
+
},
|
|
46
|
+
'data-science-pipeline': {
|
|
47
|
+
boss: 'Boss Postgres',
|
|
48
|
+
domain: 'data-science',
|
|
49
|
+
description: 'ML and data analysis pipelines'
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Tier 2: Boss Agents (Domain + Security Reviewers)
|
|
55
|
+
* Review worker output for bugs and security issues
|
|
56
|
+
*/
|
|
57
|
+
bosses: {
|
|
58
|
+
'Boss N8N': {
|
|
59
|
+
metaBoss: 'Boss Claude',
|
|
60
|
+
domain: 'n8n',
|
|
61
|
+
workers: ['n8n-workflow-architect'],
|
|
62
|
+
description: 'Reviews n8n workflows for security and correctness'
|
|
63
|
+
},
|
|
64
|
+
'Boss Postgres': {
|
|
65
|
+
metaBoss: 'Boss Claude',
|
|
66
|
+
domain: 'postgres',
|
|
67
|
+
workers: ['postgres-n8n-specialist', 'data-science-pipeline'],
|
|
68
|
+
description: 'Reviews database queries and schema changes'
|
|
69
|
+
},
|
|
70
|
+
'Boss Redis': {
|
|
71
|
+
metaBoss: 'Boss Claude',
|
|
72
|
+
domain: 'redis',
|
|
73
|
+
workers: ['redis-architect'],
|
|
74
|
+
description: 'Reviews Redis configuration and optimization'
|
|
75
|
+
},
|
|
76
|
+
'Boss GitHub': {
|
|
77
|
+
metaBoss: 'Boss Claude',
|
|
78
|
+
domain: 'github',
|
|
79
|
+
workers: ['github-expert', 'automation-architect'],
|
|
80
|
+
description: 'Reviews GitHub operations and automation'
|
|
81
|
+
},
|
|
82
|
+
'Boss OpenAI': {
|
|
83
|
+
metaBoss: 'Boss Claude',
|
|
84
|
+
domain: 'openai',
|
|
85
|
+
workers: ['openai-streaming-expert'],
|
|
86
|
+
description: 'Reviews OpenAI integrations for correctness'
|
|
87
|
+
}
|
|
88
|
+
},
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Tier 3: Meta-Boss (Final Security Gate)
|
|
92
|
+
* Cross-domain security review and final approval
|
|
93
|
+
*/
|
|
94
|
+
metaBoss: 'Boss Claude',
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Helper function: Get boss for a worker agent
|
|
98
|
+
*/
|
|
99
|
+
getBossForWorker(workerName) {
|
|
100
|
+
const worker = this.workers[workerName];
|
|
101
|
+
if (!worker) {
|
|
102
|
+
throw new Error(`Unknown worker agent: ${workerName}`);
|
|
103
|
+
}
|
|
104
|
+
return this.bosses[worker.boss];
|
|
105
|
+
},
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Helper function: Get all workers under a boss
|
|
109
|
+
*/
|
|
110
|
+
getWorkersUnderBoss(bossName) {
|
|
111
|
+
const boss = this.bosses[bossName];
|
|
112
|
+
if (!boss) {
|
|
113
|
+
throw new Error(`Unknown boss agent: ${bossName}`);
|
|
114
|
+
}
|
|
115
|
+
return boss.workers.map(w => this.workers[w]);
|
|
116
|
+
},
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Helper function: Validate hierarchy chain
|
|
120
|
+
* Returns true if worker -> boss -> metaboss chain is valid
|
|
121
|
+
*/
|
|
122
|
+
validateChain(workerName, bossName, metaBossName = 'Boss Claude') {
|
|
123
|
+
const worker = this.workers[workerName];
|
|
124
|
+
if (!worker) return false;
|
|
125
|
+
|
|
126
|
+
const boss = this.bosses[bossName];
|
|
127
|
+
if (!boss) return false;
|
|
128
|
+
|
|
129
|
+
if (boss.metaBoss !== metaBossName) return false;
|
|
130
|
+
|
|
131
|
+
return worker.boss === bossName;
|
|
132
|
+
}
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
export default AGENT_HIERARCHY;
|