@ruvector/edge-net 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +9 -1
- package/real-agents.js +706 -0
- package/sync.js +799 -0
package/real-agents.js
ADDED
|
@@ -0,0 +1,706 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @ruvector/edge-net REAL Agent System
|
|
3
|
+
*
|
|
4
|
+
* Actually functional distributed agents with:
|
|
5
|
+
* - Real LLM API calls (Anthropic Claude, OpenAI)
|
|
6
|
+
* - Real embeddings via ruvector AdaptiveEmbedder
|
|
7
|
+
* - Real relay server sync
|
|
8
|
+
* - Real task execution
|
|
9
|
+
*
|
|
10
|
+
* @module @ruvector/edge-net/real-agents
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import { EventEmitter } from 'events';
|
|
14
|
+
import { createHash, randomBytes } from 'crypto';
|
|
15
|
+
import { readFileSync, writeFileSync, existsSync } from 'fs';
|
|
16
|
+
import { join } from 'path';
|
|
17
|
+
|
|
18
|
+
// ============================================
|
|
19
|
+
// LLM PROVIDER CONFIGURATION
|
|
20
|
+
// ============================================
|
|
21
|
+
|
|
22
|
+
const LLM_PROVIDERS = {
|
|
23
|
+
anthropic: {
|
|
24
|
+
name: 'Anthropic Claude',
|
|
25
|
+
baseUrl: 'https://api.anthropic.com/v1',
|
|
26
|
+
models: {
|
|
27
|
+
fast: 'claude-3-5-haiku-20241022',
|
|
28
|
+
balanced: 'claude-3-5-sonnet-20241022',
|
|
29
|
+
powerful: 'claude-3-opus-20240229',
|
|
30
|
+
},
|
|
31
|
+
headers: (apiKey) => ({
|
|
32
|
+
'Content-Type': 'application/json',
|
|
33
|
+
'x-api-key': apiKey,
|
|
34
|
+
'anthropic-version': '2023-06-01',
|
|
35
|
+
}),
|
|
36
|
+
},
|
|
37
|
+
openai: {
|
|
38
|
+
name: 'OpenAI',
|
|
39
|
+
baseUrl: 'https://api.openai.com/v1',
|
|
40
|
+
models: {
|
|
41
|
+
fast: 'gpt-4o-mini',
|
|
42
|
+
balanced: 'gpt-4o',
|
|
43
|
+
powerful: 'gpt-4-turbo',
|
|
44
|
+
},
|
|
45
|
+
headers: (apiKey) => ({
|
|
46
|
+
'Content-Type': 'application/json',
|
|
47
|
+
'Authorization': `Bearer ${apiKey}`,
|
|
48
|
+
}),
|
|
49
|
+
},
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
// Agent type to system prompt mapping
|
|
53
|
+
const AGENT_PROMPTS = {
|
|
54
|
+
researcher: `You are a research agent. Your task is to analyze, search, summarize, and extract information.
|
|
55
|
+
Be thorough and cite sources when possible. Structure your findings clearly.`,
|
|
56
|
+
|
|
57
|
+
coder: `You are a coding agent. Your task is to write, refactor, debug, and test code.
|
|
58
|
+
Write clean, well-documented code. Follow best practices and explain your approach.`,
|
|
59
|
+
|
|
60
|
+
reviewer: `You are a code review agent. Your task is to review code for quality, security, and best practices.
|
|
61
|
+
Be constructive and specific. Identify issues and suggest improvements.`,
|
|
62
|
+
|
|
63
|
+
tester: `You are a testing agent. Your task is to write tests, validate functionality, and report issues.
|
|
64
|
+
Cover edge cases. Write clear test descriptions.`,
|
|
65
|
+
|
|
66
|
+
analyst: `You are an analysis agent. Your task is to analyze data, generate metrics, and create reports.
|
|
67
|
+
Be data-driven. Present findings with evidence.`,
|
|
68
|
+
|
|
69
|
+
optimizer: `You are an optimization agent. Your task is to profile, identify bottlenecks, and improve performance.
|
|
70
|
+
Quantify improvements. Focus on measurable gains.`,
|
|
71
|
+
|
|
72
|
+
coordinator: `You are a coordination agent. Your task is to orchestrate workflows, route tasks, and manage schedules.
|
|
73
|
+
Be organized and clear about task dependencies.`,
|
|
74
|
+
|
|
75
|
+
embedder: `You are an embedding agent specialized in semantic search and vector operations.
|
|
76
|
+
Generate high-quality embeddings for text. Optimize for similarity matching.`,
|
|
77
|
+
};
|
|
78
|
+
|
|
79
|
+
// ============================================
|
|
80
|
+
// REAL LLM CLIENT
|
|
81
|
+
// ============================================
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Real LLM client that makes actual API calls
|
|
85
|
+
*/
|
|
86
|
+
export class LLMClient {
|
|
87
|
+
constructor(options = {}) {
|
|
88
|
+
this.provider = options.provider || 'anthropic';
|
|
89
|
+
this.apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || process.env.OPENAI_API_KEY;
|
|
90
|
+
this.model = options.model || 'balanced';
|
|
91
|
+
this.maxTokens = options.maxTokens || 4096;
|
|
92
|
+
|
|
93
|
+
if (!this.apiKey) {
|
|
94
|
+
console.warn('[LLM] No API key found. Set ANTHROPIC_API_KEY or OPENAI_API_KEY');
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
this.config = LLM_PROVIDERS[this.provider];
|
|
98
|
+
if (!this.config) {
|
|
99
|
+
throw new Error(`Unknown LLM provider: ${this.provider}`);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Call LLM API
|
|
105
|
+
*/
|
|
106
|
+
async complete(systemPrompt, userMessage, options = {}) {
|
|
107
|
+
if (!this.apiKey) {
|
|
108
|
+
throw new Error('No API key configured. Set ANTHROPIC_API_KEY or OPENAI_API_KEY');
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const model = this.config.models[options.model || this.model];
|
|
112
|
+
|
|
113
|
+
if (this.provider === 'anthropic') {
|
|
114
|
+
return this.callAnthropic(systemPrompt, userMessage, model, options);
|
|
115
|
+
} else {
|
|
116
|
+
return this.callOpenAI(systemPrompt, userMessage, model, options);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
async callAnthropic(systemPrompt, userMessage, model, options = {}) {
|
|
121
|
+
const response = await fetch(`${this.config.baseUrl}/messages`, {
|
|
122
|
+
method: 'POST',
|
|
123
|
+
headers: this.config.headers(this.apiKey),
|
|
124
|
+
body: JSON.stringify({
|
|
125
|
+
model,
|
|
126
|
+
max_tokens: options.maxTokens || this.maxTokens,
|
|
127
|
+
system: systemPrompt,
|
|
128
|
+
messages: [{ role: 'user', content: userMessage }],
|
|
129
|
+
}),
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
if (!response.ok) {
|
|
133
|
+
const error = await response.text();
|
|
134
|
+
throw new Error(`Anthropic API error: ${response.status} - ${error}`);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
const data = await response.json();
|
|
138
|
+
return {
|
|
139
|
+
content: data.content[0]?.text || '',
|
|
140
|
+
model,
|
|
141
|
+
usage: data.usage,
|
|
142
|
+
stopReason: data.stop_reason,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
async callOpenAI(systemPrompt, userMessage, model, options = {}) {
|
|
147
|
+
const response = await fetch(`${this.config.baseUrl}/chat/completions`, {
|
|
148
|
+
method: 'POST',
|
|
149
|
+
headers: this.config.headers(this.apiKey),
|
|
150
|
+
body: JSON.stringify({
|
|
151
|
+
model,
|
|
152
|
+
max_tokens: options.maxTokens || this.maxTokens,
|
|
153
|
+
messages: [
|
|
154
|
+
{ role: 'system', content: systemPrompt },
|
|
155
|
+
{ role: 'user', content: userMessage },
|
|
156
|
+
],
|
|
157
|
+
}),
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
if (!response.ok) {
|
|
161
|
+
const error = await response.text();
|
|
162
|
+
throw new Error(`OpenAI API error: ${response.status} - ${error}`);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
const data = await response.json();
|
|
166
|
+
return {
|
|
167
|
+
content: data.choices[0]?.message?.content || '',
|
|
168
|
+
model,
|
|
169
|
+
usage: data.usage,
|
|
170
|
+
stopReason: data.choices[0]?.finish_reason,
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Check if LLM is configured
|
|
176
|
+
*/
|
|
177
|
+
isConfigured() {
|
|
178
|
+
return !!this.apiKey;
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// ============================================
|
|
183
|
+
// REAL EMBEDDER (uses ruvector)
|
|
184
|
+
// ============================================
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Real embedder using ruvector's AdaptiveEmbedder
|
|
188
|
+
*/
|
|
189
|
+
export class RealEmbedder {
|
|
190
|
+
constructor(options = {}) {
|
|
191
|
+
this.embedder = null;
|
|
192
|
+
this.initialized = false;
|
|
193
|
+
this.options = options;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
async initialize() {
|
|
197
|
+
try {
|
|
198
|
+
// Try to load ruvector's AdaptiveEmbedder
|
|
199
|
+
const { AdaptiveEmbedder } = await import('ruvector');
|
|
200
|
+
this.embedder = new AdaptiveEmbedder();
|
|
201
|
+
await this.embedder.initialize();
|
|
202
|
+
this.initialized = true;
|
|
203
|
+
console.log('[Embedder] Initialized ruvector AdaptiveEmbedder');
|
|
204
|
+
return true;
|
|
205
|
+
} catch (error) {
|
|
206
|
+
console.warn('[Embedder] ruvector not available, using fallback:', error.message);
|
|
207
|
+
return false;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
async embed(text) {
|
|
212
|
+
if (this.initialized && this.embedder) {
|
|
213
|
+
return await this.embedder.embed(text);
|
|
214
|
+
}
|
|
215
|
+
// Fallback: Use a simple hash-based pseudo-embedding (NOT for production)
|
|
216
|
+
console.warn('[Embedder] Using fallback hash embeddings - install ruvector for real embeddings');
|
|
217
|
+
return this.fallbackEmbed(text);
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
async embedBatch(texts) {
|
|
221
|
+
if (this.initialized && this.embedder) {
|
|
222
|
+
return await this.embedder.embedBatch(texts);
|
|
223
|
+
}
|
|
224
|
+
return Promise.all(texts.map(t => this.fallbackEmbed(t)));
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
fallbackEmbed(text) {
|
|
228
|
+
// Simple hash-based pseudo-embedding for testing
|
|
229
|
+
// NOT semantically meaningful - use real embedder in production
|
|
230
|
+
const hash = createHash('sha256').update(text).digest();
|
|
231
|
+
const embedding = new Float32Array(384);
|
|
232
|
+
for (let i = 0; i < 384; i++) {
|
|
233
|
+
embedding[i] = (hash[i % 32] - 128) / 128;
|
|
234
|
+
}
|
|
235
|
+
return embedding;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
async cosineSimilarity(a, b) {
|
|
239
|
+
let dot = 0, normA = 0, normB = 0;
|
|
240
|
+
for (let i = 0; i < a.length; i++) {
|
|
241
|
+
dot += a[i] * b[i];
|
|
242
|
+
normA += a[i] * a[i];
|
|
243
|
+
normB += b[i] * b[i];
|
|
244
|
+
}
|
|
245
|
+
return dot / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// ============================================
|
|
250
|
+
// REAL AGENT
|
|
251
|
+
// ============================================
|
|
252
|
+
|
|
253
|
+
/**
|
|
254
|
+
* Real agent that executes tasks via LLM
|
|
255
|
+
*/
|
|
256
|
+
export class RealAgent extends EventEmitter {
|
|
257
|
+
constructor(type, options = {}) {
|
|
258
|
+
super();
|
|
259
|
+
this.id = `agent-${type}-${Date.now()}-${randomBytes(4).toString('hex')}`;
|
|
260
|
+
this.type = type;
|
|
261
|
+
this.systemPrompt = AGENT_PROMPTS[type] || AGENT_PROMPTS.coder;
|
|
262
|
+
this.llm = new LLMClient(options);
|
|
263
|
+
this.embedder = null;
|
|
264
|
+
this.status = 'idle';
|
|
265
|
+
this.taskHistory = [];
|
|
266
|
+
this.cost = { inputTokens: 0, outputTokens: 0 };
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
async initialize() {
|
|
270
|
+
if (this.type === 'embedder') {
|
|
271
|
+
this.embedder = new RealEmbedder();
|
|
272
|
+
await this.embedder.initialize();
|
|
273
|
+
}
|
|
274
|
+
return this;
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
/**
|
|
278
|
+
* Execute a task
|
|
279
|
+
*/
|
|
280
|
+
async execute(task, context = {}) {
|
|
281
|
+
if (!this.llm.isConfigured() && this.type !== 'embedder') {
|
|
282
|
+
throw new Error('LLM not configured. Set ANTHROPIC_API_KEY or OPENAI_API_KEY');
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
this.status = 'executing';
|
|
286
|
+
this.emit('started', { id: this.id, type: this.type, task });
|
|
287
|
+
|
|
288
|
+
const startTime = Date.now();
|
|
289
|
+
|
|
290
|
+
try {
|
|
291
|
+
let result;
|
|
292
|
+
|
|
293
|
+
if (this.type === 'embedder' && this.embedder) {
|
|
294
|
+
// Embedding task
|
|
295
|
+
result = await this.executeEmbeddingTask(task, context);
|
|
296
|
+
} else {
|
|
297
|
+
// LLM task
|
|
298
|
+
result = await this.executeLLMTask(task, context);
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
const duration = Date.now() - startTime;
|
|
302
|
+
|
|
303
|
+
this.taskHistory.push({
|
|
304
|
+
task,
|
|
305
|
+
result,
|
|
306
|
+
duration,
|
|
307
|
+
timestamp: new Date().toISOString(),
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
this.status = 'idle';
|
|
311
|
+
this.emit('completed', { id: this.id, result, duration });
|
|
312
|
+
|
|
313
|
+
return result;
|
|
314
|
+
|
|
315
|
+
} catch (error) {
|
|
316
|
+
this.status = 'error';
|
|
317
|
+
this.emit('error', { id: this.id, error: error.message });
|
|
318
|
+
throw error;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
async executeLLMTask(task, context = {}) {
|
|
323
|
+
// Build user message with context
|
|
324
|
+
let userMessage = task;
|
|
325
|
+
|
|
326
|
+
if (context.files && context.files.length > 0) {
|
|
327
|
+
userMessage += '\n\n--- FILES ---\n';
|
|
328
|
+
for (const file of context.files) {
|
|
329
|
+
try {
|
|
330
|
+
const content = readFileSync(file, 'utf-8');
|
|
331
|
+
userMessage += `\n### ${file}\n\`\`\`\n${content.slice(0, 10000)}\n\`\`\`\n`;
|
|
332
|
+
} catch (e) {
|
|
333
|
+
userMessage += `\n### ${file}\n(Could not read file: ${e.message})\n`;
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
if (context.additionalContext) {
|
|
339
|
+
userMessage += `\n\n--- ADDITIONAL CONTEXT ---\n${context.additionalContext}`;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
const response = await this.llm.complete(this.systemPrompt, userMessage, {
|
|
343
|
+
model: context.model || 'balanced',
|
|
344
|
+
});
|
|
345
|
+
|
|
346
|
+
// Track usage
|
|
347
|
+
if (response.usage) {
|
|
348
|
+
this.cost.inputTokens += response.usage.input_tokens || response.usage.prompt_tokens || 0;
|
|
349
|
+
this.cost.outputTokens += response.usage.output_tokens || response.usage.completion_tokens || 0;
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
return {
|
|
353
|
+
content: response.content,
|
|
354
|
+
model: response.model,
|
|
355
|
+
stopReason: response.stopReason,
|
|
356
|
+
agentId: this.id,
|
|
357
|
+
agentType: this.type,
|
|
358
|
+
};
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
async executeEmbeddingTask(task, context = {}) {
|
|
362
|
+
const texts = context.texts || [task];
|
|
363
|
+
const embeddings = await this.embedder.embedBatch(texts);
|
|
364
|
+
|
|
365
|
+
return {
|
|
366
|
+
embeddings: embeddings.map((e, i) => ({
|
|
367
|
+
text: texts[i].slice(0, 100),
|
|
368
|
+
embedding: Array.from(e).slice(0, 10), // Preview
|
|
369
|
+
dimensions: e.length,
|
|
370
|
+
})),
|
|
371
|
+
count: embeddings.length,
|
|
372
|
+
agentId: this.id,
|
|
373
|
+
agentType: this.type,
|
|
374
|
+
};
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
getStats() {
|
|
378
|
+
return {
|
|
379
|
+
id: this.id,
|
|
380
|
+
type: this.type,
|
|
381
|
+
status: this.status,
|
|
382
|
+
tasksCompleted: this.taskHistory.length,
|
|
383
|
+
cost: this.cost,
|
|
384
|
+
configured: this.llm.isConfigured() || this.type === 'embedder',
|
|
385
|
+
};
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
// ============================================
|
|
390
|
+
// REAL RELAY SYNC CLIENT
|
|
391
|
+
// ============================================
|
|
392
|
+
|
|
393
|
+
/**
|
|
394
|
+
* Real sync client that connects to the actual relay server
|
|
395
|
+
*/
|
|
396
|
+
export class RelaySyncClient extends EventEmitter {
|
|
397
|
+
constructor(options = {}) {
|
|
398
|
+
super();
|
|
399
|
+
this.relayUrl = options.relayUrl || 'ws://localhost:8080';
|
|
400
|
+
this.nodeId = options.nodeId || `node-${randomBytes(8).toString('hex')}`;
|
|
401
|
+
this.ws = null;
|
|
402
|
+
this.connected = false;
|
|
403
|
+
this.ledgerState = { earned: {}, spent: {}, balance: 0 };
|
|
404
|
+
this.reconnectAttempts = 0;
|
|
405
|
+
this.maxReconnects = options.maxReconnects || 10;
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
/**
|
|
409
|
+
* Connect to relay server
|
|
410
|
+
*/
|
|
411
|
+
async connect() {
|
|
412
|
+
return new Promise((resolve, reject) => {
|
|
413
|
+
try {
|
|
414
|
+
// Use dynamic import for WebSocket in Node
|
|
415
|
+
this.loadWebSocket().then(WebSocket => {
|
|
416
|
+
this.ws = new WebSocket(this.relayUrl);
|
|
417
|
+
|
|
418
|
+
const timeout = setTimeout(() => {
|
|
419
|
+
reject(new Error('Connection timeout'));
|
|
420
|
+
}, 10000);
|
|
421
|
+
|
|
422
|
+
this.ws.onopen = () => {
|
|
423
|
+
clearTimeout(timeout);
|
|
424
|
+
this.connected = true;
|
|
425
|
+
this.reconnectAttempts = 0;
|
|
426
|
+
|
|
427
|
+
// Register with relay
|
|
428
|
+
this.send({
|
|
429
|
+
type: 'register',
|
|
430
|
+
nodeId: this.nodeId,
|
|
431
|
+
capabilities: ['sync', 'agent', 'compute'],
|
|
432
|
+
});
|
|
433
|
+
|
|
434
|
+
this.emit('connected');
|
|
435
|
+
resolve(true);
|
|
436
|
+
};
|
|
437
|
+
|
|
438
|
+
this.ws.onmessage = (event) => {
|
|
439
|
+
this.handleMessage(JSON.parse(event.data));
|
|
440
|
+
};
|
|
441
|
+
|
|
442
|
+
this.ws.onclose = () => {
|
|
443
|
+
this.connected = false;
|
|
444
|
+
this.emit('disconnected');
|
|
445
|
+
this.scheduleReconnect();
|
|
446
|
+
};
|
|
447
|
+
|
|
448
|
+
this.ws.onerror = (error) => {
|
|
449
|
+
clearTimeout(timeout);
|
|
450
|
+
reject(error);
|
|
451
|
+
};
|
|
452
|
+
|
|
453
|
+
}).catch(reject);
|
|
454
|
+
} catch (error) {
|
|
455
|
+
reject(error);
|
|
456
|
+
}
|
|
457
|
+
});
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
async loadWebSocket() {
|
|
461
|
+
if (typeof WebSocket !== 'undefined') {
|
|
462
|
+
return WebSocket;
|
|
463
|
+
}
|
|
464
|
+
const ws = await import('ws');
|
|
465
|
+
return ws.default || ws.WebSocket;
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
scheduleReconnect() {
|
|
469
|
+
if (this.reconnectAttempts < this.maxReconnects) {
|
|
470
|
+
this.reconnectAttempts++;
|
|
471
|
+
const delay = Math.min(1000 * Math.pow(2, this.reconnectAttempts), 30000);
|
|
472
|
+
setTimeout(() => this.connect().catch(() => {}), delay);
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
handleMessage(message) {
|
|
477
|
+
switch (message.type) {
|
|
478
|
+
case 'registered':
|
|
479
|
+
console.log(`[Sync] Registered with relay as ${this.nodeId}`);
|
|
480
|
+
this.emit('registered', message);
|
|
481
|
+
break;
|
|
482
|
+
|
|
483
|
+
case 'ledger_sync':
|
|
484
|
+
this.mergeLedgerState(message.state);
|
|
485
|
+
break;
|
|
486
|
+
|
|
487
|
+
case 'peer_state':
|
|
488
|
+
this.emit('peer_state', message);
|
|
489
|
+
break;
|
|
490
|
+
|
|
491
|
+
case 'time_crystal_sync':
|
|
492
|
+
this.emit('time_crystal', message);
|
|
493
|
+
break;
|
|
494
|
+
|
|
495
|
+
default:
|
|
496
|
+
this.emit('message', message);
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
/**
|
|
501
|
+
* Send message to relay
|
|
502
|
+
*/
|
|
503
|
+
send(message) {
|
|
504
|
+
if (this.connected && this.ws?.readyState === 1) {
|
|
505
|
+
this.ws.send(JSON.stringify(message));
|
|
506
|
+
return true;
|
|
507
|
+
}
|
|
508
|
+
return false;
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
/**
|
|
512
|
+
* Sync ledger state with relay
|
|
513
|
+
*/
|
|
514
|
+
syncLedger(state) {
|
|
515
|
+
return this.send({
|
|
516
|
+
type: 'ledger_sync',
|
|
517
|
+
nodeId: this.nodeId,
|
|
518
|
+
state,
|
|
519
|
+
timestamp: Date.now(),
|
|
520
|
+
});
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
/**
|
|
524
|
+
* Merge incoming ledger state (CRDT)
|
|
525
|
+
*/
|
|
526
|
+
mergeLedgerState(remoteState) {
|
|
527
|
+
if (!remoteState) return;
|
|
528
|
+
|
|
529
|
+
// Merge earned (max wins)
|
|
530
|
+
for (const [key, value] of Object.entries(remoteState.earned || {})) {
|
|
531
|
+
const current = this.ledgerState.earned[key] || 0;
|
|
532
|
+
this.ledgerState.earned[key] = Math.max(current, value);
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
// Merge spent (max wins)
|
|
536
|
+
for (const [key, value] of Object.entries(remoteState.spent || {})) {
|
|
537
|
+
const current = this.ledgerState.spent[key] || 0;
|
|
538
|
+
this.ledgerState.spent[key] = Math.max(current, value);
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
// Recalculate balance
|
|
542
|
+
const totalEarned = Object.values(this.ledgerState.earned).reduce((a, b) => a + b, 0);
|
|
543
|
+
const totalSpent = Object.values(this.ledgerState.spent).reduce((a, b) => a + b, 0);
|
|
544
|
+
this.ledgerState.balance = totalEarned - totalSpent;
|
|
545
|
+
|
|
546
|
+
this.emit('ledger_updated', this.ledgerState);
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
/**
|
|
550
|
+
* Credit rUv
|
|
551
|
+
*/
|
|
552
|
+
credit(amount, reason) {
|
|
553
|
+
const key = `${Date.now()}-${reason}`;
|
|
554
|
+
this.ledgerState.earned[key] = amount;
|
|
555
|
+
this.ledgerState.balance += amount;
|
|
556
|
+
this.syncLedger(this.ledgerState);
|
|
557
|
+
return this.ledgerState.balance;
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
/**
|
|
561
|
+
* Spend rUv
|
|
562
|
+
*/
|
|
563
|
+
spend(amount, reason) {
|
|
564
|
+
if (this.ledgerState.balance < amount) {
|
|
565
|
+
throw new Error('Insufficient balance');
|
|
566
|
+
}
|
|
567
|
+
const key = `${Date.now()}-${reason}`;
|
|
568
|
+
this.ledgerState.spent[key] = amount;
|
|
569
|
+
this.ledgerState.balance -= amount;
|
|
570
|
+
this.syncLedger(this.ledgerState);
|
|
571
|
+
return this.ledgerState.balance;
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
getBalance() {
|
|
575
|
+
return this.ledgerState.balance;
|
|
576
|
+
}
|
|
577
|
+
|
|
578
|
+
close() {
|
|
579
|
+
if (this.ws) {
|
|
580
|
+
this.ws.close();
|
|
581
|
+
}
|
|
582
|
+
}
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
// ============================================
|
|
586
|
+
// REAL AGENT MANAGER
|
|
587
|
+
// ============================================
|
|
588
|
+
|
|
589
|
+
/**
|
|
590
|
+
* Manager for real agents with actual execution
|
|
591
|
+
*/
|
|
592
|
+
export class RealAgentManager extends EventEmitter {
|
|
593
|
+
constructor(options = {}) {
|
|
594
|
+
super();
|
|
595
|
+
this.agents = new Map();
|
|
596
|
+
this.syncClient = null;
|
|
597
|
+
this.embedder = null;
|
|
598
|
+
this.options = options;
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
async initialize() {
|
|
602
|
+
// Initialize embedder
|
|
603
|
+
this.embedder = new RealEmbedder();
|
|
604
|
+
await this.embedder.initialize();
|
|
605
|
+
|
|
606
|
+
// Connect to relay if URL provided
|
|
607
|
+
if (this.options.relayUrl || this.options.enableSync) {
|
|
608
|
+
this.syncClient = new RelaySyncClient({
|
|
609
|
+
relayUrl: this.options.relayUrl || 'ws://localhost:8080',
|
|
610
|
+
nodeId: this.options.nodeId,
|
|
611
|
+
});
|
|
612
|
+
|
|
613
|
+
try {
|
|
614
|
+
await this.syncClient.connect();
|
|
615
|
+
console.log('[AgentManager] Connected to relay server');
|
|
616
|
+
} catch (error) {
|
|
617
|
+
console.warn('[AgentManager] Relay connection failed:', error.message);
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
return this;
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
/**
|
|
625
|
+
* Spawn a real agent
|
|
626
|
+
*/
|
|
627
|
+
async spawn(type, options = {}) {
|
|
628
|
+
const agent = new RealAgent(type, {
|
|
629
|
+
provider: options.provider || this.options.provider || 'anthropic',
|
|
630
|
+
apiKey: options.apiKey || this.options.apiKey,
|
|
631
|
+
model: options.model || 'balanced',
|
|
632
|
+
});
|
|
633
|
+
|
|
634
|
+
await agent.initialize();
|
|
635
|
+
this.agents.set(agent.id, agent);
|
|
636
|
+
|
|
637
|
+
// Track agent spawn with credits
|
|
638
|
+
if (this.syncClient?.connected) {
|
|
639
|
+
// Deduct spawn cost
|
|
640
|
+
const spawnCost = { researcher: 1, coder: 2, reviewer: 1.5, tester: 1, analyst: 1, optimizer: 2, coordinator: 3, embedder: 0.5 };
|
|
641
|
+
try {
|
|
642
|
+
this.syncClient.spend(spawnCost[type] || 1, `spawn-${type}`);
|
|
643
|
+
} catch (e) {
|
|
644
|
+
// Continue even if no credits
|
|
645
|
+
}
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
this.emit('agent_spawned', { id: agent.id, type });
|
|
649
|
+
return agent;
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
/**
|
|
653
|
+
* Execute task on agent
|
|
654
|
+
*/
|
|
655
|
+
async execute(agentId, task, context = {}) {
|
|
656
|
+
const agent = this.agents.get(agentId);
|
|
657
|
+
if (!agent) {
|
|
658
|
+
throw new Error(`Agent not found: ${agentId}`);
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
const result = await agent.execute(task, context);
|
|
662
|
+
|
|
663
|
+
// Credit for completed task
|
|
664
|
+
if (this.syncClient?.connected) {
|
|
665
|
+
this.syncClient.credit(1, `task-${agent.type}`);
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
return result;
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
/**
|
|
672
|
+
* Quick execute - spawn and run in one call
|
|
673
|
+
*/
|
|
674
|
+
async quickExecute(type, task, context = {}) {
|
|
675
|
+
const agent = await this.spawn(type, context);
|
|
676
|
+
return agent.execute(task, context);
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
getAgent(id) {
|
|
680
|
+
return this.agents.get(id);
|
|
681
|
+
}
|
|
682
|
+
|
|
683
|
+
listAgents() {
|
|
684
|
+
return Array.from(this.agents.values()).map(a => a.getStats());
|
|
685
|
+
}
|
|
686
|
+
|
|
687
|
+
getBalance() {
|
|
688
|
+
return this.syncClient?.getBalance() || 0;
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
async close() {
|
|
692
|
+
if (this.syncClient) {
|
|
693
|
+
this.syncClient.close();
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
|
|
698
|
+
// ============================================
|
|
699
|
+
// EXPORTS
|
|
700
|
+
// ============================================
|
|
701
|
+
|
|
702
|
+
// Classes are already exported via 'export class' declarations above
|
|
703
|
+
// Only export non-class items here
|
|
704
|
+
export { AGENT_PROMPTS, LLM_PROVIDERS };
|
|
705
|
+
|
|
706
|
+
export default RealAgentManager;
|