@ruvector/edge-net 0.1.4 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +9 -1
- package/real-agents.js +145 -12
- package/real-workers.js +970 -0
- package/real-workflows.js +739 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ruvector/edge-net",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.5",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "Distributed compute intelligence network with AI agents and workers - contribute browser compute, spawn distributed AI agents, earn credits. Features Time Crystal coordination, Neural DAG attention, P2P swarm intelligence, and multi-agent workflows.",
|
|
6
6
|
"main": "ruvector_edge_net.js",
|
|
@@ -62,6 +62,8 @@
|
|
|
62
62
|
"webrtc.js",
|
|
63
63
|
"agents.js",
|
|
64
64
|
"real-agents.js",
|
|
65
|
+
"real-workers.js",
|
|
66
|
+
"real-workflows.js",
|
|
65
67
|
"sync.js",
|
|
66
68
|
"README.md",
|
|
67
69
|
"LICENSE"
|
|
@@ -80,6 +82,12 @@
|
|
|
80
82
|
"./real-agents": {
|
|
81
83
|
"import": "./real-agents.js"
|
|
82
84
|
},
|
|
85
|
+
"./real-workers": {
|
|
86
|
+
"import": "./real-workers.js"
|
|
87
|
+
},
|
|
88
|
+
"./real-workflows": {
|
|
89
|
+
"import": "./real-workflows.js"
|
|
90
|
+
},
|
|
83
91
|
"./sync": {
|
|
84
92
|
"import": "./sync.js"
|
|
85
93
|
},
|
package/real-agents.js
CHANGED
|
@@ -2,7 +2,8 @@
|
|
|
2
2
|
* @ruvector/edge-net REAL Agent System
|
|
3
3
|
*
|
|
4
4
|
* Actually functional distributed agents with:
|
|
5
|
-
* -
|
|
5
|
+
* - LOCAL LLM execution via ruvllm (default - no API key needed)
|
|
6
|
+
* - Cloud LLM API calls (Anthropic Claude, OpenAI) as fallback
|
|
6
7
|
* - Real embeddings via ruvector AdaptiveEmbedder
|
|
7
8
|
* - Real relay server sync
|
|
8
9
|
* - Real task execution
|
|
@@ -20,13 +21,34 @@ import { join } from 'path';
|
|
|
20
21
|
// ============================================
|
|
21
22
|
|
|
22
23
|
const LLM_PROVIDERS = {
|
|
24
|
+
// LOCAL LLM - Default, no API key needed
|
|
25
|
+
local: {
|
|
26
|
+
name: 'RuvLLM Local',
|
|
27
|
+
type: 'local',
|
|
28
|
+
models: {
|
|
29
|
+
fast: 'ruvllm-fast',
|
|
30
|
+
balanced: 'ruvllm-balanced',
|
|
31
|
+
powerful: 'ruvllm-powerful',
|
|
32
|
+
},
|
|
33
|
+
},
|
|
34
|
+
ruvllm: {
|
|
35
|
+
name: 'RuvLLM',
|
|
36
|
+
type: 'local',
|
|
37
|
+
models: {
|
|
38
|
+
fast: 'ruvllm-fast',
|
|
39
|
+
balanced: 'ruvllm-balanced',
|
|
40
|
+
powerful: 'ruvllm-powerful',
|
|
41
|
+
},
|
|
42
|
+
},
|
|
43
|
+
// Cloud providers as fallback (December 2025 models)
|
|
23
44
|
anthropic: {
|
|
24
45
|
name: 'Anthropic Claude',
|
|
46
|
+
type: 'cloud',
|
|
25
47
|
baseUrl: 'https://api.anthropic.com/v1',
|
|
26
48
|
models: {
|
|
27
49
|
fast: 'claude-3-5-haiku-20241022',
|
|
28
|
-
balanced: 'claude-
|
|
29
|
-
powerful: 'claude-
|
|
50
|
+
balanced: 'claude-sonnet-4-20250514',
|
|
51
|
+
powerful: 'claude-opus-4-5-20251101',
|
|
30
52
|
},
|
|
31
53
|
headers: (apiKey) => ({
|
|
32
54
|
'Content-Type': 'application/json',
|
|
@@ -36,11 +58,12 @@ const LLM_PROVIDERS = {
|
|
|
36
58
|
},
|
|
37
59
|
openai: {
|
|
38
60
|
name: 'OpenAI',
|
|
61
|
+
type: 'cloud',
|
|
39
62
|
baseUrl: 'https://api.openai.com/v1',
|
|
40
63
|
models: {
|
|
41
64
|
fast: 'gpt-4o-mini',
|
|
42
|
-
balanced: 'gpt-
|
|
43
|
-
powerful: 'gpt-
|
|
65
|
+
balanced: 'gpt-5.2',
|
|
66
|
+
powerful: 'gpt-5.2-turbo',
|
|
44
67
|
},
|
|
45
68
|
headers: (apiKey) => ({
|
|
46
69
|
'Content-Type': 'application/json',
|
|
@@ -81,31 +104,62 @@ Generate high-quality embeddings for text. Optimize for similarity matching.`,
|
|
|
81
104
|
// ============================================
|
|
82
105
|
|
|
83
106
|
/**
|
|
84
|
-
* Real LLM client
|
|
107
|
+
* Real LLM client - uses local ruvllm by default, falls back to cloud APIs
|
|
85
108
|
*/
|
|
86
109
|
export class LLMClient {
|
|
87
110
|
constructor(options = {}) {
|
|
88
|
-
|
|
111
|
+
// Default to local ruvllm, fallback to cloud if API key provided
|
|
112
|
+
this.provider = options.provider || 'local';
|
|
89
113
|
this.apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || process.env.OPENAI_API_KEY;
|
|
90
114
|
this.model = options.model || 'balanced';
|
|
91
115
|
this.maxTokens = options.maxTokens || 4096;
|
|
92
116
|
|
|
93
|
-
if
|
|
94
|
-
|
|
117
|
+
// Auto-select cloud provider if API key is set and provider not specified
|
|
118
|
+
if (!options.provider && this.apiKey) {
|
|
119
|
+
this.provider = process.env.ANTHROPIC_API_KEY ? 'anthropic' : 'openai';
|
|
95
120
|
}
|
|
96
121
|
|
|
97
122
|
this.config = LLM_PROVIDERS[this.provider];
|
|
98
123
|
if (!this.config) {
|
|
99
124
|
throw new Error(`Unknown LLM provider: ${this.provider}`);
|
|
100
125
|
}
|
|
126
|
+
|
|
127
|
+
// Initialize local LLM if using local provider
|
|
128
|
+
this.ruvllm = null;
|
|
129
|
+
this.ruvllmInitialized = false;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* Initialize local ruvllm
|
|
134
|
+
*/
|
|
135
|
+
async initLocal() {
|
|
136
|
+
if (this.ruvllmInitialized) return;
|
|
137
|
+
|
|
138
|
+
try {
|
|
139
|
+
const ruvllm = await import('@ruvector/ruvllm');
|
|
140
|
+
this.ruvllm = new ruvllm.RuvLLM({
|
|
141
|
+
embeddingDim: 768,
|
|
142
|
+
learningEnabled: true,
|
|
143
|
+
});
|
|
144
|
+
this.ruvllmInitialized = true;
|
|
145
|
+
console.log('[LLM] Initialized local RuvLLM engine');
|
|
146
|
+
} catch (error) {
|
|
147
|
+
console.warn('[LLM] RuvLLM not available:', error.message);
|
|
148
|
+
}
|
|
101
149
|
}
|
|
102
150
|
|
|
103
151
|
/**
|
|
104
|
-
* Call LLM
|
|
152
|
+
* Call LLM - local or cloud
|
|
105
153
|
*/
|
|
106
154
|
async complete(systemPrompt, userMessage, options = {}) {
|
|
155
|
+
const isLocal = this.config.type === 'local';
|
|
156
|
+
|
|
157
|
+
if (isLocal) {
|
|
158
|
+
return this.callLocal(systemPrompt, userMessage, options);
|
|
159
|
+
}
|
|
160
|
+
|
|
107
161
|
if (!this.apiKey) {
|
|
108
|
-
throw new Error('No API key configured. Set ANTHROPIC_API_KEY or OPENAI_API_KEY');
|
|
162
|
+
throw new Error('No API key configured. Set ANTHROPIC_API_KEY or OPENAI_API_KEY, or use provider: "local"');
|
|
109
163
|
}
|
|
110
164
|
|
|
111
165
|
const model = this.config.models[options.model || this.model];
|
|
@@ -117,6 +171,71 @@ export class LLMClient {
|
|
|
117
171
|
}
|
|
118
172
|
}
|
|
119
173
|
|
|
174
|
+
/**
|
|
175
|
+
* Call local RuvLLM
|
|
176
|
+
*/
|
|
177
|
+
async callLocal(systemPrompt, userMessage, options = {}) {
|
|
178
|
+
await this.initLocal();
|
|
179
|
+
|
|
180
|
+
const modelTier = options.model || this.model;
|
|
181
|
+
const prompt = `${systemPrompt}\n\n${userMessage}`;
|
|
182
|
+
|
|
183
|
+
if (this.ruvllm) {
|
|
184
|
+
// Use real ruvllm engine
|
|
185
|
+
const response = this.ruvllm.query(prompt, {
|
|
186
|
+
maxTokens: options.maxTokens || this.maxTokens,
|
|
187
|
+
temperature: options.temperature || 0.7,
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
return {
|
|
191
|
+
content: response.text,
|
|
192
|
+
model: `ruvllm-${modelTier}`,
|
|
193
|
+
usage: { input_tokens: prompt.length, output_tokens: response.text.length },
|
|
194
|
+
stopReason: 'end',
|
|
195
|
+
confidence: response.confidence,
|
|
196
|
+
local: true,
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Fallback: Generate response using local heuristics
|
|
201
|
+
console.log('[LLM] Using fallback local generation');
|
|
202
|
+
const fallbackResponse = this.generateFallbackResponse(systemPrompt, userMessage);
|
|
203
|
+
|
|
204
|
+
return {
|
|
205
|
+
content: fallbackResponse,
|
|
206
|
+
model: `ruvllm-${modelTier}-fallback`,
|
|
207
|
+
usage: { input_tokens: prompt.length, output_tokens: fallbackResponse.length },
|
|
208
|
+
stopReason: 'end',
|
|
209
|
+
local: true,
|
|
210
|
+
fallback: true,
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
/**
|
|
215
|
+
* Generate fallback response for basic tasks
|
|
216
|
+
*/
|
|
217
|
+
generateFallbackResponse(systemPrompt, userMessage) {
|
|
218
|
+
// Basic task-specific responses
|
|
219
|
+
if (systemPrompt.includes('research')) {
|
|
220
|
+
return `Based on the query "${userMessage.slice(0, 100)}...", here are the key findings:\n\n1. The topic requires further investigation.\n2. Multiple sources should be consulted.\n3. Consider the context and requirements carefully.\n\nNote: This is a local fallback response. For more detailed analysis, ensure ruvllm is properly installed.`;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (systemPrompt.includes('coding') || systemPrompt.includes('code')) {
|
|
224
|
+
return `Here's a code solution for: ${userMessage.slice(0, 50)}...\n\n\`\`\`javascript\n// Implementation based on the requirements\nfunction solution() {\n // TODO: Implement the specific logic\n console.log('Task:', '${userMessage.slice(0, 30)}...');\n return { success: true };\n}\n\`\`\`\n\nNote: This is a local fallback. Install ruvllm for real code generation.`;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
if (systemPrompt.includes('review')) {
|
|
228
|
+
return `Code Review for: ${userMessage.slice(0, 50)}...\n\n**Summary:** The code structure appears reasonable.\n\n**Suggestions:**\n- Add error handling\n- Consider edge cases\n- Add documentation\n\nNote: This is a local fallback response.`;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
if (systemPrompt.includes('test')) {
|
|
232
|
+
return `Test Plan for: ${userMessage.slice(0, 50)}...\n\n\`\`\`javascript\ndescribe('Feature', () => {\n it('should work correctly', () => {\n // Test implementation\n expect(true).toBe(true);\n });\n});\n\`\`\`\n\nNote: This is a local fallback response.`;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// Generic response
|
|
236
|
+
return `Response to: ${userMessage.slice(0, 100)}...\n\nThis is a local response generated without cloud API calls. For full LLM capabilities:\n1. Install @ruvector/ruvllm for local AI\n2. Or set ANTHROPIC_API_KEY/OPENAI_API_KEY for cloud\n\nTask acknowledged and processed locally.`;
|
|
237
|
+
}
|
|
238
|
+
|
|
120
239
|
async callAnthropic(systemPrompt, userMessage, model, options = {}) {
|
|
121
240
|
const response = await fetch(`${this.config.baseUrl}/messages`, {
|
|
122
241
|
method: 'POST',
|
|
@@ -175,8 +294,17 @@ export class LLMClient {
|
|
|
175
294
|
* Check if LLM is configured
|
|
176
295
|
*/
|
|
177
296
|
isConfigured() {
|
|
297
|
+
// Local is always configured
|
|
298
|
+
if (this.config.type === 'local') return true;
|
|
178
299
|
return !!this.apiKey;
|
|
179
300
|
}
|
|
301
|
+
|
|
302
|
+
/**
|
|
303
|
+
* Check if using local provider
|
|
304
|
+
*/
|
|
305
|
+
isLocal() {
|
|
306
|
+
return this.config.type === 'local';
|
|
307
|
+
}
|
|
180
308
|
}
|
|
181
309
|
|
|
182
310
|
// ============================================
|
|
@@ -198,7 +326,12 @@ export class RealEmbedder {
|
|
|
198
326
|
// Try to load ruvector's AdaptiveEmbedder
|
|
199
327
|
const { AdaptiveEmbedder } = await import('ruvector');
|
|
200
328
|
this.embedder = new AdaptiveEmbedder();
|
|
201
|
-
|
|
329
|
+
// Support both init() and initialize() methods
|
|
330
|
+
if (typeof this.embedder.init === 'function') {
|
|
331
|
+
await this.embedder.init();
|
|
332
|
+
} else if (typeof this.embedder.initialize === 'function') {
|
|
333
|
+
await this.embedder.initialize();
|
|
334
|
+
}
|
|
202
335
|
this.initialized = true;
|
|
203
336
|
console.log('[Embedder] Initialized ruvector AdaptiveEmbedder');
|
|
204
337
|
return true;
|