nebula-worker 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/cli.js +20 -0
  2. package/package.json +20 -0
  3. package/src/worker.js +146 -0
package/bin/cli.js ADDED
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env node
2
+
3
+ const { program } = require('commander');
4
+ const { startWorker } = require('../src/worker');
5
+
6
+ program
7
+ .name('nebula-worker')
8
+ .description('Contribute compute to the Nebula distributed AI network')
9
+ .version('1.0.0');
10
+
11
+ program
12
+ .command('start')
13
+ .description('Start contributing compute to the Nebula network')
14
+ .option('--master <url>', 'Master node URL', 'http://localhost:3000')
15
+ .option('--model <name>', 'Ollama model to use (auto-detected if not specified)')
16
+ .action((options) => {
17
+ startWorker(options.master, options.model);
18
+ });
19
+
20
+ program.parse();
package/package.json ADDED
@@ -0,0 +1,20 @@
1
+ {
2
+ "name": "nebula-worker",
3
+ "version": "1.0.0",
4
+ "description": "Contribute your idle compute to the Nebula network",
5
+ "main": "src/worker.js",
6
+ "bin": {
7
+ "nebula-worker": "bin/cli.js"
8
+ },
9
+ "dependencies": {
10
+ "socket.io-client": "^4.7.2",
11
+ "commander": "^12.0.0"
12
+ },
13
+ "keywords": [
14
+ "nebula",
15
+ "distributed-compute",
16
+ "ai",
17
+ "ollama"
18
+ ],
19
+ "license": "MIT"
20
+ }
package/src/worker.js ADDED
@@ -0,0 +1,146 @@
1
+ const { io } = require('socket.io-client');
2
+ const crypto = require('crypto');
3
+
4
+ const IV_LENGTH = 16;
5
+
6
+ function encrypt(data, sessionKey) {
7
+ const iv = crypto.randomBytes(IV_LENGTH);
8
+ const cipher = crypto.createCipheriv('aes-256-cbc', sessionKey, iv);
9
+ let encrypted = cipher.update(JSON.stringify(data), 'utf8', 'hex');
10
+ encrypted += cipher.final('hex');
11
+ return iv.toString('hex') + encrypted;
12
+ }
13
+
14
+ function decrypt(encryptedData, sessionKey) {
15
+ const iv = Buffer.from(encryptedData.slice(0, 32), 'hex');
16
+ const encrypted = encryptedData.slice(32);
17
+ const decipher = crypto.createDecipheriv('aes-256-cbc', sessionKey, iv);
18
+ let decrypted = decipher.update(encrypted, 'hex', 'utf8');
19
+ decrypted += decipher.final('utf8');
20
+ return JSON.parse(decrypted);
21
+ }
22
+
23
+ async function detectModel(preferredModel) {
24
+ try {
25
+ const res = await fetch('http://localhost:11434/api/tags');
26
+ const data = await res.json();
27
+ const models = data.models || [];
28
+
29
+ if (models.length === 0) {
30
+ console.error('❌ No Ollama models found. Run: ollama pull llama3.2');
31
+ process.exit(1);
32
+ }
33
+
34
+ if (preferredModel) {
35
+ const match = models.find(m => m.name.includes(preferredModel));
36
+ if (!match) {
37
+ console.warn(`⚠️ Model "${preferredModel}" not found. Using "${models[0].name}" instead.`);
38
+ return models[0].name;
39
+ }
40
+ return match.name;
41
+ }
42
+
43
+ // Auto-pick: prefer llama3.2, otherwise first available
44
+ const preferred = models.find(m => m.name.includes('llama3.2'));
45
+ const chosen = preferred ? preferred.name : models[0].name;
46
+ return chosen;
47
+
48
+ } catch (err) {
49
+ console.error('❌ Ollama is not running. Start it with: ollama serve');
50
+ process.exit(1);
51
+ }
52
+ }
53
+
54
+ async function processTask(task, model) {
55
+ try {
56
+ const response = await fetch('http://localhost:11434/api/generate', {
57
+ method: 'POST',
58
+ headers: { 'Content-Type': 'application/json' },
59
+ body: JSON.stringify({ model, prompt: task, stream: false })
60
+ });
61
+ const data = await response.json();
62
+ return data.response.trim();
63
+ } catch (err) {
64
+ return `Error: ${err.message}`;
65
+ }
66
+ }
67
+
68
+ async function startWorker(masterUrl, preferredModel) {
69
+ console.log('\n⚡ Nebula Worker\n');
70
+ console.log(`Connecting to master: ${masterUrl}`);
71
+
72
+ // Check Ollama first
73
+ const model = await detectModel(preferredModel);
74
+ console.log(`Using model: ${model}`);
75
+
76
+ let sessionKey = null;
77
+ let tasksProcessed = 0;
78
+ let chunksProcessed = 0;
79
+
80
+ const socket = io(masterUrl, {
81
+ query: { type: 'worker' },
82
+ reconnection: true,
83
+ reconnectionDelay: 2000,
84
+ });
85
+
86
+ socket.on('connect', () => {
87
+ console.log(`Connected ✓ (id: ${socket.id})`);
88
+ console.log('Waiting for work...\n');
89
+ });
90
+
91
+ socket.on('session-key', (keyHex) => {
92
+ sessionKey = Buffer.from(keyHex, 'hex');
93
+ console.log('Session key received ✓');
94
+ });
95
+
96
+ socket.on('task-chunk', async (data) => {
97
+ if (!sessionKey) {
98
+ console.log('No session key yet — skipping chunk');
99
+ return;
100
+ }
101
+
102
+ let jobId, chunk;
103
+
104
+ if (typeof data.chunk === 'string' && data.chunk.startsWith('PLAIN:')) {
105
+ console.log('Received wrong format — skipping');
106
+ return;
107
+ }
108
+
109
+ try {
110
+ const decoded = decrypt(data.chunk, sessionKey);
111
+ jobId = decoded.jobId;
112
+ chunk = decoded.chunk;
113
+ } catch (err) {
114
+ console.error('Failed to decrypt chunk:', err.message);
115
+ return;
116
+ }
117
+
118
+ console.log(`\nJob ${jobId.slice(0, 8)}... → ${chunk.length} tasks`);
119
+
120
+ const results = await Promise.all(
121
+ chunk.map(async (task, i) => {
122
+ console.log(` Processing task ${i + 1}/${chunk.length}...`);
123
+ const result = await processTask(task, model);
124
+ tasksProcessed++;
125
+ return result;
126
+ })
127
+ );
128
+
129
+ chunksProcessed++;
130
+ console.log(` Done ✓ (${tasksProcessed} tasks total, ${chunksProcessed} chunks)`);
131
+
132
+ socket.emit('chunk-result', encrypt({ jobId, result: results }, sessionKey));
133
+ });
134
+
135
+ socket.on('disconnect', () => {
136
+ sessionKey = null;
137
+ console.log('\nDisconnected from master. Reconnecting...');
138
+ });
139
+
140
+ socket.on('connect_error', (err) => {
141
+ console.error(`Connection failed: ${err.message}`);
142
+ console.log('Retrying in 2 seconds...');
143
+ });
144
+ }
145
+
146
+ module.exports = { startWorker };