@sparkleideas/ruv-swarm 1.0.18-patch.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1565 -0
- package/bin/ruv-swarm-clean.js +1872 -0
- package/bin/ruv-swarm-memory.js +119 -0
- package/bin/ruv-swarm-secure-heartbeat.js +1549 -0
- package/bin/ruv-swarm-secure.js +1689 -0
- package/package.json +221 -0
- package/src/agent.ts +342 -0
- package/src/benchmark.js +267 -0
- package/src/claude-flow-enhanced.js +839 -0
- package/src/claude-integration/advanced-commands.js +561 -0
- package/src/claude-integration/core.js +112 -0
- package/src/claude-integration/docs.js +1548 -0
- package/src/claude-integration/env-template.js +39 -0
- package/src/claude-integration/index.js +209 -0
- package/src/claude-integration/remote.js +408 -0
- package/src/cli-diagnostics.js +364 -0
- package/src/cognitive-pattern-evolution.js +1317 -0
- package/src/daa-cognition.js +977 -0
- package/src/daa-service.d.ts +298 -0
- package/src/daa-service.js +1116 -0
- package/src/diagnostics.js +533 -0
- package/src/errors.js +528 -0
- package/src/github-coordinator/README.md +193 -0
- package/src/github-coordinator/claude-hooks.js +162 -0
- package/src/github-coordinator/gh-cli-coordinator.js +260 -0
- package/src/hooks/cli.js +82 -0
- package/src/hooks/index.js +1900 -0
- package/src/index-enhanced.d.ts +371 -0
- package/src/index-enhanced.js +734 -0
- package/src/index.d.ts +287 -0
- package/src/index.js +405 -0
- package/src/index.ts +457 -0
- package/src/logger.js +182 -0
- package/src/logging-config.js +179 -0
- package/src/mcp-daa-tools.js +735 -0
- package/src/mcp-tools-benchmarks.js +328 -0
- package/src/mcp-tools-enhanced.js +2863 -0
- package/src/memory-config.js +42 -0
- package/src/meta-learning-framework.js +1359 -0
- package/src/neural-agent.js +830 -0
- package/src/neural-coordination-protocol.js +1363 -0
- package/src/neural-models/README.md +118 -0
- package/src/neural-models/autoencoder.js +543 -0
- package/src/neural-models/base.js +269 -0
- package/src/neural-models/cnn.js +497 -0
- package/src/neural-models/gnn.js +447 -0
- package/src/neural-models/gru.js +536 -0
- package/src/neural-models/index.js +273 -0
- package/src/neural-models/lstm.js +551 -0
- package/src/neural-models/neural-presets-complete.js +1306 -0
- package/src/neural-models/presets/graph.js +392 -0
- package/src/neural-models/presets/index.js +279 -0
- package/src/neural-models/presets/nlp.js +328 -0
- package/src/neural-models/presets/timeseries.js +368 -0
- package/src/neural-models/presets/vision.js +387 -0
- package/src/neural-models/resnet.js +534 -0
- package/src/neural-models/transformer.js +515 -0
- package/src/neural-models/vae.js +489 -0
- package/src/neural-network-manager.js +1938 -0
- package/src/neural-network.ts +296 -0
- package/src/neural.js +574 -0
- package/src/performance-benchmarks.js +898 -0
- package/src/performance.js +458 -0
- package/src/persistence-pooled.js +695 -0
- package/src/persistence.js +480 -0
- package/src/schemas.js +864 -0
- package/src/security.js +218 -0
- package/src/singleton-container.js +183 -0
- package/src/sqlite-pool.js +587 -0
- package/src/sqlite-worker.js +141 -0
- package/src/types.ts +164 -0
- package/src/utils.ts +286 -0
- package/src/wasm-loader.js +601 -0
- package/src/wasm-loader2.js +404 -0
- package/src/wasm-memory-optimizer.js +783 -0
- package/src/wasm-types.d.ts +63 -0
- package/wasm/README.md +347 -0
- package/wasm/neuro-divergent.wasm +0 -0
- package/wasm/package.json +18 -0
- package/wasm/ruv-fann.wasm +0 -0
- package/wasm/ruv_swarm_simd.wasm +0 -0
- package/wasm/ruv_swarm_wasm.d.ts +391 -0
- package/wasm/ruv_swarm_wasm.js +2164 -0
- package/wasm/ruv_swarm_wasm_bg.wasm +0 -0
- package/wasm/ruv_swarm_wasm_bg.wasm.d.ts +123 -0
- package/wasm/wasm-bindings-loader.mjs +435 -0
- package/wasm/wasm-updates.md +684 -0
|
@@ -0,0 +1,695 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* High-Availability SQLite Persistence Layer with Connection Pooling
|
|
3
|
+
*
|
|
4
|
+
* This is the production-ready version of the persistence layer that addresses
|
|
5
|
+
* the connection pooling concerns identified in the production readiness assessment.
|
|
6
|
+
*
|
|
7
|
+
* Key improvements:
|
|
8
|
+
* - Connection pooling for high concurrency
|
|
9
|
+
* - Deadlock prevention through queuing
|
|
10
|
+
* - Connection health monitoring
|
|
11
|
+
* - Graceful degradation under load
|
|
12
|
+
* - Proper resource lifecycle management
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
import { SQLiteConnectionPool } from './sqlite-pool.js';
|
|
16
|
+
import path from 'path';
|
|
17
|
+
import fs from 'fs';
|
|
18
|
+
|
|
19
|
+
class SwarmPersistencePooled {
|
|
20
|
+
constructor(dbPath = path.join(new URL('.', import.meta.url).pathname, '..', 'data', '@sparkleideas/ruv-swarm.db'), options = {}) {
|
|
21
|
+
this.dbPath = dbPath;
|
|
22
|
+
this.options = {
|
|
23
|
+
// Pool configuration
|
|
24
|
+
maxReaders: options.maxReaders || 4,
|
|
25
|
+
maxWorkers: options.maxWorkers || 2,
|
|
26
|
+
|
|
27
|
+
// Performance settings
|
|
28
|
+
mmapSize: options.mmapSize || 268435456, // 256MB
|
|
29
|
+
cacheSize: options.cacheSize || -64000, // 64MB
|
|
30
|
+
|
|
31
|
+
// High availability
|
|
32
|
+
enableBackup: options.enableBackup || false,
|
|
33
|
+
backupInterval: options.backupInterval || 3600000, // 1 hour
|
|
34
|
+
|
|
35
|
+
...options
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
this.pool = null;
|
|
39
|
+
this.initialized = false;
|
|
40
|
+
this.initializing = false;
|
|
41
|
+
|
|
42
|
+
// Statistics
|
|
43
|
+
this.stats = {
|
|
44
|
+
totalOperations: 0,
|
|
45
|
+
totalErrors: 0,
|
|
46
|
+
averageResponseTime: 0
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
async initialize() {
|
|
51
|
+
if (this.initialized) return;
|
|
52
|
+
if (this.initializing) {
|
|
53
|
+
// Wait for initialization to complete
|
|
54
|
+
return new Promise((resolve, reject) => {
|
|
55
|
+
const checkInitialized = () => {
|
|
56
|
+
if (this.initialized) {
|
|
57
|
+
resolve();
|
|
58
|
+
} else if (!this.initializing) {
|
|
59
|
+
reject(new Error('Initialization failed'));
|
|
60
|
+
} else {
|
|
61
|
+
setTimeout(checkInitialized, 100);
|
|
62
|
+
}
|
|
63
|
+
};
|
|
64
|
+
checkInitialized();
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
this.initializing = true;
|
|
69
|
+
|
|
70
|
+
try {
|
|
71
|
+
// Ensure data directory exists
|
|
72
|
+
const dataDir = path.dirname(this.dbPath);
|
|
73
|
+
if (!fs.existsSync(dataDir)) {
|
|
74
|
+
fs.mkdirSync(dataDir, { recursive: true });
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Initialize connection pool
|
|
78
|
+
this.pool = new SQLiteConnectionPool(this.dbPath, this.options);
|
|
79
|
+
|
|
80
|
+
// Wait for pool to be ready
|
|
81
|
+
await new Promise((resolve, reject) => {
|
|
82
|
+
this.pool.once('ready', resolve);
|
|
83
|
+
this.pool.once('error', reject);
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
// Initialize database schema
|
|
87
|
+
await this.initDatabase();
|
|
88
|
+
|
|
89
|
+
this.initialized = true;
|
|
90
|
+
this.initializing = false;
|
|
91
|
+
|
|
92
|
+
} catch (error) {
|
|
93
|
+
this.initializing = false;
|
|
94
|
+
throw error;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
async initDatabase() {
|
|
99
|
+
// Create tables using write connection
|
|
100
|
+
await this.pool.write(`
|
|
101
|
+
CREATE TABLE IF NOT EXISTS swarms (
|
|
102
|
+
id TEXT PRIMARY KEY,
|
|
103
|
+
name TEXT NOT NULL,
|
|
104
|
+
topology TEXT NOT NULL,
|
|
105
|
+
max_agents INTEGER NOT NULL,
|
|
106
|
+
strategy TEXT,
|
|
107
|
+
status TEXT DEFAULT 'active',
|
|
108
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
109
|
+
metadata TEXT
|
|
110
|
+
)
|
|
111
|
+
`);
|
|
112
|
+
|
|
113
|
+
await this.pool.write(`
|
|
114
|
+
CREATE TABLE IF NOT EXISTS agents (
|
|
115
|
+
id TEXT PRIMARY KEY,
|
|
116
|
+
swarm_id TEXT,
|
|
117
|
+
name TEXT NOT NULL,
|
|
118
|
+
type TEXT NOT NULL,
|
|
119
|
+
status TEXT DEFAULT 'idle',
|
|
120
|
+
capabilities TEXT,
|
|
121
|
+
neural_config TEXT,
|
|
122
|
+
metrics TEXT,
|
|
123
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
124
|
+
FOREIGN KEY (swarm_id) REFERENCES swarms(id)
|
|
125
|
+
)
|
|
126
|
+
`);
|
|
127
|
+
|
|
128
|
+
await this.pool.write(`
|
|
129
|
+
CREATE TABLE IF NOT EXISTS tasks (
|
|
130
|
+
id TEXT PRIMARY KEY,
|
|
131
|
+
swarm_id TEXT,
|
|
132
|
+
description TEXT,
|
|
133
|
+
priority TEXT DEFAULT 'medium',
|
|
134
|
+
status TEXT DEFAULT 'pending',
|
|
135
|
+
assigned_agents TEXT,
|
|
136
|
+
result TEXT,
|
|
137
|
+
error TEXT,
|
|
138
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
139
|
+
completed_at DATETIME,
|
|
140
|
+
execution_time_ms INTEGER,
|
|
141
|
+
FOREIGN KEY (swarm_id) REFERENCES swarms(id)
|
|
142
|
+
)
|
|
143
|
+
`);
|
|
144
|
+
|
|
145
|
+
await this.pool.write(`
|
|
146
|
+
CREATE TABLE IF NOT EXISTS task_results (
|
|
147
|
+
id TEXT PRIMARY KEY,
|
|
148
|
+
task_id TEXT NOT NULL,
|
|
149
|
+
agent_id TEXT NOT NULL,
|
|
150
|
+
output TEXT,
|
|
151
|
+
metrics TEXT,
|
|
152
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
153
|
+
FOREIGN KEY (task_id) REFERENCES tasks(id),
|
|
154
|
+
FOREIGN KEY (agent_id) REFERENCES agents(id)
|
|
155
|
+
)
|
|
156
|
+
`);
|
|
157
|
+
|
|
158
|
+
await this.pool.write(`
|
|
159
|
+
CREATE TABLE IF NOT EXISTS agent_memory (
|
|
160
|
+
id TEXT PRIMARY KEY,
|
|
161
|
+
agent_id TEXT NOT NULL,
|
|
162
|
+
key TEXT NOT NULL,
|
|
163
|
+
value TEXT,
|
|
164
|
+
ttl_secs INTEGER,
|
|
165
|
+
expires_at DATETIME,
|
|
166
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
167
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
168
|
+
FOREIGN KEY (agent_id) REFERENCES agents(id),
|
|
169
|
+
UNIQUE(agent_id, key)
|
|
170
|
+
)
|
|
171
|
+
`);
|
|
172
|
+
|
|
173
|
+
await this.pool.write(`
|
|
174
|
+
CREATE TABLE IF NOT EXISTS metrics (
|
|
175
|
+
id TEXT PRIMARY KEY,
|
|
176
|
+
entity_type TEXT NOT NULL,
|
|
177
|
+
entity_id TEXT NOT NULL,
|
|
178
|
+
metric_name TEXT NOT NULL,
|
|
179
|
+
metric_value REAL,
|
|
180
|
+
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
181
|
+
)
|
|
182
|
+
`);
|
|
183
|
+
|
|
184
|
+
await this.pool.write(`
|
|
185
|
+
CREATE TABLE IF NOT EXISTS neural_networks (
|
|
186
|
+
id TEXT PRIMARY KEY,
|
|
187
|
+
agent_id TEXT NOT NULL,
|
|
188
|
+
architecture TEXT NOT NULL,
|
|
189
|
+
weights TEXT,
|
|
190
|
+
training_data TEXT,
|
|
191
|
+
performance_metrics TEXT,
|
|
192
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
193
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
194
|
+
FOREIGN KEY (agent_id) REFERENCES agents(id)
|
|
195
|
+
)
|
|
196
|
+
`);
|
|
197
|
+
|
|
198
|
+
await this.pool.write(`
|
|
199
|
+
CREATE TABLE IF NOT EXISTS events (
|
|
200
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
201
|
+
swarm_id TEXT,
|
|
202
|
+
event_type TEXT NOT NULL,
|
|
203
|
+
event_data TEXT,
|
|
204
|
+
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
205
|
+
)
|
|
206
|
+
`);
|
|
207
|
+
|
|
208
|
+
// Create indexes for better performance
|
|
209
|
+
const indexes = [
|
|
210
|
+
'CREATE INDEX IF NOT EXISTS idx_agents_swarm ON agents(swarm_id)',
|
|
211
|
+
'CREATE INDEX IF NOT EXISTS idx_tasks_swarm ON tasks(swarm_id)',
|
|
212
|
+
'CREATE INDEX IF NOT EXISTS idx_task_results_task ON task_results(task_id)',
|
|
213
|
+
'CREATE INDEX IF NOT EXISTS idx_task_results_agent ON task_results(agent_id)',
|
|
214
|
+
'CREATE INDEX IF NOT EXISTS idx_agent_memory_agent ON agent_memory(agent_id)',
|
|
215
|
+
'CREATE INDEX IF NOT EXISTS idx_metrics_entity ON metrics(entity_type, entity_id)',
|
|
216
|
+
'CREATE INDEX IF NOT EXISTS idx_events_swarm ON events(swarm_id)',
|
|
217
|
+
'CREATE INDEX IF NOT EXISTS idx_events_timestamp ON events(timestamp)'
|
|
218
|
+
];
|
|
219
|
+
|
|
220
|
+
for (const index of indexes) {
|
|
221
|
+
await this.pool.write(index);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
async ensureInitialized() {
|
|
226
|
+
if (!this.initialized) {
|
|
227
|
+
await this.initialize();
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
async withRetry(operation, maxRetries = 3) {
|
|
232
|
+
let lastError;
|
|
233
|
+
|
|
234
|
+
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
235
|
+
try {
|
|
236
|
+
return await operation();
|
|
237
|
+
} catch (error) {
|
|
238
|
+
lastError = error;
|
|
239
|
+
|
|
240
|
+
// Don't retry on certain errors
|
|
241
|
+
if (error.message.includes('UNIQUE constraint failed') ||
|
|
242
|
+
error.message.includes('NOT NULL constraint failed')) {
|
|
243
|
+
throw error;
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// Wait before retry
|
|
247
|
+
if (attempt < maxRetries - 1) {
|
|
248
|
+
await new Promise(resolve => setTimeout(resolve, Math.pow(2, attempt) * 100));
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
throw lastError;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
async trackOperation(operation) {
|
|
257
|
+
const startTime = Date.now();
|
|
258
|
+
|
|
259
|
+
try {
|
|
260
|
+
const result = await operation();
|
|
261
|
+
|
|
262
|
+
// Update statistics
|
|
263
|
+
this.stats.totalOperations++;
|
|
264
|
+
const duration = Date.now() - startTime;
|
|
265
|
+
this.stats.averageResponseTime =
|
|
266
|
+
(this.stats.averageResponseTime + duration) / 2;
|
|
267
|
+
|
|
268
|
+
return result;
|
|
269
|
+
} catch (error) {
|
|
270
|
+
this.stats.totalErrors++;
|
|
271
|
+
throw error;
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
// Swarm operations
|
|
276
|
+
async createSwarm(swarm) {
|
|
277
|
+
await this.ensureInitialized();
|
|
278
|
+
|
|
279
|
+
return this.trackOperation(() => this.withRetry(async () => {
|
|
280
|
+
// Check if swarm already exists
|
|
281
|
+
const existing = await this.pool.read('SELECT id FROM swarms WHERE id = ?', [swarm.id]);
|
|
282
|
+
if (existing && existing.length > 0) {
|
|
283
|
+
// Return existing swarm info instead of failing
|
|
284
|
+
return { id: swarm.id, changes: 0, lastInsertRowid: null };
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
return this.pool.write(`
|
|
288
|
+
INSERT INTO swarms (id, name, topology, max_agents, strategy, metadata)
|
|
289
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
290
|
+
`, [
|
|
291
|
+
swarm.id,
|
|
292
|
+
swarm.name,
|
|
293
|
+
swarm.topology,
|
|
294
|
+
swarm.maxAgents,
|
|
295
|
+
swarm.strategy,
|
|
296
|
+
JSON.stringify(swarm.metadata || {})
|
|
297
|
+
]);
|
|
298
|
+
}));
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
async getActiveSwarms() {
|
|
302
|
+
await this.ensureInitialized();
|
|
303
|
+
|
|
304
|
+
return this.trackOperation(async () => {
|
|
305
|
+
const swarms = await this.pool.read('SELECT * FROM swarms WHERE status = ?', ['active']);
|
|
306
|
+
return swarms.map(s => ({
|
|
307
|
+
...s,
|
|
308
|
+
metadata: JSON.parse(s.metadata || '{}')
|
|
309
|
+
}));
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// Agent operations
|
|
314
|
+
async createAgent(agent) {
|
|
315
|
+
await this.ensureInitialized();
|
|
316
|
+
|
|
317
|
+
return this.trackOperation(() => this.withRetry(() =>
|
|
318
|
+
this.pool.write(`
|
|
319
|
+
INSERT INTO agents (id, swarm_id, name, type, capabilities, neural_config, metrics)
|
|
320
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
321
|
+
`, [
|
|
322
|
+
agent.id,
|
|
323
|
+
agent.swarmId,
|
|
324
|
+
agent.name,
|
|
325
|
+
agent.type,
|
|
326
|
+
JSON.stringify(agent.capabilities || []),
|
|
327
|
+
JSON.stringify(agent.neuralConfig || {}),
|
|
328
|
+
JSON.stringify(agent.metrics || {})
|
|
329
|
+
])
|
|
330
|
+
));
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
async updateAgentStatus(agentId, status) {
|
|
334
|
+
await this.ensureInitialized();
|
|
335
|
+
|
|
336
|
+
return this.trackOperation(() => this.withRetry(() =>
|
|
337
|
+
this.pool.write('UPDATE agents SET status = ? WHERE id = ?', [status, agentId])
|
|
338
|
+
));
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
async getAgent(id) {
|
|
342
|
+
await this.ensureInitialized();
|
|
343
|
+
|
|
344
|
+
return this.trackOperation(async () => {
|
|
345
|
+
const agents = await this.pool.read('SELECT * FROM agents WHERE id = ?', [id]);
|
|
346
|
+
if (agents.length === 0) return null;
|
|
347
|
+
|
|
348
|
+
const agent = agents[0];
|
|
349
|
+
return {
|
|
350
|
+
...agent,
|
|
351
|
+
capabilities: JSON.parse(agent.capabilities || '[]'),
|
|
352
|
+
neural_config: JSON.parse(agent.neural_config || '{}'),
|
|
353
|
+
metrics: JSON.parse(agent.metrics || '{}')
|
|
354
|
+
};
|
|
355
|
+
});
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
async getSwarmAgents(swarmId, filter = 'all') {
|
|
359
|
+
await this.ensureInitialized();
|
|
360
|
+
|
|
361
|
+
return this.trackOperation(async () => {
|
|
362
|
+
let sql = 'SELECT * FROM agents WHERE swarm_id = ?';
|
|
363
|
+
let params = [swarmId];
|
|
364
|
+
|
|
365
|
+
if (filter !== 'all') {
|
|
366
|
+
sql += ' AND status = ?';
|
|
367
|
+
params.push(filter);
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
const agents = await this.pool.read(sql, params);
|
|
371
|
+
return agents.map(a => ({
|
|
372
|
+
...a,
|
|
373
|
+
capabilities: JSON.parse(a.capabilities || '[]'),
|
|
374
|
+
neural_config: JSON.parse(a.neural_config || '{}'),
|
|
375
|
+
metrics: JSON.parse(a.metrics || '{}')
|
|
376
|
+
}));
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
// Task operations
|
|
381
|
+
async createTask(task) {
|
|
382
|
+
await this.ensureInitialized();
|
|
383
|
+
|
|
384
|
+
return this.trackOperation(() => this.withRetry(() =>
|
|
385
|
+
this.pool.write(`
|
|
386
|
+
INSERT INTO tasks (id, swarm_id, description, priority, status, assigned_agents)
|
|
387
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
388
|
+
`, [
|
|
389
|
+
task.id,
|
|
390
|
+
task.swarmId,
|
|
391
|
+
task.description,
|
|
392
|
+
task.priority || 'medium',
|
|
393
|
+
task.status || 'pending',
|
|
394
|
+
JSON.stringify(task.assignedAgents || [])
|
|
395
|
+
])
|
|
396
|
+
));
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
async updateTask(taskId, updates) {
|
|
400
|
+
await this.ensureInitialized();
|
|
401
|
+
|
|
402
|
+
return this.trackOperation(() => this.withRetry(() => {
|
|
403
|
+
const fields = [];
|
|
404
|
+
const values = [];
|
|
405
|
+
|
|
406
|
+
Object.entries(updates).forEach(([key, value]) => {
|
|
407
|
+
if (key === 'assignedAgents' || key === 'result') {
|
|
408
|
+
fields.push(`${key} = ?`);
|
|
409
|
+
values.push(JSON.stringify(value));
|
|
410
|
+
} else {
|
|
411
|
+
fields.push(`${key} = ?`);
|
|
412
|
+
values.push(value);
|
|
413
|
+
}
|
|
414
|
+
});
|
|
415
|
+
|
|
416
|
+
values.push(taskId);
|
|
417
|
+
return this.pool.write(`UPDATE tasks SET ${fields.join(', ')} WHERE id = ?`, values);
|
|
418
|
+
}));
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
async getTask(id) {
|
|
422
|
+
await this.ensureInitialized();
|
|
423
|
+
|
|
424
|
+
return this.trackOperation(async () => {
|
|
425
|
+
const tasks = await this.pool.read('SELECT * FROM tasks WHERE id = ?', [id]);
|
|
426
|
+
if (tasks.length === 0) return null;
|
|
427
|
+
|
|
428
|
+
const task = tasks[0];
|
|
429
|
+
return {
|
|
430
|
+
...task,
|
|
431
|
+
assigned_agents: JSON.parse(task.assigned_agents || '[]'),
|
|
432
|
+
result: task.result ? JSON.parse(task.result) : null
|
|
433
|
+
};
|
|
434
|
+
});
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
async getSwarmTasks(swarmId, status = null) {
|
|
438
|
+
await this.ensureInitialized();
|
|
439
|
+
|
|
440
|
+
return this.trackOperation(async () => {
|
|
441
|
+
let sql = 'SELECT * FROM tasks WHERE swarm_id = ?';
|
|
442
|
+
let params = [swarmId];
|
|
443
|
+
|
|
444
|
+
if (status) {
|
|
445
|
+
sql += ' AND status = ?';
|
|
446
|
+
params.push(status);
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
const tasks = await this.pool.read(sql, params);
|
|
450
|
+
return tasks.map(t => ({
|
|
451
|
+
...t,
|
|
452
|
+
assigned_agents: JSON.parse(t.assigned_agents || '[]'),
|
|
453
|
+
result: t.result ? JSON.parse(t.result) : null
|
|
454
|
+
}));
|
|
455
|
+
});
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// Memory operations
|
|
459
|
+
async storeMemory(agentId, key, value, ttlSecs = null) {
|
|
460
|
+
await this.ensureInitialized();
|
|
461
|
+
|
|
462
|
+
return this.trackOperation(() => this.withRetry(() => {
|
|
463
|
+
const expiresAt = ttlSecs ? new Date(Date.now() + ttlSecs * 1000).toISOString() : null;
|
|
464
|
+
const id = `mem_${agentId}_${Date.now()}`;
|
|
465
|
+
|
|
466
|
+
return this.pool.write(`
|
|
467
|
+
INSERT OR REPLACE INTO agent_memory (id, agent_id, key, value, ttl_secs, expires_at, updated_at)
|
|
468
|
+
VALUES (?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
|
469
|
+
`, [id, agentId, key, JSON.stringify(value), ttlSecs, expiresAt]);
|
|
470
|
+
}));
|
|
471
|
+
}
|
|
472
|
+
|
|
473
|
+
async getMemory(agentId, key) {
|
|
474
|
+
await this.ensureInitialized();
|
|
475
|
+
|
|
476
|
+
return this.trackOperation(async () => {
|
|
477
|
+
// First cleanup expired entries
|
|
478
|
+
await this.cleanupExpiredMemory();
|
|
479
|
+
|
|
480
|
+
const memories = await this.pool.read(`
|
|
481
|
+
SELECT * FROM agent_memory
|
|
482
|
+
WHERE agent_id = ? AND key = ?
|
|
483
|
+
AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)
|
|
484
|
+
`, [agentId, key]);
|
|
485
|
+
|
|
486
|
+
if (memories.length === 0) return null;
|
|
487
|
+
|
|
488
|
+
const memory = memories[0];
|
|
489
|
+
return {
|
|
490
|
+
...memory,
|
|
491
|
+
value: JSON.parse(memory.value)
|
|
492
|
+
};
|
|
493
|
+
});
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
async getAllMemory(agentId) {
|
|
497
|
+
await this.ensureInitialized();
|
|
498
|
+
|
|
499
|
+
return this.trackOperation(async () => {
|
|
500
|
+
// First cleanup expired entries
|
|
501
|
+
await this.cleanupExpiredMemory();
|
|
502
|
+
|
|
503
|
+
const memories = await this.pool.read(`
|
|
504
|
+
SELECT * FROM agent_memory
|
|
505
|
+
WHERE agent_id = ?
|
|
506
|
+
AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)
|
|
507
|
+
ORDER BY updated_at DESC
|
|
508
|
+
`, [agentId]);
|
|
509
|
+
|
|
510
|
+
return memories.map(m => ({
|
|
511
|
+
...m,
|
|
512
|
+
value: JSON.parse(m.value)
|
|
513
|
+
}));
|
|
514
|
+
});
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
async deleteMemory(agentId, key) {
|
|
518
|
+
await this.ensureInitialized();
|
|
519
|
+
|
|
520
|
+
return this.trackOperation(() => this.withRetry(() =>
|
|
521
|
+
this.pool.write('DELETE FROM agent_memory WHERE agent_id = ? AND key = ?', [agentId, key])
|
|
522
|
+
));
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
async cleanupExpiredMemory() {
|
|
526
|
+
await this.ensureInitialized();
|
|
527
|
+
|
|
528
|
+
return this.trackOperation(() => this.withRetry(() =>
|
|
529
|
+
this.pool.write('DELETE FROM agent_memory WHERE expires_at IS NOT NULL AND expires_at <= CURRENT_TIMESTAMP')
|
|
530
|
+
));
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
// Neural network operations
|
|
534
|
+
async storeNeuralNetwork(network) {
|
|
535
|
+
await this.ensureInitialized();
|
|
536
|
+
|
|
537
|
+
return this.trackOperation(() => this.withRetry(() => {
|
|
538
|
+
const id = `nn_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
539
|
+
|
|
540
|
+
return this.pool.write(`
|
|
541
|
+
INSERT INTO neural_networks (id, agent_id, architecture, weights, training_data, performance_metrics)
|
|
542
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
543
|
+
`, [
|
|
544
|
+
id,
|
|
545
|
+
network.agentId,
|
|
546
|
+
JSON.stringify(network.architecture),
|
|
547
|
+
JSON.stringify(network.weights),
|
|
548
|
+
JSON.stringify(network.trainingData || {}),
|
|
549
|
+
JSON.stringify(network.performanceMetrics || {})
|
|
550
|
+
]);
|
|
551
|
+
}));
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
async updateNeuralNetwork(id, updates) {
|
|
555
|
+
await this.ensureInitialized();
|
|
556
|
+
|
|
557
|
+
return this.trackOperation(() => this.withRetry(() => {
|
|
558
|
+
const fields = [];
|
|
559
|
+
const values = [];
|
|
560
|
+
|
|
561
|
+
Object.entries(updates).forEach(([key, value]) => {
|
|
562
|
+
fields.push(`${key} = ?`);
|
|
563
|
+
values.push(JSON.stringify(value));
|
|
564
|
+
});
|
|
565
|
+
|
|
566
|
+
fields.push('updated_at = CURRENT_TIMESTAMP');
|
|
567
|
+
values.push(id);
|
|
568
|
+
|
|
569
|
+
return this.pool.write(`UPDATE neural_networks SET ${fields.join(', ')} WHERE id = ?`, values);
|
|
570
|
+
}));
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
async getAgentNeuralNetworks(agentId) {
|
|
574
|
+
await this.ensureInitialized();
|
|
575
|
+
|
|
576
|
+
return this.trackOperation(async () => {
|
|
577
|
+
const networks = await this.pool.read('SELECT * FROM neural_networks WHERE agent_id = ?', [agentId]);
|
|
578
|
+
|
|
579
|
+
return networks.map(n => ({
|
|
580
|
+
...n,
|
|
581
|
+
architecture: JSON.parse(n.architecture),
|
|
582
|
+
weights: JSON.parse(n.weights),
|
|
583
|
+
training_data: JSON.parse(n.training_data || '{}'),
|
|
584
|
+
performance_metrics: JSON.parse(n.performance_metrics || '{}')
|
|
585
|
+
}));
|
|
586
|
+
});
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
// Metrics operations
|
|
590
|
+
async recordMetric(entityType, entityId, metricName, metricValue) {
|
|
591
|
+
await this.ensureInitialized();
|
|
592
|
+
|
|
593
|
+
return this.trackOperation(() => this.withRetry(() => {
|
|
594
|
+
const id = `metric_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
595
|
+
|
|
596
|
+
return this.pool.write(`
|
|
597
|
+
INSERT INTO metrics (id, entity_type, entity_id, metric_name, metric_value)
|
|
598
|
+
VALUES (?, ?, ?, ?, ?)
|
|
599
|
+
`, [id, entityType, entityId, metricName, metricValue]);
|
|
600
|
+
}));
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
async getMetrics(entityType, entityId, metricName = null) {
|
|
604
|
+
await this.ensureInitialized();
|
|
605
|
+
|
|
606
|
+
return this.trackOperation(async () => {
|
|
607
|
+
let sql = 'SELECT * FROM metrics WHERE entity_type = ? AND entity_id = ?';
|
|
608
|
+
let params = [entityType, entityId];
|
|
609
|
+
|
|
610
|
+
if (metricName) {
|
|
611
|
+
sql += ' AND metric_name = ?';
|
|
612
|
+
params.push(metricName);
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
sql += ' ORDER BY timestamp DESC LIMIT 100';
|
|
616
|
+
|
|
617
|
+
return this.pool.read(sql, params);
|
|
618
|
+
});
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
// Event logging
|
|
622
|
+
async logEvent(swarmId, eventType, eventData) {
|
|
623
|
+
await this.ensureInitialized();
|
|
624
|
+
|
|
625
|
+
return this.trackOperation(() => this.withRetry(() =>
|
|
626
|
+
this.pool.write(`
|
|
627
|
+
INSERT INTO events (swarm_id, event_type, event_data)
|
|
628
|
+
VALUES (?, ?, ?)
|
|
629
|
+
`, [swarmId, eventType, JSON.stringify(eventData)])
|
|
630
|
+
));
|
|
631
|
+
}
|
|
632
|
+
|
|
633
|
+
async getSwarmEvents(swarmId, limit = 100) {
|
|
634
|
+
await this.ensureInitialized();
|
|
635
|
+
|
|
636
|
+
return this.trackOperation(async () => {
|
|
637
|
+
const events = await this.pool.read(`
|
|
638
|
+
SELECT * FROM events
|
|
639
|
+
WHERE swarm_id = ?
|
|
640
|
+
ORDER BY timestamp DESC
|
|
641
|
+
LIMIT ?
|
|
642
|
+
`, [swarmId, limit]);
|
|
643
|
+
|
|
644
|
+
return events.map(e => ({
|
|
645
|
+
...e,
|
|
646
|
+
event_data: JSON.parse(e.event_data || '{}')
|
|
647
|
+
}));
|
|
648
|
+
});
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
// Cleanup operations
|
|
652
|
+
async cleanup() {
|
|
653
|
+
await this.ensureInitialized();
|
|
654
|
+
|
|
655
|
+
return this.trackOperation(async () => {
|
|
656
|
+
// Delete expired memories
|
|
657
|
+
await this.cleanupExpiredMemory();
|
|
658
|
+
|
|
659
|
+
// Delete old events (older than 7 days)
|
|
660
|
+
const sevenDaysAgo = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString();
|
|
661
|
+
await this.pool.write('DELETE FROM events WHERE timestamp < ?', [sevenDaysAgo]);
|
|
662
|
+
|
|
663
|
+
// Delete old metrics (older than 30 days)
|
|
664
|
+
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).toISOString();
|
|
665
|
+
await this.pool.write('DELETE FROM metrics WHERE timestamp < ?', [thirtyDaysAgo]);
|
|
666
|
+
|
|
667
|
+
// Vacuum to reclaim space
|
|
668
|
+
await this.pool.write('VACUUM');
|
|
669
|
+
});
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
// Get pool statistics
|
|
673
|
+
getPoolStats() {
|
|
674
|
+
return this.pool ? this.pool.getStats() : null;
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
// Get persistence statistics
|
|
678
|
+
getPersistenceStats() {
|
|
679
|
+
return this.stats;
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
// Check if pool is healthy
|
|
683
|
+
isHealthy() {
|
|
684
|
+
return this.pool ? this.pool.isHealthy : false;
|
|
685
|
+
}
|
|
686
|
+
|
|
687
|
+
// Close database connection
|
|
688
|
+
async close() {
|
|
689
|
+
if (this.pool) {
|
|
690
|
+
await this.pool.close();
|
|
691
|
+
}
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
export { SwarmPersistencePooled };
|