neohive 6.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/agents.js ADDED
@@ -0,0 +1,107 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+ const { DATA_DIR, AGENTS_FILE, PROFILES_FILE, ACKS_FILE } = require('./config');
6
+ const { cachedRead, invalidateCache, lockAgentsFile, unlockAgentsFile, withFileLock, readJsonFile } = require('./file-io');
7
+
8
+ // Cache for isPidAlive results
9
+ const _pidAliveCache = {};
10
+
11
+ // isAutonomousMode is injected late to avoid circular dependency with workflows
12
+ let _isAutonomousMode = () => false;
13
+ function setAutonomousModeCheck(fn) { _isAutonomousMode = fn; }
14
+
15
+ function isPidAlive(pid, lastActivity) {
16
+ const cacheKey = `${pid}_${lastActivity}`;
17
+ const cached = _pidAliveCache[cacheKey];
18
+ if (cached && Date.now() - cached.ts < 5000) return cached.alive;
19
+
20
+ const STALE_THRESHOLD = _isAutonomousMode() ? 30000 : 60000;
21
+ let alive = false;
22
+
23
+ if (lastActivity) {
24
+ const stale = Date.now() - new Date(lastActivity).getTime();
25
+ if (stale < STALE_THRESHOLD) alive = true;
26
+ }
27
+ if (!alive) {
28
+ try { process.kill(pid, 0); alive = true; } catch { alive = false; }
29
+ }
30
+ _pidAliveCache[cacheKey] = { alive, ts: Date.now() };
31
+ const keys = Object.keys(_pidAliveCache);
32
+ if (keys.length > 200) {
33
+ const cutoff = Date.now() - 10000;
34
+ for (const k of keys) { if (_pidAliveCache[k].ts < cutoff) delete _pidAliveCache[k]; }
35
+ }
36
+ return alive;
37
+ }
38
+
39
+ function getAgents() {
40
+ return cachedRead('agents', () => {
41
+ if (!fs.existsSync(AGENTS_FILE)) return {};
42
+ let agents;
43
+ try { agents = JSON.parse(fs.readFileSync(AGENTS_FILE, 'utf8')); } catch { return {}; }
44
+ try {
45
+ const files = fs.readdirSync(DATA_DIR).filter(f => f.startsWith('heartbeat-') && f.endsWith('.json'));
46
+ for (const f of files) {
47
+ const name = f.slice(10, -5);
48
+ if (agents[name]) {
49
+ try {
50
+ const hb = JSON.parse(fs.readFileSync(path.join(DATA_DIR, f), 'utf8'));
51
+ if (hb.last_activity) agents[name].last_activity = hb.last_activity;
52
+ if (hb.pid) agents[name].pid = hb.pid;
53
+ } catch {}
54
+ }
55
+ }
56
+ } catch {}
57
+ return agents;
58
+ }, 1500);
59
+ }
60
+
61
+ function saveAgents(agents) {
62
+ const data = JSON.stringify(agents);
63
+ if (data && data.length > 2) {
64
+ fs.writeFileSync(AGENTS_FILE, data);
65
+ }
66
+ invalidateCache('agents');
67
+ }
68
+
69
+ function heartbeatFile(name) { return path.join(DATA_DIR, `heartbeat-${name}.json`); }
70
+
71
+ function touchHeartbeat(name) {
72
+ if (!name) return;
73
+ try {
74
+ fs.writeFileSync(heartbeatFile(name), JSON.stringify({
75
+ last_activity: new Date().toISOString(),
76
+ pid: process.pid,
77
+ }));
78
+ } catch {}
79
+ }
80
+
81
+ function getAcks() {
82
+ if (!fs.existsSync(ACKS_FILE)) return {};
83
+ try { return JSON.parse(fs.readFileSync(ACKS_FILE, 'utf8')); } catch { return {}; }
84
+ }
85
+
86
+ function getProfiles() {
87
+ return cachedRead('profiles', () => {
88
+ if (!fs.existsSync(PROFILES_FILE)) return {};
89
+ try { return JSON.parse(fs.readFileSync(PROFILES_FILE, 'utf8')); } catch { return {}; }
90
+ }, 2000);
91
+ }
92
+
93
+ function saveProfiles(profiles) {
94
+ withFileLock(PROFILES_FILE, () => {
95
+ invalidateCache('profiles');
96
+ fs.writeFileSync(PROFILES_FILE, JSON.stringify(profiles));
97
+ });
98
+ }
99
+
100
+ module.exports = {
101
+ isPidAlive, setAutonomousModeCheck,
102
+ getAgents, saveAgents,
103
+ heartbeatFile, touchHeartbeat,
104
+ getAcks,
105
+ getProfiles, saveProfiles,
106
+ lockAgentsFile, unlockAgentsFile,
107
+ };
package/lib/compact.js ADDED
@@ -0,0 +1,124 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+ const log = require('./logger');
6
+ const state = require('./state');
7
+ const { DATA_DIR, getMessagesFile, sanitizeName } = require('./config');
8
+ const { getAgents, isPidAlive } = require('./agents');
9
+
10
+ // --- Consumed ID tracking ---
11
+
12
+ function consumedFile(agentName) {
13
+ sanitizeName(agentName);
14
+ return path.join(DATA_DIR, `consumed-${agentName}.json`);
15
+ }
16
+
17
+ function getConsumedIds(agentName) {
18
+ const file = consumedFile(agentName);
19
+ if (!fs.existsSync(file)) return new Set();
20
+ try {
21
+ return new Set(JSON.parse(fs.readFileSync(file, 'utf8')));
22
+ } catch {
23
+ return new Set();
24
+ }
25
+ }
26
+
27
+ function saveConsumedIds(agentName, ids) {
28
+ if (ids.size > 500) {
29
+ trimConsumedIds(agentName, ids);
30
+ }
31
+ fs.writeFileSync(consumedFile(agentName), JSON.stringify([...ids]));
32
+ }
33
+
34
+ function trimConsumedIds(agentName, ids) {
35
+ try {
36
+ const msgFile = getMessagesFile(state.currentBranch);
37
+ if (!fs.existsSync(msgFile)) { ids.clear(); return; }
38
+ const content = fs.readFileSync(msgFile, 'utf8').trim();
39
+ if (!content) { ids.clear(); return; }
40
+ const currentIds = new Set();
41
+ for (const line of content.split(/\r?\n/)) {
42
+ const match = line.match(/"id"\s*:\s*"([^"]+)"/);
43
+ if (match) currentIds.add(match[1]);
44
+ }
45
+ for (const id of ids) {
46
+ if (!currentIds.has(id)) ids.delete(id);
47
+ }
48
+ } catch {}
49
+ }
50
+
51
+ // --- Auto-compact ---
52
+
53
+ function autoCompact() {
54
+ const msgFile = getMessagesFile(state.currentBranch);
55
+ if (!fs.existsSync(msgFile)) return;
56
+ try {
57
+ const content = fs.readFileSync(msgFile, 'utf8').trim();
58
+ if (!content) return;
59
+ const lines = content.split(/\r?\n/);
60
+ if (lines.length < 500) return;
61
+
62
+ const messages = lines.map(l => { try { return JSON.parse(l); } catch { return null; } }).filter(Boolean);
63
+
64
+ const agents = getAgents();
65
+ const aliveAgentNames = Object.keys(agents).filter(n => isPidAlive(agents[n].pid, agents[n].last_activity));
66
+ const allConsumed = new Set();
67
+ const perAgentConsumed = {};
68
+ if (fs.existsSync(DATA_DIR)) {
69
+ for (const f of fs.readdirSync(DATA_DIR)) {
70
+ if (f.startsWith('consumed-') && f.endsWith('.json')) {
71
+ const agentName = f.replace('consumed-', '').replace('.json', '');
72
+ try {
73
+ const ids = JSON.parse(fs.readFileSync(path.join(DATA_DIR, f), 'utf8'));
74
+ perAgentConsumed[agentName] = new Set(ids);
75
+ ids.forEach(id => allConsumed.add(id));
76
+ } catch {}
77
+ }
78
+ }
79
+ }
80
+
81
+ const active = messages.filter(m => {
82
+ if (m.to === '__group__') {
83
+ return !aliveAgentNames.every(n => n === m.from || (perAgentConsumed[n] && perAgentConsumed[n].has(m.id)));
84
+ }
85
+ if (!allConsumed.has(m.id)) return true;
86
+ return false;
87
+ });
88
+
89
+ const archived = messages.filter(m => !active.includes(m));
90
+ if (archived.length > 0) {
91
+ const dateStr = new Date().toISOString().slice(0, 10);
92
+ const archiveFile = path.join(DATA_DIR, `archive-${dateStr}.jsonl`);
93
+ const archiveContent = archived.map(m => JSON.stringify(m)).join('\n') + '\n';
94
+ try { fs.appendFileSync(archiveFile, archiveContent); } catch (e) { log.error('autoCompact archive write failed:', e.message); }
95
+ }
96
+
97
+ const newContent = active.map(m => JSON.stringify(m)).join('\n') + (active.length ? '\n' : '');
98
+ const tmpFile = msgFile + '.tmp';
99
+ fs.writeFileSync(tmpFile, newContent);
100
+ try {
101
+ fs.renameSync(tmpFile, msgFile);
102
+ } catch {
103
+ try { fs.unlinkSync(tmpFile); } catch {}
104
+ return;
105
+ }
106
+ state.lastReadOffset = Buffer.byteLength(newContent, 'utf8');
107
+
108
+ const activeIds = new Set(active.map(m => m.id));
109
+ for (const f of fs.readdirSync(DATA_DIR)) {
110
+ if (f.startsWith('consumed-') && f.endsWith('.json')) {
111
+ try {
112
+ const ids = JSON.parse(fs.readFileSync(path.join(DATA_DIR, f), 'utf8'));
113
+ const trimmed = ids.filter(id => activeIds.has(id));
114
+ fs.writeFileSync(path.join(DATA_DIR, f), JSON.stringify(trimmed));
115
+ } catch (e) { log.debug('consumed trim failed:', e.message); }
116
+ }
117
+ }
118
+ } catch (e) { log.warn('autoCompact failed:', e.message); }
119
+ }
120
+
121
+ module.exports = {
122
+ consumedFile, getConsumedIds, saveConsumedIds, trimConsumedIds,
123
+ autoCompact,
124
+ };
package/lib/config.js ADDED
@@ -0,0 +1,127 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+
6
+ // Data dir lives in the project where the CLI runs, not where the package is installed
7
+ const DATA_DIR = process.env.NEOHIVE_DATA_DIR || path.join(process.cwd(), '.neohive');
8
+
9
+ // File paths for all shared data
10
+ const MESSAGES_FILE = path.join(DATA_DIR, 'messages.jsonl');
11
+ const HISTORY_FILE = path.join(DATA_DIR, 'history.jsonl');
12
+ const AGENTS_FILE = path.join(DATA_DIR, 'agents.json');
13
+ const ACKS_FILE = path.join(DATA_DIR, 'acks.json');
14
+ const TASKS_FILE = path.join(DATA_DIR, 'tasks.json');
15
+ const PROFILES_FILE = path.join(DATA_DIR, 'profiles.json');
16
+ const WORKFLOWS_FILE = path.join(DATA_DIR, 'workflows.json');
17
+ const WORKSPACES_DIR = path.join(DATA_DIR, 'workspaces');
18
+ const BRANCHES_FILE = path.join(DATA_DIR, 'branches.json');
19
+ const DECISIONS_FILE = path.join(DATA_DIR, 'decisions.json');
20
+ const KB_FILE = path.join(DATA_DIR, 'kb.json');
21
+ const LOCKS_FILE = path.join(DATA_DIR, 'locks.json');
22
+ const PROGRESS_FILE = path.join(DATA_DIR, 'progress.json');
23
+ const VOTES_FILE = path.join(DATA_DIR, 'votes.json');
24
+ const REVIEWS_FILE = path.join(DATA_DIR, 'reviews.json');
25
+ const DEPS_FILE = path.join(DATA_DIR, 'dependencies.json');
26
+ const REPUTATION_FILE = path.join(DATA_DIR, 'reputation.json');
27
+ const COMPRESSED_FILE = path.join(DATA_DIR, 'compressed.json');
28
+ const RULES_FILE = path.join(DATA_DIR, 'rules.json');
29
+ const CONFIG_FILE = path.join(DATA_DIR, 'config.json');
30
+ const PERMISSIONS_FILE = path.join(DATA_DIR, 'permissions.json');
31
+ const READ_RECEIPTS_FILE = path.join(DATA_DIR, 'read_receipts.json');
32
+ const DATA_VERSION_FILE = path.join(DATA_DIR, '.version');
33
+ const CHANNELS_FILE_PATH = path.join(DATA_DIR, 'channels.json');
34
+
35
+ // Constants
36
+ const MAX_CONTENT_BYTES = 1000000; // 1 MB max message size
37
+ const CURRENT_DATA_VERSION = 1;
38
+ const RESERVED_NAMES = ['__system__', '__all__', '__open__', '__close__', 'system', 'dashboard', 'Dashboard'];
39
+
40
+ // Config helpers
41
+ function getConfig() {
42
+ if (!fs.existsSync(CONFIG_FILE)) return {};
43
+ try { return JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8')); } catch { return {}; }
44
+ }
45
+
46
+ function saveConfig(config) {
47
+ ensureDataDir();
48
+ fs.writeFileSync(CONFIG_FILE, JSON.stringify(config));
49
+ }
50
+
51
+ function isGroupMode() {
52
+ return getConfig().conversation_mode === 'group';
53
+ }
54
+
55
+ function isManagedMode() {
56
+ return getConfig().conversation_mode === 'managed';
57
+ }
58
+
59
+ function getManagedConfig() {
60
+ const config = getConfig();
61
+ return config.managed || {
62
+ manager: null,
63
+ phase: 'discussion',
64
+ floor: 'closed',
65
+ turn_queue: [],
66
+ turn_current: null,
67
+ phase_history: [],
68
+ };
69
+ }
70
+
71
+ function ensureDataDir() {
72
+ if (!fs.existsSync(DATA_DIR)) {
73
+ fs.mkdirSync(DATA_DIR, { recursive: true, mode: 0o700 });
74
+ }
75
+ }
76
+
77
+ function sanitizeName(name) {
78
+ if (typeof name !== 'string' || !/^[a-zA-Z0-9_-]{1,20}$/.test(name)) {
79
+ throw new Error(`Invalid name "${name}": must be 1-20 alphanumeric/underscore/hyphen chars`);
80
+ }
81
+ if (RESERVED_NAMES.includes(name.toLowerCase())) {
82
+ throw new Error(`Name "${name}" is reserved and cannot be used`);
83
+ }
84
+ return name;
85
+ }
86
+
87
+ function generateId() {
88
+ try { return Date.now().toString(36) + require('crypto').randomBytes(6).toString('hex'); }
89
+ catch { return Date.now().toString(36) + Math.random().toString(36).slice(2, 8); }
90
+ }
91
+
92
+ function generateToken() {
93
+ try { return require('crypto').randomBytes(16).toString('hex'); }
94
+ catch { return Math.random().toString(36).slice(2) + Math.random().toString(36).slice(2); }
95
+ }
96
+
97
+ function validateContentSize(content) {
98
+ if (typeof content !== 'string') return { error: 'content must be a string' };
99
+ if (Buffer.byteLength(content, 'utf8') > MAX_CONTENT_BYTES) {
100
+ return { error: 'Message content exceeds maximum size (1 MB)' };
101
+ }
102
+ return null;
103
+ }
104
+
105
+ function getMessagesFile(branch) {
106
+ if (!branch || branch === 'main') return MESSAGES_FILE;
107
+ return path.join(DATA_DIR, `branch-${sanitizeName(branch)}-messages.jsonl`);
108
+ }
109
+
110
+ function getHistoryFile(branch) {
111
+ if (!branch || branch === 'main') return HISTORY_FILE;
112
+ return path.join(DATA_DIR, `branch-${sanitizeName(branch)}-history.jsonl`);
113
+ }
114
+
115
+ module.exports = {
116
+ DATA_DIR,
117
+ MESSAGES_FILE, HISTORY_FILE, AGENTS_FILE, ACKS_FILE, TASKS_FILE,
118
+ PROFILES_FILE, WORKFLOWS_FILE, WORKSPACES_DIR, BRANCHES_FILE,
119
+ DECISIONS_FILE, KB_FILE, LOCKS_FILE, PROGRESS_FILE, VOTES_FILE,
120
+ REVIEWS_FILE, DEPS_FILE, REPUTATION_FILE, COMPRESSED_FILE, RULES_FILE,
121
+ CONFIG_FILE, PERMISSIONS_FILE, READ_RECEIPTS_FILE, DATA_VERSION_FILE,
122
+ CHANNELS_FILE_PATH,
123
+ MAX_CONTENT_BYTES, CURRENT_DATA_VERSION, RESERVED_NAMES,
124
+ getConfig, saveConfig, isGroupMode, isManagedMode, getManagedConfig,
125
+ ensureDataDir, sanitizeName, generateId, generateToken, validateContentSize,
126
+ getMessagesFile, getHistoryFile,
127
+ };
package/lib/file-io.js ADDED
@@ -0,0 +1,166 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+ const log = require('./logger');
6
+ const { DATA_DIR, AGENTS_FILE, CONFIG_FILE, ensureDataDir } = require('./config');
7
+
8
+ // --- Read cache (eliminates 70%+ redundant disk I/O) ---
9
+ const _cache = {};
10
+ function cachedRead(key, readFn, ttlMs = 2000) {
11
+ const now = Date.now();
12
+ const entry = _cache[key];
13
+ if (entry && now - entry.ts < ttlMs) return entry.val;
14
+ const val = readFn();
15
+ _cache[key] = { val, ts: now };
16
+ return val;
17
+ }
18
+ function invalidateCache(key) { delete _cache[key]; }
19
+
20
+ // --- JSONL readers ---
21
+
22
+ function readJsonl(file) {
23
+ if (!fs.existsSync(file)) return [];
24
+ const content = fs.readFileSync(file, 'utf8').trim();
25
+ if (!content) return [];
26
+ return content.split(/\r?\n/).map(line => {
27
+ try { return JSON.parse(line); } catch { return null; }
28
+ }).filter(Boolean);
29
+ }
30
+
31
+ // Optimized: read only NEW lines from a JSONL file starting at byte offset
32
+ function readJsonlFromOffset(file, offset) {
33
+ if (!fs.existsSync(file)) return { messages: [], newOffset: 0 };
34
+ const stat = fs.statSync(file);
35
+ if (stat.size <= offset) return { messages: [], newOffset: offset };
36
+ const fd = fs.openSync(file, 'r');
37
+ const buf = Buffer.alloc(stat.size - offset);
38
+ fs.readSync(fd, buf, 0, buf.length, offset);
39
+ fs.closeSync(fd);
40
+ const content = buf.toString('utf8').trim();
41
+ if (!content) return { messages: [], newOffset: stat.size };
42
+ const messages = content.split(/\r?\n/).map(line => {
43
+ try { return JSON.parse(line); } catch { return null; }
44
+ }).filter(Boolean);
45
+ return { messages, newOffset: stat.size };
46
+ }
47
+
48
+ // Read only last N lines of a JSONL file — O(N) instead of O(all)
49
+ function tailReadJsonl(file, lineCount = 100) {
50
+ if (!fs.existsSync(file)) return [];
51
+ const stat = fs.statSync(file);
52
+ if (stat.size === 0) return [];
53
+ const readSize = Math.min(stat.size, lineCount * 300);
54
+ const offset = Math.max(0, stat.size - readSize);
55
+ const fd = fs.openSync(file, 'r');
56
+ const buf = Buffer.alloc(readSize);
57
+ fs.readSync(fd, buf, 0, readSize, offset);
58
+ fs.closeSync(fd);
59
+ const content = buf.toString('utf8');
60
+ const lines = content.split(/\r?\n/).filter(l => l.trim());
61
+ if (offset > 0 && lines.length > 0) lines.shift();
62
+ const messages = lines.map(line => {
63
+ try { return JSON.parse(line); } catch { return null; }
64
+ }).filter(Boolean);
65
+ return messages.slice(-lineCount);
66
+ }
67
+
68
+ // --- JSON file helpers ---
69
+
70
+ function readJsonFile(file) {
71
+ if (!fs.existsSync(file)) return null;
72
+ try { return JSON.parse(fs.readFileSync(file, 'utf8')); } catch { return null; }
73
+ }
74
+
75
+ // File-to-cache-key map: writeJsonFile auto-invalidates the right cache entry
76
+ const _fileCacheKeys = {};
77
+
78
+ function registerFileCacheKey(file, cacheKey) {
79
+ _fileCacheKeys[file] = cacheKey;
80
+ }
81
+
82
+ function writeJsonFile(file, data) {
83
+ ensureDataDir();
84
+ const str = JSON.stringify(data);
85
+ if (str && str.length > 0) {
86
+ // Use file lock to prevent concurrent write corruption
87
+ const lockPath = file + '.lock';
88
+ let locked = false;
89
+ try { fs.writeFileSync(lockPath, String(process.pid), { flag: 'wx' }); locked = true; } catch {}
90
+ try {
91
+ fs.writeFileSync(file, str);
92
+ } finally {
93
+ if (locked) try { fs.unlinkSync(lockPath); } catch {}
94
+ }
95
+ const cacheKey = _fileCacheKeys[file];
96
+ if (cacheKey) invalidateCache(cacheKey);
97
+ }
98
+ }
99
+
100
+ // --- File locking ---
101
+
102
+ // Dedicated lock for agents.json (exponential backoff)
103
+ const AGENTS_LOCK = AGENTS_FILE + '.lock';
104
+ function lockAgentsFile() {
105
+ const maxWait = 5000; const start = Date.now();
106
+ let backoff = 1;
107
+ while (Date.now() - start < maxWait) {
108
+ try { fs.writeFileSync(AGENTS_LOCK, String(process.pid), { flag: 'wx' }); return true; }
109
+ catch {}
110
+ const wait = Date.now(); while (Date.now() - wait < backoff) {}
111
+ backoff = Math.min(backoff * 2, 500);
112
+ }
113
+ try { fs.unlinkSync(AGENTS_LOCK); } catch {}
114
+ try { fs.writeFileSync(AGENTS_LOCK, String(process.pid), { flag: 'wx' }); return true; } catch {}
115
+ return false;
116
+ }
117
+ function unlockAgentsFile() { try { fs.unlinkSync(AGENTS_LOCK); } catch {} }
118
+
119
+ // Dedicated lock for config.json
120
+ const CONFIG_LOCK = CONFIG_FILE + '.lock';
121
+ function lockConfigFile() {
122
+ const maxWait = 5000; const start = Date.now();
123
+ while (Date.now() - start < maxWait) {
124
+ try { fs.writeFileSync(CONFIG_LOCK, String(process.pid), { flag: 'wx' }); return true; }
125
+ catch {}
126
+ const wait = Date.now(); while (Date.now() - wait < 50) {}
127
+ }
128
+ try { fs.unlinkSync(CONFIG_LOCK); } catch {}
129
+ try { fs.writeFileSync(CONFIG_LOCK, String(process.pid), { flag: 'wx' }); return true; } catch {}
130
+ return false;
131
+ }
132
+ function unlockConfigFile() { try { fs.unlinkSync(CONFIG_LOCK); } catch {} }
133
+
134
+ // Generic file lock for any JSON file
135
+ function withFileLock(filePath, fn) {
136
+ const lockPath = filePath + '.lock';
137
+ const maxWait = 5000; const start = Date.now();
138
+ let backoff = 1;
139
+ while (Date.now() - start < maxWait) {
140
+ try { fs.writeFileSync(lockPath, String(process.pid), { flag: 'wx' }); break; }
141
+ catch {}
142
+ const wait = Date.now(); while (Date.now() - wait < backoff) {}
143
+ backoff = Math.min(backoff * 2, 500);
144
+ if (Date.now() - start >= maxWait) {
145
+ try {
146
+ const lockPid = parseInt(fs.readFileSync(lockPath, 'utf8').trim(), 10);
147
+ if (lockPid && lockPid !== process.pid) {
148
+ try { process.kill(lockPid, 0); return null; } catch {}
149
+ }
150
+ } catch {}
151
+ try { fs.unlinkSync(lockPath); } catch {}
152
+ try { fs.writeFileSync(lockPath, String(process.pid), { flag: 'wx' }); } catch { return fn(); }
153
+ break;
154
+ }
155
+ }
156
+ try { return fn(); } finally { try { fs.unlinkSync(lockPath); } catch {} }
157
+ }
158
+
159
+ module.exports = {
160
+ cachedRead, invalidateCache,
161
+ readJsonl, readJsonlFromOffset, tailReadJsonl,
162
+ readJsonFile, writeJsonFile, registerFileCacheKey,
163
+ lockAgentsFile, unlockAgentsFile,
164
+ lockConfigFile, unlockConfigFile,
165
+ withFileLock,
166
+ };
package/lib/logger.js ADDED
@@ -0,0 +1,13 @@
1
+ 'use strict';
2
+
3
+ const LOG_LEVEL = (process.env.NEOHIVE_LOG_LEVEL || 'warn').toLowerCase();
4
+ const LOG_LEVELS = { error: 0, warn: 1, info: 2, debug: 3 };
5
+
6
+ const log = {
7
+ error: (...args) => { if (LOG_LEVELS[LOG_LEVEL] >= 0) process.stderr.write('[NEOHIVE:ERROR] ' + args.map(String).join(' ') + '\n'); },
8
+ warn: (...args) => { if (LOG_LEVELS[LOG_LEVEL] >= 1) process.stderr.write('[NEOHIVE:WARN] ' + args.map(String).join(' ') + '\n'); },
9
+ info: (...args) => { if (LOG_LEVELS[LOG_LEVEL] >= 2) process.stderr.write('[NEOHIVE:INFO] ' + args.map(String).join(' ') + '\n'); },
10
+ debug: (...args) => { if (LOG_LEVELS[LOG_LEVEL] >= 3) process.stderr.write('[NEOHIVE:DEBUG] ' + args.map(String).join(' ') + '\n'); },
11
+ };
12
+
13
+ module.exports = log;
@@ -0,0 +1,137 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const state = require('./state');
5
+ const { DATA_DIR, getMessagesFile, getHistoryFile, generateId, ensureDataDir } = require('./config');
6
+ const { readJsonlFromOffset } = require('./file-io');
7
+ const { getAgents, isPidAlive } = require('./agents');
8
+
9
+ // Rate limiting constants
10
+ const rateLimitWindow = 60000;
11
+ const rateLimitMax = 30;
12
+
13
+ function checkRateLimit(content, to) {
14
+ const now = Date.now();
15
+ state.rateLimitMessages = state.rateLimitMessages.filter(t => now - t < rateLimitWindow);
16
+ if (state.rateLimitMessages.length >= rateLimitMax) {
17
+ return { error: `Rate limit exceeded: max ${rateLimitMax} messages per minute. Wait before sending more.` };
18
+ }
19
+ state.recentSentMessages = state.recentSentMessages.filter(m => now - m.timestamp < 30000);
20
+ if (content && typeof content === 'string' && to) {
21
+ const contentKey = content.substring(0, 200);
22
+ const dup = state.recentSentMessages.find(m => m.to === to && m.content === contentKey);
23
+ if (dup) {
24
+ return { error: `Duplicate message detected — you already sent this to ${to} ${Math.round((now - dup.timestamp) / 1000)}s ago. Send a different message.` };
25
+ }
26
+ state.recentSentMessages.push({ content: contentKey, to, timestamp: now });
27
+ if (state.recentSentMessages.length > 50) state.recentSentMessages = state.recentSentMessages.slice(-30);
28
+ }
29
+ state.rateLimitMessages.push(now);
30
+ return null;
31
+ }
32
+
33
+ function sendSystemMessage(toAgent, content) {
34
+ state.messageSeq++;
35
+ const agents = getAgents();
36
+ const recipientBranch = (agents[toAgent] && agents[toAgent].branch) || state.currentBranch;
37
+ const msg = {
38
+ id: generateId(),
39
+ seq: state.messageSeq,
40
+ from: '__system__',
41
+ to: toAgent,
42
+ content,
43
+ timestamp: new Date().toISOString(),
44
+ system: true,
45
+ };
46
+ ensureDataDir();
47
+ fs.appendFileSync(getMessagesFile(recipientBranch), JSON.stringify(msg) + '\n');
48
+ fs.appendFileSync(getHistoryFile(recipientBranch), JSON.stringify(msg) + '\n');
49
+ }
50
+
51
+ function broadcastSystemMessage(content, excludeAgent = null) {
52
+ state.messageSeq++;
53
+ const msg = {
54
+ id: generateId(),
55
+ seq: state.messageSeq,
56
+ from: '__system__',
57
+ to: '__group__',
58
+ content,
59
+ timestamp: new Date().toISOString(),
60
+ system: true,
61
+ };
62
+ if (excludeAgent) msg.exclude_agent = excludeAgent;
63
+ ensureDataDir();
64
+ fs.appendFileSync(getMessagesFile(state.currentBranch), JSON.stringify(msg) + '\n');
65
+ fs.appendFileSync(getHistoryFile(state.currentBranch), JSON.stringify(msg) + '\n');
66
+ }
67
+
68
+ // Read new lines from messages.jsonl starting at a byte offset
69
+ function readNewMessages(fromOffset, branch) {
70
+ const msgFile = getMessagesFile(branch || state.currentBranch);
71
+ return readNewMessagesFromFile(fromOffset, msgFile);
72
+ }
73
+
74
+ function readNewMessagesFromFile(fromOffset, filePath) {
75
+ if (!fs.existsSync(filePath)) return { messages: [], newOffset: 0 };
76
+ const stat = fs.statSync(filePath);
77
+ if (stat.size < fromOffset) return { messages: [], newOffset: 0 };
78
+ if (stat.size === fromOffset) return { messages: [], newOffset: fromOffset };
79
+ const fd = fs.openSync(filePath, 'r');
80
+ const buf = Buffer.alloc(stat.size - fromOffset);
81
+ fs.readSync(fd, buf, 0, buf.length, fromOffset);
82
+ fs.closeSync(fd);
83
+ const chunk = buf.toString('utf8').trim();
84
+ if (!chunk) return { messages: [], newOffset: stat.size };
85
+ const messages = chunk.split(/\r?\n/).map(line => {
86
+ try { return JSON.parse(line); } catch { return null; }
87
+ }).filter(Boolean);
88
+ return { messages, newOffset: stat.size };
89
+ }
90
+
91
+ // Build standard message delivery response
92
+ function buildMessageResponse(msg, consumedIds) {
93
+ const log = require('./logger');
94
+ let pendingCount = 0;
95
+ try {
96
+ const msgFile = getMessagesFile(state.currentBranch);
97
+ if (fs.existsSync(msgFile)) {
98
+ const { messages: tail } = readNewMessages(state.lastReadOffset);
99
+ pendingCount = tail.filter(m => m.to === state.registeredName && m.id !== msg.id && !consumedIds.has(m.id)).length;
100
+ }
101
+ } catch (e) { log.debug('pending count failed:', e.message); }
102
+
103
+ const agents = getAgents();
104
+ const agentsOnline = Object.entries(agents).filter(([, info]) => isPidAlive(info.pid, info.last_activity)).length;
105
+
106
+ return {
107
+ success: true,
108
+ message: {
109
+ id: msg.id,
110
+ from: msg.from,
111
+ content: msg.content,
112
+ timestamp: msg.timestamp,
113
+ ...(msg.reply_to && { reply_to: msg.reply_to }),
114
+ ...(msg.thread_id && { thread_id: msg.thread_id }),
115
+ },
116
+ pending_count: pendingCount,
117
+ agents_online: agentsOnline,
118
+ };
119
+ }
120
+
121
+ function sleep(ms) {
122
+ return new Promise(resolve => setTimeout(resolve, ms));
123
+ }
124
+
125
+ function adaptiveSleep(pollCount) {
126
+ if (pollCount < 10) return sleep(500);
127
+ if (pollCount < 30) return sleep(1000);
128
+ return sleep(2000);
129
+ }
130
+
131
+ module.exports = {
132
+ checkRateLimit,
133
+ sendSystemMessage, broadcastSystemMessage,
134
+ readNewMessages, readNewMessagesFromFile,
135
+ buildMessageResponse,
136
+ sleep, adaptiveSleep,
137
+ };