@siftd/connect-agent 0.2.16 → 0.2.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/agent.js CHANGED
@@ -1,4 +1,4 @@
1
- import { spawn } from 'child_process';
1
+ import { spawn, execSync } from 'child_process';
2
2
  import { pollMessages, sendResponse } from './api.js';
3
3
  import { getUserId, getAnthropicApiKey, isCloudMode, getDeploymentInfo } from './config.js';
4
4
  import { MasterOrchestrator } from './orchestrator.js';
@@ -8,6 +8,112 @@ import { startHeartbeat, stopHeartbeat, getHeartbeatState } from './heartbeat.js
8
8
  function stripAnsi(str) {
9
9
  return str.replace(/\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])/g, '');
10
10
  }
11
+ // Lock to prevent concurrent updates
12
+ let updateInProgress = false;
13
+ /**
14
+ * Get the installed version of connect-agent
15
+ */
16
+ function getInstalledVersion() {
17
+ try {
18
+ const output = execSync('npm list -g @siftd/connect-agent --depth=0 2>/dev/null', {
19
+ encoding: 'utf8',
20
+ shell: '/bin/bash'
21
+ });
22
+ const match = output.match(/@siftd\/connect-agent@([\d.]+)/);
23
+ return match ? match[1] : 'unknown';
24
+ }
25
+ catch {
26
+ return 'unknown';
27
+ }
28
+ }
29
+ /**
30
+ * Get the latest published version from npm
31
+ */
32
+ function getLatestVersion() {
33
+ try {
34
+ const output = execSync('npm view @siftd/connect-agent version 2>/dev/null', {
35
+ encoding: 'utf8',
36
+ shell: '/bin/bash'
37
+ });
38
+ return output.trim();
39
+ }
40
+ catch {
41
+ return 'unknown';
42
+ }
43
+ }
44
+ /**
45
+ * Actually perform a self-update - runs npm install and restarts
46
+ */
47
+ async function performSelfUpdate() {
48
+ // Prevent concurrent updates
49
+ if (updateInProgress) {
50
+ return '⏳ Update already in progress. Please wait...';
51
+ }
52
+ updateInProgress = true;
53
+ console.log('[AGENT] Starting self-update...');
54
+ try {
55
+ // Get current version
56
+ const currentVersion = getInstalledVersion();
57
+ console.log('[AGENT] Current version:', currentVersion);
58
+ // Check latest version first
59
+ const latestVersion = getLatestVersion();
60
+ console.log('[AGENT] Latest available:', latestVersion);
61
+ if (currentVersion === latestVersion && currentVersion !== 'unknown') {
62
+ updateInProgress = false;
63
+ return `✅ Already on latest version (${currentVersion})`;
64
+ }
65
+ console.log('[AGENT] Running: npm install -g @siftd/connect-agent@latest');
66
+ // Actually run the npm install with more detailed error capture
67
+ let installOutput;
68
+ try {
69
+ installOutput = execSync('npm install -g @siftd/connect-agent@latest 2>&1', {
70
+ encoding: 'utf8',
71
+ shell: '/bin/bash',
72
+ timeout: 180000, // 3 minute timeout
73
+ maxBuffer: 10 * 1024 * 1024 // 10MB buffer
74
+ });
75
+ }
76
+ catch (installError) {
77
+ const err = installError;
78
+ const output = err.stdout || err.stderr || err.message || 'Unknown install error';
79
+ console.error('[AGENT] npm install failed:', output);
80
+ updateInProgress = false;
81
+ // Parse common npm errors
82
+ if (output.includes('EACCES') || output.includes('permission denied')) {
83
+ return `❌ Permission denied. Try running:\nsudo npm install -g @siftd/connect-agent@latest`;
84
+ }
85
+ if (output.includes('ENOTFOUND') || output.includes('network')) {
86
+ return `❌ Network error. Check your internet connection and try again.`;
87
+ }
88
+ if (output.includes('E404')) {
89
+ return `❌ Package not found on npm. The package may have been unpublished.`;
90
+ }
91
+ return `❌ Update failed:\n${output.slice(0, 500)}\n\nRun manually:\nnpm install -g @siftd/connect-agent@latest`;
92
+ }
93
+ console.log('[AGENT] Install output:', installOutput.slice(0, 500));
94
+ // Verify the update succeeded
95
+ const newVersion = getInstalledVersion();
96
+ console.log('[AGENT] New version:', newVersion);
97
+ if (newVersion === currentVersion && currentVersion !== 'unknown') {
98
+ updateInProgress = false;
99
+ return `⚠️ Version unchanged (${currentVersion}). npm may have used cache.\n\nTry: npm cache clean --force && npm install -g @siftd/connect-agent@latest`;
100
+ }
101
+ // Schedule restart
102
+ console.log('[AGENT] Scheduling restart in 3 seconds...');
103
+ setTimeout(() => {
104
+ console.log('[AGENT] Restarting...');
105
+ updateInProgress = false;
106
+ process.exit(0); // Exit - systemd/pm2/user will restart
107
+ }, 3000);
108
+ return `✅ Update complete!\n\n${currentVersion} → ${newVersion}\n\nRestarting agent in 3 seconds...`;
109
+ }
110
+ catch (error) {
111
+ updateInProgress = false;
112
+ const errMsg = error instanceof Error ? error.message : String(error);
113
+ console.error('[AGENT] Update failed:', errMsg);
114
+ return `❌ Update failed: ${errMsg}\n\nYou may need to run manually:\nnpm install -g @siftd/connect-agent@latest`;
115
+ }
116
+ }
11
117
  // Conversation history for orchestrator mode
12
118
  let conversationHistory = [];
13
119
  let orchestrator = null;
@@ -152,6 +258,12 @@ export async function processMessage(message) {
152
258
  );
153
259
  return response;
154
260
  }
261
+ // Handle self-update requests - ACTUALLY run the update, don't just pretend
262
+ if (content.includes('update') && content.includes('connect-agent') &&
263
+ (content.includes('npm install') || content.includes('latest'))) {
264
+ console.log('[AGENT] Self-update request detected - forcing actual execution');
265
+ return await performSelfUpdate();
266
+ }
155
267
  try {
156
268
  if (orchestrator) {
157
269
  return await sendToOrchestrator(message.content, orchestrator, message.id, message.apiKey);
package/dist/heartbeat.js CHANGED
@@ -10,7 +10,7 @@ import { hostname } from 'os';
10
10
  import { createHash } from 'crypto';
11
11
  import { getServerUrl, getAgentToken, getUserId, isCloudMode } from './config.js';
12
12
  const HEARTBEAT_INTERVAL = 10000; // 10 seconds
13
- const VERSION = '0.2.16'; // Should match package.json
13
+ const VERSION = '0.2.18'; // Should match package.json
14
14
  const state = {
15
15
  intervalId: null,
16
16
  runnerId: null,
@@ -106,6 +106,15 @@ export declare class MasterOrchestrator {
106
106
  * @param timeoutMs - Timeout in milliseconds (default: 30 minutes, max: 60 minutes)
107
107
  */
108
108
  private delegateToWorker;
109
+ /**
110
+ * Try to recover worker output from log file
111
+ * Workers are instructed to write logs to /tmp/worker-{id}-log.txt
112
+ */
113
+ private recoverWorkerLog;
114
+ /**
115
+ * Log worker failure to memory for learning
116
+ */
117
+ private logWorkerFailure;
109
118
  /**
110
119
  * Extract memory and coordination contributions from worker output
111
120
  * Workers can contribute using:
@@ -6,7 +6,7 @@
6
6
  */
7
7
  import Anthropic from '@anthropic-ai/sdk';
8
8
  import { spawn, execSync } from 'child_process';
9
- import { existsSync } from 'fs';
9
+ import { existsSync, readFileSync } from 'fs';
10
10
  import { AdvancedMemoryStore } from './core/memory-advanced.js';
11
11
  import { PostgresMemoryStore, isPostgresConfigured } from './core/memory-postgres.js';
12
12
  import { TaskScheduler } from './core/scheduler.js';
@@ -917,10 +917,11 @@ This enables parallel workers to coordinate.`;
917
917
  this.jobs.set(id, job);
918
918
  // Configurable timeout (default 30 min, max 60 min)
919
919
  const timeoutMinutes = Math.round(workerTimeout / 60000);
920
- const timeout = setTimeout(() => {
920
+ const timeout = setTimeout(async () => {
921
921
  if (job.status === 'running') {
922
922
  job.status = 'timeout';
923
923
  job.endTime = Date.now();
924
+ const duration = Math.round((job.endTime - job.startTime) / 1000);
924
925
  // Send SIGINT first to allow graceful shutdown and output flush
925
926
  child.kill('SIGINT');
926
927
  // Give 5 seconds for graceful shutdown before SIGTERM
@@ -929,14 +930,26 @@ This enables parallel workers to coordinate.`;
929
930
  child.kill('SIGTERM');
930
931
  }
931
932
  }, 5000);
933
+ // Wait a moment for graceful shutdown to flush logs
934
+ await new Promise(r => setTimeout(r, 2000));
932
935
  const partialOutput = job.output.trim();
933
- // Check for log file that worker should have created
934
- const logFile = `/tmp/worker-${id}-log.txt`;
936
+ // Try to recover from log file
937
+ const recoveredLog = this.recoverWorkerLog(id);
938
+ // Log failure to memory for learning
939
+ await this.logWorkerFailure(id, task, `Timeout after ${timeoutMinutes} minutes`, duration, recoveredLog || undefined);
940
+ // Build combined output
941
+ let combinedOutput = '';
942
+ if (recoveredLog) {
943
+ combinedOutput = `[Recovered from log file]\n${recoveredLog}\n\n`;
944
+ }
945
+ if (partialOutput) {
946
+ combinedOutput += `[Partial stdout]\n${partialOutput.slice(-3000)}`;
947
+ }
935
948
  resolve({
936
949
  success: false,
937
- output: partialOutput
938
- ? `Worker timed out after ${timeoutMinutes} minutes. Check ${logFile} for full logs. Partial findings:\n${partialOutput.slice(-3000)}`
939
- : `Worker timed out after ${timeoutMinutes} minutes with no output. Check ${logFile} for any saved progress.`
950
+ output: combinedOutput
951
+ ? `Worker timed out after ${timeoutMinutes} minutes. Recovered output:\n${combinedOutput}`
952
+ : `Worker timed out after ${timeoutMinutes} minutes with no recoverable output.`
940
953
  });
941
954
  }
942
955
  }, workerTimeout);
@@ -954,38 +967,98 @@ This enables parallel workers to coordinate.`;
954
967
  clearTimeout(timeout);
955
968
  job.status = code === 0 ? 'completed' : 'failed';
956
969
  job.endTime = Date.now();
957
- const duration = ((job.endTime - job.startTime) / 1000).toFixed(1);
970
+ const duration = Math.round((job.endTime - job.startTime) / 1000);
958
971
  console.log(`[ORCHESTRATOR] Worker ${id} finished in ${duration}s (code: ${code})`);
959
- if (code !== 0 || job.output.length === 0) {
960
- console.log(`[ORCHESTRATOR] Worker ${id} output: ${job.output.slice(0, 200) || '(empty)'}`);
972
+ let finalOutput = job.output.trim();
973
+ if (code !== 0 || finalOutput.length === 0) {
974
+ console.log(`[ORCHESTRATOR] Worker ${id} output: ${finalOutput.slice(0, 200) || '(empty)'}`);
975
+ // Try to recover from log file on failure or empty output
976
+ const recoveredLog = this.recoverWorkerLog(id);
977
+ if (recoveredLog) {
978
+ finalOutput = recoveredLog + (finalOutput ? `\n\n[Additional stdout]\n${finalOutput}` : '');
979
+ }
980
+ // Log failure to memory
981
+ if (code !== 0) {
982
+ await this.logWorkerFailure(id, task, `Exit code ${code}`, duration, recoveredLog || undefined);
983
+ }
961
984
  }
962
985
  // Extract and store memory contributions from worker output
963
- await this.extractWorkerMemories(job.output, id);
986
+ await this.extractWorkerMemories(finalOutput, id);
964
987
  resolve({
965
988
  success: code === 0,
966
- output: job.output.trim() || '(No output)'
989
+ output: finalOutput || '(No output)'
967
990
  });
968
991
  });
969
992
  child.on('error', async (err) => {
970
993
  clearTimeout(timeout);
971
994
  job.status = 'failed';
972
995
  job.endTime = Date.now();
996
+ const duration = Math.round((job.endTime - job.startTime) / 1000);
973
997
  console.error(`[ORCHESTRATOR] Worker ${id} spawn error:`, err.message);
974
- // Retry on ENOENT (intermittent spawn failures)
975
- if (err.message.includes('ENOENT') && retryCount < maxRetries) {
976
- console.log(`[ORCHESTRATOR] Retrying worker (attempt ${retryCount + 2}/${maxRetries + 1})...`);
977
- await new Promise(r => setTimeout(r, 500)); // Small delay before retry
978
- const retryResult = await this.delegateToWorker(task, context, workingDir, retryCount + 1);
998
+ // Retryable errors: ENOENT (command not found), EAGAIN (resource busy), ENOMEM (out of memory)
999
+ const retryableErrors = ['ENOENT', 'EAGAIN', 'ENOMEM', 'ETIMEDOUT', 'ECONNRESET'];
1000
+ const isRetryable = retryableErrors.some(code => err.message.includes(code));
1001
+ if (isRetryable && retryCount < maxRetries) {
1002
+ console.log(`[ORCHESTRATOR] Retrying worker (attempt ${retryCount + 2}/${maxRetries + 1}) after ${err.message}...`);
1003
+ // Exponential backoff: 500ms, 1000ms, 2000ms
1004
+ const delay = 500 * Math.pow(2, retryCount);
1005
+ await new Promise(r => setTimeout(r, delay));
1006
+ const retryResult = await this.delegateToWorker(task, context, workingDir, retryCount + 1, timeoutMs);
979
1007
  resolve(retryResult);
980
1008
  return;
981
1009
  }
1010
+ // Log failure to memory
1011
+ await this.logWorkerFailure(id, task, err.message, duration);
1012
+ // Try to recover any partial output from log file
1013
+ const recoveredLog = this.recoverWorkerLog(id);
982
1014
  resolve({
983
1015
  success: false,
984
- output: `Worker error: ${err.message}`
1016
+ output: recoveredLog
1017
+ ? `Worker error: ${err.message}\n\n[Recovered from log]\n${recoveredLog}`
1018
+ : `Worker error: ${err.message}`
985
1019
  });
986
1020
  });
987
1021
  });
988
1022
  }
1023
+ /**
1024
+ * Try to recover worker output from log file
1025
+ * Workers are instructed to write logs to /tmp/worker-{id}-log.txt
1026
+ */
1027
+ recoverWorkerLog(workerId) {
1028
+ const logFile = `/tmp/worker-${workerId}-log.txt`;
1029
+ try {
1030
+ if (existsSync(logFile)) {
1031
+ const content = readFileSync(logFile, 'utf8');
1032
+ if (content.trim()) {
1033
+ console.log(`[ORCHESTRATOR] Recovered ${content.length} bytes from worker log: ${logFile}`);
1034
+ return content;
1035
+ }
1036
+ }
1037
+ }
1038
+ catch (error) {
1039
+ console.log(`[ORCHESTRATOR] Could not read worker log: ${error}`);
1040
+ }
1041
+ return null;
1042
+ }
1043
+ /**
1044
+ * Log worker failure to memory for learning
1045
+ */
1046
+ async logWorkerFailure(workerId, task, error, duration, recoveredOutput) {
1047
+ try {
1048
+ const content = `Worker ${workerId} failed after ${duration}s. Task: "${task.slice(0, 100)}..." Error: ${error}${recoveredOutput ? ` (recovered ${recoveredOutput.length} chars from log)` : ''}`;
1049
+ await this.memory.remember(content, {
1050
+ type: 'episodic',
1051
+ source: `worker-failure:${workerId}`,
1052
+ importance: 0.6,
1053
+ tags: ['worker-failure', 'debugging', 'auto-logged']
1054
+ });
1055
+ console.log(`[ORCHESTRATOR] Logged worker failure to memory for future learning`);
1056
+ }
1057
+ catch (err) {
1058
+ // Don't fail on memory errors
1059
+ console.log(`[ORCHESTRATOR] Could not log failure to memory: ${err}`);
1060
+ }
1061
+ }
989
1062
  /**
990
1063
  * Extract memory and coordination contributions from worker output
991
1064
  * Workers can contribute using:
@@ -139,7 +139,19 @@ This ensures nothing is lost even if your output gets truncated.`;
139
139
  currentJob.status = code === 0 ? 'completed' : 'failed';
140
140
  currentJob.completed = new Date().toISOString();
141
141
  currentJob.exitCode = code ?? undefined;
142
- currentJob.result = stdout.trim();
142
+ // Try to recover from log file if output is empty or failed
143
+ let finalResult = stdout.trim();
144
+ if ((code !== 0 || !finalResult) && fs.existsSync(logFile)) {
145
+ try {
146
+ const logContent = fs.readFileSync(logFile, 'utf8');
147
+ if (logContent.trim()) {
148
+ console.log(`[WORKER] Recovered ${logContent.length} bytes from ${logFile}`);
149
+ finalResult = logContent + (finalResult ? `\n\n[stdout]\n${finalResult}` : '');
150
+ }
151
+ }
152
+ catch { /* ignore read errors */ }
153
+ }
154
+ currentJob.result = finalResult;
143
155
  if (stderr && code !== 0) {
144
156
  currentJob.error = stderr.trim();
145
157
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@siftd/connect-agent",
3
- "version": "0.2.16",
3
+ "version": "0.2.18",
4
4
  "description": "Master orchestrator agent - control Claude Code remotely via web",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",