@siftd/connect-agent 0.2.17 → 0.2.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/agent.js CHANGED
@@ -9,48 +9,32 @@ function stripAnsi(str) {
9
9
  return str.replace(/\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])/g, '');
10
10
  }
11
11
  /**
12
- * Actually perform a self-update - runs npm install and restarts
12
+ * Self-update: npm install latest and restart
13
+ * Called from webapp banner or update command
13
14
  */
14
15
  async function performSelfUpdate() {
15
- console.log('[AGENT] Starting self-update...');
16
+ console.log('[AGENT] === SELF-UPDATE STARTING ===');
16
17
  try {
17
- // Get current version
18
- let currentVersion = 'unknown';
19
- try {
20
- currentVersion = execSync('npm list -g @siftd/connect-agent --json 2>/dev/null | grep version || echo "unknown"', { encoding: 'utf8', shell: '/bin/bash' }).trim();
21
- }
22
- catch { /* ignore */ }
23
- console.log('[AGENT] Current version:', currentVersion);
18
+ // Just do it - npm install latest
24
19
  console.log('[AGENT] Running: npm install -g @siftd/connect-agent@latest');
25
- // Actually run the npm install
26
- const installOutput = execSync('npm install -g @siftd/connect-agent@latest 2>&1', {
20
+ execSync('npm install -g @siftd/connect-agent@latest', {
27
21
  encoding: 'utf8',
28
22
  shell: '/bin/bash',
29
- timeout: 120000 // 2 minute timeout
23
+ stdio: 'inherit', // Show output in real-time
24
+ timeout: 180000
30
25
  });
31
- console.log('[AGENT] Install output:', installOutput);
32
- // Get new version
33
- let newVersion = 'unknown';
34
- try {
35
- const versionOutput = execSync('npm list -g @siftd/connect-agent --depth=0 2>/dev/null', { encoding: 'utf8', shell: '/bin/bash' });
36
- const match = versionOutput.match(/@siftd\/connect-agent@([\d.]+)/);
37
- if (match)
38
- newVersion = match[1];
39
- }
40
- catch { /* ignore */ }
41
- console.log('[AGENT] New version:', newVersion);
42
- // Schedule restart
43
- console.log('[AGENT] Scheduling restart in 2 seconds...');
26
+ console.log('[AGENT] Update installed. Restarting in 2 seconds...');
27
+ // Restart the agent
44
28
  setTimeout(() => {
45
- console.log('[AGENT] Restarting...');
46
- process.exit(0); // Exit - systemd/pm2/user will restart
29
+ console.log('[AGENT] === RESTARTING ===');
30
+ process.exit(0);
47
31
  }, 2000);
48
- return `✅ Update complete!\n\nInstalled: @siftd/connect-agent@${newVersion}\n\nRestarting agent in 2 seconds...`;
32
+ return '✅ Update installed. Restarting...';
49
33
  }
50
34
  catch (error) {
51
- const errMsg = error instanceof Error ? error.message : String(error);
52
- console.error('[AGENT] Update failed:', errMsg);
53
- return `❌ Update failed: ${errMsg}\n\nYou may need to run manually:\nnpm install -g @siftd/connect-agent@latest`;
35
+ const msg = error instanceof Error ? error.message : String(error);
36
+ console.error('[AGENT] Update failed:', msg);
37
+ return `❌ Update failed: ${msg}`;
54
38
  }
55
39
  }
56
40
  // Conversation history for orchestrator mode
@@ -197,10 +181,9 @@ export async function processMessage(message) {
197
181
  );
198
182
  return response;
199
183
  }
200
- // Handle self-update requests - ACTUALLY run the update, don't just pretend
201
- if (content.includes('update') && content.includes('connect-agent') &&
202
- (content.includes('npm install') || content.includes('latest'))) {
203
- console.log('[AGENT] Self-update request detected - forcing actual execution');
184
+ // Handle self-update requests - trigger on "update" keyword
185
+ if (content.includes('update') && (content.includes('agent') || content.includes('yourself') || content.includes('latest'))) {
186
+ console.log('[AGENT] Update request detected');
204
187
  return await performSelfUpdate();
205
188
  }
206
189
  try {
package/dist/heartbeat.js CHANGED
@@ -10,7 +10,7 @@ import { hostname } from 'os';
10
10
  import { createHash } from 'crypto';
11
11
  import { getServerUrl, getAgentToken, getUserId, isCloudMode } from './config.js';
12
12
  const HEARTBEAT_INTERVAL = 10000; // 10 seconds
13
- const VERSION = '0.2.17'; // Should match package.json
13
+ const VERSION = '0.2.19'; // Should match package.json
14
14
  const state = {
15
15
  intervalId: null,
16
16
  runnerId: null,
@@ -106,6 +106,15 @@ export declare class MasterOrchestrator {
106
106
  * @param timeoutMs - Timeout in milliseconds (default: 30 minutes, max: 60 minutes)
107
107
  */
108
108
  private delegateToWorker;
109
+ /**
110
+ * Try to recover worker output from log file
111
+ * Workers are instructed to write logs to /tmp/worker-{id}-log.txt
112
+ */
113
+ private recoverWorkerLog;
114
+ /**
115
+ * Log worker failure to memory for learning
116
+ */
117
+ private logWorkerFailure;
109
118
  /**
110
119
  * Extract memory and coordination contributions from worker output
111
120
  * Workers can contribute using:
@@ -6,7 +6,7 @@
6
6
  */
7
7
  import Anthropic from '@anthropic-ai/sdk';
8
8
  import { spawn, execSync } from 'child_process';
9
- import { existsSync } from 'fs';
9
+ import { existsSync, readFileSync } from 'fs';
10
10
  import { AdvancedMemoryStore } from './core/memory-advanced.js';
11
11
  import { PostgresMemoryStore, isPostgresConfigured } from './core/memory-postgres.js';
12
12
  import { TaskScheduler } from './core/scheduler.js';
@@ -917,10 +917,11 @@ This enables parallel workers to coordinate.`;
917
917
  this.jobs.set(id, job);
918
918
  // Configurable timeout (default 30 min, max 60 min)
919
919
  const timeoutMinutes = Math.round(workerTimeout / 60000);
920
- const timeout = setTimeout(() => {
920
+ const timeout = setTimeout(async () => {
921
921
  if (job.status === 'running') {
922
922
  job.status = 'timeout';
923
923
  job.endTime = Date.now();
924
+ const duration = Math.round((job.endTime - job.startTime) / 1000);
924
925
  // Send SIGINT first to allow graceful shutdown and output flush
925
926
  child.kill('SIGINT');
926
927
  // Give 5 seconds for graceful shutdown before SIGTERM
@@ -929,14 +930,26 @@ This enables parallel workers to coordinate.`;
929
930
  child.kill('SIGTERM');
930
931
  }
931
932
  }, 5000);
933
+ // Wait a moment for graceful shutdown to flush logs
934
+ await new Promise(r => setTimeout(r, 2000));
932
935
  const partialOutput = job.output.trim();
933
- // Check for log file that worker should have created
934
- const logFile = `/tmp/worker-${id}-log.txt`;
936
+ // Try to recover from log file
937
+ const recoveredLog = this.recoverWorkerLog(id);
938
+ // Log failure to memory for learning
939
+ await this.logWorkerFailure(id, task, `Timeout after ${timeoutMinutes} minutes`, duration, recoveredLog || undefined);
940
+ // Build combined output
941
+ let combinedOutput = '';
942
+ if (recoveredLog) {
943
+ combinedOutput = `[Recovered from log file]\n${recoveredLog}\n\n`;
944
+ }
945
+ if (partialOutput) {
946
+ combinedOutput += `[Partial stdout]\n${partialOutput.slice(-3000)}`;
947
+ }
935
948
  resolve({
936
949
  success: false,
937
- output: partialOutput
938
- ? `Worker timed out after ${timeoutMinutes} minutes. Check ${logFile} for full logs. Partial findings:\n${partialOutput.slice(-3000)}`
939
- : `Worker timed out after ${timeoutMinutes} minutes with no output. Check ${logFile} for any saved progress.`
950
+ output: combinedOutput
951
+ ? `Worker timed out after ${timeoutMinutes} minutes. Recovered output:\n${combinedOutput}`
952
+ : `Worker timed out after ${timeoutMinutes} minutes with no recoverable output.`
940
953
  });
941
954
  }
942
955
  }, workerTimeout);
@@ -954,38 +967,98 @@ This enables parallel workers to coordinate.`;
954
967
  clearTimeout(timeout);
955
968
  job.status = code === 0 ? 'completed' : 'failed';
956
969
  job.endTime = Date.now();
957
- const duration = ((job.endTime - job.startTime) / 1000).toFixed(1);
970
+ const duration = Math.round((job.endTime - job.startTime) / 1000);
958
971
  console.log(`[ORCHESTRATOR] Worker ${id} finished in ${duration}s (code: ${code})`);
959
- if (code !== 0 || job.output.length === 0) {
960
- console.log(`[ORCHESTRATOR] Worker ${id} output: ${job.output.slice(0, 200) || '(empty)'}`);
972
+ let finalOutput = job.output.trim();
973
+ if (code !== 0 || finalOutput.length === 0) {
974
+ console.log(`[ORCHESTRATOR] Worker ${id} output: ${finalOutput.slice(0, 200) || '(empty)'}`);
975
+ // Try to recover from log file on failure or empty output
976
+ const recoveredLog = this.recoverWorkerLog(id);
977
+ if (recoveredLog) {
978
+ finalOutput = recoveredLog + (finalOutput ? `\n\n[Additional stdout]\n${finalOutput}` : '');
979
+ }
980
+ // Log failure to memory
981
+ if (code !== 0) {
982
+ await this.logWorkerFailure(id, task, `Exit code ${code}`, duration, recoveredLog || undefined);
983
+ }
961
984
  }
962
985
  // Extract and store memory contributions from worker output
963
- await this.extractWorkerMemories(job.output, id);
986
+ await this.extractWorkerMemories(finalOutput, id);
964
987
  resolve({
965
988
  success: code === 0,
966
- output: job.output.trim() || '(No output)'
989
+ output: finalOutput || '(No output)'
967
990
  });
968
991
  });
969
992
  child.on('error', async (err) => {
970
993
  clearTimeout(timeout);
971
994
  job.status = 'failed';
972
995
  job.endTime = Date.now();
996
+ const duration = Math.round((job.endTime - job.startTime) / 1000);
973
997
  console.error(`[ORCHESTRATOR] Worker ${id} spawn error:`, err.message);
974
- // Retry on ENOENT (intermittent spawn failures)
975
- if (err.message.includes('ENOENT') && retryCount < maxRetries) {
976
- console.log(`[ORCHESTRATOR] Retrying worker (attempt ${retryCount + 2}/${maxRetries + 1})...`);
977
- await new Promise(r => setTimeout(r, 500)); // Small delay before retry
978
- const retryResult = await this.delegateToWorker(task, context, workingDir, retryCount + 1);
998
+ // Retryable errors: ENOENT (command not found), EAGAIN (resource busy), ENOMEM (out of memory)
999
+ const retryableErrors = ['ENOENT', 'EAGAIN', 'ENOMEM', 'ETIMEDOUT', 'ECONNRESET'];
1000
+ const isRetryable = retryableErrors.some(code => err.message.includes(code));
1001
+ if (isRetryable && retryCount < maxRetries) {
1002
+ console.log(`[ORCHESTRATOR] Retrying worker (attempt ${retryCount + 2}/${maxRetries + 1}) after ${err.message}...`);
1003
+ // Exponential backoff: 500ms, 1000ms, 2000ms
1004
+ const delay = 500 * Math.pow(2, retryCount);
1005
+ await new Promise(r => setTimeout(r, delay));
1006
+ const retryResult = await this.delegateToWorker(task, context, workingDir, retryCount + 1, timeoutMs);
979
1007
  resolve(retryResult);
980
1008
  return;
981
1009
  }
1010
+ // Log failure to memory
1011
+ await this.logWorkerFailure(id, task, err.message, duration);
1012
+ // Try to recover any partial output from log file
1013
+ const recoveredLog = this.recoverWorkerLog(id);
982
1014
  resolve({
983
1015
  success: false,
984
- output: `Worker error: ${err.message}`
1016
+ output: recoveredLog
1017
+ ? `Worker error: ${err.message}\n\n[Recovered from log]\n${recoveredLog}`
1018
+ : `Worker error: ${err.message}`
985
1019
  });
986
1020
  });
987
1021
  });
988
1022
  }
1023
+ /**
1024
+ * Try to recover worker output from log file
1025
+ * Workers are instructed to write logs to /tmp/worker-{id}-log.txt
1026
+ */
1027
+ recoverWorkerLog(workerId) {
1028
+ const logFile = `/tmp/worker-${workerId}-log.txt`;
1029
+ try {
1030
+ if (existsSync(logFile)) {
1031
+ const content = readFileSync(logFile, 'utf8');
1032
+ if (content.trim()) {
1033
+ console.log(`[ORCHESTRATOR] Recovered ${content.length} bytes from worker log: ${logFile}`);
1034
+ return content;
1035
+ }
1036
+ }
1037
+ }
1038
+ catch (error) {
1039
+ console.log(`[ORCHESTRATOR] Could not read worker log: ${error}`);
1040
+ }
1041
+ return null;
1042
+ }
1043
+ /**
1044
+ * Log worker failure to memory for learning
1045
+ */
1046
+ async logWorkerFailure(workerId, task, error, duration, recoveredOutput) {
1047
+ try {
1048
+ const content = `Worker ${workerId} failed after ${duration}s. Task: "${task.slice(0, 100)}..." Error: ${error}${recoveredOutput ? ` (recovered ${recoveredOutput.length} chars from log)` : ''}`;
1049
+ await this.memory.remember(content, {
1050
+ type: 'episodic',
1051
+ source: `worker-failure:${workerId}`,
1052
+ importance: 0.6,
1053
+ tags: ['worker-failure', 'debugging', 'auto-logged']
1054
+ });
1055
+ console.log(`[ORCHESTRATOR] Logged worker failure to memory for future learning`);
1056
+ }
1057
+ catch (err) {
1058
+ // Don't fail on memory errors
1059
+ console.log(`[ORCHESTRATOR] Could not log failure to memory: ${err}`);
1060
+ }
1061
+ }
989
1062
  /**
990
1063
  * Extract memory and coordination contributions from worker output
991
1064
  * Workers can contribute using:
@@ -139,7 +139,19 @@ This ensures nothing is lost even if your output gets truncated.`;
139
139
  currentJob.status = code === 0 ? 'completed' : 'failed';
140
140
  currentJob.completed = new Date().toISOString();
141
141
  currentJob.exitCode = code ?? undefined;
142
- currentJob.result = stdout.trim();
142
+ // Try to recover from log file if output is empty or failed
143
+ let finalResult = stdout.trim();
144
+ if ((code !== 0 || !finalResult) && fs.existsSync(logFile)) {
145
+ try {
146
+ const logContent = fs.readFileSync(logFile, 'utf8');
147
+ if (logContent.trim()) {
148
+ console.log(`[WORKER] Recovered ${logContent.length} bytes from ${logFile}`);
149
+ finalResult = logContent + (finalResult ? `\n\n[stdout]\n${finalResult}` : '');
150
+ }
151
+ }
152
+ catch { /* ignore read errors */ }
153
+ }
154
+ currentJob.result = finalResult;
143
155
  if (stderr && code !== 0) {
144
156
  currentJob.error = stderr.trim();
145
157
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@siftd/connect-agent",
3
- "version": "0.2.17",
3
+ "version": "0.2.19",
4
4
  "description": "Master orchestrator agent - control Claude Code remotely via web",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",