ralphblaster-agent 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +294 -0
- package/bin/agent-dashboard.sh +168 -0
- package/bin/monitor-agent.sh +264 -0
- package/bin/ralphblaster.js +247 -0
- package/package.json +64 -0
- package/postinstall-colored.js +66 -0
- package/src/api-client.js +764 -0
- package/src/claude-plugin/.claude-plugin/plugin.json +9 -0
- package/src/claude-plugin/README.md +42 -0
- package/src/claude-plugin/skills/ralph/SKILL.md +259 -0
- package/src/commands/add-project.js +257 -0
- package/src/commands/init.js +79 -0
- package/src/config-file-manager.js +84 -0
- package/src/config.js +66 -0
- package/src/error-window.js +86 -0
- package/src/executor/claude-runner.js +716 -0
- package/src/executor/error-handler.js +65 -0
- package/src/executor/git-helper.js +196 -0
- package/src/executor/index.js +296 -0
- package/src/executor/job-handlers/clarifying-questions.js +213 -0
- package/src/executor/job-handlers/code-execution.js +145 -0
- package/src/executor/job-handlers/prd-generation.js +259 -0
- package/src/executor/path-helper.js +74 -0
- package/src/executor/prompt-validator.js +51 -0
- package/src/executor.js +4 -0
- package/src/index.js +342 -0
- package/src/logger.js +193 -0
- package/src/logging/README.md +93 -0
- package/src/logging/config.js +179 -0
- package/src/logging/destinations/README.md +290 -0
- package/src/logging/destinations/api-destination-unbatched.js +118 -0
- package/src/logging/destinations/api-destination.js +40 -0
- package/src/logging/destinations/base-destination.js +85 -0
- package/src/logging/destinations/batched-destination.js +198 -0
- package/src/logging/destinations/console-destination.js +172 -0
- package/src/logging/destinations/file-destination.js +208 -0
- package/src/logging/destinations/index.js +29 -0
- package/src/logging/destinations/progress-batch-destination-unbatched.js +92 -0
- package/src/logging/destinations/progress-batch-destination.js +41 -0
- package/src/logging/formatter.js +288 -0
- package/src/logging/log-manager.js +426 -0
- package/src/progress-throttle.js +101 -0
- package/src/system-monitor.js +64 -0
- package/src/utils/format.js +16 -0
- package/src/utils/log-file-helper.js +265 -0
- package/src/utils/progress-parser.js +250 -0
- package/src/worktree-manager.js +255 -0
|
@@ -0,0 +1,764 @@
|
|
|
1
|
+
const axios = require('axios');
|
|
2
|
+
const config = require('./config');
|
|
3
|
+
const logger = require('./logger');
|
|
4
|
+
const SystemMonitor = require('./system-monitor');
|
|
5
|
+
const packageJson = require('../package.json');
|
|
6
|
+
|
|
7
|
+
// Agent version from package.json
|
|
8
|
+
const AGENT_VERSION = packageJson.version;
|
|
9
|
+
|
|
10
|
+
// Timeout constants
|
|
11
|
+
const SERVER_LONG_POLL_TIMEOUT_S = 10; // Server waits up to 10s for job (matches server MAX_LONG_POLL_TIMEOUT)
|
|
12
|
+
const NETWORK_BUFFER_MS = 5000; // 5s buffer for network latency
|
|
13
|
+
const LONG_POLLING_TIMEOUT_MS = (SERVER_LONG_POLL_TIMEOUT_S * 1000) + NETWORK_BUFFER_MS; // 15s
|
|
14
|
+
const REGULAR_API_TIMEOUT_MS = 15000; // 15s for regular API calls
|
|
15
|
+
const BATCH_API_TIMEOUT_MS = 30000; // 30s for batch operations
|
|
16
|
+
|
|
17
|
+
// API endpoint versions
|
|
18
|
+
const NEW_API_PREFIX = '/api/v1/rb';
|
|
19
|
+
const OLD_API_PREFIX = '/api/v1/ralph';
|
|
20
|
+
|
|
21
|
+
class ApiClient {
|
|
22
|
+
constructor(agentId = 'agent-default') {
|
|
23
|
+
this.agentId = agentId;
|
|
24
|
+
this.useNewEndpoints = true; // Start with new endpoints, fall back if needed
|
|
25
|
+
|
|
26
|
+
// System monitoring for dynamic capacity reporting
|
|
27
|
+
this.systemMonitor = new SystemMonitor();
|
|
28
|
+
|
|
29
|
+
// Rate limiting backoff tracking per endpoint category
|
|
30
|
+
this.rateLimitBackoff = {
|
|
31
|
+
jobs: 0, // /jobs/* endpoints
|
|
32
|
+
progress: 0, // /jobs/*/progress endpoints
|
|
33
|
+
events: 0, // /jobs/*/events endpoints
|
|
34
|
+
metadata: 0 // /jobs/*/metadata endpoints
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
// Progress batching
|
|
38
|
+
this.progressBuffer = new Map(); // jobId -> [{chunk, timestamp}, ...]
|
|
39
|
+
this.progressTimers = new Map(); // jobId -> timer
|
|
40
|
+
this.BATCH_INTERVAL_MS = 200; // Send batches every 200ms
|
|
41
|
+
this.MAX_BATCH_SIZE = 50; // Max chunks per batch
|
|
42
|
+
|
|
43
|
+
this.client = axios.create({
|
|
44
|
+
baseURL: config.apiUrl,
|
|
45
|
+
headers: {
|
|
46
|
+
'Content-Type': 'application/json'
|
|
47
|
+
},
|
|
48
|
+
timeout: REGULAR_API_TIMEOUT_MS
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
// Add Authorization header via interceptor to prevent token exposure in logs
|
|
52
|
+
this.client.interceptors.request.use((requestConfig) => {
|
|
53
|
+
requestConfig.headers.Authorization = `Bearer ${config.apiToken}`;
|
|
54
|
+
requestConfig.headers['X-Agent-Version'] = AGENT_VERSION;
|
|
55
|
+
requestConfig.headers['X-Agent-ID'] = this.agentId;
|
|
56
|
+
// Add dynamic capacity based on system resources
|
|
57
|
+
requestConfig.headers['X-Agent-Capacity'] = this.systemMonitor.getCapacity();
|
|
58
|
+
return requestConfig;
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
// Sanitize errors to prevent token leakage in stack traces
|
|
62
|
+
this.client.interceptors.response.use(
|
|
63
|
+
response => response,
|
|
64
|
+
error => {
|
|
65
|
+
// Remove auth header from error config before it gets logged
|
|
66
|
+
if (error.config && error.config.headers) {
|
|
67
|
+
error.config.headers.Authorization = 'Bearer [REDACTED]';
|
|
68
|
+
}
|
|
69
|
+
// Also redact from response config if present
|
|
70
|
+
if (error.response && error.response.config && error.response.config.headers) {
|
|
71
|
+
error.response.config.headers.Authorization = 'Bearer [REDACTED]';
|
|
72
|
+
}
|
|
73
|
+
return Promise.reject(error);
|
|
74
|
+
}
|
|
75
|
+
);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Get endpoint category for rate limit tracking
|
|
80
|
+
* @param {string} path - API path
|
|
81
|
+
* @returns {string} Category name
|
|
82
|
+
*/
|
|
83
|
+
getEndpointCategory(path) {
|
|
84
|
+
if (path.includes('/progress')) return 'progress';
|
|
85
|
+
if (path.includes('/events')) return 'events';
|
|
86
|
+
if (path.includes('/metadata')) return 'metadata';
|
|
87
|
+
return 'jobs';
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* Check if error is retryable
|
|
92
|
+
* @param {Error} error - Error object
|
|
93
|
+
* @returns {boolean} True if should retry
|
|
94
|
+
*/
|
|
95
|
+
isRetryableError(error) {
|
|
96
|
+
if (!error.response) return true; // Network error - retry
|
|
97
|
+
const status = error.response.status;
|
|
98
|
+
// Retry on rate limits, timeouts, and server errors
|
|
99
|
+
return status === 408 || status === 429 || status === 502 || status === 503 || status === 504;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Make an API request with automatic retry and rate limit handling
|
|
104
|
+
* @param {string} method - HTTP method (get, post, patch, etc.)
|
|
105
|
+
* @param {string} path - Endpoint path (e.g., '/jobs/next' or '/jobs/{id}')
|
|
106
|
+
* @param {Object} data - Request data (for POST/PATCH)
|
|
107
|
+
* @param {Object} config - Axios config options
|
|
108
|
+
* @param {number} maxRetries - Maximum retry attempts (default: 3)
|
|
109
|
+
* @returns {Promise<Object>} Response object
|
|
110
|
+
*/
|
|
111
|
+
async requestWithRetry(method, path, data = null, config = null, maxRetries = 3) {
|
|
112
|
+
const category = this.getEndpointCategory(path);
|
|
113
|
+
|
|
114
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
115
|
+
try {
|
|
116
|
+
// Check if we're in backoff period for this endpoint category
|
|
117
|
+
const backoffUntil = this.rateLimitBackoff[category] || 0;
|
|
118
|
+
const now = Date.now();
|
|
119
|
+
|
|
120
|
+
if (now < backoffUntil) {
|
|
121
|
+
const waitMs = backoffUntil - now;
|
|
122
|
+
logger.warn(`Rate limit backoff active for ${category}, waiting ${waitMs}ms`);
|
|
123
|
+
await new Promise(resolve => setTimeout(resolve, waitMs));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Make the request using existing fallback logic
|
|
127
|
+
return await this.requestWithFallback(method, path, data, config);
|
|
128
|
+
|
|
129
|
+
} catch (error) {
|
|
130
|
+
const isLastAttempt = attempt === maxRetries;
|
|
131
|
+
|
|
132
|
+
// Handle rate limiting
|
|
133
|
+
if (error.response?.status === 429) {
|
|
134
|
+
// Get retry-after header (in seconds) or use exponential backoff
|
|
135
|
+
const retryAfter = error.response.headers['retry-after'];
|
|
136
|
+
const backoffMs = retryAfter
|
|
137
|
+
? parseInt(retryAfter) * 1000
|
|
138
|
+
: Math.min(1000 * Math.pow(2, attempt), 30000); // 1s, 2s, 4s, max 30s
|
|
139
|
+
|
|
140
|
+
logger.warn(
|
|
141
|
+
`Rate limited on ${path} (attempt ${attempt + 1}/${maxRetries + 1}), ` +
|
|
142
|
+
`backing off ${backoffMs}ms`
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
// Set backoff for this endpoint category
|
|
146
|
+
this.rateLimitBackoff[category] = Date.now() + backoffMs;
|
|
147
|
+
|
|
148
|
+
if (!isLastAttempt) {
|
|
149
|
+
await new Promise(resolve => setTimeout(resolve, backoffMs));
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
// Handle other retryable errors
|
|
155
|
+
if (this.isRetryableError(error) && !isLastAttempt) {
|
|
156
|
+
const backoffMs = 1000 * Math.pow(2, attempt); // Exponential: 1s, 2s, 4s
|
|
157
|
+
logger.warn(
|
|
158
|
+
`Retryable error on ${path} (${error.message}), ` +
|
|
159
|
+
`retry ${attempt + 1}/${maxRetries + 1} after ${backoffMs}ms`
|
|
160
|
+
);
|
|
161
|
+
await new Promise(resolve => setTimeout(resolve, backoffMs));
|
|
162
|
+
continue;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// Not retryable or last attempt - throw
|
|
166
|
+
throw error;
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Make an API request with automatic fallback from new to old endpoints
|
|
173
|
+
* @param {string} method - HTTP method (get, post, patch, etc.)
|
|
174
|
+
* @param {string} path - Endpoint path (e.g., '/jobs/next' or '/jobs/{id}')
|
|
175
|
+
* @param {Object} data - Request data (for POST/PATCH)
|
|
176
|
+
* @param {Object} config - Axios config options
|
|
177
|
+
* @returns {Promise<Object>} Response object
|
|
178
|
+
*/
|
|
179
|
+
async requestWithFallback(method, path, data = null, config = null) {
|
|
180
|
+
const newEndpoint = `${NEW_API_PREFIX}${path}`;
|
|
181
|
+
const oldEndpoint = `${OLD_API_PREFIX}${path}`;
|
|
182
|
+
|
|
183
|
+
try {
|
|
184
|
+
// Try new endpoint first
|
|
185
|
+
const endpoint = this.useNewEndpoints ? newEndpoint : oldEndpoint;
|
|
186
|
+
logger.debug(`API request: ${method.toUpperCase()} ${endpoint}`);
|
|
187
|
+
|
|
188
|
+
// Build args array based on what's provided
|
|
189
|
+
let args;
|
|
190
|
+
if (data && config) {
|
|
191
|
+
args = [endpoint, data, config];
|
|
192
|
+
} else if (data) {
|
|
193
|
+
args = [endpoint, data];
|
|
194
|
+
} else if (config) {
|
|
195
|
+
args = [endpoint, config];
|
|
196
|
+
} else {
|
|
197
|
+
args = [endpoint];
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
const response = await this.client[method](...args);
|
|
201
|
+
|
|
202
|
+
// If we successfully used new endpoints, log once
|
|
203
|
+
if (this.useNewEndpoints && endpoint === newEndpoint) {
|
|
204
|
+
logger.debug('Using new /api/v1/rb/* endpoints');
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
return response;
|
|
208
|
+
} catch (error) {
|
|
209
|
+
// If we got a 404 and we were trying new endpoints, fall back to old
|
|
210
|
+
if (error.response?.status === 404 && this.useNewEndpoints) {
|
|
211
|
+
logger.info('New endpoint not found, falling back to legacy /api/v1/ralph/* endpoints');
|
|
212
|
+
this.useNewEndpoints = false;
|
|
213
|
+
|
|
214
|
+
// Retry with old endpoint - rebuild args array
|
|
215
|
+
let args;
|
|
216
|
+
if (data && config) {
|
|
217
|
+
args = [oldEndpoint, data, config];
|
|
218
|
+
} else if (data) {
|
|
219
|
+
args = [oldEndpoint, data];
|
|
220
|
+
} else if (config) {
|
|
221
|
+
args = [oldEndpoint, config];
|
|
222
|
+
} else {
|
|
223
|
+
args = [oldEndpoint];
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
return await this.client[method](...args);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// Re-throw all other errors
|
|
230
|
+
throw error;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Poll for next available job (with long polling)
|
|
236
|
+
* @returns {Promise<Object|null>} Job object or null if no jobs available
|
|
237
|
+
*/
|
|
238
|
+
async getNextJob() {
|
|
239
|
+
try {
|
|
240
|
+
logger.info(`Polling for next job (long poll timeout: ${SERVER_LONG_POLL_TIMEOUT_S}s)...`);
|
|
241
|
+
const response = await this.requestWithFallback('get', '/jobs/next', null, {
|
|
242
|
+
params: { timeout: SERVER_LONG_POLL_TIMEOUT_S }, // Server waits up to 30s for job
|
|
243
|
+
timeout: LONG_POLLING_TIMEOUT_MS // Client waits up to 35s (30s + 5s buffer)
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
if (response.status === 204) {
|
|
247
|
+
// No jobs available
|
|
248
|
+
logger.info('No jobs available (HTTP 204)');
|
|
249
|
+
return null;
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
if (response.data && response.data.success) {
|
|
253
|
+
const job = response.data.job;
|
|
254
|
+
|
|
255
|
+
// Validate job object
|
|
256
|
+
const validationError = this.validateJob(job);
|
|
257
|
+
if (validationError) {
|
|
258
|
+
logger.error(`Invalid job received from API: ${validationError}`);
|
|
259
|
+
return null;
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
logger.info(`✓ Claimed job #${job.id} - ${job.task_title}`);
|
|
263
|
+
|
|
264
|
+
// Log full job details (upgraded from debug to info for visibility)
|
|
265
|
+
logger.info('Job details:', {
|
|
266
|
+
id: job.id,
|
|
267
|
+
job_type: job.job_type,
|
|
268
|
+
task_title: job.task_title,
|
|
269
|
+
project_name: job.project?.name,
|
|
270
|
+
has_prompt: !!job.prompt,
|
|
271
|
+
prompt_length: job.prompt?.length || 0
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
return job;
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
logger.warn('Unexpected response from API', response.data);
|
|
278
|
+
return null;
|
|
279
|
+
} catch (error) {
|
|
280
|
+
if (error.response?.status === 204) {
|
|
281
|
+
// No jobs available
|
|
282
|
+
logger.info('No jobs available (HTTP 204)');
|
|
283
|
+
return null;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
if (error.response?.status === 403) {
|
|
287
|
+
logger.error('API token lacks rb_agent permission');
|
|
288
|
+
throw new Error('Invalid API token permissions');
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
if (error.code === 'ECONNREFUSED') {
|
|
292
|
+
logger.error(`Cannot connect to API at ${config.apiUrl}`);
|
|
293
|
+
return null;
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
logger.error('Error fetching next job: ' + error.message);
|
|
297
|
+
return null;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
/**
|
|
302
|
+
* Update job status to running
|
|
303
|
+
* @param {number} jobId - Job ID
|
|
304
|
+
*/
|
|
305
|
+
async markJobRunning(jobId) {
|
|
306
|
+
try {
|
|
307
|
+
await this.requestWithFallback('patch', `/jobs/${jobId}`, {
|
|
308
|
+
status: 'running'
|
|
309
|
+
});
|
|
310
|
+
logger.info(`Job #${jobId} marked as running`);
|
|
311
|
+
} catch (error) {
|
|
312
|
+
logger.error(`Error marking job #${jobId} as running: ${error.message}`);
|
|
313
|
+
throw error;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
/**
|
|
318
|
+
* Validate and truncate output to prevent excessive data transmission
|
|
319
|
+
* @param {string} output - Output string to validate
|
|
320
|
+
* @param {number} maxSize - Maximum size in bytes (default 10MB)
|
|
321
|
+
* @returns {string} Validated/truncated output
|
|
322
|
+
*/
|
|
323
|
+
validateOutput(output, maxSize = 10 * 1024 * 1024) {
|
|
324
|
+
if (typeof output !== 'string') {
|
|
325
|
+
return '';
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
// Security: Reject output containing null bytes (potential injection attack)
|
|
329
|
+
if (output.includes('\0')) {
|
|
330
|
+
logger.error('Output contains null bytes - rejecting for security');
|
|
331
|
+
throw new Error('Output validation failed: null bytes detected');
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
if (output.length > maxSize) {
|
|
335
|
+
logger.warn(`Output truncated from ${output.length} to ${maxSize} bytes`);
|
|
336
|
+
return output.substring(0, maxSize) + '\n\n[OUTPUT TRUNCATED - EXCEEDED MAX SIZE]';
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
return output;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
/**
|
|
343
|
+
* Update job status to completed
|
|
344
|
+
* @param {number} jobId - Job ID
|
|
345
|
+
* @param {Object} result - Job result containing output, summary, etc.
|
|
346
|
+
*/
|
|
347
|
+
async markJobCompleted(jobId, result) {
|
|
348
|
+
// Flush any remaining progress updates before marking complete
|
|
349
|
+
await this.flushProgressBuffer(jobId);
|
|
350
|
+
|
|
351
|
+
try {
|
|
352
|
+
logger.debug(`Building completion payload for job #${jobId}...`, {
|
|
353
|
+
hasOutput: !!result.output,
|
|
354
|
+
outputLength: result.output?.length || 0,
|
|
355
|
+
hasPrdContent: !!result.prdContent,
|
|
356
|
+
prdContentLength: result.prdContent?.length || 0,
|
|
357
|
+
hasSummary: !!result.summary,
|
|
358
|
+
hasBranchName: !!result.branchName,
|
|
359
|
+
hasGitActivity: !!result.gitActivity,
|
|
360
|
+
executionTimeMs: result.executionTimeMs
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
const payload = {
|
|
364
|
+
status: 'completed',
|
|
365
|
+
// Phase 2.2: REMOVED output (already streamed via progress_batch)
|
|
366
|
+
execution_time_ms: result.executionTimeMs
|
|
367
|
+
};
|
|
368
|
+
|
|
369
|
+
// Add job-type specific fields with validation
|
|
370
|
+
if (result.prdContent) {
|
|
371
|
+
logger.debug('Adding PRD content to payload', { length: result.prdContent.length });
|
|
372
|
+
payload.prd_content = this.validateOutput(result.prdContent);
|
|
373
|
+
}
|
|
374
|
+
if (result.summary) {
|
|
375
|
+
logger.debug('Adding summary to payload', { length: result.summary.length });
|
|
376
|
+
payload.summary = this.validateOutput(result.summary, 10000); // 10KB max
|
|
377
|
+
}
|
|
378
|
+
if (result.branchName) {
|
|
379
|
+
// Validate branch name format following git branch naming rules:
|
|
380
|
+
// - Must start with alphanumeric
|
|
381
|
+
// - Can contain alphanumeric, dash, underscore
|
|
382
|
+
// - Can contain forward slash for hierarchical names (e.g., feature/foo)
|
|
383
|
+
// - Each segment must start with alphanumeric (not dash/slash)
|
|
384
|
+
// - Max length 200 characters
|
|
385
|
+
if (!/^[a-zA-Z0-9][a-zA-Z0-9_-]*(?:\/[a-zA-Z0-9][a-zA-Z0-9_-]*)*$/.test(result.branchName) ||
|
|
386
|
+
result.branchName.length > 200) {
|
|
387
|
+
logger.warn('Invalid branch name format, omitting from payload', { branchName: result.branchName });
|
|
388
|
+
} else {
|
|
389
|
+
logger.debug('Adding branch name to payload', { branchName: result.branchName });
|
|
390
|
+
payload.branch_name = result.branchName;
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
// Add git activity metadata
|
|
395
|
+
if (result.gitActivity) {
|
|
396
|
+
logger.debug('Adding git activity to payload', result.gitActivity);
|
|
397
|
+
payload.git_activity = {
|
|
398
|
+
commit_count: result.gitActivity.commitCount || 0,
|
|
399
|
+
last_commit: result.gitActivity.lastCommit || null,
|
|
400
|
+
changes: result.gitActivity.changes || null,
|
|
401
|
+
pushed_to_remote: result.gitActivity.pushedToRemote || false,
|
|
402
|
+
has_uncommitted_changes: result.gitActivity.hasUncommittedChanges || false
|
|
403
|
+
};
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
logger.debug('Sending PATCH request to mark job as completed...', {
|
|
407
|
+
endpoint: `/jobs/${jobId}`,
|
|
408
|
+
payloadSize: JSON.stringify(payload).length
|
|
409
|
+
});
|
|
410
|
+
|
|
411
|
+
await this.requestWithRetry('patch', `/jobs/${jobId}`, payload, null, 3);
|
|
412
|
+
logger.info(`✓ Job #${jobId} successfully marked as completed in API`);
|
|
413
|
+
} catch (error) {
|
|
414
|
+
logger.error(`✗ Failed to mark job #${jobId} as completed in API`, {
|
|
415
|
+
error: error.message,
|
|
416
|
+
statusCode: error.response?.status,
|
|
417
|
+
responseData: error.response?.data,
|
|
418
|
+
stack: error.stack?.split('\n').slice(0, 3).join('\n')
|
|
419
|
+
});
|
|
420
|
+
throw error;
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
/**
|
|
425
|
+
* Update job status to failed
|
|
426
|
+
* @param {number} jobId - Job ID
|
|
427
|
+
* @param {Error|string} error - Error object or error message
|
|
428
|
+
* @param {string} partialOutput - Partial output if any
|
|
429
|
+
*/
|
|
430
|
+
async markJobFailed(jobId, error, partialOutput = null) {
|
|
431
|
+
// Flush any remaining progress updates before marking failed
|
|
432
|
+
await this.flushProgressBuffer(jobId);
|
|
433
|
+
|
|
434
|
+
try {
|
|
435
|
+
// Support both Error objects and string messages for backward compatibility
|
|
436
|
+
const errorMessage = typeof error === 'string' ? error : error.message || String(error);
|
|
437
|
+
|
|
438
|
+
logger.debug('Building failure payload...', {
|
|
439
|
+
errorMessage: errorMessage,
|
|
440
|
+
errorType: typeof error,
|
|
441
|
+
hasPartialOutput: !!(partialOutput || error.partialOutput),
|
|
442
|
+
partialOutputLength: (partialOutput || error.partialOutput)?.length || 0
|
|
443
|
+
});
|
|
444
|
+
|
|
445
|
+
const payload = {
|
|
446
|
+
status: 'failed',
|
|
447
|
+
error: errorMessage,
|
|
448
|
+
output: partialOutput || error.partialOutput || null
|
|
449
|
+
};
|
|
450
|
+
|
|
451
|
+
// Add error categorization if available (from enriched Error objects)
|
|
452
|
+
if (typeof error === 'object' && error !== null) {
|
|
453
|
+
if (error.category) {
|
|
454
|
+
payload.error_category = error.category;
|
|
455
|
+
logger.debug('Error category identified', { category: error.category });
|
|
456
|
+
}
|
|
457
|
+
if (error.technicalDetails) {
|
|
458
|
+
payload.error_details = error.technicalDetails;
|
|
459
|
+
logger.debug('Technical details available', {
|
|
460
|
+
detailsLength: error.technicalDetails.length
|
|
461
|
+
});
|
|
462
|
+
}
|
|
463
|
+
if (error.stack) {
|
|
464
|
+
logger.debug('Error stack trace (first 5 lines):', {
|
|
465
|
+
stack: error.stack.split('\n').slice(0, 5).join('\n')
|
|
466
|
+
});
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
logger.debug('Sending PATCH request to mark job as failed...', {
|
|
471
|
+
endpoint: `/jobs/${jobId}`,
|
|
472
|
+
errorCategory: payload.error_category || 'unknown',
|
|
473
|
+
hasErrorDetails: !!payload.error_details
|
|
474
|
+
});
|
|
475
|
+
|
|
476
|
+
await this.requestWithRetry('patch', `/jobs/${jobId}`, payload, null, 3);
|
|
477
|
+
logger.info(`✓ Job #${jobId} successfully marked as failed in API with category: ${payload.error_category || 'unknown'}`);
|
|
478
|
+
} catch (apiError) {
|
|
479
|
+
logger.error(`✗ Failed to mark job #${jobId} as failed in API (meta-failure!)`, {
|
|
480
|
+
originalError: typeof error === 'string' ? error : error.message,
|
|
481
|
+
apiError: apiError.message,
|
|
482
|
+
statusCode: apiError.response?.status,
|
|
483
|
+
responseData: apiError.response?.data
|
|
484
|
+
});
|
|
485
|
+
// Don't throw - we want to continue even if this fails
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
/**
|
|
490
|
+
* Send heartbeat to keep job alive (updates claimed_at)
|
|
491
|
+
* Optionally includes status event data to reduce API calls
|
|
492
|
+
* @param {number} jobId - Job ID
|
|
493
|
+
* @param {Object} statusEvent - Optional {event_type, message, metadata}
|
|
494
|
+
*/
|
|
495
|
+
async sendHeartbeat(jobId, statusEvent = null) {
|
|
496
|
+
try {
|
|
497
|
+
const payload = {
|
|
498
|
+
status: 'running',
|
|
499
|
+
heartbeat: true // Distinguish from initial markJobRunning call
|
|
500
|
+
};
|
|
501
|
+
|
|
502
|
+
// Phase 1.2: Include status event if provided (reduces API calls by 50%)
|
|
503
|
+
if (statusEvent) {
|
|
504
|
+
payload.status_event = statusEvent;
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
await this.requestWithFallback('patch', `/jobs/${jobId}`, payload);
|
|
508
|
+
logger.debug(`Heartbeat sent for job #${jobId}${statusEvent ? ' (with event)' : ''}`);
|
|
509
|
+
} catch (error) {
|
|
510
|
+
logger.warn(`Error sending heartbeat for job #${jobId}: ${error.message}`);
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
/**
|
|
515
|
+
* Send progress update for job (streaming Claude output)
|
|
516
|
+
* Batches chunks for efficiency
|
|
517
|
+
* @param {number} jobId - Job ID
|
|
518
|
+
* @param {string} chunk - Output chunk
|
|
519
|
+
* @param {Object} metadata - Optional metadata for milestones/events
|
|
520
|
+
*/
|
|
521
|
+
async sendProgress(jobId, chunk, metadata = null) {
|
|
522
|
+
// Initialize buffer for this job if needed
|
|
523
|
+
if (!this.progressBuffer.has(jobId)) {
|
|
524
|
+
this.progressBuffer.set(jobId, []);
|
|
525
|
+
}
|
|
526
|
+
|
|
527
|
+
// Add chunk to buffer with timestamp and optional metadata
|
|
528
|
+
const buffer = this.progressBuffer.get(jobId);
|
|
529
|
+
const entry = {
|
|
530
|
+
chunk,
|
|
531
|
+
timestamp: Date.now()
|
|
532
|
+
};
|
|
533
|
+
|
|
534
|
+
// Phase 2.3: Include metadata for milestones/events (reduces API calls)
|
|
535
|
+
if (metadata && Object.keys(metadata).length > 0) {
|
|
536
|
+
entry.metadata = metadata;
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
buffer.push(entry);
|
|
540
|
+
|
|
541
|
+
// Flush immediately if buffer is full
|
|
542
|
+
if (buffer.length >= this.MAX_BATCH_SIZE) {
|
|
543
|
+
await this.flushProgressBuffer(jobId);
|
|
544
|
+
return;
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
// Otherwise, schedule batch send if not already scheduled
|
|
548
|
+
if (!this.progressTimers.has(jobId)) {
|
|
549
|
+
const timer = setTimeout(() => {
|
|
550
|
+
this.flushProgressBuffer(jobId).catch(err => {
|
|
551
|
+
logger.debug(`Error flushing progress buffer: ${err.message}`);
|
|
552
|
+
});
|
|
553
|
+
}, this.BATCH_INTERVAL_MS);
|
|
554
|
+
this.progressTimers.set(jobId, timer);
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
/**
|
|
559
|
+
* Flush buffered progress updates for a job
|
|
560
|
+
* @param {number} jobId - Job ID
|
|
561
|
+
*/
|
|
562
|
+
async flushProgressBuffer(jobId) {
|
|
563
|
+
// Clear timer if it exists
|
|
564
|
+
const timer = this.progressTimers.get(jobId);
|
|
565
|
+
if (timer) {
|
|
566
|
+
clearTimeout(timer);
|
|
567
|
+
this.progressTimers.delete(jobId);
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
// Get buffer
|
|
571
|
+
const buffer = this.progressBuffer.get(jobId);
|
|
572
|
+
if (!buffer || buffer.length === 0) return;
|
|
573
|
+
|
|
574
|
+
// Clear buffer immediately to prevent duplicates
|
|
575
|
+
this.progressBuffer.set(jobId, []);
|
|
576
|
+
|
|
577
|
+
try {
|
|
578
|
+
// Send batched updates
|
|
579
|
+
await this.requestWithRetry('post', `/jobs/${jobId}/progress_batch`, {
|
|
580
|
+
updates: buffer
|
|
581
|
+
}, null, 2);
|
|
582
|
+
|
|
583
|
+
logger.debug(`Batched ${buffer.length} progress updates for job #${jobId}`);
|
|
584
|
+
} catch (error) {
|
|
585
|
+
logger.warn(`Error sending batched progress for job #${jobId}: ${error.message}`);
|
|
586
|
+
// Don't throw - progress updates are best-effort
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
|
|
590
|
+
/**
|
|
591
|
+
* Send status event for job (structured progress updates for UI visibility)
|
|
592
|
+
* @param {number} jobId - Job ID
|
|
593
|
+
* @param {string} eventType - Event type (e.g., 'setup_started', 'file_modified', 'progress_update')
|
|
594
|
+
* @param {string} message - Human-readable status message
|
|
595
|
+
* @param {Object} metadata - Optional metadata (e.g., {filename: 'app.js', percentage: 50})
|
|
596
|
+
*/
|
|
597
|
+
async sendStatusEvent(jobId, eventType, message, metadata = {}) {
|
|
598
|
+
try {
|
|
599
|
+
// Use retry with 2 attempts for status events (best-effort)
|
|
600
|
+
await this.requestWithRetry('post', `/jobs/${jobId}/events`, {
|
|
601
|
+
event_type: eventType,
|
|
602
|
+
message: message,
|
|
603
|
+
metadata: metadata
|
|
604
|
+
}, null, 2);
|
|
605
|
+
logger.debug(`Status event sent for job #${jobId}: ${eventType} - ${message}`);
|
|
606
|
+
} catch (error) {
|
|
607
|
+
logger.warn(`Error sending status event for job #${jobId}: ${error.message}`);
|
|
608
|
+
// Don't throw - status events are best-effort for UI visibility
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
|
|
612
|
+
/**
|
|
613
|
+
* Update job metadata (best-effort, doesn't fail job if unsuccessful)
|
|
614
|
+
* @param {number} jobId - Job ID
|
|
615
|
+
* @param {Object} metadata - Metadata object to merge
|
|
616
|
+
*/
|
|
617
|
+
async updateJobMetadata(jobId, metadata) {
|
|
618
|
+
// Validate metadata
|
|
619
|
+
if (!metadata || typeof metadata !== 'object') {
|
|
620
|
+
logger.warn('Invalid metadata: must be an object');
|
|
621
|
+
return;
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
// Check metadata size to prevent sending excessively large payloads
|
|
625
|
+
try {
|
|
626
|
+
const metadataStr = JSON.stringify(metadata);
|
|
627
|
+
if (metadataStr.length > 10000) {
|
|
628
|
+
logger.warn(`Metadata too large (${metadataStr.length} bytes), truncating`);
|
|
629
|
+
return;
|
|
630
|
+
}
|
|
631
|
+
} catch (error) {
|
|
632
|
+
logger.warn(`Error serializing metadata: ${error.message}`);
|
|
633
|
+
return;
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
try {
|
|
637
|
+
await this.requestWithFallback('patch', `/jobs/${jobId}/metadata`, {
|
|
638
|
+
metadata: metadata
|
|
639
|
+
});
|
|
640
|
+
logger.debug(`Metadata updated for job #${jobId}`, metadata);
|
|
641
|
+
} catch (error) {
|
|
642
|
+
logger.warn(`Error updating metadata for job #${jobId}: ${error.message}`);
|
|
643
|
+
// Don't throw - metadata updates are best-effort
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
/**
|
|
648
|
+
* Add a setup log entry (appears in "Instance Setup Logs" section of UI)
|
|
649
|
+
* Best-effort - doesn't fail job if unsuccessful
|
|
650
|
+
* @param {number} jobId - Job ID
|
|
651
|
+
* @param {string} level - Log level ('info' or 'error')
|
|
652
|
+
* @param {string} message - Log message
|
|
653
|
+
* @param {Object} metadata - Optional structured metadata (Phase 3)
|
|
654
|
+
*/
|
|
655
|
+
async addSetupLog(jobId, level, message, metadata = null) {
|
|
656
|
+
try {
|
|
657
|
+
const payload = {
|
|
658
|
+
level: level,
|
|
659
|
+
message: message,
|
|
660
|
+
timestamp: new Date().toISOString()
|
|
661
|
+
};
|
|
662
|
+
|
|
663
|
+
// Add metadata if present (Phase 3)
|
|
664
|
+
if (metadata && Object.keys(metadata).length > 0) {
|
|
665
|
+
payload.metadata = metadata;
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
await this.requestWithFallback('patch', `/jobs/${jobId}/setup_log`, payload);
|
|
669
|
+
logger.debug(`Setup log sent for job #${jobId}: [${level}] ${message}`);
|
|
670
|
+
} catch (error) {
|
|
671
|
+
logger.debug(`Error sending setup log for job #${jobId}: ${error.message}`);
|
|
672
|
+
// Don't throw - setup logs are best-effort for UI visibility
|
|
673
|
+
// Silently fail to avoid disrupting job execution
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
/**
|
|
678
|
+
* Add multiple setup log entries in a single batch (more efficient)
|
|
679
|
+
* Best-effort - doesn't fail job if unsuccessful
|
|
680
|
+
* @param {number} jobId - Job ID
|
|
681
|
+
* @param {Array} logs - Array of log objects with {timestamp, level, message}
|
|
682
|
+
*/
|
|
683
|
+
async addSetupLogBatch(jobId, logs) {
|
|
684
|
+
if (!logs || logs.length === 0) return;
|
|
685
|
+
|
|
686
|
+
try {
|
|
687
|
+
await this.requestWithFallback('post', `/jobs/${jobId}/setup_logs`, {
|
|
688
|
+
logs: logs
|
|
689
|
+
}, {
|
|
690
|
+
timeout: BATCH_API_TIMEOUT_MS // 30s for batch operations
|
|
691
|
+
});
|
|
692
|
+
logger.debug(`Batch setup logs sent for job #${jobId}: ${logs.length} logs`);
|
|
693
|
+
} catch (error) {
|
|
694
|
+
logger.debug(`Error sending batch setup logs for job #${jobId}: ${error.message}`);
|
|
695
|
+
// Don't throw - setup logs are best-effort for UI visibility
|
|
696
|
+
// Silently fail to avoid disrupting job execution
|
|
697
|
+
throw error; // Rethrow so batcher can fall back to individual sends
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
|
|
701
|
+
/**
|
|
702
|
+
* Validate job object from API
|
|
703
|
+
* @param {Object} job - Job object to validate
|
|
704
|
+
* @returns {string|null} Error message if invalid, null if valid
|
|
705
|
+
*/
|
|
706
|
+
validateJob(job) {
|
|
707
|
+
// Basic structure validation
|
|
708
|
+
if (!job || typeof job !== 'object') {
|
|
709
|
+
return 'Job is null or not an object';
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
// Required fields
|
|
713
|
+
if (typeof job.id !== 'number' || job.id <= 0) {
|
|
714
|
+
return 'Job ID is missing or invalid';
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
if (typeof job.job_type !== 'string' || !job.job_type.trim()) {
|
|
718
|
+
return 'Job type is missing or invalid';
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
// Validate job_type is one of the known types
|
|
722
|
+
const validJobTypes = ['plan_generation', 'code_execution'];
|
|
723
|
+
if (!validJobTypes.includes(job.job_type)) {
|
|
724
|
+
return `Unknown job type: ${job.job_type}`;
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
if (typeof job.task_title !== 'string' || !job.task_title.trim()) {
|
|
728
|
+
return 'Task title is missing or invalid';
|
|
729
|
+
}
|
|
730
|
+
|
|
731
|
+
// Validate prompt if present (can be null/empty for legacy clients)
|
|
732
|
+
if (job.prompt !== null && job.prompt !== undefined && typeof job.prompt !== 'string') {
|
|
733
|
+
return 'Prompt must be a string or null';
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
// For code_execution jobs, validate project
|
|
737
|
+
if (job.job_type === 'code_execution') {
|
|
738
|
+
if (!job.project || typeof job.project !== 'object') {
|
|
739
|
+
return 'Project object is required for code_execution jobs';
|
|
740
|
+
}
|
|
741
|
+
|
|
742
|
+
if (typeof job.project.system_path !== 'string' || !job.project.system_path.trim()) {
|
|
743
|
+
return 'Project system_path is missing or invalid';
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
|
|
747
|
+
// For plan_generation jobs, validate project if present
|
|
748
|
+
if (job.job_type === 'plan_generation' && job.project) {
|
|
749
|
+
if (typeof job.project !== 'object') {
|
|
750
|
+
return 'Project must be an object if provided';
|
|
751
|
+
}
|
|
752
|
+
|
|
753
|
+
if (job.project.system_path !== null &&
|
|
754
|
+
job.project.system_path !== undefined &&
|
|
755
|
+
typeof job.project.system_path !== 'string') {
|
|
756
|
+
return 'Project system_path must be a string if provided';
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
return null; // Valid
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
module.exports = ApiClient;
|