@supaku/agentfactory 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/src/deployment/deployment-checker.d.ts +110 -0
- package/dist/src/deployment/deployment-checker.d.ts.map +1 -0
- package/dist/src/deployment/deployment-checker.js +242 -0
- package/dist/src/deployment/index.d.ts +3 -0
- package/dist/src/deployment/index.d.ts.map +1 -0
- package/dist/src/deployment/index.js +2 -0
- package/dist/src/index.d.ts +5 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +4 -0
- package/dist/src/logger.d.ts +117 -0
- package/dist/src/logger.d.ts.map +1 -0
- package/dist/src/logger.js +430 -0
- package/dist/src/orchestrator/activity-emitter.d.ts +128 -0
- package/dist/src/orchestrator/activity-emitter.d.ts.map +1 -0
- package/dist/src/orchestrator/activity-emitter.js +406 -0
- package/dist/src/orchestrator/api-activity-emitter.d.ts +167 -0
- package/dist/src/orchestrator/api-activity-emitter.d.ts.map +1 -0
- package/dist/src/orchestrator/api-activity-emitter.js +469 -0
- package/dist/src/orchestrator/heartbeat-writer.d.ts +57 -0
- package/dist/src/orchestrator/heartbeat-writer.d.ts.map +1 -0
- package/dist/src/orchestrator/heartbeat-writer.js +137 -0
- package/dist/src/orchestrator/index.d.ts +20 -0
- package/dist/src/orchestrator/index.d.ts.map +1 -0
- package/dist/src/orchestrator/index.js +22 -0
- package/dist/src/orchestrator/log-analyzer.d.ts +160 -0
- package/dist/src/orchestrator/log-analyzer.d.ts.map +1 -0
- package/dist/src/orchestrator/log-analyzer.js +572 -0
- package/dist/src/orchestrator/log-config.d.ts +39 -0
- package/dist/src/orchestrator/log-config.d.ts.map +1 -0
- package/dist/src/orchestrator/log-config.js +45 -0
- package/dist/src/orchestrator/orchestrator.d.ts +246 -0
- package/dist/src/orchestrator/orchestrator.d.ts.map +1 -0
- package/dist/src/orchestrator/orchestrator.js +2525 -0
- package/dist/src/orchestrator/parse-work-result.d.ts +16 -0
- package/dist/src/orchestrator/parse-work-result.d.ts.map +1 -0
- package/dist/src/orchestrator/parse-work-result.js +73 -0
- package/dist/src/orchestrator/progress-logger.d.ts +72 -0
- package/dist/src/orchestrator/progress-logger.d.ts.map +1 -0
- package/dist/src/orchestrator/progress-logger.js +135 -0
- package/dist/src/orchestrator/session-logger.d.ts +159 -0
- package/dist/src/orchestrator/session-logger.d.ts.map +1 -0
- package/dist/src/orchestrator/session-logger.js +275 -0
- package/dist/src/orchestrator/state-recovery.d.ts +96 -0
- package/dist/src/orchestrator/state-recovery.d.ts.map +1 -0
- package/dist/src/orchestrator/state-recovery.js +301 -0
- package/dist/src/orchestrator/state-types.d.ts +165 -0
- package/dist/src/orchestrator/state-types.d.ts.map +1 -0
- package/dist/src/orchestrator/state-types.js +7 -0
- package/dist/src/orchestrator/stream-parser.d.ts +145 -0
- package/dist/src/orchestrator/stream-parser.d.ts.map +1 -0
- package/dist/src/orchestrator/stream-parser.js +131 -0
- package/dist/src/orchestrator/types.d.ts +205 -0
- package/dist/src/orchestrator/types.d.ts.map +1 -0
- package/dist/src/orchestrator/types.js +4 -0
- package/dist/src/providers/amp-provider.d.ts +20 -0
- package/dist/src/providers/amp-provider.d.ts.map +1 -0
- package/dist/src/providers/amp-provider.js +24 -0
- package/dist/src/providers/claude-provider.d.ts +18 -0
- package/dist/src/providers/claude-provider.d.ts.map +1 -0
- package/dist/src/providers/claude-provider.js +267 -0
- package/dist/src/providers/codex-provider.d.ts +21 -0
- package/dist/src/providers/codex-provider.d.ts.map +1 -0
- package/dist/src/providers/codex-provider.js +25 -0
- package/dist/src/providers/index.d.ts +42 -0
- package/dist/src/providers/index.d.ts.map +1 -0
- package/dist/src/providers/index.js +77 -0
- package/dist/src/providers/types.d.ts +147 -0
- package/dist/src/providers/types.d.ts.map +1 -0
- package/dist/src/providers/types.js +13 -0
- package/package.json +63 -0
|
@@ -0,0 +1,572 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Log Analyzer
|
|
3
|
+
*
|
|
4
|
+
* Analyzes session logs for errors and improvement opportunities.
|
|
5
|
+
* Can automatically create deduplicated Linear issues.
|
|
6
|
+
*/
|
|
7
|
+
import { existsSync, readdirSync, readFileSync, writeFileSync, mkdirSync, rmSync, statSync, } from 'fs';
|
|
8
|
+
import { resolve, join } from 'path';
|
|
9
|
+
import { createHash } from 'crypto';
|
|
10
|
+
import { createLinearAgentClient, DEFAULT_TEAM_ID, LINEAR_PROJECTS, LINEAR_LABELS, } from '@supaku/agentfactory-linear';
|
|
11
|
+
import { readSessionMetadata, readSessionEvents, } from './session-logger';
|
|
12
|
+
import { getLogAnalysisConfig } from './log-config';
|
|
13
|
+
/**
|
|
14
|
+
* Pattern detection rules
|
|
15
|
+
*/
|
|
16
|
+
const PATTERN_RULES = [
|
|
17
|
+
// Command requires approval (critical in autonomous mode)
|
|
18
|
+
{
|
|
19
|
+
pattern: /This command requires approval|requires approval/i,
|
|
20
|
+
type: 'approval_required',
|
|
21
|
+
severity: 'critical',
|
|
22
|
+
title: () => 'Command requires approval in autonomous mode',
|
|
23
|
+
},
|
|
24
|
+
// Specific tool misuse patterns (before generic tool_use_error)
|
|
25
|
+
// File not read before write
|
|
26
|
+
{
|
|
27
|
+
pattern: /File has not been read yet/i,
|
|
28
|
+
type: 'tool_misuse',
|
|
29
|
+
severity: 'high',
|
|
30
|
+
title: () => 'Write attempted before read',
|
|
31
|
+
},
|
|
32
|
+
// File does not exist (tool use)
|
|
33
|
+
{
|
|
34
|
+
pattern: /File does not exist/i,
|
|
35
|
+
type: 'tool_misuse',
|
|
36
|
+
severity: 'medium',
|
|
37
|
+
title: () => 'File does not exist',
|
|
38
|
+
},
|
|
39
|
+
// Path does not exist (tool use)
|
|
40
|
+
{
|
|
41
|
+
pattern: /Path does not exist/i,
|
|
42
|
+
type: 'tool_misuse',
|
|
43
|
+
severity: 'medium',
|
|
44
|
+
title: () => 'Path does not exist',
|
|
45
|
+
},
|
|
46
|
+
// Unknown JSON field (malformed tool input)
|
|
47
|
+
{
|
|
48
|
+
pattern: /Unknown JSON field/i,
|
|
49
|
+
type: 'tool_misuse',
|
|
50
|
+
severity: 'high',
|
|
51
|
+
title: () => 'Invalid tool parameter',
|
|
52
|
+
},
|
|
53
|
+
// Glob pattern in write operation
|
|
54
|
+
{
|
|
55
|
+
pattern: /Glob patterns are not allowed in write/i,
|
|
56
|
+
type: 'tool_misuse',
|
|
57
|
+
severity: 'medium',
|
|
58
|
+
title: () => 'Glob pattern used in write operation',
|
|
59
|
+
},
|
|
60
|
+
// Generic tool use errors from Claude Code API (catch-all for unmatched errors)
|
|
61
|
+
{
|
|
62
|
+
pattern: /<tool_use_error>.*<\/tool_use_error>/i,
|
|
63
|
+
type: 'tool_misuse',
|
|
64
|
+
severity: 'high',
|
|
65
|
+
title: () => 'Tool API error',
|
|
66
|
+
},
|
|
67
|
+
// File too large
|
|
68
|
+
{
|
|
69
|
+
pattern: /exceeds maximum allowed tokens/i,
|
|
70
|
+
type: 'tool_issue',
|
|
71
|
+
severity: 'medium',
|
|
72
|
+
title: () => 'File too large to read',
|
|
73
|
+
},
|
|
74
|
+
// Directory blocked (sandbox cd restriction)
|
|
75
|
+
{
|
|
76
|
+
pattern: /cd in .* was blocked|only change directories to the allowed/i,
|
|
77
|
+
type: 'permission',
|
|
78
|
+
severity: 'high',
|
|
79
|
+
title: () => 'Directory change blocked by sandbox',
|
|
80
|
+
},
|
|
81
|
+
// Sandbox violations
|
|
82
|
+
{
|
|
83
|
+
pattern: /sandbox.*not allowed|operation not permitted/i,
|
|
84
|
+
type: 'permission',
|
|
85
|
+
severity: 'high',
|
|
86
|
+
title: () => 'Sandbox permission error',
|
|
87
|
+
},
|
|
88
|
+
// Permission denied
|
|
89
|
+
{
|
|
90
|
+
pattern: /permission denied|EACCES|access denied/i,
|
|
91
|
+
type: 'permission',
|
|
92
|
+
severity: 'high',
|
|
93
|
+
title: () => 'File permission denied',
|
|
94
|
+
},
|
|
95
|
+
// File not found (filesystem)
|
|
96
|
+
{
|
|
97
|
+
pattern: /ENOENT|no such file or directory/i,
|
|
98
|
+
type: 'tool_issue',
|
|
99
|
+
severity: 'medium',
|
|
100
|
+
title: () => 'File not found error',
|
|
101
|
+
},
|
|
102
|
+
// Network timeouts
|
|
103
|
+
{
|
|
104
|
+
pattern: /timeout|ETIMEDOUT|connection timed out/i,
|
|
105
|
+
type: 'performance',
|
|
106
|
+
severity: 'medium',
|
|
107
|
+
title: () => 'Network timeout',
|
|
108
|
+
},
|
|
109
|
+
// Rate limiting
|
|
110
|
+
{
|
|
111
|
+
pattern: /rate limit|429|too many requests/i,
|
|
112
|
+
type: 'performance',
|
|
113
|
+
severity: 'high',
|
|
114
|
+
title: () => 'Rate limit exceeded',
|
|
115
|
+
},
|
|
116
|
+
// Network errors
|
|
117
|
+
{
|
|
118
|
+
pattern: /ECONNREFUSED|ENOTFOUND|connection refused/i,
|
|
119
|
+
type: 'tool_issue',
|
|
120
|
+
severity: 'medium',
|
|
121
|
+
title: () => 'Network connection error',
|
|
122
|
+
},
|
|
123
|
+
// Worktree conflicts
|
|
124
|
+
{
|
|
125
|
+
pattern: /is already used by worktree|already checked out/i,
|
|
126
|
+
type: 'tool_issue',
|
|
127
|
+
severity: 'high',
|
|
128
|
+
title: () => 'Git worktree conflict',
|
|
129
|
+
},
|
|
130
|
+
// Tool failures
|
|
131
|
+
{
|
|
132
|
+
pattern: /tool.*error|tool.*failed|command failed/i,
|
|
133
|
+
type: 'tool_issue',
|
|
134
|
+
severity: 'medium',
|
|
135
|
+
title: () => 'Tool execution failed',
|
|
136
|
+
},
|
|
137
|
+
];
|
|
138
|
+
/**
|
|
139
|
+
* Generate a deterministic signature for deduplication
|
|
140
|
+
*/
|
|
141
|
+
function generateSignature(issueType, title) {
|
|
142
|
+
const normalized = `${issueType}:${title.toLowerCase().substring(0, 100)}`;
|
|
143
|
+
const hash = createHash('sha256').update(normalized).digest('hex').substring(0, 16);
|
|
144
|
+
return `agent-env-${hash}`;
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Get the project ID for agent bugs from AGENT_BUG_BACKLOG env var.
|
|
148
|
+
* Maps project names (e.g., "Agent", "Social") to their Linear project IDs.
|
|
149
|
+
* Defaults to the Agent project if not set or not found.
|
|
150
|
+
*/
|
|
151
|
+
function getBugBacklogProjectId() {
|
|
152
|
+
const projectName = process.env.AGENT_BUG_BACKLOG?.toUpperCase();
|
|
153
|
+
if (projectName && projectName in LINEAR_PROJECTS) {
|
|
154
|
+
return LINEAR_PROJECTS[projectName];
|
|
155
|
+
}
|
|
156
|
+
// Default to Agent project
|
|
157
|
+
return LINEAR_PROJECTS.AGENT;
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* LogAnalyzer class for analyzing session logs
|
|
161
|
+
*/
|
|
162
|
+
export class LogAnalyzer {
|
|
163
|
+
config;
|
|
164
|
+
sessionsDir;
|
|
165
|
+
processedDir;
|
|
166
|
+
analysisDir;
|
|
167
|
+
deduplicationPath;
|
|
168
|
+
linearClient;
|
|
169
|
+
constructor(config) {
|
|
170
|
+
this.config = { ...getLogAnalysisConfig(), ...config };
|
|
171
|
+
this.sessionsDir = resolve(this.config.logsDir, 'sessions');
|
|
172
|
+
this.processedDir = resolve(this.config.logsDir, 'processed');
|
|
173
|
+
this.analysisDir = resolve(this.config.logsDir, 'analysis');
|
|
174
|
+
this.deduplicationPath = resolve(this.analysisDir, 'issues-created.json');
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Initialize directories and Linear client
|
|
178
|
+
*/
|
|
179
|
+
initialize(linearApiKey) {
|
|
180
|
+
// Create directories if needed
|
|
181
|
+
for (const dir of [this.sessionsDir, this.processedDir, this.analysisDir]) {
|
|
182
|
+
if (!existsSync(dir)) {
|
|
183
|
+
mkdirSync(dir, { recursive: true });
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
// Initialize Linear client if API key provided
|
|
187
|
+
const apiKey = linearApiKey ?? process.env.LINEAR_API_KEY;
|
|
188
|
+
if (apiKey) {
|
|
189
|
+
this.linearClient = createLinearAgentClient({ apiKey });
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
/**
|
|
193
|
+
* Get list of session IDs that haven't been analyzed yet
|
|
194
|
+
*/
|
|
195
|
+
getUnprocessedSessions() {
|
|
196
|
+
if (!existsSync(this.sessionsDir)) {
|
|
197
|
+
return [];
|
|
198
|
+
}
|
|
199
|
+
const sessions = [];
|
|
200
|
+
const entries = readdirSync(this.sessionsDir, { withFileTypes: true });
|
|
201
|
+
for (const entry of entries) {
|
|
202
|
+
if (entry.isDirectory()) {
|
|
203
|
+
const sessionId = entry.name;
|
|
204
|
+
const processedPath = resolve(this.processedDir, `${sessionId}.json`);
|
|
205
|
+
// Check if already processed
|
|
206
|
+
if (!existsSync(processedPath)) {
|
|
207
|
+
// Check if session is completed (has endedAt in metadata)
|
|
208
|
+
const metadata = readSessionMetadata(resolve(this.sessionsDir, sessionId));
|
|
209
|
+
if (metadata?.endedAt) {
|
|
210
|
+
sessions.push(sessionId);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
return sessions;
|
|
216
|
+
}
|
|
217
|
+
/**
|
|
218
|
+
* Analyze a single session
|
|
219
|
+
*/
|
|
220
|
+
analyzeSession(sessionId) {
|
|
221
|
+
const sessionDir = resolve(this.sessionsDir, sessionId);
|
|
222
|
+
const metadata = readSessionMetadata(sessionDir);
|
|
223
|
+
if (!metadata) {
|
|
224
|
+
return null;
|
|
225
|
+
}
|
|
226
|
+
// Collect events
|
|
227
|
+
const events = [];
|
|
228
|
+
for (const event of readSessionEvents(sessionDir)) {
|
|
229
|
+
events.push(event);
|
|
230
|
+
}
|
|
231
|
+
// Detect patterns
|
|
232
|
+
const patterns = this.detectPatterns(events);
|
|
233
|
+
// Check for repeated failures
|
|
234
|
+
const repeatedFailures = this.detectRepeatedFailures(events);
|
|
235
|
+
patterns.push(...repeatedFailures);
|
|
236
|
+
// Generate suggested issues
|
|
237
|
+
const suggestedIssues = this.generateSuggestedIssues(patterns, metadata);
|
|
238
|
+
const result = {
|
|
239
|
+
sessionId,
|
|
240
|
+
metadata,
|
|
241
|
+
patterns,
|
|
242
|
+
eventsAnalyzed: events.length,
|
|
243
|
+
errorsFound: events.filter((e) => e.isError).length,
|
|
244
|
+
suggestedIssues,
|
|
245
|
+
analyzedAt: Date.now(),
|
|
246
|
+
};
|
|
247
|
+
return result;
|
|
248
|
+
}
|
|
249
|
+
/**
|
|
250
|
+
* Detect patterns in events
|
|
251
|
+
*/
|
|
252
|
+
detectPatterns(events) {
|
|
253
|
+
const patternMap = new Map();
|
|
254
|
+
for (const event of events) {
|
|
255
|
+
if (!event.isError)
|
|
256
|
+
continue;
|
|
257
|
+
const content = typeof event.content === 'string' ? event.content : JSON.stringify(event.content);
|
|
258
|
+
for (const rule of PATTERN_RULES) {
|
|
259
|
+
if (rule.pattern.test(content)) {
|
|
260
|
+
const key = `${rule.type}:${rule.title('')}`;
|
|
261
|
+
if (patternMap.has(key)) {
|
|
262
|
+
const existing = patternMap.get(key);
|
|
263
|
+
existing.occurrences++;
|
|
264
|
+
if (existing.examples.length < 3) {
|
|
265
|
+
existing.examples.push(content.substring(0, 200));
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
else {
|
|
269
|
+
patternMap.set(key, {
|
|
270
|
+
type: rule.type,
|
|
271
|
+
severity: rule.severity,
|
|
272
|
+
title: rule.title(''),
|
|
273
|
+
description: `Detected ${rule.type} issue: ${rule.title('')}`,
|
|
274
|
+
examples: [content.substring(0, 200)],
|
|
275
|
+
occurrences: 1,
|
|
276
|
+
tool: event.tool,
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
break; // Only match first rule per event
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
return Array.from(patternMap.values());
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* Detect repeated failures (same error 3+ times)
|
|
287
|
+
*/
|
|
288
|
+
detectRepeatedFailures(events) {
|
|
289
|
+
const errorCounts = new Map();
|
|
290
|
+
for (const event of events) {
|
|
291
|
+
if (!event.isError)
|
|
292
|
+
continue;
|
|
293
|
+
const content = typeof event.content === 'string' ? event.content : JSON.stringify(event.content);
|
|
294
|
+
// Normalize error message for grouping
|
|
295
|
+
const normalized = content.toLowerCase().substring(0, 100);
|
|
296
|
+
if (errorCounts.has(normalized)) {
|
|
297
|
+
const data = errorCounts.get(normalized);
|
|
298
|
+
data.count++;
|
|
299
|
+
if (data.examples.length < 3) {
|
|
300
|
+
data.examples.push(content.substring(0, 200));
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
else {
|
|
304
|
+
errorCounts.set(normalized, {
|
|
305
|
+
count: 1,
|
|
306
|
+
examples: [content.substring(0, 200)],
|
|
307
|
+
tool: event.tool,
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
const patterns = [];
|
|
312
|
+
for (const [normalized, data] of errorCounts) {
|
|
313
|
+
if (data.count >= 3) {
|
|
314
|
+
patterns.push({
|
|
315
|
+
type: 'repeated_failure',
|
|
316
|
+
severity: 'high',
|
|
317
|
+
title: `Repeated error: ${normalized.substring(0, 50)}...`,
|
|
318
|
+
description: `The same error occurred ${data.count} times in this session`,
|
|
319
|
+
examples: data.examples,
|
|
320
|
+
occurrences: data.count,
|
|
321
|
+
tool: data.tool,
|
|
322
|
+
});
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
return patterns;
|
|
326
|
+
}
|
|
327
|
+
/**
|
|
328
|
+
* Generate suggested issues from patterns
|
|
329
|
+
*/
|
|
330
|
+
generateSuggestedIssues(patterns, metadata) {
|
|
331
|
+
const suggestions = [];
|
|
332
|
+
// Group patterns by type
|
|
333
|
+
const byType = new Map();
|
|
334
|
+
for (const pattern of patterns) {
|
|
335
|
+
const existing = byType.get(pattern.type) ?? [];
|
|
336
|
+
existing.push(pattern);
|
|
337
|
+
byType.set(pattern.type, existing);
|
|
338
|
+
}
|
|
339
|
+
// Generate issue for each pattern type
|
|
340
|
+
for (const [type, typePatterns] of byType) {
|
|
341
|
+
const totalOccurrences = typePatterns.reduce((sum, p) => sum + p.occurrences, 0);
|
|
342
|
+
const highSeverity = typePatterns.filter((p) => p.severity === 'high' || p.severity === 'critical');
|
|
343
|
+
// Create issues for:
|
|
344
|
+
// 1. Any critical/high severity patterns
|
|
345
|
+
// 2. Medium severity with 2+ total occurrences
|
|
346
|
+
// 3. Multiple distinct patterns of the same type (2+)
|
|
347
|
+
const hasHighSeverity = highSeverity.length > 0;
|
|
348
|
+
const hasMediumWithOccurrences = totalOccurrences >= 2;
|
|
349
|
+
const hasMultiplePatterns = typePatterns.length >= 2;
|
|
350
|
+
if (!hasHighSeverity && !hasMediumWithOccurrences && !hasMultiplePatterns)
|
|
351
|
+
continue;
|
|
352
|
+
const primaryPattern = highSeverity[0] ?? typePatterns[0];
|
|
353
|
+
// Determine category prefix and labels based on pattern type
|
|
354
|
+
let categoryPrefix;
|
|
355
|
+
let labels;
|
|
356
|
+
switch (type) {
|
|
357
|
+
case 'tool_misuse':
|
|
358
|
+
categoryPrefix = '[Agent Behavior]';
|
|
359
|
+
labels = ['Agent', 'Tool Usage'];
|
|
360
|
+
break;
|
|
361
|
+
case 'approval_required':
|
|
362
|
+
categoryPrefix = '[Agent Permissions]';
|
|
363
|
+
labels = ['Agent', 'Permissions'];
|
|
364
|
+
break;
|
|
365
|
+
case 'permission':
|
|
366
|
+
categoryPrefix = '[Agent Environment]';
|
|
367
|
+
labels = ['Agent', 'Sandbox'];
|
|
368
|
+
break;
|
|
369
|
+
default:
|
|
370
|
+
categoryPrefix = '[Agent Environment]';
|
|
371
|
+
labels = ['Agent', 'Infrastructure'];
|
|
372
|
+
}
|
|
373
|
+
const title = `${categoryPrefix} ${primaryPattern.title}`;
|
|
374
|
+
const signature = generateSignature(type, primaryPattern.title);
|
|
375
|
+
// Build description
|
|
376
|
+
const description = [
|
|
377
|
+
'## Summary',
|
|
378
|
+
primaryPattern.description,
|
|
379
|
+
'',
|
|
380
|
+
`**Session:** ${metadata.issueIdentifier}`,
|
|
381
|
+
`**Work Type:** ${metadata.workType}`,
|
|
382
|
+
`**Occurrences:** ${totalOccurrences}`,
|
|
383
|
+
`**Severity:** ${primaryPattern.severity}`,
|
|
384
|
+
'',
|
|
385
|
+
'## Examples',
|
|
386
|
+
...primaryPattern.examples.map((e) => `\`\`\`\n${e}\n\`\`\``),
|
|
387
|
+
'',
|
|
388
|
+
'## Analysis',
|
|
389
|
+
`This issue was detected by the automated log analyzer.`,
|
|
390
|
+
`Pattern type: ${type}`,
|
|
391
|
+
].join('\n');
|
|
392
|
+
suggestions.push({
|
|
393
|
+
signature,
|
|
394
|
+
title,
|
|
395
|
+
description,
|
|
396
|
+
workType: type === 'tool_misuse' ? 'Bug' : 'Chore',
|
|
397
|
+
labels,
|
|
398
|
+
sourcePatterns: [type],
|
|
399
|
+
});
|
|
400
|
+
}
|
|
401
|
+
return suggestions;
|
|
402
|
+
}
|
|
403
|
+
/**
|
|
404
|
+
* Mark a session as processed
|
|
405
|
+
*/
|
|
406
|
+
markProcessed(sessionId, result) {
|
|
407
|
+
const processedPath = resolve(this.processedDir, `${sessionId}.json`);
|
|
408
|
+
writeFileSync(processedPath, JSON.stringify(result, null, 2));
|
|
409
|
+
}
|
|
410
|
+
/**
|
|
411
|
+
* Load the deduplication store
|
|
412
|
+
*/
|
|
413
|
+
loadDeduplicationStore() {
|
|
414
|
+
if (!existsSync(this.deduplicationPath)) {
|
|
415
|
+
return { issues: {} };
|
|
416
|
+
}
|
|
417
|
+
try {
|
|
418
|
+
const content = readFileSync(this.deduplicationPath, 'utf-8');
|
|
419
|
+
return JSON.parse(content);
|
|
420
|
+
}
|
|
421
|
+
catch {
|
|
422
|
+
return { issues: {} };
|
|
423
|
+
}
|
|
424
|
+
}
|
|
425
|
+
/**
|
|
426
|
+
* Save the deduplication store
|
|
427
|
+
*/
|
|
428
|
+
saveDeduplicationStore(store) {
|
|
429
|
+
writeFileSync(this.deduplicationPath, JSON.stringify(store, null, 2));
|
|
430
|
+
}
|
|
431
|
+
/**
|
|
432
|
+
* Create or update issues in Linear
|
|
433
|
+
* Returns created/updated issue identifiers
|
|
434
|
+
*/
|
|
435
|
+
async createIssues(suggestions, sessionId, dryRun = false) {
|
|
436
|
+
if (!this.linearClient) {
|
|
437
|
+
throw new Error('Linear client not initialized. Provide LINEAR_API_KEY.');
|
|
438
|
+
}
|
|
439
|
+
const store = this.loadDeduplicationStore();
|
|
440
|
+
const results = [];
|
|
441
|
+
for (const suggestion of suggestions) {
|
|
442
|
+
const existing = store.issues[suggestion.signature];
|
|
443
|
+
if (existing) {
|
|
444
|
+
// Update existing issue - add comment
|
|
445
|
+
try {
|
|
446
|
+
if (!dryRun) {
|
|
447
|
+
await this.linearClient.createComment(existing.linearIssueId, `+1 - Detected again in session. Total occurrences: ${existing.sessionCount + 1}`);
|
|
448
|
+
}
|
|
449
|
+
// Update store
|
|
450
|
+
existing.lastSeenAt = Date.now();
|
|
451
|
+
existing.sessionCount++;
|
|
452
|
+
if (!existing.sessionIds.includes(sessionId)) {
|
|
453
|
+
existing.sessionIds.push(sessionId);
|
|
454
|
+
}
|
|
455
|
+
results.push({
|
|
456
|
+
signature: suggestion.signature,
|
|
457
|
+
identifier: existing.linearIdentifier,
|
|
458
|
+
created: false,
|
|
459
|
+
});
|
|
460
|
+
}
|
|
461
|
+
catch (error) {
|
|
462
|
+
console.warn(`Failed to update issue ${existing.linearIdentifier}:`, error);
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
else {
|
|
466
|
+
// Create new issue
|
|
467
|
+
try {
|
|
468
|
+
if (!dryRun) {
|
|
469
|
+
// Create issue in the configured backlog project with Bug label
|
|
470
|
+
const payload = await this.linearClient.linearClient.createIssue({
|
|
471
|
+
teamId: DEFAULT_TEAM_ID,
|
|
472
|
+
projectId: getBugBacklogProjectId(),
|
|
473
|
+
labelIds: [LINEAR_LABELS.BUG],
|
|
474
|
+
title: suggestion.title,
|
|
475
|
+
description: suggestion.description,
|
|
476
|
+
});
|
|
477
|
+
if (payload.success) {
|
|
478
|
+
const issue = await payload.issue;
|
|
479
|
+
if (issue) {
|
|
480
|
+
// Track in store
|
|
481
|
+
store.issues[suggestion.signature] = {
|
|
482
|
+
linearIssueId: issue.id,
|
|
483
|
+
linearIdentifier: issue.identifier,
|
|
484
|
+
createdAt: Date.now(),
|
|
485
|
+
lastSeenAt: Date.now(),
|
|
486
|
+
sessionCount: 1,
|
|
487
|
+
sessionIds: [sessionId],
|
|
488
|
+
};
|
|
489
|
+
results.push({
|
|
490
|
+
signature: suggestion.signature,
|
|
491
|
+
identifier: issue.identifier,
|
|
492
|
+
created: true,
|
|
493
|
+
});
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
else {
|
|
498
|
+
// Dry run - just report what would be created
|
|
499
|
+
results.push({
|
|
500
|
+
signature: suggestion.signature,
|
|
501
|
+
identifier: '[DRY RUN]',
|
|
502
|
+
created: true,
|
|
503
|
+
});
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
catch (error) {
|
|
507
|
+
console.warn(`Failed to create issue "${suggestion.title}":`, error);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
// Save updated store
|
|
512
|
+
if (!dryRun) {
|
|
513
|
+
this.saveDeduplicationStore(store);
|
|
514
|
+
}
|
|
515
|
+
return results;
|
|
516
|
+
}
|
|
517
|
+
/**
|
|
518
|
+
* Cleanup old logs based on retention policy
|
|
519
|
+
*/
|
|
520
|
+
cleanupOldLogs() {
|
|
521
|
+
const cutoff = Date.now() - this.config.retentionDays * 24 * 60 * 60 * 1000;
|
|
522
|
+
let deleted = 0;
|
|
523
|
+
// Clean up old session directories
|
|
524
|
+
if (existsSync(this.sessionsDir)) {
|
|
525
|
+
const entries = readdirSync(this.sessionsDir, { withFileTypes: true });
|
|
526
|
+
for (const entry of entries) {
|
|
527
|
+
if (entry.isDirectory()) {
|
|
528
|
+
const sessionDir = join(this.sessionsDir, entry.name);
|
|
529
|
+
const metadataPath = join(sessionDir, 'metadata.json');
|
|
530
|
+
try {
|
|
531
|
+
if (existsSync(metadataPath)) {
|
|
532
|
+
const stats = statSync(metadataPath);
|
|
533
|
+
if (stats.mtimeMs < cutoff) {
|
|
534
|
+
rmSync(sessionDir, { recursive: true, force: true });
|
|
535
|
+
deleted++;
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
catch {
|
|
540
|
+
// Skip on error
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
// Clean up old processed files
|
|
546
|
+
if (existsSync(this.processedDir)) {
|
|
547
|
+
const entries = readdirSync(this.processedDir);
|
|
548
|
+
for (const entry of entries) {
|
|
549
|
+
const filePath = join(this.processedDir, entry);
|
|
550
|
+
try {
|
|
551
|
+
const stats = statSync(filePath);
|
|
552
|
+
if (stats.mtimeMs < cutoff) {
|
|
553
|
+
rmSync(filePath, { force: true });
|
|
554
|
+
deleted++;
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
catch {
|
|
558
|
+
// Skip on error
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
}
|
|
562
|
+
return deleted;
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
/**
|
|
566
|
+
* Create and initialize a log analyzer
|
|
567
|
+
*/
|
|
568
|
+
export function createLogAnalyzer(config, linearApiKey) {
|
|
569
|
+
const analyzer = new LogAnalyzer(config);
|
|
570
|
+
analyzer.initialize(linearApiKey);
|
|
571
|
+
return analyzer;
|
|
572
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Log Analysis Configuration
|
|
3
|
+
*
|
|
4
|
+
* Environment variable handling for session logging and analysis.
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Configuration for log analysis
|
|
8
|
+
*/
|
|
9
|
+
export interface LogAnalysisConfig {
|
|
10
|
+
/** Enable verbose session logging */
|
|
11
|
+
loggingEnabled: boolean;
|
|
12
|
+
/** Auto-analyze sessions after completion */
|
|
13
|
+
autoAnalyzeEnabled: boolean;
|
|
14
|
+
/** Days to retain logs before cleanup */
|
|
15
|
+
retentionDays: number;
|
|
16
|
+
/** Base directory for agent logs (relative to repo root) */
|
|
17
|
+
logsDir: string;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Get log analysis configuration from environment variables
|
|
21
|
+
*
|
|
22
|
+
* Environment variables:
|
|
23
|
+
* - AGENT_SESSION_LOGGING_ENABLED: Enable verbose session logging (default: false)
|
|
24
|
+
* - AGENT_AUTO_ANALYZE_ENABLED: Auto-analyze after completion (default: false)
|
|
25
|
+
* - AGENT_LOG_RETENTION_DAYS: Days before cleanup (default: 7)
|
|
26
|
+
* - AGENT_LOGS_DIR: Base directory for logs (default: .agent-logs)
|
|
27
|
+
* - AGENT_BUG_BACKLOG: Linear project name for agent bugs (default: "Agent")
|
|
28
|
+
* Maps to LINEAR_PROJECTS.{NAME} - supports "Agent", "Social", "Test"
|
|
29
|
+
*/
|
|
30
|
+
export declare function getLogAnalysisConfig(): LogAnalysisConfig;
|
|
31
|
+
/**
|
|
32
|
+
* Check if session logging is enabled
|
|
33
|
+
*/
|
|
34
|
+
export declare function isSessionLoggingEnabled(): boolean;
|
|
35
|
+
/**
|
|
36
|
+
* Check if auto-analysis is enabled
|
|
37
|
+
*/
|
|
38
|
+
export declare function isAutoAnalyzeEnabled(): boolean;
|
|
39
|
+
//# sourceMappingURL=log-config.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"log-config.d.ts","sourceRoot":"","sources":["../../../src/orchestrator/log-config.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC,qCAAqC;IACrC,cAAc,EAAE,OAAO,CAAA;IACvB,6CAA6C;IAC7C,kBAAkB,EAAE,OAAO,CAAA;IAC3B,yCAAyC;IACzC,aAAa,EAAE,MAAM,CAAA;IACrB,4DAA4D;IAC5D,OAAO,EAAE,MAAM,CAAA;CAChB;AAYD;;;;;;;;;;GAUG;AACH,wBAAgB,oBAAoB,IAAI,iBAAiB,CAOxD;AAED;;GAEG;AACH,wBAAgB,uBAAuB,IAAI,OAAO,CAEjD;AAED;;GAEG;AACH,wBAAgB,oBAAoB,IAAI,OAAO,CAE9C"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Log Analysis Configuration
|
|
3
|
+
*
|
|
4
|
+
* Environment variable handling for session logging and analysis.
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Default configuration values
|
|
8
|
+
*/
|
|
9
|
+
const DEFAULTS = {
|
|
10
|
+
loggingEnabled: false,
|
|
11
|
+
autoAnalyzeEnabled: false,
|
|
12
|
+
retentionDays: 7,
|
|
13
|
+
logsDir: '.agent-logs',
|
|
14
|
+
};
|
|
15
|
+
/**
|
|
16
|
+
* Get log analysis configuration from environment variables
|
|
17
|
+
*
|
|
18
|
+
* Environment variables:
|
|
19
|
+
* - AGENT_SESSION_LOGGING_ENABLED: Enable verbose session logging (default: false)
|
|
20
|
+
* - AGENT_AUTO_ANALYZE_ENABLED: Auto-analyze after completion (default: false)
|
|
21
|
+
* - AGENT_LOG_RETENTION_DAYS: Days before cleanup (default: 7)
|
|
22
|
+
* - AGENT_LOGS_DIR: Base directory for logs (default: .agent-logs)
|
|
23
|
+
* - AGENT_BUG_BACKLOG: Linear project name for agent bugs (default: "Agent")
|
|
24
|
+
* Maps to LINEAR_PROJECTS.{NAME} - supports "Agent", "Social", "Test"
|
|
25
|
+
*/
|
|
26
|
+
export function getLogAnalysisConfig() {
|
|
27
|
+
return {
|
|
28
|
+
loggingEnabled: process.env.AGENT_SESSION_LOGGING_ENABLED === 'true',
|
|
29
|
+
autoAnalyzeEnabled: process.env.AGENT_AUTO_ANALYZE_ENABLED === 'true',
|
|
30
|
+
retentionDays: parseInt(process.env.AGENT_LOG_RETENTION_DAYS ?? '', 10) || DEFAULTS.retentionDays,
|
|
31
|
+
logsDir: process.env.AGENT_LOGS_DIR ?? DEFAULTS.logsDir,
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Check if session logging is enabled
|
|
36
|
+
*/
|
|
37
|
+
export function isSessionLoggingEnabled() {
|
|
38
|
+
return process.env.AGENT_SESSION_LOGGING_ENABLED === 'true';
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Check if auto-analysis is enabled
|
|
42
|
+
*/
|
|
43
|
+
export function isAutoAnalyzeEnabled() {
|
|
44
|
+
return process.env.AGENT_AUTO_ANALYZE_ENABLED === 'true';
|
|
45
|
+
}
|