elsabro 2.3.0 → 3.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +668 -20
- package/bin/install.js +0 -0
- package/flows/development-flow.json +452 -0
- package/flows/quick-flow.json +118 -0
- package/package.json +3 -2
- package/references/SYSTEM_INDEX.md +379 -5
- package/references/agent-marketplace.md +2274 -0
- package/references/agent-protocol.md +1126 -0
- package/references/ai-code-suggestions.md +2413 -0
- package/references/checkpointing.md +595 -0
- package/references/collaboration-patterns.md +851 -0
- package/references/collaborative-sessions.md +1081 -0
- package/references/configuration-management.md +1810 -0
- package/references/cost-tracking.md +1095 -0
- package/references/enterprise-sso.md +2001 -0
- package/references/error-contracts-v2.md +968 -0
- package/references/event-driven.md +1031 -0
- package/references/flow-orchestration.md +940 -0
- package/references/flow-visualization.md +1557 -0
- package/references/ide-integrations.md +3513 -0
- package/references/interrupt-system.md +681 -0
- package/references/kubernetes-deployment.md +3099 -0
- package/references/memory-system.md +683 -0
- package/references/mobile-companion.md +3236 -0
- package/references/multi-llm-providers.md +2494 -0
- package/references/multi-project-memory.md +1182 -0
- package/references/observability.md +793 -0
- package/references/output-schemas.md +858 -0
- package/references/performance-profiler.md +955 -0
- package/references/plugin-system.md +1526 -0
- package/references/prompt-management.md +292 -0
- package/references/sandbox-execution.md +303 -0
- package/references/security-system.md +1253 -0
- package/references/streaming.md +696 -0
- package/references/testing-framework.md +1151 -0
- package/references/time-travel.md +802 -0
- package/references/tool-registry.md +886 -0
- package/references/voice-commands.md +3296 -0
- package/templates/agent-marketplace-config.json +220 -0
- package/templates/agent-protocol-config.json +136 -0
- package/templates/ai-suggestions-config.json +100 -0
- package/templates/checkpoint-state.json +61 -0
- package/templates/collaboration-config.json +157 -0
- package/templates/collaborative-sessions-config.json +153 -0
- package/templates/configuration-config.json +245 -0
- package/templates/cost-tracking-config.json +148 -0
- package/templates/enterprise-sso-config.json +438 -0
- package/templates/events-config.json +148 -0
- package/templates/flow-visualization-config.json +196 -0
- package/templates/ide-integrations-config.json +442 -0
- package/templates/kubernetes-config.json +764 -0
- package/templates/memory-state.json +84 -0
- package/templates/mobile-companion-config.json +600 -0
- package/templates/multi-llm-config.json +544 -0
- package/templates/multi-project-memory-config.json +145 -0
- package/templates/observability-config.json +109 -0
- package/templates/performance-profiler-config.json +125 -0
- package/templates/plugin-config.json +170 -0
- package/templates/prompt-management-config.json +86 -0
- package/templates/sandbox-config.json +185 -0
- package/templates/schemas-config.json +65 -0
- package/templates/security-config.json +120 -0
- package/templates/streaming-config.json +72 -0
- package/templates/testing-config.json +81 -0
- package/templates/timetravel-config.json +62 -0
- package/templates/tool-registry-config.json +109 -0
- package/templates/voice-commands-config.json +658 -0
|
@@ -0,0 +1,955 @@
|
|
|
1
|
+
# ELSABRO Performance Profiler
|
|
2
|
+
|
|
3
|
+
> Sistema de profiling para identificar y optimizar cuellos de botella en agentes y flows.
|
|
4
|
+
|
|
5
|
+
## Arquitectura General
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
┌─────────────────────────────────────────────────────────────────────────┐
|
|
9
|
+
│ Performance Profiler │
|
|
10
|
+
├─────────────────────────────────────────────────────────────────────────┤
|
|
11
|
+
│ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ │
|
|
12
|
+
│ │ Profiler │ │BottleneckDetect │ │ MemoryAnalyzer │ │
|
|
13
|
+
│ │ ───────────── │ │ ───────────── │ │ ───────────── │ │
|
|
14
|
+
│ │ • Sampling │ │ • Identify slow │ │ • Heap analysis │ │
|
|
15
|
+
│ │ • Tracing │ │ • Root cause │ │ • Leaks detect │ │
|
|
16
|
+
│ │ • Benchmarking │ │ • Suggestions │ │ • GC pressure │ │
|
|
17
|
+
│ └─────────────────┘ └─────────────────┘ └─────────────────┘ │
|
|
18
|
+
│ │ │
|
|
19
|
+
│ ┌───────────────────────────┴───────────────────────────────┐ │
|
|
20
|
+
│ │ LatencyTracker │ │
|
|
21
|
+
│ │ • P50/P95/P99 • Histograms • SLO monitoring │ │
|
|
22
|
+
│ └────────────────────────────────────────────────────────────┘ │
|
|
23
|
+
│ │ │
|
|
24
|
+
│ ┌───────────────────────────┴───────────────────────────────┐ │
|
|
25
|
+
│ │ PerformanceDashboard │ │
|
|
26
|
+
│ │ • Real-time metrics • Flame graphs • Recommendations │ │
|
|
27
|
+
│ └────────────────────────────────────────────────────────────┘ │
|
|
28
|
+
└─────────────────────────────────────────────────────────────────────────┘
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
---
|
|
32
|
+
|
|
33
|
+
## 1. Profiler
|
|
34
|
+
|
|
35
|
+
### Propósito
|
|
36
|
+
Recopila métricas de rendimiento mediante sampling y tracing.
|
|
37
|
+
|
|
38
|
+
### Interfaz
|
|
39
|
+
|
|
40
|
+
```typescript
|
|
41
|
+
interface ProfilerOptions {
|
|
42
|
+
mode: 'sampling' | 'tracing' | 'both';
|
|
43
|
+
samplingInterval?: number; // ms
|
|
44
|
+
traceAllCalls?: boolean;
|
|
45
|
+
captureStacks?: boolean;
|
|
46
|
+
maxSamples?: number;
|
|
47
|
+
targets?: ProfileTarget[];
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
interface ProfileTarget {
|
|
51
|
+
type: 'agent' | 'flow' | 'tool' | 'function';
|
|
52
|
+
name?: string;
|
|
53
|
+
pattern?: string; // e.g., "elsabro-*"
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
interface ProfileSample {
|
|
57
|
+
timestamp: number;
|
|
58
|
+
target: string;
|
|
59
|
+
type: ProfileTarget['type'];
|
|
60
|
+
metrics: {
|
|
61
|
+
duration: number;
|
|
62
|
+
cpuTime?: number;
|
|
63
|
+
memoryDelta?: number;
|
|
64
|
+
tokenCount?: number;
|
|
65
|
+
};
|
|
66
|
+
stack?: string[];
|
|
67
|
+
metadata?: Record<string, unknown>;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
interface ProfileResult {
|
|
71
|
+
id: string;
|
|
72
|
+
startTime: number;
|
|
73
|
+
endTime: number;
|
|
74
|
+
duration: number;
|
|
75
|
+
samples: ProfileSample[];
|
|
76
|
+
summary: ProfileSummary;
|
|
77
|
+
hotspots: Hotspot[];
|
|
78
|
+
recommendations: Recommendation[];
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
interface ProfileSummary {
|
|
82
|
+
totalSamples: number;
|
|
83
|
+
byTarget: Map<string, TargetStats>;
|
|
84
|
+
byType: Map<string, TypeStats>;
|
|
85
|
+
percentiles: {
|
|
86
|
+
p50: number;
|
|
87
|
+
p75: number;
|
|
88
|
+
p90: number;
|
|
89
|
+
p95: number;
|
|
90
|
+
p99: number;
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
interface Hotspot {
|
|
95
|
+
target: string;
|
|
96
|
+
type: string;
|
|
97
|
+
totalTime: number;
|
|
98
|
+
percentage: number;
|
|
99
|
+
callCount: number;
|
|
100
|
+
avgDuration: number;
|
|
101
|
+
maxDuration: number;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
interface Profiler {
|
|
105
|
+
// Lifecycle
|
|
106
|
+
start(options?: ProfilerOptions): Promise<void>;
|
|
107
|
+
stop(): Promise<ProfileResult>;
|
|
108
|
+
pause(): void;
|
|
109
|
+
resume(): void;
|
|
110
|
+
isRunning(): boolean;
|
|
111
|
+
|
|
112
|
+
// Manual instrumentation
|
|
113
|
+
startSpan(name: string, metadata?: Record<string, unknown>): ProfileSpan;
|
|
114
|
+
measure<T>(name: string, fn: () => T | Promise<T>): Promise<T>;
|
|
115
|
+
|
|
116
|
+
// Queries
|
|
117
|
+
getSamples(filter?: SampleFilter): ProfileSample[];
|
|
118
|
+
getStats(target?: string): TargetStats;
|
|
119
|
+
getCurrentProfile(): ProfileResult | null;
|
|
120
|
+
|
|
121
|
+
// Export
|
|
122
|
+
export(format: 'json' | 'flamegraph' | 'chrome-trace'): string;
|
|
123
|
+
|
|
124
|
+
// Events
|
|
125
|
+
onSample(callback: (sample: ProfileSample) => void): () => void;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
interface ProfileSpan {
|
|
129
|
+
end(metadata?: Record<string, unknown>): void;
|
|
130
|
+
addEvent(name: string, data?: unknown): void;
|
|
131
|
+
}
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
### Implementación
|
|
135
|
+
|
|
136
|
+
```typescript
|
|
137
|
+
class ProfilerImpl implements Profiler {
|
|
138
|
+
private options: ProfilerOptions;
|
|
139
|
+
private samples: ProfileSample[] = [];
|
|
140
|
+
private running: boolean = false;
|
|
141
|
+
private paused: boolean = false;
|
|
142
|
+
private startTime: number = 0;
|
|
143
|
+
private samplingTimer: NodeJS.Timer | null = null;
|
|
144
|
+
private sampleCallbacks: Set<(sample: ProfileSample) => void> = new Set();
|
|
145
|
+
private activeSpans: Map<string, { start: number; metadata: Record<string, unknown> }> = new Map();
|
|
146
|
+
|
|
147
|
+
constructor(private config: ProfilerConfig) {
|
|
148
|
+
this.options = {
|
|
149
|
+
mode: 'sampling',
|
|
150
|
+
samplingInterval: 100,
|
|
151
|
+
captureStacks: true,
|
|
152
|
+
maxSamples: 10000,
|
|
153
|
+
...config.defaultOptions
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
async start(options?: ProfilerOptions): Promise<void> {
|
|
158
|
+
if (this.running) {
|
|
159
|
+
throw new Error('Profiler already running');
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
this.options = { ...this.options, ...options };
|
|
163
|
+
this.samples = [];
|
|
164
|
+
this.startTime = Date.now();
|
|
165
|
+
this.running = true;
|
|
166
|
+
this.paused = false;
|
|
167
|
+
|
|
168
|
+
if (this.options.mode === 'sampling' || this.options.mode === 'both') {
|
|
169
|
+
this.startSampling();
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (this.options.mode === 'tracing' || this.options.mode === 'both') {
|
|
173
|
+
this.setupTracing();
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
async stop(): Promise<ProfileResult> {
|
|
178
|
+
if (!this.running) {
|
|
179
|
+
throw new Error('Profiler not running');
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
this.running = false;
|
|
183
|
+
|
|
184
|
+
if (this.samplingTimer) {
|
|
185
|
+
clearInterval(this.samplingTimer);
|
|
186
|
+
this.samplingTimer = null;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
const endTime = Date.now();
|
|
190
|
+
const duration = endTime - this.startTime;
|
|
191
|
+
|
|
192
|
+
const result: ProfileResult = {
|
|
193
|
+
id: `profile_${this.startTime}`,
|
|
194
|
+
startTime: this.startTime,
|
|
195
|
+
endTime,
|
|
196
|
+
duration,
|
|
197
|
+
samples: this.samples,
|
|
198
|
+
summary: this.calculateSummary(),
|
|
199
|
+
hotspots: this.identifyHotspots(),
|
|
200
|
+
recommendations: this.generateRecommendations()
|
|
201
|
+
};
|
|
202
|
+
|
|
203
|
+
return result;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
pause(): void {
|
|
207
|
+
this.paused = true;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
resume(): void {
|
|
211
|
+
this.paused = false;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
isRunning(): boolean {
|
|
215
|
+
return this.running && !this.paused;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
private startSampling(): void {
|
|
219
|
+
this.samplingTimer = setInterval(() => {
|
|
220
|
+
if (this.paused) return;
|
|
221
|
+
this.collectSample();
|
|
222
|
+
}, this.options.samplingInterval);
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
private collectSample(): void {
|
|
226
|
+
if (this.samples.length >= (this.options.maxSamples || 10000)) {
|
|
227
|
+
// Remove oldest samples
|
|
228
|
+
this.samples.shift();
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
// Collect system metrics
|
|
232
|
+
const memoryUsage = process.memoryUsage();
|
|
233
|
+
|
|
234
|
+
const sample: ProfileSample = {
|
|
235
|
+
timestamp: Date.now(),
|
|
236
|
+
target: 'system',
|
|
237
|
+
type: 'function',
|
|
238
|
+
metrics: {
|
|
239
|
+
duration: this.options.samplingInterval || 100,
|
|
240
|
+
memoryDelta: memoryUsage.heapUsed
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
|
|
244
|
+
if (this.options.captureStacks) {
|
|
245
|
+
sample.stack = this.captureStack();
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
this.samples.push(sample);
|
|
249
|
+
this.notifySample(sample);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
private captureStack(): string[] {
|
|
253
|
+
const stack = new Error().stack || '';
|
|
254
|
+
return stack
|
|
255
|
+
.split('\n')
|
|
256
|
+
.slice(3) // Remove profiler frames
|
|
257
|
+
.map(line => line.trim())
|
|
258
|
+
.filter(line => !line.includes('node_modules'));
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
startSpan(name: string, metadata: Record<string, unknown> = {}): ProfileSpan {
|
|
262
|
+
const spanId = `${name}_${Date.now()}`;
|
|
263
|
+
|
|
264
|
+
this.activeSpans.set(spanId, {
|
|
265
|
+
start: Date.now(),
|
|
266
|
+
metadata
|
|
267
|
+
});
|
|
268
|
+
|
|
269
|
+
return {
|
|
270
|
+
end: (endMetadata?: Record<string, unknown>) => {
|
|
271
|
+
const spanData = this.activeSpans.get(spanId);
|
|
272
|
+
if (!spanData) return;
|
|
273
|
+
|
|
274
|
+
const duration = Date.now() - spanData.start;
|
|
275
|
+
|
|
276
|
+
const sample: ProfileSample = {
|
|
277
|
+
timestamp: spanData.start,
|
|
278
|
+
target: name,
|
|
279
|
+
type: this.inferType(name),
|
|
280
|
+
metrics: { duration },
|
|
281
|
+
metadata: { ...spanData.metadata, ...endMetadata }
|
|
282
|
+
};
|
|
283
|
+
|
|
284
|
+
this.samples.push(sample);
|
|
285
|
+
this.notifySample(sample);
|
|
286
|
+
this.activeSpans.delete(spanId);
|
|
287
|
+
},
|
|
288
|
+
addEvent: (eventName: string, data?: unknown) => {
|
|
289
|
+
// Add event to span metadata
|
|
290
|
+
const spanData = this.activeSpans.get(spanId);
|
|
291
|
+
if (spanData) {
|
|
292
|
+
spanData.metadata.events = spanData.metadata.events || [];
|
|
293
|
+
(spanData.metadata.events as unknown[]).push({
|
|
294
|
+
name: eventName,
|
|
295
|
+
timestamp: Date.now(),
|
|
296
|
+
data
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
};
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
async measure<T>(name: string, fn: () => T | Promise<T>): Promise<T> {
|
|
304
|
+
const span = this.startSpan(name);
|
|
305
|
+
try {
|
|
306
|
+
const result = await fn();
|
|
307
|
+
span.end({ success: true });
|
|
308
|
+
return result;
|
|
309
|
+
} catch (error) {
|
|
310
|
+
span.end({ success: false, error: String(error) });
|
|
311
|
+
throw error;
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
private inferType(name: string): ProfileTarget['type'] {
|
|
316
|
+
if (name.startsWith('agent:') || name.includes('Agent')) return 'agent';
|
|
317
|
+
if (name.startsWith('flow:') || name.includes('Flow')) return 'flow';
|
|
318
|
+
if (name.startsWith('tool:') || name.includes('Tool')) return 'tool';
|
|
319
|
+
return 'function';
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
private calculateSummary(): ProfileSummary {
|
|
323
|
+
const byTarget = new Map<string, TargetStats>();
|
|
324
|
+
const byType = new Map<string, TypeStats>();
|
|
325
|
+
const durations: number[] = [];
|
|
326
|
+
|
|
327
|
+
for (const sample of this.samples) {
|
|
328
|
+
durations.push(sample.metrics.duration);
|
|
329
|
+
|
|
330
|
+
// By target
|
|
331
|
+
if (!byTarget.has(sample.target)) {
|
|
332
|
+
byTarget.set(sample.target, {
|
|
333
|
+
count: 0,
|
|
334
|
+
totalDuration: 0,
|
|
335
|
+
minDuration: Infinity,
|
|
336
|
+
maxDuration: 0,
|
|
337
|
+
avgDuration: 0
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
const targetStats = byTarget.get(sample.target)!;
|
|
341
|
+
targetStats.count++;
|
|
342
|
+
targetStats.totalDuration += sample.metrics.duration;
|
|
343
|
+
targetStats.minDuration = Math.min(targetStats.minDuration, sample.metrics.duration);
|
|
344
|
+
targetStats.maxDuration = Math.max(targetStats.maxDuration, sample.metrics.duration);
|
|
345
|
+
|
|
346
|
+
// By type
|
|
347
|
+
if (!byType.has(sample.type)) {
|
|
348
|
+
byType.set(sample.type, {
|
|
349
|
+
count: 0,
|
|
350
|
+
totalDuration: 0,
|
|
351
|
+
percentage: 0
|
|
352
|
+
});
|
|
353
|
+
}
|
|
354
|
+
const typeStats = byType.get(sample.type)!;
|
|
355
|
+
typeStats.count++;
|
|
356
|
+
typeStats.totalDuration += sample.metrics.duration;
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
// Calculate averages
|
|
360
|
+
for (const stats of byTarget.values()) {
|
|
361
|
+
stats.avgDuration = stats.totalDuration / stats.count;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// Calculate percentiles
|
|
365
|
+
durations.sort((a, b) => a - b);
|
|
366
|
+
const percentiles = {
|
|
367
|
+
p50: this.percentile(durations, 50),
|
|
368
|
+
p75: this.percentile(durations, 75),
|
|
369
|
+
p90: this.percentile(durations, 90),
|
|
370
|
+
p95: this.percentile(durations, 95),
|
|
371
|
+
p99: this.percentile(durations, 99)
|
|
372
|
+
};
|
|
373
|
+
|
|
374
|
+
return {
|
|
375
|
+
totalSamples: this.samples.length,
|
|
376
|
+
byTarget,
|
|
377
|
+
byType,
|
|
378
|
+
percentiles
|
|
379
|
+
};
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
private percentile(sorted: number[], p: number): number {
|
|
383
|
+
if (sorted.length === 0) return 0;
|
|
384
|
+
const index = Math.ceil((p / 100) * sorted.length) - 1;
|
|
385
|
+
return sorted[Math.max(0, index)];
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
private identifyHotspots(): Hotspot[] {
|
|
389
|
+
const summary = this.calculateSummary();
|
|
390
|
+
const totalDuration = this.samples.reduce((sum, s) => sum + s.metrics.duration, 0);
|
|
391
|
+
|
|
392
|
+
const hotspots: Hotspot[] = [];
|
|
393
|
+
|
|
394
|
+
for (const [target, stats] of summary.byTarget) {
|
|
395
|
+
hotspots.push({
|
|
396
|
+
target,
|
|
397
|
+
type: this.samples.find(s => s.target === target)?.type || 'function',
|
|
398
|
+
totalTime: stats.totalDuration,
|
|
399
|
+
percentage: (stats.totalDuration / totalDuration) * 100,
|
|
400
|
+
callCount: stats.count,
|
|
401
|
+
avgDuration: stats.avgDuration,
|
|
402
|
+
maxDuration: stats.maxDuration
|
|
403
|
+
});
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
// Sort by total time descending
|
|
407
|
+
hotspots.sort((a, b) => b.totalTime - a.totalTime);
|
|
408
|
+
return hotspots.slice(0, 10);
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
private generateRecommendations(): Recommendation[] {
|
|
412
|
+
const recommendations: Recommendation[] = [];
|
|
413
|
+
const hotspots = this.identifyHotspots();
|
|
414
|
+
const summary = this.calculateSummary();
|
|
415
|
+
|
|
416
|
+
// Check for slow agents
|
|
417
|
+
for (const hotspot of hotspots) {
|
|
418
|
+
if (hotspot.type === 'agent' && hotspot.avgDuration > 30000) {
|
|
419
|
+
recommendations.push({
|
|
420
|
+
type: 'optimization',
|
|
421
|
+
severity: 'high',
|
|
422
|
+
target: hotspot.target,
|
|
423
|
+
message: `Agent "${hotspot.target}" has high average latency (${(hotspot.avgDuration / 1000).toFixed(1)}s)`,
|
|
424
|
+
suggestion: 'Consider using a faster model (haiku) or breaking into smaller tasks'
|
|
425
|
+
});
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
// Check for high variance
|
|
430
|
+
if (summary.percentiles.p99 > summary.percentiles.p50 * 10) {
|
|
431
|
+
recommendations.push({
|
|
432
|
+
type: 'investigation',
|
|
433
|
+
severity: 'medium',
|
|
434
|
+
target: 'system',
|
|
435
|
+
message: 'High latency variance detected (P99 >> P50)',
|
|
436
|
+
suggestion: 'Investigate outliers - may indicate resource contention or external delays'
|
|
437
|
+
});
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
// Check for memory issues
|
|
441
|
+
const memorySamples = this.samples.filter(s => s.metrics.memoryDelta);
|
|
442
|
+
if (memorySamples.length > 10) {
|
|
443
|
+
const memoryTrend = this.calculateTrend(memorySamples.map(s => s.metrics.memoryDelta!));
|
|
444
|
+
if (memoryTrend > 0.1) {
|
|
445
|
+
recommendations.push({
|
|
446
|
+
type: 'warning',
|
|
447
|
+
severity: 'high',
|
|
448
|
+
target: 'memory',
|
|
449
|
+
message: 'Potential memory leak detected - heap usage trending upward',
|
|
450
|
+
suggestion: 'Review object retention, consider manual cleanup between operations'
|
|
451
|
+
});
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
return recommendations;
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
private calculateTrend(values: number[]): number {
|
|
459
|
+
if (values.length < 2) return 0;
|
|
460
|
+
const first = values.slice(0, Math.floor(values.length / 2));
|
|
461
|
+
const second = values.slice(Math.floor(values.length / 2));
|
|
462
|
+
const avgFirst = first.reduce((a, b) => a + b, 0) / first.length;
|
|
463
|
+
const avgSecond = second.reduce((a, b) => a + b, 0) / second.length;
|
|
464
|
+
return (avgSecond - avgFirst) / avgFirst;
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
export(format: 'json' | 'flamegraph' | 'chrome-trace'): string {
|
|
468
|
+
switch (format) {
|
|
469
|
+
case 'json':
|
|
470
|
+
return JSON.stringify({
|
|
471
|
+
samples: this.samples,
|
|
472
|
+
summary: this.calculateSummary(),
|
|
473
|
+
hotspots: this.identifyHotspots()
|
|
474
|
+
}, null, 2);
|
|
475
|
+
|
|
476
|
+
case 'flamegraph':
|
|
477
|
+
return this.generateFlameGraphData();
|
|
478
|
+
|
|
479
|
+
case 'chrome-trace':
|
|
480
|
+
return this.generateChromeTrace();
|
|
481
|
+
|
|
482
|
+
default:
|
|
483
|
+
throw new Error(`Unknown format: ${format}`);
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
private generateFlameGraphData(): string {
|
|
488
|
+
// Generate folded stack format for flamegraph tools
|
|
489
|
+
const lines: string[] = [];
|
|
490
|
+
|
|
491
|
+
for (const sample of this.samples) {
|
|
492
|
+
if (sample.stack && sample.stack.length > 0) {
|
|
493
|
+
const stack = sample.stack.join(';');
|
|
494
|
+
lines.push(`${stack} ${sample.metrics.duration}`);
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
|
|
498
|
+
return lines.join('\n');
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
private generateChromeTrace(): string {
|
|
502
|
+
// Generate Chrome DevTools trace format
|
|
503
|
+
const events: object[] = [];
|
|
504
|
+
|
|
505
|
+
for (const sample of this.samples) {
|
|
506
|
+
events.push({
|
|
507
|
+
name: sample.target,
|
|
508
|
+
cat: sample.type,
|
|
509
|
+
ph: 'X', // Complete event
|
|
510
|
+
ts: sample.timestamp * 1000, // microseconds
|
|
511
|
+
dur: sample.metrics.duration * 1000,
|
|
512
|
+
pid: 1,
|
|
513
|
+
tid: 1
|
|
514
|
+
});
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
return JSON.stringify({ traceEvents: events });
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
onSample(callback: (sample: ProfileSample) => void): () => void {
|
|
521
|
+
this.sampleCallbacks.add(callback);
|
|
522
|
+
return () => this.sampleCallbacks.delete(callback);
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
private notifySample(sample: ProfileSample): void {
|
|
526
|
+
this.sampleCallbacks.forEach(cb => cb(sample));
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
private setupTracing(): void {
|
|
530
|
+
// Hook into agent/flow/tool systems for automatic tracing
|
|
531
|
+
// This would integrate with the existing systems
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
interface TargetStats {
|
|
536
|
+
count: number;
|
|
537
|
+
totalDuration: number;
|
|
538
|
+
minDuration: number;
|
|
539
|
+
maxDuration: number;
|
|
540
|
+
avgDuration: number;
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
interface TypeStats {
|
|
544
|
+
count: number;
|
|
545
|
+
totalDuration: number;
|
|
546
|
+
percentage: number;
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
interface Recommendation {
|
|
550
|
+
type: 'optimization' | 'warning' | 'investigation';
|
|
551
|
+
severity: 'low' | 'medium' | 'high';
|
|
552
|
+
target: string;
|
|
553
|
+
message: string;
|
|
554
|
+
suggestion: string;
|
|
555
|
+
}
|
|
556
|
+
```
|
|
557
|
+
|
|
558
|
+
---
|
|
559
|
+
|
|
560
|
+
## 2. BottleneckDetector
|
|
561
|
+
|
|
562
|
+
### Propósito
|
|
563
|
+
Identifica automáticamente cuellos de botella y sus causas raíz.
|
|
564
|
+
|
|
565
|
+
### Interfaz
|
|
566
|
+
|
|
567
|
+
```typescript
|
|
568
|
+
interface Bottleneck {
|
|
569
|
+
id: string;
|
|
570
|
+
type: 'latency' | 'throughput' | 'resource' | 'dependency';
|
|
571
|
+
location: string;
|
|
572
|
+
severity: 'low' | 'medium' | 'high' | 'critical';
|
|
573
|
+
impact: {
|
|
574
|
+
latencyIncrease: number; // percentage
|
|
575
|
+
throughputDecrease: number;
|
|
576
|
+
affectedOperations: string[];
|
|
577
|
+
};
|
|
578
|
+
rootCause?: RootCause;
|
|
579
|
+
detectedAt: string;
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
interface RootCause {
|
|
583
|
+
type: 'slow-model' | 'large-context' | 'serial-execution' | 'memory-pressure' | 'network' | 'external-api';
|
|
584
|
+
confidence: number; // 0-1
|
|
585
|
+
evidence: string[];
|
|
586
|
+
relatedMetrics: Record<string, number>;
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
interface BottleneckDetector {
|
|
590
|
+
// Detection
|
|
591
|
+
analyze(profileResult: ProfileResult): Bottleneck[];
|
|
592
|
+
detectRealtime(samples: ProfileSample[]): Bottleneck[];
|
|
593
|
+
|
|
594
|
+
// Root cause analysis
|
|
595
|
+
findRootCause(bottleneck: Bottleneck): Promise<RootCause>;
|
|
596
|
+
correlate(bottlenecks: Bottleneck[]): CorrelationResult;
|
|
597
|
+
|
|
598
|
+
// Suggestions
|
|
599
|
+
suggest(bottleneck: Bottleneck): Suggestion[];
|
|
600
|
+
prioritize(bottlenecks: Bottleneck[]): Bottleneck[];
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
interface Suggestion {
|
|
604
|
+
action: string;
|
|
605
|
+
expectedImprovement: string;
|
|
606
|
+
effort: 'low' | 'medium' | 'high';
|
|
607
|
+
risk: 'low' | 'medium' | 'high';
|
|
608
|
+
}
|
|
609
|
+
```
|
|
610
|
+
|
|
611
|
+
### Implementación
|
|
612
|
+
|
|
613
|
+
```typescript
|
|
614
|
+
class BottleneckDetectorImpl implements BottleneckDetector {
|
|
615
|
+
private thresholds: BottleneckThresholds;
|
|
616
|
+
|
|
617
|
+
constructor(config: BottleneckDetectorConfig) {
|
|
618
|
+
this.thresholds = {
|
|
619
|
+
latencyP95: 30000, // 30s
|
|
620
|
+
latencyP99: 60000, // 60s
|
|
621
|
+
throughputMin: 0.1, // ops/sec
|
|
622
|
+
memoryPressure: 0.8, // 80% heap
|
|
623
|
+
...config.thresholds
|
|
624
|
+
};
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
analyze(profileResult: ProfileResult): Bottleneck[] {
|
|
628
|
+
const bottlenecks: Bottleneck[] = [];
|
|
629
|
+
|
|
630
|
+
// Analyze hotspots
|
|
631
|
+
for (const hotspot of profileResult.hotspots) {
|
|
632
|
+
if (hotspot.percentage > 30) {
|
|
633
|
+
bottlenecks.push(this.createBottleneck(
|
|
634
|
+
'latency',
|
|
635
|
+
hotspot.target,
|
|
636
|
+
hotspot.percentage > 50 ? 'high' : 'medium',
|
|
637
|
+
{
|
|
638
|
+
latencyIncrease: hotspot.percentage,
|
|
639
|
+
throughputDecrease: hotspot.percentage * 0.8,
|
|
640
|
+
affectedOperations: [hotspot.target]
|
|
641
|
+
}
|
|
642
|
+
));
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
|
|
646
|
+
// Check percentiles
|
|
647
|
+
const { percentiles } = profileResult.summary;
|
|
648
|
+
if (percentiles.p95 > this.thresholds.latencyP95) {
|
|
649
|
+
bottlenecks.push(this.createBottleneck(
|
|
650
|
+
'latency',
|
|
651
|
+
'system',
|
|
652
|
+
percentiles.p99 > this.thresholds.latencyP99 ? 'critical' : 'high',
|
|
653
|
+
{
|
|
654
|
+
latencyIncrease: (percentiles.p95 / 10000) * 100,
|
|
655
|
+
throughputDecrease: 50,
|
|
656
|
+
affectedOperations: ['all']
|
|
657
|
+
}
|
|
658
|
+
));
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
// Look for serial execution patterns
|
|
662
|
+
const serialPattern = this.detectSerialExecution(profileResult.samples);
|
|
663
|
+
if (serialPattern) {
|
|
664
|
+
bottlenecks.push(this.createBottleneck(
|
|
665
|
+
'throughput',
|
|
666
|
+
serialPattern.location,
|
|
667
|
+
'medium',
|
|
668
|
+
{
|
|
669
|
+
latencyIncrease: 0,
|
|
670
|
+
throughputDecrease: serialPattern.impact,
|
|
671
|
+
affectedOperations: serialPattern.operations
|
|
672
|
+
}
|
|
673
|
+
));
|
|
674
|
+
}
|
|
675
|
+
|
|
676
|
+
return this.prioritize(bottlenecks);
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
async findRootCause(bottleneck: Bottleneck): Promise<RootCause> {
|
|
680
|
+
const evidence: string[] = [];
|
|
681
|
+
let type: RootCause['type'] = 'slow-model';
|
|
682
|
+
let confidence = 0.5;
|
|
683
|
+
|
|
684
|
+
// Analyze based on bottleneck type and location
|
|
685
|
+
if (bottleneck.location.includes('agent')) {
|
|
686
|
+
// Check if it's a model issue
|
|
687
|
+
if (bottleneck.location.includes('opus')) {
|
|
688
|
+
type = 'slow-model';
|
|
689
|
+
evidence.push('Using opus model which has higher latency');
|
|
690
|
+
confidence = 0.8;
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
// Check for large context
|
|
694
|
+
const contextMetric = bottleneck.impact.latencyIncrease;
|
|
695
|
+
if (contextMetric > 50) {
|
|
696
|
+
type = 'large-context';
|
|
697
|
+
evidence.push('High latency correlated with large input context');
|
|
698
|
+
confidence = 0.7;
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
if (bottleneck.type === 'throughput') {
|
|
703
|
+
type = 'serial-execution';
|
|
704
|
+
evidence.push('Operations executing sequentially instead of parallel');
|
|
705
|
+
confidence = 0.75;
|
|
706
|
+
}
|
|
707
|
+
|
|
708
|
+
if (bottleneck.type === 'resource') {
|
|
709
|
+
type = 'memory-pressure';
|
|
710
|
+
evidence.push('High memory usage detected during operation');
|
|
711
|
+
confidence = 0.7;
|
|
712
|
+
}
|
|
713
|
+
|
|
714
|
+
return {
|
|
715
|
+
type,
|
|
716
|
+
confidence,
|
|
717
|
+
evidence,
|
|
718
|
+
relatedMetrics: {
|
|
719
|
+
avgLatency: bottleneck.impact.latencyIncrease,
|
|
720
|
+
throughput: 100 - bottleneck.impact.throughputDecrease
|
|
721
|
+
}
|
|
722
|
+
};
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
suggest(bottleneck: Bottleneck): Suggestion[] {
|
|
726
|
+
const suggestions: Suggestion[] = [];
|
|
727
|
+
|
|
728
|
+
switch (bottleneck.rootCause?.type) {
|
|
729
|
+
case 'slow-model':
|
|
730
|
+
suggestions.push({
|
|
731
|
+
action: 'Switch to haiku model for exploration tasks',
|
|
732
|
+
expectedImprovement: '60-70% latency reduction',
|
|
733
|
+
effort: 'low',
|
|
734
|
+
risk: 'low'
|
|
735
|
+
});
|
|
736
|
+
suggestions.push({
|
|
737
|
+
action: 'Use model cascade (haiku → sonnet → opus)',
|
|
738
|
+
expectedImprovement: '40-50% cost and latency reduction',
|
|
739
|
+
effort: 'medium',
|
|
740
|
+
risk: 'low'
|
|
741
|
+
});
|
|
742
|
+
break;
|
|
743
|
+
|
|
744
|
+
case 'large-context':
|
|
745
|
+
suggestions.push({
|
|
746
|
+
action: 'Implement context windowing',
|
|
747
|
+
expectedImprovement: '30-40% latency reduction',
|
|
748
|
+
effort: 'medium',
|
|
749
|
+
risk: 'medium'
|
|
750
|
+
});
|
|
751
|
+
suggestions.push({
|
|
752
|
+
action: 'Use summarization for long conversations',
|
|
753
|
+
expectedImprovement: '20-30% token reduction',
|
|
754
|
+
effort: 'medium',
|
|
755
|
+
risk: 'low'
|
|
756
|
+
});
|
|
757
|
+
break;
|
|
758
|
+
|
|
759
|
+
case 'serial-execution':
|
|
760
|
+
suggestions.push({
|
|
761
|
+
action: 'Enable parallel execution for independent tasks',
|
|
762
|
+
expectedImprovement: 'Up to Nx speedup (N = parallel tasks)',
|
|
763
|
+
effort: 'low',
|
|
764
|
+
risk: 'low'
|
|
765
|
+
});
|
|
766
|
+
suggestions.push({
|
|
767
|
+
action: 'Use wave-based execution pattern',
|
|
768
|
+
expectedImprovement: '2-4x throughput improvement',
|
|
769
|
+
effort: 'medium',
|
|
770
|
+
risk: 'low'
|
|
771
|
+
});
|
|
772
|
+
break;
|
|
773
|
+
|
|
774
|
+
case 'memory-pressure':
|
|
775
|
+
suggestions.push({
|
|
776
|
+
action: 'Implement streaming for large outputs',
|
|
777
|
+
expectedImprovement: '50% memory reduction',
|
|
778
|
+
effort: 'medium',
|
|
779
|
+
risk: 'low'
|
|
780
|
+
});
|
|
781
|
+
suggestions.push({
|
|
782
|
+
action: 'Clear caches between operations',
|
|
783
|
+
expectedImprovement: '20-30% memory reduction',
|
|
784
|
+
effort: 'low',
|
|
785
|
+
risk: 'low'
|
|
786
|
+
});
|
|
787
|
+
break;
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
return suggestions;
|
|
791
|
+
}
|
|
792
|
+
|
|
793
|
+
prioritize(bottlenecks: Bottleneck[]): Bottleneck[] {
|
|
794
|
+
const severityScore: Record<string, number> = {
|
|
795
|
+
critical: 4,
|
|
796
|
+
high: 3,
|
|
797
|
+
medium: 2,
|
|
798
|
+
low: 1
|
|
799
|
+
};
|
|
800
|
+
|
|
801
|
+
return bottlenecks.sort((a, b) => {
|
|
802
|
+
// First by severity
|
|
803
|
+
const severityDiff = severityScore[b.severity] - severityScore[a.severity];
|
|
804
|
+
if (severityDiff !== 0) return severityDiff;
|
|
805
|
+
|
|
806
|
+
// Then by impact
|
|
807
|
+
return b.impact.latencyIncrease - a.impact.latencyIncrease;
|
|
808
|
+
});
|
|
809
|
+
}
|
|
810
|
+
|
|
811
|
+
private createBottleneck(
|
|
812
|
+
type: Bottleneck['type'],
|
|
813
|
+
location: string,
|
|
814
|
+
severity: Bottleneck['severity'],
|
|
815
|
+
impact: Bottleneck['impact']
|
|
816
|
+
): Bottleneck {
|
|
817
|
+
return {
|
|
818
|
+
id: `bn_${Date.now()}_${Math.random().toString(36).slice(2, 6)}`,
|
|
819
|
+
type,
|
|
820
|
+
location,
|
|
821
|
+
severity,
|
|
822
|
+
impact,
|
|
823
|
+
detectedAt: new Date().toISOString()
|
|
824
|
+
};
|
|
825
|
+
}
|
|
826
|
+
|
|
827
|
+
private detectSerialExecution(samples: ProfileSample[]): {
|
|
828
|
+
location: string;
|
|
829
|
+
impact: number;
|
|
830
|
+
operations: string[];
|
|
831
|
+
} | null {
|
|
832
|
+
// Detect if operations that could be parallel are running serially
|
|
833
|
+
const agentSamples = samples.filter(s => s.type === 'agent');
|
|
834
|
+
|
|
835
|
+
// Group by time windows
|
|
836
|
+
const windows: ProfileSample[][] = [];
|
|
837
|
+
let currentWindow: ProfileSample[] = [];
|
|
838
|
+
let lastEnd = 0;
|
|
839
|
+
|
|
840
|
+
for (const sample of agentSamples) {
|
|
841
|
+
if (sample.timestamp > lastEnd + 1000) {
|
|
842
|
+
if (currentWindow.length > 0) {
|
|
843
|
+
windows.push(currentWindow);
|
|
844
|
+
}
|
|
845
|
+
currentWindow = [sample];
|
|
846
|
+
} else {
|
|
847
|
+
currentWindow.push(sample);
|
|
848
|
+
}
|
|
849
|
+
lastEnd = sample.timestamp + sample.metrics.duration;
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
// Check if any window has only 1 sample (serial) when it could have more
|
|
853
|
+
const serialWindows = windows.filter(w => w.length === 1);
|
|
854
|
+
if (serialWindows.length > windows.length * 0.7) {
|
|
855
|
+
return {
|
|
856
|
+
location: 'agent-execution',
|
|
857
|
+
impact: 50, // Estimated 50% throughput loss
|
|
858
|
+
operations: serialWindows.map(w => w[0].target)
|
|
859
|
+
};
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
return null;
|
|
863
|
+
}
|
|
864
|
+
}
|
|
865
|
+
```
|
|
866
|
+
|
|
867
|
+
---
|
|
868
|
+
|
|
869
|
+
## 3. Performance Dashboard
|
|
870
|
+
|
|
871
|
+
### ASCII Dashboard
|
|
872
|
+
|
|
873
|
+
```
|
|
874
|
+
╔══════════════════════════════════════════════════════════════════════════════╗
|
|
875
|
+
║ ELSABRO Performance Dashboard ║
|
|
876
|
+
╠══════════════════════════════════════════════════════════════════════════════╣
|
|
877
|
+
║ Profile: flow_abc123 │ Duration: 5m 32s │ Samples: 3,284 ║
|
|
878
|
+
╠══════════════════════════════════════════════════════════════════════════════╣
|
|
879
|
+
║ Latency Percentiles ║
|
|
880
|
+
├──────────────────────────────────────────────────────────────────────────────┤
|
|
881
|
+
║ P50: 2.3s ████████░░░░░░░░░░░░ ║
|
|
882
|
+
║ P75: 4.8s ████████████████░░░░ ║
|
|
883
|
+
║ P90: 8.2s ██████████████████████████░░░░ ║
|
|
884
|
+
║ P95: 12.5s ████████████████████████████████████░░░░ ║
|
|
885
|
+
║ P99: 28.3s ████████████████████████████████████████████████████████████ ║
|
|
886
|
+
╠══════════════════════════════════════════════════════════════════════════════╣
|
|
887
|
+
║ Top Hotspots ║
|
|
888
|
+
├──────────────────────────────────────────────────────────────────────────────┤
|
|
889
|
+
║ 1. agent:elsabro-executor 42.3% ████████████████████████████░░░░ ║
|
|
890
|
+
║ 2. agent:elsabro-verifier 18.7% ████████████░░░░░░░░░░░░░░░░░░░░ ║
|
|
891
|
+
║ 3. tool:Read 8.2% █████░░░░░░░░░░░░░░░░░░░░░░░░░░░ ║
|
|
892
|
+
║ 4. agent:Explore 6.5% ████░░░░░░░░░░░░░░░░░░░░░░░░░░░░ ║
|
|
893
|
+
║ 5. flow:parallel-join 4.1% ███░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ ║
|
|
894
|
+
╠══════════════════════════════════════════════════════════════════════════════╣
|
|
895
|
+
║ Bottlenecks Detected ║
|
|
896
|
+
├──────────────────────────────────────────────────────────────────────────────┤
|
|
897
|
+
║ 🔴 HIGH: agent:elsabro-executor - Slow model latency ║
|
|
898
|
+
║ └─ Suggestion: Use haiku for exploration, opus only for implementation ║
|
|
899
|
+
║ ║
|
|
900
|
+
║ 🟡 MEDIUM: flow:development - Serial execution detected ║
|
|
901
|
+
║ └─ Suggestion: Enable parallel branches for independent tasks ║
|
|
902
|
+
╠══════════════════════════════════════════════════════════════════════════════╣
|
|
903
|
+
║ Resource Usage ║
|
|
904
|
+
├──────────────────────────────────────────────────────────────────────────────┤
|
|
905
|
+
║ Memory: ████████████████░░░░ 78% (312MB / 400MB) ║
|
|
906
|
+
║ Tokens: ████████░░░░░░░░░░░░ 42% (84K / 200K budget) ║
|
|
907
|
+
║ API: ██████████████░░░░░░ 68% (34 / 50 req/min) ║
|
|
908
|
+
╠══════════════════════════════════════════════════════════════════════════════╣
|
|
909
|
+
║ Recommendations ║
|
|
910
|
+
├──────────────────────────────────────────────────────────────────────────────┤
|
|
911
|
+
║ 1. [HIGH] Switch exploration agents to haiku model ║
|
|
912
|
+
║ Expected: 60-70% latency reduction | Effort: Low | Risk: Low ║
|
|
913
|
+
║ ║
|
|
914
|
+
║ 2. [MEDIUM] Enable parallel execution in development flow ║
|
|
915
|
+
║ Expected: 2-4x throughput improvement | Effort: Low | Risk: Low ║
|
|
916
|
+
║ ║
|
|
917
|
+
║ 3. [LOW] Implement context summarization for long sessions ║
|
|
918
|
+
║ Expected: 20-30% token reduction | Effort: Medium | Risk: Low ║
|
|
919
|
+
╚══════════════════════════════════════════════════════════════════════════════╝
|
|
920
|
+
```
|
|
921
|
+
|
|
922
|
+
---
|
|
923
|
+
|
|
924
|
+
## 4. Comandos CLI
|
|
925
|
+
|
|
926
|
+
```bash
|
|
927
|
+
# Iniciar profiling
|
|
928
|
+
/elsabro:profile start
|
|
929
|
+
|
|
930
|
+
# Detener y ver resultados
|
|
931
|
+
/elsabro:profile stop
|
|
932
|
+
|
|
933
|
+
# Ver dashboard en tiempo real
|
|
934
|
+
/elsabro:profile dashboard
|
|
935
|
+
|
|
936
|
+
# Exportar resultados
|
|
937
|
+
/elsabro:profile export --format flamegraph --output profile.svg
|
|
938
|
+
|
|
939
|
+
# Analizar bottlenecks
|
|
940
|
+
/elsabro:profile bottlenecks
|
|
941
|
+
|
|
942
|
+
# Ver recomendaciones
|
|
943
|
+
/elsabro:profile recommendations
|
|
944
|
+
|
|
945
|
+
# Benchmark específico
|
|
946
|
+
/elsabro:profile benchmark agent:elsabro-executor --iterations 10
|
|
947
|
+
```
|
|
948
|
+
|
|
949
|
+
---
|
|
950
|
+
|
|
951
|
+
## Referencias
|
|
952
|
+
|
|
953
|
+
- **REF-013**: Observability System
|
|
954
|
+
- **REF-020**: Cost Tracking
|
|
955
|
+
- **REF-031**: Esta referencia (Performance Profiler)
|