@testsmith/perfornium 0.2.0 โ†’ 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -32,6 +32,9 @@ export declare class MetricsCollector extends EventEmitter {
32
32
  private errorDetails;
33
33
  private vuStartEvents;
34
34
  private loadPatternType;
35
+ private runningStats;
36
+ private readonly maxDurationsForPercentiles;
37
+ private readonly maxStoredResults;
35
38
  private realtimeConfig;
36
39
  private batchBuffer;
37
40
  private batchTimer;
@@ -11,6 +11,18 @@ class MetricsCollector extends events_1.EventEmitter {
11
11
  this.errorDetails = new Map();
12
12
  this.vuStartEvents = [];
13
13
  this.loadPatternType = 'basic';
14
+ // Running statistics (accurate even when individual results are dropped)
15
+ this.runningStats = {
16
+ totalRequests: 0,
17
+ successfulRequests: 0,
18
+ failedRequests: 0,
19
+ totalDuration: 0, // Sum of all durations for averaging
20
+ minDuration: Infinity,
21
+ maxDuration: 0,
22
+ durations: [], // For percentile calculation (limited size)
23
+ };
24
+ this.maxDurationsForPercentiles = 10000; // Keep last N for percentiles
25
+ this.maxStoredResults = 50000; // Max individual results to keep in memory
14
26
  this.batchBuffer = [];
15
27
  this.batchTimer = null;
16
28
  this.batchCounter = 0;
@@ -95,6 +107,16 @@ class MetricsCollector extends events_1.EventEmitter {
95
107
  this.batchBuffer = [];
96
108
  this.batchCounter = 0;
97
109
  this.csvHeaderWritten = false;
110
+ // Reset running statistics
111
+ this.runningStats = {
112
+ totalRequests: 0,
113
+ successfulRequests: 0,
114
+ failedRequests: 0,
115
+ totalDuration: 0,
116
+ minDuration: Infinity,
117
+ maxDuration: 0,
118
+ durations: [],
119
+ };
98
120
  if (this.realtimeConfig.enabled && this.realtimeConfig.interval_ms) {
99
121
  this.startBatchTimer();
100
122
  }
@@ -107,14 +129,44 @@ class MetricsCollector extends events_1.EventEmitter {
107
129
  });
108
130
  }
109
131
  recordResult(result) {
110
- this.results.push(result);
132
+ // Update running statistics (always accurate regardless of stored results)
133
+ this.runningStats.totalRequests++;
134
+ if (result.success) {
135
+ this.runningStats.successfulRequests++;
136
+ const duration = result.duration || 0;
137
+ this.runningStats.totalDuration += duration;
138
+ this.runningStats.minDuration = Math.min(this.runningStats.minDuration, duration);
139
+ this.runningStats.maxDuration = Math.max(this.runningStats.maxDuration, duration);
140
+ // Keep limited durations for percentile calculation (reservoir sampling)
141
+ if (this.runningStats.durations.length < this.maxDurationsForPercentiles) {
142
+ this.runningStats.durations.push(duration);
143
+ }
144
+ else {
145
+ // Randomly replace an existing duration (reservoir sampling)
146
+ const replaceIndex = Math.floor(Math.random() * this.runningStats.totalRequests);
147
+ if (replaceIndex < this.maxDurationsForPercentiles) {
148
+ this.runningStats.durations[replaceIndex] = duration;
149
+ }
150
+ }
151
+ }
152
+ else {
153
+ this.runningStats.failedRequests++;
154
+ }
155
+ // Store result only if under limit (for detailed analysis)
156
+ if (this.results.length < this.maxStoredResults) {
157
+ this.results.push(result);
158
+ }
111
159
  this.emit('result', result);
112
160
  // Track detailed error information
113
161
  if (!result.success) {
114
162
  this.trackErrorDetail(result);
115
163
  }
116
- // Add to batch buffer for real-time processing
164
+ // Add to batch buffer for real-time processing (with safety limit)
117
165
  if (this.realtimeConfig.enabled) {
166
+ // Safety limit: if buffer exceeds 1000 items, force flush to prevent memory issues
167
+ if (this.batchBuffer.length >= 1000) {
168
+ this.flushBatch();
169
+ }
118
170
  this.batchBuffer.push(result);
119
171
  // Check if we should flush based on batch size (if not using intervals)
120
172
  if (!this.realtimeConfig.interval_ms) {
@@ -509,21 +561,25 @@ class MetricsCollector extends events_1.EventEmitter {
509
561
  getResults() {
510
562
  return [...this.results];
511
563
  }
512
- // Add method to configure output paths without recreating the collector
513
- // Add method to disable incremental files if needed
514
564
  getSummary() {
515
- const totalRequests = this.results.length;
516
- const successfulRequests = this.results.filter(r => r.success).length;
517
- const failedRequests = totalRequests - successfulRequests;
518
- const durations = this.results.filter(r => r.success).map(r => r.duration);
565
+ // Use running statistics for accurate totals (even when individual results are limited)
566
+ const totalRequests = this.runningStats.totalRequests;
567
+ const successfulRequests = this.runningStats.successfulRequests;
568
+ const failedRequests = this.runningStats.failedRequests;
569
+ // Use sampled durations for percentiles (reservoir sampling ensures representative sample)
570
+ const durations = this.runningStats.durations;
519
571
  const totalDuration = (Date.now() - this.startTime) / 1000;
520
- // Error distribution by error message
572
+ // Calculate average from running totals (accurate even with limited stored results)
573
+ const avgResponseTime = successfulRequests > 0
574
+ ? this.runningStats.totalDuration / successfulRequests
575
+ : 0;
576
+ // Error distribution from stored results (may be limited but representative)
521
577
  const errorDistribution = {};
522
578
  this.results.filter(r => !r.success).forEach(r => {
523
579
  const error = r.error || 'Unknown error';
524
580
  errorDistribution[error] = (errorDistribution[error] || 0) + 1;
525
581
  });
526
- // Status code distribution
582
+ // Status code distribution from stored results
527
583
  const statusDistribution = {};
528
584
  this.results.forEach(r => {
529
585
  if (r.status) {
@@ -538,9 +594,9 @@ class MetricsCollector extends events_1.EventEmitter {
538
594
  successful_requests: successfulRequests,
539
595
  failed_requests: failedRequests,
540
596
  success_rate: totalRequests > 0 ? (successfulRequests / totalRequests) * 100 : 0,
541
- avg_response_time: durations.length > 0 ? durations.reduce((a, b) => a + b, 0) / durations.length : 0,
542
- min_response_time: durations.length > 0 ? Math.min(...durations) : 0,
543
- max_response_time: durations.length > 0 ? Math.max(...durations) : 0,
597
+ avg_response_time: avgResponseTime,
598
+ min_response_time: this.runningStats.minDuration === Infinity ? 0 : this.runningStats.minDuration,
599
+ max_response_time: this.runningStats.maxDuration,
544
600
  percentiles: this.calculatePercentiles(durations),
545
601
  requests_per_second: totalDuration > 0 ? (totalRequests / totalDuration) : 0,
546
602
  bytes_per_second: responseSizes.length > 0 && totalDuration > 0
@@ -64,4 +64,10 @@ export declare class RESTHandler implements ProtocolHandler {
64
64
  * Format: "iteration. step_name vu_id-iteration"
65
65
  */
66
66
  private generateThreadName;
67
+ /**
68
+ * Normalize debug config to support user-friendly aliases
69
+ * Maps: log_requests, log_responses, log_headers, log_body, log_timings
70
+ * To internal names used by the handler
71
+ */
72
+ private normalizeDebugConfig;
67
73
  }
@@ -44,7 +44,8 @@ const logger_1 = require("../../utils/logger");
44
44
  class RESTHandler {
45
45
  constructor(baseURL, defaultHeaders, timeout, debugConfig) {
46
46
  this.connectionTimings = new Map();
47
- this.debugConfig = debugConfig;
47
+ // Normalize debug config to support user-friendly aliases
48
+ this.debugConfig = this.normalizeDebugConfig(debugConfig);
48
49
  // Create custom HTTP agent with socket timing hooks
49
50
  const httpAgent = new http.Agent({
50
51
  keepAlive: true,
@@ -772,5 +773,32 @@ class RESTHandler {
772
773
  const vuId = context.vu_id;
773
774
  return `${iteration}. ${stepName} ${vuId}-${iteration}`;
774
775
  }
776
+ /**
777
+ * Normalize debug config to support user-friendly aliases
778
+ * Maps: log_requests, log_responses, log_headers, log_body, log_timings
779
+ * To internal names used by the handler
780
+ */
781
+ normalizeDebugConfig(config) {
782
+ if (!config)
783
+ return undefined;
784
+ const normalized = { ...config };
785
+ // If any of the user-friendly log_* options are set, enable debug logging
786
+ const hasUserFriendlyOptions = config.log_requests || config.log_responses ||
787
+ config.log_headers || config.log_body || config.log_timings;
788
+ if (hasUserFriendlyOptions) {
789
+ // Set log level to debug to enable logging
790
+ normalized.log_level = normalized.log_level || 'debug';
791
+ // Map user-friendly names to internal names
792
+ if (config.log_headers) {
793
+ normalized.capture_request_headers = true;
794
+ normalized.capture_response_headers = true;
795
+ }
796
+ if (config.log_body) {
797
+ normalized.capture_request_body = true;
798
+ normalized.capture_response_body = true;
799
+ }
800
+ }
801
+ return normalized;
802
+ }
775
803
  }
776
804
  exports.RESTHandler = RESTHandler;
@@ -286,21 +286,31 @@ class VerificationMetricsCollector {
286
286
  }
287
287
  catch (error) {
288
288
  error_message = error.message;
289
- throw error;
290
- }
291
- finally {
292
- // Round to 1 decimal place for cleaner output
289
+ // Calculate duration and metrics before re-throwing
293
290
  const duration = Math.round((performance.now() - startTime) * 10) / 10;
294
291
  const metrics = {
295
292
  step_name: stepName,
296
293
  step_type: stepType,
297
294
  duration,
298
- success,
295
+ success: false,
299
296
  error_message,
300
297
  ...additionalMetrics
301
298
  };
302
- return { result: result, metrics };
299
+ // Attach metrics to error so caller can access them
300
+ error.verificationMetrics = metrics;
301
+ throw error;
303
302
  }
303
+ // Round to 1 decimal place for cleaner output
304
+ const duration = Math.round((performance.now() - startTime) * 10) / 10;
305
+ const metrics = {
306
+ step_name: stepName,
307
+ step_type: stepType,
308
+ duration,
309
+ success,
310
+ error_message,
311
+ ...additionalMetrics
312
+ };
313
+ return { result: result, metrics };
304
314
  }
305
315
  static generateVerificationThresholds(metrics) {
306
316
  const thresholds = [];
@@ -196,6 +196,27 @@ class WebHandler {
196
196
  'measure_web_vitals', 'performance_audit'
197
197
  ];
198
198
  const shouldRecord = measurableCommands.includes(action.command);
199
+ // Check if verification took too long (>= 95% of timeout = effective timeout)
200
+ // If it completed right at the timeout boundary, treat it as a timeout failure
201
+ const timeout = action.timeout || 30000;
202
+ const timeoutThreshold = timeout * 0.95; // 95% of timeout
203
+ const isEffectiveTimeout = responseTime && responseTime >= timeoutThreshold && measurableCommands.includes(action.command);
204
+ if (isEffectiveTimeout) {
205
+ return {
206
+ success: false,
207
+ error: `Verification timeout: took ${responseTime}ms (>= ${timeoutThreshold}ms threshold)`,
208
+ shouldRecord: true,
209
+ response_time: responseTime,
210
+ custom_metrics: {
211
+ page_url: page.url(),
212
+ page_title: await page.title(),
213
+ vu_id: context.vu_id,
214
+ command: action.command,
215
+ timeout_threshold: timeoutThreshold,
216
+ verification_metrics: verificationMetrics
217
+ }
218
+ };
219
+ }
199
220
  const enhancedResult = {
200
221
  success: true,
201
222
  data: result,
@@ -217,14 +238,28 @@ class WebHandler {
217
238
  return enhancedResult;
218
239
  }
219
240
  catch (error) {
241
+ // Only record errors for measurable commands (verifications/waits) in step statistics
242
+ // Non-measurable command errors (click, fill, etc.) still appear in the errors table
243
+ // via the error tracking in virtual-user.ts, but not in step performance statistics
244
+ const measurableCommands = [
245
+ 'verify_exists', 'verify_visible', 'verify_text', 'verify_contains', 'verify_not_exists',
246
+ 'wait_for_selector', 'wait_for_text',
247
+ 'measure_web_vitals', 'performance_audit'
248
+ ];
249
+ const shouldRecordError = measurableCommands.includes(action.command);
250
+ // Get verification metrics from error if available (attached by measureVerificationStep)
251
+ const verificationMetrics = error.verificationMetrics;
220
252
  return {
221
253
  success: false,
222
254
  error: error.message,
223
- shouldRecord: true, // Record errors too for analysis
255
+ shouldRecord: shouldRecordError,
256
+ response_time: verificationMetrics?.duration,
224
257
  custom_metrics: {
225
258
  vu_id: context.vu_id,
259
+ command: action.command,
226
260
  error_type: error.constructor.name,
227
- error_stack: error.stack?.split('\n').slice(0, 3).join('; ')
261
+ error_stack: error.stack?.split('\n').slice(0, 3).join('; '),
262
+ verification_metrics: verificationMetrics
228
263
  }
229
264
  };
230
265
  }
@@ -6,6 +6,7 @@ export declare class WorkerManager extends EventEmitter {
6
6
  private aggregatedMetrics;
7
7
  addWorker(address: string): Promise<void>;
8
8
  distributeTest(config: TestConfiguration): Promise<void>;
9
+ private scaleLoadConfig;
9
10
  waitForCompletion(): Promise<void>;
10
11
  getAggregatedMetrics(): MetricsCollector;
11
12
  cleanup(): Promise<void>;
@@ -48,19 +48,36 @@ class WorkerManager extends events_1.EventEmitter {
48
48
  const workerVUs = Math.min(vusPerWorker, totalVUs - (index * vusPerWorker));
49
49
  if (workerVUs <= 0)
50
50
  return;
51
+ // Properly scale load config for this worker
52
+ const scaledLoad = this.scaleLoadConfig(config.load, workerVUs);
51
53
  const workerConfig = {
52
54
  ...config,
53
55
  name: `${config.name} - Worker ${index + 1}`,
54
- load: {
55
- ...config.load,
56
- virtual_users: workerVUs
57
- }
56
+ load: scaledLoad
58
57
  };
59
58
  logger_1.logger.debug(`๐ŸŽฏ Assigning ${workerVUs} VUs to worker ${worker.getAddress()}`);
60
59
  return worker.executeTest(workerConfig);
61
60
  });
62
61
  await Promise.all(promises);
63
62
  }
63
+ scaleLoadConfig(load, vus) {
64
+ if (Array.isArray(load)) {
65
+ // Scale each phase in the array
66
+ return load.map(phase => ({
67
+ ...phase,
68
+ virtual_users: vus,
69
+ vus: vus
70
+ }));
71
+ }
72
+ else {
73
+ // Single phase object
74
+ return {
75
+ ...load,
76
+ virtual_users: vus,
77
+ vus: vus
78
+ };
79
+ }
80
+ }
64
81
  async waitForCompletion() {
65
82
  logger_1.logger.info('โณ Waiting for all workers to complete...');
66
83
  const promises = this.workers.map(worker => worker.waitForCompletion());
@@ -72,9 +89,19 @@ class WorkerManager extends events_1.EventEmitter {
72
89
  }
73
90
  async cleanup() {
74
91
  logger_1.logger.info('๐Ÿงน Cleaning up workers...');
92
+ // Remove all event listeners from workers before disconnecting
93
+ for (const worker of this.workers) {
94
+ worker.removeAllListeners();
95
+ }
96
+ // Disconnect all workers
75
97
  const promises = this.workers.map(worker => worker.disconnect());
76
98
  await Promise.all(promises);
77
99
  this.workers = [];
100
+ // Finalize metrics to clear any timers
101
+ await this.aggregatedMetrics.finalize();
102
+ // Remove our own listeners
103
+ this.removeAllListeners();
104
+ logger_1.logger.info('โœ… Cleanup completed');
78
105
  }
79
106
  getWorkerCount() {
80
107
  return this.workers.length;
@@ -133,7 +133,10 @@ class WorkerNode extends events_1.EventEmitter {
133
133
  async disconnect() {
134
134
  this.isRunning = false;
135
135
  if (this.ws) {
136
- this.ws.close();
136
+ // Remove all listeners to prevent memory leaks and allow process to exit
137
+ this.ws.removeAllListeners();
138
+ // Use terminate() for immediate close instead of close() which waits for graceful handshake
139
+ this.ws.terminate();
137
140
  this.ws = undefined;
138
141
  }
139
142
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@testsmith/perfornium",
3
- "version": "0.2.0",
3
+ "version": "0.4.0",
4
4
  "description": "Flexible performance testing framework for REST, SOAP, and web applications",
5
5
  "author": "TestSmith",
6
6
  "license": "MIT",
@@ -48,7 +48,15 @@
48
48
  "dev": "tsc --watch",
49
49
  "dev:link": "npm run build && npm link",
50
50
  "dev:test": "npm run build && node dist/cli/cli.js",
51
- "report:web-vitals": "node generate-web-vitals-report.js"
51
+ "report:web-vitals": "node generate-web-vitals-report.js",
52
+ "docker:build:controller": "docker build -t perfornium/controller -f docker/Dockerfile.controller .",
53
+ "docker:build:worker": "docker build -t perfornium/worker -f docker/Dockerfile.worker .",
54
+ "docker:build:worker-slim": "docker build -t perfornium/worker-slim -f docker/Dockerfile.worker-slim .",
55
+ "docker:build:all": "npm run docker:build:controller && npm run docker:build:worker && npm run docker:build:worker-slim",
56
+ "docker:up": "docker compose -f docker/docker-compose.yml up -d worker-1 worker-2 worker-3",
57
+ "docker:up:slim": "docker compose -f docker/docker-compose.slim.yml up -d worker-1 worker-2 worker-3",
58
+ "docker:down": "docker compose -f docker/docker-compose.yml down",
59
+ "docker:logs": "docker compose -f docker/docker-compose.yml logs -f"
52
60
  },
53
61
  "keywords": [
54
62
  "performance",