@m00nsolutions/playwright-reporter 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +248 -0
  3. package/index.mjs +2386 -0
  4. package/package.json +60 -0
package/index.mjs ADDED
@@ -0,0 +1,2386 @@
1
+ // M00n Reporter v2 - Simplified Playwright reporter for M00n Report dashboard
2
+ //
3
+ // Key improvements over v1:
4
+ // 1. Hybrid model - real-time step streaming + atomic final persistence
5
+ // 2. Lightweight step events for UI (fire-and-forget)
6
+ // 3. Complete test data at end (reliable, with retry)
7
+ // 4. Simple sequential step indexing (no complex matching)
8
+ // 5. Exponential backoff retries for reliability
9
+ // 6. Binary multipart uploads for attachments (no base64 overhead!)
10
+ // 7. Project API key - the API key identifies the project (no need for projectId)
11
+ //
12
+ // Usage in playwright.config.js:
13
+ // reporter: [
14
+ // ['./m00nreporter.mjs', {
15
+ // serverUrl: 'http://localhost:4001', // Required: ingest service URL
16
+ // apiKey: 'm00n_xxxxxxxxxxxx...', // Required: project API key (identifies both org and project)
17
+ // launch: 'My test run', // Optional: run title
18
+ // tags: ['smoke', 'regression'], // Optional: tags array
19
+ // attributes: { // Optional: custom key-value metadata
20
+ // ciJobUrl: 'https://...',
21
+ // browserVersion: '120',
22
+ // platform: 'Linux',
23
+ // },
24
+ // realtime: true, // Optional: stream steps in real-time (default: true)
25
+ // binaryAttachments: true, // Optional: use multipart uploads (default: true)
26
+ // debug: false, // Optional: debug logs
27
+ // verbose: false, // Optional: performance/timing logs for bottleneck analysis
28
+ // logFile: './m00n-perf.log', // Optional: save performance metrics to file
29
+ // }]
30
+ // ]
31
+
32
+ import fs from 'fs';
33
+ import path from 'path';
34
+ import crypto from 'crypto';
35
+ import { fileURLToPath } from 'url';
36
+
37
+ // ============================================================================
38
+ // CONSTANTS
39
+ // ============================================================================
40
+
41
+ // Files larger than this threshold will be streamed directly to server
42
+ // to avoid memory issues in reporter process
43
+ const LARGE_FILE_THRESHOLD = 10 * 1024 * 1024; // 10MB
44
+
45
+ // Maximum concurrent attachment uploads across ALL tests
46
+ // Prevents server/MinIO overload with high parallelism (30+ workers)
47
+ const MAX_GLOBAL_UPLOAD_CONCURRENCY = 10;
48
+
49
+ // Timeout for individual attachment uploads (ms)
50
+ const UPLOAD_TIMEOUT = 60000; // 60 seconds
51
+
52
+ // Timeout for streaming large file uploads (ms)
53
+ const STREAM_UPLOAD_TIMEOUT = 300000; // 5 minutes
54
+
55
+ // Timeout for waiting on pending uploads at run end (ms)
56
+ const END_UPLOAD_WAIT_TIMEOUT = 180000; // 3 minutes
57
+
58
+ // ============================================================================
59
+ // HELPERS
60
+ // ============================================================================
61
+
62
+ /**
63
+ * Strip ANSI escape codes from a string.
64
+ * Terminal color codes appear as garbage characters in browser UI.
65
+ */
66
+ function stripAnsi(str) {
67
+ if (!str || typeof str !== 'string') return str;
68
+ // eslint-disable-next-line no-control-regex
69
+ return str.replace(/\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])/g, '');
70
+ }
71
+
72
+ // ============================================================================
73
+ // UPLOAD SEMAPHORE - Global concurrency control for attachment uploads
74
+ // ============================================================================
75
+ // Prevents server/MinIO overload when many tests complete simultaneously
76
+ // With 36 concurrent tests × 4 attachments = 144 potential parallel uploads
77
+ // This limits to MAX_GLOBAL_UPLOAD_CONCURRENCY at a time
78
+
79
+ class UploadSemaphore {
80
+ constructor(maxConcurrent = MAX_GLOBAL_UPLOAD_CONCURRENCY) {
81
+ this.max = maxConcurrent;
82
+ this.running = 0;
83
+ this.queue = [];
84
+ }
85
+
86
+ // Acquire a slot (waits if at capacity)
87
+ async acquire() {
88
+ if (this.running >= this.max) {
89
+ await new Promise(resolve => this.queue.push(resolve));
90
+ }
91
+ this.running++;
92
+ }
93
+
94
+ // Release a slot
95
+ release() {
96
+ this.running--;
97
+ if (this.queue.length > 0) {
98
+ const next = this.queue.shift();
99
+ next();
100
+ }
101
+ }
102
+
103
+ // Execute a function with acquired semaphore
104
+ async run(fn) {
105
+ await this.acquire();
106
+ try {
107
+ return await fn();
108
+ } finally {
109
+ this.release();
110
+ }
111
+ }
112
+
113
+ // Get current stats for debugging
114
+ getStats() {
115
+ return {
116
+ running: this.running,
117
+ queued: this.queue.length,
118
+ max: this.max,
119
+ };
120
+ }
121
+ }
122
+
123
+ // ============================================================================
124
+ // FILE LOGGER - Write performance metrics to a log file
125
+ // ============================================================================
126
+
127
+ class FileLogger {
128
+ constructor(logFilePath, enabled = false) {
129
+ this.enabled = enabled;
130
+ this.logFilePath = logFilePath;
131
+ this.buffer = [];
132
+ this.flushTimer = null;
133
+ this.startTime = Date.now();
134
+ this.writeStream = null;
135
+
136
+ if (this.enabled && this.logFilePath) {
137
+ try {
138
+ // Ensure directory exists
139
+ const dir = path.dirname(this.logFilePath);
140
+ if (dir && dir !== '.') {
141
+ fs.mkdirSync(dir, { recursive: true });
142
+ }
143
+
144
+ // Open write stream in append mode
145
+ this.writeStream = fs.createWriteStream(this.logFilePath, { flags: 'a' });
146
+
147
+ // Write session header
148
+ const header = `\n${'='.repeat(80)}\n` +
149
+ `M00nReporter Performance Log - ${new Date().toISOString()}\n` +
150
+ `${'='.repeat(80)}\n`;
151
+ this.writeStream.write(header);
152
+ } catch (err) {
153
+ console.warn(`[M00nReporter] Failed to open log file: ${err.message}`);
154
+ this.enabled = false;
155
+ }
156
+ }
157
+ }
158
+
159
+ // Write a log line with timestamp
160
+ log(category, message, data = null) {
161
+ if (!this.enabled || !this.writeStream) return;
162
+
163
+ const elapsed = ((Date.now() - this.startTime) / 1000).toFixed(3);
164
+ const timestamp = new Date().toISOString();
165
+
166
+ let line = `[${timestamp}] [+${elapsed}s] [${category}] ${message}`;
167
+ if (data !== null) {
168
+ if (typeof data === 'object') {
169
+ line += ` ${JSON.stringify(data)}`;
170
+ } else {
171
+ line += ` ${data}`;
172
+ }
173
+ }
174
+
175
+ this.buffer.push(line);
176
+
177
+ // Flush periodically (every 500ms) to avoid too many writes
178
+ if (!this.flushTimer) {
179
+ this.flushTimer = setTimeout(() => this.flush(), 500);
180
+ }
181
+ }
182
+
183
+ // Flush buffer to file
184
+ flush() {
185
+ if (this.flushTimer) {
186
+ clearTimeout(this.flushTimer);
187
+ this.flushTimer = null;
188
+ }
189
+
190
+ if (!this.enabled || !this.writeStream || this.buffer.length === 0) return;
191
+
192
+ const lines = this.buffer.join('\n') + '\n';
193
+ this.buffer = [];
194
+
195
+ try {
196
+ this.writeStream.write(lines);
197
+ } catch (err) {
198
+ // Silently ignore write errors
199
+ }
200
+ }
201
+
202
+ // Write a structured JSON event (for machine parsing)
203
+ event(eventType, data) {
204
+ if (!this.enabled || !this.writeStream) return;
205
+
206
+ const event = {
207
+ ts: Date.now(),
208
+ elapsed: Date.now() - this.startTime,
209
+ type: eventType,
210
+ ...data,
211
+ };
212
+
213
+ this.buffer.push(`[EVENT] ${JSON.stringify(event)}`);
214
+
215
+ if (!this.flushTimer) {
216
+ this.flushTimer = setTimeout(() => this.flush(), 500);
217
+ }
218
+ }
219
+
220
+ // Write final report section
221
+ writeReport(title, content) {
222
+ if (!this.enabled || !this.writeStream) return;
223
+
224
+ this.flush(); // Flush pending logs first
225
+
226
+ const section = `\n${'─'.repeat(60)}\n${title}\n${'─'.repeat(60)}\n${content}\n`;
227
+
228
+ try {
229
+ this.writeStream.write(section);
230
+ } catch (err) {
231
+ // Silently ignore write errors
232
+ }
233
+ }
234
+
235
+ // Close the log file
236
+ close() {
237
+ this.flush();
238
+
239
+ if (this.writeStream) {
240
+ try {
241
+ this.writeStream.write(`\n${'='.repeat(80)}\nSession ended: ${new Date().toISOString()}\n${'='.repeat(80)}\n\n`);
242
+ this.writeStream.end();
243
+ } catch (err) {
244
+ // Silently ignore close errors
245
+ }
246
+ this.writeStream = null;
247
+ }
248
+ }
249
+ }
250
+
251
+ // ============================================================================
252
+ // PERFORMANCE TRACKER - Timing and metrics for bottleneck analysis
253
+ // ============================================================================
254
+
255
+ class PerformanceTracker {
256
+ constructor(enabled = false) {
257
+ this.enabled = enabled;
258
+ this.startTime = Date.now();
259
+
260
+ // Timing buckets for different operations
261
+ this.timings = {
262
+ httpPost: [], // HTTP POST request latencies
263
+ httpMultipart: [], // Multipart upload latencies
264
+ httpStream: [], // Streaming upload latencies
265
+ attachmentRead: [], // File read latencies
266
+ testComplete: [], // Full test completion cycle
267
+ runStart: null, // Run start latency
268
+ runEnd: null, // Run end latency
269
+ };
270
+
271
+ // Concurrency tracking
272
+ this.concurrency = {
273
+ peakActiveTests: 0,
274
+ peakPendingRequests: 0,
275
+ peakQueuedSteps: 0,
276
+ currentActiveTests: 0,
277
+ currentPendingRequests: 0,
278
+ };
279
+
280
+ // Throughput metrics
281
+ this.throughput = {
282
+ totalBytesUploaded: 0,
283
+ totalRequests: 0,
284
+ totalRetries: 0,
285
+ batchesFlushed: 0,
286
+ stepsPerBatch: [],
287
+ };
288
+
289
+ // Memory snapshots
290
+ this.memorySnapshots = [];
291
+ }
292
+
293
+ // Start timing an operation, returns a function to call when done
294
+ time(category) {
295
+ if (!this.enabled) return () => {};
296
+ const start = performance.now();
297
+ this.concurrency.currentPendingRequests++;
298
+ this.concurrency.peakPendingRequests = Math.max(
299
+ this.concurrency.peakPendingRequests,
300
+ this.concurrency.currentPendingRequests
301
+ );
302
+
303
+ return (bytesTransferred = 0) => {
304
+ const duration = performance.now() - start;
305
+ this.concurrency.currentPendingRequests--;
306
+
307
+ if (this.timings[category]) {
308
+ this.timings[category].push(duration);
309
+ }
310
+
311
+ if (bytesTransferred > 0) {
312
+ this.throughput.totalBytesUploaded += bytesTransferred;
313
+ }
314
+ this.throughput.totalRequests++;
315
+
316
+ return duration;
317
+ };
318
+ }
319
+
320
+ // Track a retry attempt
321
+ recordRetry() {
322
+ if (!this.enabled) return;
323
+ this.throughput.totalRetries++;
324
+ }
325
+
326
+ // Track active test count
327
+ testStarted() {
328
+ if (!this.enabled) return;
329
+ this.concurrency.currentActiveTests++;
330
+ this.concurrency.peakActiveTests = Math.max(
331
+ this.concurrency.peakActiveTests,
332
+ this.concurrency.currentActiveTests
333
+ );
334
+ }
335
+
336
+ testEnded() {
337
+ if (!this.enabled) return;
338
+ this.concurrency.currentActiveTests--;
339
+ }
340
+
341
+ // Track step batching efficiency
342
+ recordBatchFlush(stepCount) {
343
+ if (!this.enabled) return;
344
+ this.throughput.batchesFlushed++;
345
+ this.throughput.stepsPerBatch.push(stepCount);
346
+ }
347
+
348
+ // Update peak queue depth
349
+ updateQueueDepth(depth) {
350
+ if (!this.enabled) return;
351
+ this.concurrency.peakQueuedSteps = Math.max(
352
+ this.concurrency.peakQueuedSteps,
353
+ depth
354
+ );
355
+ }
356
+
357
+ // Take a memory snapshot
358
+ snapshotMemory(label) {
359
+ if (!this.enabled) return;
360
+ const mem = process.memoryUsage();
361
+ this.memorySnapshots.push({
362
+ label,
363
+ timestamp: Date.now() - this.startTime,
364
+ heapUsed: Math.round(mem.heapUsed / 1024 / 1024), // MB
365
+ heapTotal: Math.round(mem.heapTotal / 1024 / 1024),
366
+ rss: Math.round(mem.rss / 1024 / 1024),
367
+ external: Math.round(mem.external / 1024 / 1024),
368
+ });
369
+ }
370
+
371
+ // Calculate statistics for a timing array
372
+ calcStats(timings) {
373
+ if (timings.length === 0) return null;
374
+ const sorted = [...timings].sort((a, b) => a - b);
375
+ const sum = sorted.reduce((a, b) => a + b, 0);
376
+ return {
377
+ count: sorted.length,
378
+ min: Math.round(sorted[0]),
379
+ max: Math.round(sorted[sorted.length - 1]),
380
+ avg: Math.round(sum / sorted.length),
381
+ p50: Math.round(sorted[Math.floor(sorted.length * 0.5)]),
382
+ p90: Math.round(sorted[Math.floor(sorted.length * 0.9)]),
383
+ p99: Math.round(sorted[Math.floor(sorted.length * 0.99)]),
384
+ };
385
+ }
386
+
387
+ // Generate performance report
388
+ generateReport() {
389
+ if (!this.enabled) return null;
390
+
391
+ const totalDuration = (Date.now() - this.startTime) / 1000;
392
+ const requestsPerSecond = this.throughput.totalRequests / totalDuration;
393
+ const bytesPerSecond = this.throughput.totalBytesUploaded / totalDuration;
394
+
395
+ return {
396
+ duration: `${totalDuration.toFixed(1)}s`,
397
+
398
+ latencies: {
399
+ httpPost: this.calcStats(this.timings.httpPost),
400
+ httpMultipart: this.calcStats(this.timings.httpMultipart),
401
+ httpStream: this.calcStats(this.timings.httpStream),
402
+ attachmentRead: this.calcStats(this.timings.attachmentRead),
403
+ testComplete: this.calcStats(this.timings.testComplete),
404
+ runStart: this.timings.runStart ? `${Math.round(this.timings.runStart)}ms` : null,
405
+ runEnd: this.timings.runEnd ? `${Math.round(this.timings.runEnd)}ms` : null,
406
+ },
407
+
408
+ concurrency: {
409
+ peakActiveTests: this.concurrency.peakActiveTests,
410
+ peakPendingRequests: this.concurrency.peakPendingRequests,
411
+ peakQueuedSteps: this.concurrency.peakQueuedSteps,
412
+ },
413
+
414
+ throughput: {
415
+ totalRequests: this.throughput.totalRequests,
416
+ totalRetries: this.throughput.totalRetries,
417
+ retryRate: this.throughput.totalRequests > 0
418
+ ? `${((this.throughput.totalRetries / this.throughput.totalRequests) * 100).toFixed(1)}%`
419
+ : '0%',
420
+ requestsPerSecond: requestsPerSecond.toFixed(1),
421
+ bytesUploaded: this.formatBytes(this.throughput.totalBytesUploaded),
422
+ uploadSpeed: `${this.formatBytes(bytesPerSecond)}/s`,
423
+ batchesFlushed: this.throughput.batchesFlushed,
424
+ avgStepsPerBatch: this.throughput.stepsPerBatch.length > 0
425
+ ? (this.throughput.stepsPerBatch.reduce((a, b) => a + b, 0) / this.throughput.stepsPerBatch.length).toFixed(1)
426
+ : 0,
427
+ },
428
+
429
+ memory: this.memorySnapshots,
430
+ };
431
+ }
432
+
433
+ formatBytes(bytes) {
434
+ if (bytes < 1024) return `${bytes}B`;
435
+ if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)}KB`;
436
+ if (bytes < 1024 * 1024 * 1024) return `${(bytes / 1024 / 1024).toFixed(1)}MB`;
437
+ return `${(bytes / 1024 / 1024 / 1024).toFixed(1)}GB`;
438
+ }
439
+
440
+ // Format a latency stats object for display
441
+ formatLatencyStats(stats, label) {
442
+ if (!stats) return null;
443
+ return `${label}: n=${stats.count} min=${stats.min}ms avg=${stats.avg}ms p90=${stats.p90}ms p99=${stats.p99}ms max=${stats.max}ms`;
444
+ }
445
+ }
446
+
447
+ // ============================================================================
448
+ // HELPERS
449
+ // ============================================================================
450
+
451
+ function nowIso(d = new Date()) {
452
+ return d.toISOString();
453
+ }
454
+
455
+ function parseTags(tags) {
456
+ if (!tags) return [];
457
+ if (Array.isArray(tags)) {
458
+ return tags.filter(t => typeof t === 'string' && t.trim()).map(t => t.trim());
459
+ }
460
+ if (typeof tags === 'string') {
461
+ return tags.split(',').map(t => t.trim()).filter(Boolean);
462
+ }
463
+ return [];
464
+ }
465
+
466
+ function extractAnnotations(test) {
467
+ const result = {};
468
+ const caseIdAnn = test?.annotations?.find(a => a?.type === 'caseId')?.description;
469
+ if (caseIdAnn) result.caseId = String(caseIdAnn);
470
+
471
+ const tagsAnn = test?.annotations?.find(a => a?.type === 'tags')?.description;
472
+ if (tagsAnn) result.tags = Array.isArray(tagsAnn) ? tagsAnn : [tagsAnn];
473
+
474
+ return result;
475
+ }
476
+
477
+ function getFilePath(test) {
478
+ return test?.location?.file || test?.titlePath?.()?.[0] || null;
479
+ }
480
+
481
+ // Generate deterministic test key from title path
482
+ function getTestKey(test) {
483
+ return test.titlePath().filter(Boolean).join(' › ');
484
+ }
485
+
486
+ // Generate unique collector key that includes retry attempt
487
+ // This prevents race conditions between first run and retry collectors
488
+ function getCollectorKey(test, retry = 0) {
489
+ return `${getTestKey(test)}##retry:${retry}`;
490
+ }
491
+
492
+ // ============================================================================
493
+ // HTTP CLIENT - Simple fetch with retry
494
+ // ============================================================================
495
+
496
+ class HttpClient {
497
+ constructor(baseUrl, options = {}) {
498
+ this.baseUrl = baseUrl.replace(/\/$/, '');
499
+ this.apiKey = options.apiKey;
500
+ this.timeout = options.timeout || 30000;
501
+ this.maxRetries = options.maxRetries || 3;
502
+ this.logger = options.logger || console.log;
503
+ this.perfTracker = options.perfTracker || null;
504
+ this.verbose = options.verbose || false;
505
+ this.fileLogger = options.fileLogger || null;
506
+ }
507
+
508
+ async post(path, body, options = {}) {
509
+ const url = `${this.baseUrl}${path}`;
510
+ const timeout = options.timeout || this.timeout;
511
+ const maxRetries = options.noRetry ? 1 : this.maxRetries;
512
+
513
+ // Start timing
514
+ const endTiming = this.perfTracker?.time('httpPost') || (() => 0);
515
+ let bodySize = 0;
516
+
517
+ try {
518
+ bodySize = JSON.stringify(body).length;
519
+ } catch (e) {
520
+ // If JSON serialization fails, return error gracefully
521
+ return { error: 'json_serialization_failed', serviceUnavailable: true };
522
+ }
523
+
524
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
525
+ const controller = new AbortController();
526
+ const timeoutId = setTimeout(() => controller.abort(), timeout);
527
+
528
+ try {
529
+ const headers = { 'Content-Type': 'application/json' };
530
+ if (this.apiKey) headers['X-API-Key'] = this.apiKey;
531
+
532
+ const response = await fetch(url, {
533
+ method: 'POST',
534
+ headers,
535
+ body: JSON.stringify(body),
536
+ signal: controller.signal,
537
+ keepalive: true,
538
+ });
539
+
540
+ clearTimeout(timeoutId);
541
+
542
+ if (response.ok) {
543
+ const duration = endTiming(bodySize);
544
+ if (this.verbose) {
545
+ this.logger('perf', `POST ${path} completed in ${Math.round(duration)}ms (${bodySize} bytes)`);
546
+ }
547
+ // Log to file
548
+ this.fileLogger?.event('HTTP_POST', {
549
+ path,
550
+ duration: Math.round(duration),
551
+ bytes: bodySize,
552
+ status: response.status,
553
+ attempt,
554
+ });
555
+ return await response.json().catch(() => ({ ok: true }));
556
+ }
557
+
558
+ // Check for permanent errors (don't retry)
559
+ const errorData = await response.json().catch(() => ({}));
560
+ if (['PROJECT_NOT_FOUND', 'API_KEY_REQUIRED', 'INVALID_API_KEY'].includes(errorData.code)) {
561
+ endTiming(0);
562
+ return { error: errorData.code, permanent: true };
563
+ }
564
+
565
+ // Transient error - retry if we have attempts left
566
+ if (attempt < maxRetries) {
567
+ this.perfTracker?.recordRetry();
568
+ if (this.verbose) {
569
+ this.logger('perf', `POST ${path} retry ${attempt}/${maxRetries} after HTTP ${response.status}`);
570
+ }
571
+ this.fileLogger?.event('HTTP_RETRY', {
572
+ path,
573
+ attempt,
574
+ reason: `HTTP ${response.status}`,
575
+ });
576
+ const delay = Math.min(1000 * Math.pow(2, attempt - 1), 5000);
577
+ await new Promise(r => setTimeout(r, delay));
578
+ continue;
579
+ }
580
+
581
+ endTiming(0);
582
+ this.fileLogger?.event('HTTP_ERROR', {
583
+ path,
584
+ attempt,
585
+ error: errorData.error || `HTTP ${response.status}`,
586
+ });
587
+ return { error: errorData.error || `HTTP ${response.status}`, serviceUnavailable: true };
588
+
589
+ } catch (err) {
590
+ clearTimeout(timeoutId);
591
+
592
+ if (attempt < maxRetries && err.name !== 'AbortError') {
593
+ this.perfTracker?.recordRetry();
594
+ if (this.verbose) {
595
+ this.logger('perf', `POST ${path} retry ${attempt}/${maxRetries} after error: ${err.message}`);
596
+ }
597
+ this.fileLogger?.event('HTTP_RETRY', {
598
+ path,
599
+ attempt,
600
+ reason: err.message,
601
+ });
602
+ const delay = Math.min(1000 * Math.pow(2, attempt - 1), 5000);
603
+ await new Promise(r => setTimeout(r, delay));
604
+ continue;
605
+ }
606
+
607
+ endTiming(0);
608
+ this.fileLogger?.event('HTTP_ERROR', {
609
+ path,
610
+ attempt,
611
+ error: err.message || 'network_error',
612
+ });
613
+ // Mark as service unavailable so caller knows the service is down
614
+ return { error: err.message || 'network_error', serviceUnavailable: true };
615
+ }
616
+ }
617
+
618
+ // Fallback return (should not reach here, but ensures no undefined return)
619
+ return { error: 'max_retries_exceeded', serviceUnavailable: true };
620
+ }
621
+
622
+ // Fire-and-forget with keepalive (for process exit scenarios)
623
+ // Designed to NEVER throw - all errors are silently caught
624
+ fire(path, body) {
625
+ try {
626
+ const url = `${this.baseUrl}${path}`;
627
+ const headers = { 'Content-Type': 'application/json' };
628
+ if (this.apiKey) headers['X-API-Key'] = this.apiKey;
629
+
630
+ // Safely serialize body
631
+ let bodyJson;
632
+ try {
633
+ bodyJson = JSON.stringify(body);
634
+ } catch (e) {
635
+ return; // Skip if serialization fails
636
+ }
637
+
638
+ // Use beacon-style request that survives process exit
639
+ fetch(url, {
640
+ method: 'POST',
641
+ headers,
642
+ body: bodyJson,
643
+ keepalive: true,
644
+ }).catch(() => {
645
+ // Silently ignore all errors - this is fire-and-forget
646
+ });
647
+ } catch (e) {
648
+ // Silently ignore all errors
649
+ }
650
+ }
651
+
652
+ // Upload files via multipart/form-data (no base64 overhead!)
653
+ async postMultipart(path, fields, files, options = {}) {
654
+ const url = `${this.baseUrl}${path}`;
655
+ const timeout = options.timeout || this.timeout;
656
+ const maxRetries = options.noRetry ? 1 : this.maxRetries;
657
+
658
+ // Calculate total size for logging
659
+ const totalSize = files.reduce((sum, f) => sum + (f.buffer?.length || 0), 0);
660
+ const fileCount = files.length;
661
+
662
+ // Start timing
663
+ const endTiming = this.perfTracker?.time('httpMultipart') || (() => 0);
664
+
665
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
666
+ const controller = new AbortController();
667
+ const timeoutId = setTimeout(() => controller.abort(), timeout);
668
+
669
+ try {
670
+ // Build multipart boundary
671
+ const boundary = `----M00nReporter${crypto.randomUUID().replace(/-/g, '')}`;
672
+ const parts = [];
673
+
674
+ // Add text fields
675
+ for (const [name, value] of Object.entries(fields)) {
676
+ parts.push(
677
+ `--${boundary}\r\n`,
678
+ `Content-Disposition: form-data; name="${name}"\r\n\r\n`,
679
+ `${value}\r\n`
680
+ );
681
+ }
682
+
683
+ // Add file fields
684
+ for (const file of files) {
685
+ parts.push(
686
+ `--${boundary}\r\n`,
687
+ `Content-Disposition: form-data; name="file"; filename="${file.name}"\r\n`,
688
+ `Content-Type: ${file.contentType}\r\n\r\n`
689
+ );
690
+ parts.push(file.buffer);
691
+ parts.push('\r\n');
692
+ }
693
+
694
+ parts.push(`--${boundary}--\r\n`);
695
+
696
+ // Concatenate all parts into a single buffer
697
+ const bodyParts = parts.map(p => Buffer.isBuffer(p) ? p : Buffer.from(p));
698
+ const body = Buffer.concat(bodyParts);
699
+
700
+ const headers = {
701
+ 'Content-Type': `multipart/form-data; boundary=${boundary}`,
702
+ 'Content-Length': body.length.toString(),
703
+ };
704
+ if (this.apiKey) headers['X-API-Key'] = this.apiKey;
705
+
706
+ const response = await fetch(url, {
707
+ method: 'POST',
708
+ headers,
709
+ body,
710
+ signal: controller.signal,
711
+ });
712
+
713
+ clearTimeout(timeoutId);
714
+
715
+ if (response.ok) {
716
+ const duration = endTiming(totalSize);
717
+ if (this.verbose) {
718
+ const speed = totalSize > 0 ? ((totalSize / 1024) / (duration / 1000)).toFixed(1) : 0;
719
+ this.logger('perf', `MULTIPART ${path} completed in ${Math.round(duration)}ms (${fileCount} files, ${Math.round(totalSize/1024)}KB, ${speed}KB/s)`);
720
+ }
721
+ // Log to file
722
+ this.fileLogger?.event('HTTP_MULTIPART', {
723
+ path,
724
+ duration: Math.round(duration),
725
+ bytes: totalSize,
726
+ fileCount,
727
+ status: response.status,
728
+ attempt,
729
+ });
730
+ return await response.json().catch(() => ({ ok: true }));
731
+ }
732
+
733
+ const errorData = await response.json().catch(() => ({}));
734
+ if (attempt < maxRetries) {
735
+ this.perfTracker?.recordRetry();
736
+ if (this.verbose) {
737
+ this.logger('perf', `MULTIPART ${path} retry ${attempt}/${maxRetries} after HTTP ${response.status}`);
738
+ }
739
+ this.fileLogger?.event('HTTP_RETRY', {
740
+ path,
741
+ type: 'multipart',
742
+ attempt,
743
+ reason: `HTTP ${response.status}`,
744
+ });
745
+ const delay = Math.min(1000 * Math.pow(2, attempt - 1), 5000);
746
+ await new Promise(r => setTimeout(r, delay));
747
+ continue;
748
+ }
749
+
750
+ endTiming(0);
751
+ this.fileLogger?.event('HTTP_ERROR', {
752
+ path,
753
+ type: 'multipart',
754
+ attempt,
755
+ error: errorData.error || `HTTP ${response.status}`,
756
+ });
757
+ return { error: errorData.error || `HTTP ${response.status}` };
758
+
759
+ } catch (err) {
760
+ clearTimeout(timeoutId);
761
+
762
+ if (attempt < maxRetries && err.name !== 'AbortError') {
763
+ this.perfTracker?.recordRetry();
764
+ if (this.verbose) {
765
+ this.logger('perf', `MULTIPART ${path} retry ${attempt}/${maxRetries} after error: ${err.message}`);
766
+ }
767
+ this.fileLogger?.event('HTTP_RETRY', {
768
+ path,
769
+ type: 'multipart',
770
+ attempt,
771
+ reason: err.message,
772
+ });
773
+ const delay = Math.min(1000 * Math.pow(2, attempt - 1), 5000);
774
+ await new Promise(r => setTimeout(r, delay));
775
+ continue;
776
+ }
777
+
778
+ endTiming(0);
779
+ this.fileLogger?.event('HTTP_ERROR', {
780
+ path,
781
+ type: 'multipart',
782
+ attempt,
783
+ error: err.message || 'network_error',
784
+ });
785
+ return { error: err.message || 'network_error' };
786
+ }
787
+ }
788
+ }
789
+
790
+ // Stream a file directly to the server (for large files, avoids memory buffering)
791
+ // Uses the /api/ingest/v2/attachment/stream endpoint
792
+ async postStreamFile(path, fields, filePath, filename, contentType, options = {}) {
793
+ const url = `${this.baseUrl}${path}`;
794
+ const timeout = options.timeout || 300000; // 5 min timeout for large files
795
+ const maxRetries = options.noRetry ? 1 : this.maxRetries;
796
+
797
+ // Start timing
798
+ const endTiming = this.perfTracker?.time('httpStream') || (() => 0);
799
+ let fileSize = 0;
800
+
801
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
802
+ const controller = new AbortController();
803
+ const timeoutId = setTimeout(() => controller.abort(), timeout);
804
+
805
+ try {
806
+ // Get file stats for Content-Length
807
+ const stats = await fs.promises.stat(filePath);
808
+ fileSize = stats.size;
809
+
810
+ // Build multipart body with streaming
811
+ const boundary = `----M00nReporter${crypto.randomUUID().replace(/-/g, '')}`;
812
+
813
+ // Build header parts (before file content)
814
+ const headerParts = [];
815
+ for (const [name, value] of Object.entries(fields)) {
816
+ headerParts.push(
817
+ `--${boundary}\r\n`,
818
+ `Content-Disposition: form-data; name="${name}"\r\n\r\n`,
819
+ `${value}\r\n`
820
+ );
821
+ }
822
+ headerParts.push(
823
+ `--${boundary}\r\n`,
824
+ `Content-Disposition: form-data; name="file"; filename="${filename}"\r\n`,
825
+ `Content-Type: ${contentType}\r\n\r\n`
826
+ );
827
+ const headerBuffer = Buffer.from(headerParts.join(''));
828
+
829
+ // Build footer parts (after file content)
830
+ const footerBuffer = Buffer.from(`\r\n--${boundary}--\r\n`);
831
+
832
+ // Calculate total content length
833
+ const totalLength = headerBuffer.length + fileSize + footerBuffer.length;
834
+
835
+ // Create a combined stream: header + file + footer
836
+ const { Readable } = await import('stream');
837
+
838
+ const combinedStream = new Readable({
839
+ read() {}
840
+ });
841
+
842
+ // Push header
843
+ combinedStream.push(headerBuffer);
844
+
845
+ // Stream file content
846
+ const fileStream = fs.createReadStream(filePath);
847
+ fileStream.on('data', (chunk) => combinedStream.push(chunk));
848
+ fileStream.on('end', () => {
849
+ combinedStream.push(footerBuffer);
850
+ combinedStream.push(null); // End the stream
851
+ });
852
+ fileStream.on('error', (err) => combinedStream.destroy(err));
853
+
854
+ const headers = {
855
+ 'Content-Type': `multipart/form-data; boundary=${boundary}`,
856
+ 'Content-Length': totalLength.toString(),
857
+ };
858
+ if (this.apiKey) headers['X-API-Key'] = this.apiKey;
859
+
860
+ // Use fetch with duplex: 'half' for streaming body
861
+ const response = await fetch(url, {
862
+ method: 'POST',
863
+ headers,
864
+ body: Readable.toWeb(combinedStream),
865
+ signal: controller.signal,
866
+ duplex: 'half', // Required for streaming request body
867
+ });
868
+
869
+ clearTimeout(timeoutId);
870
+
871
+ if (response.ok) {
872
+ const duration = endTiming(fileSize);
873
+ if (this.verbose) {
874
+ const speed = fileSize > 0 ? ((fileSize / 1024 / 1024) / (duration / 1000)).toFixed(1) : 0;
875
+ this.logger('perf', `STREAM ${path} completed in ${Math.round(duration)}ms (${(fileSize/1024/1024).toFixed(1)}MB, ${speed}MB/s)`);
876
+ }
877
+ // Log to file
878
+ this.fileLogger?.event('HTTP_STREAM', {
879
+ path,
880
+ duration: Math.round(duration),
881
+ bytes: fileSize,
882
+ filename,
883
+ status: response.status,
884
+ attempt,
885
+ });
886
+ return await response.json().catch(() => ({ ok: true }));
887
+ }
888
+
889
+ const errorData = await response.json().catch(() => ({}));
890
+ if (attempt < maxRetries) {
891
+ this.perfTracker?.recordRetry();
892
+ if (this.verbose) {
893
+ this.logger('perf', `STREAM ${path} retry ${attempt}/${maxRetries} after HTTP ${response.status}`);
894
+ }
895
+ this.fileLogger?.event('HTTP_RETRY', {
896
+ path,
897
+ type: 'stream',
898
+ attempt,
899
+ reason: `HTTP ${response.status}`,
900
+ });
901
+ const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000);
902
+ await new Promise(r => setTimeout(r, delay));
903
+ continue;
904
+ }
905
+
906
+ endTiming(0);
907
+ this.fileLogger?.event('HTTP_ERROR', {
908
+ path,
909
+ type: 'stream',
910
+ attempt,
911
+ error: errorData.error || `HTTP ${response.status}`,
912
+ });
913
+ return { error: errorData.error || `HTTP ${response.status}` };
914
+
915
+ } catch (err) {
916
+ clearTimeout(timeoutId);
917
+
918
+ if (attempt < maxRetries && err.name !== 'AbortError') {
919
+ this.perfTracker?.recordRetry();
920
+ if (this.verbose) {
921
+ this.logger('perf', `STREAM ${path} retry ${attempt}/${maxRetries} after error: ${err.message}`);
922
+ }
923
+ this.fileLogger?.event('HTTP_RETRY', {
924
+ path,
925
+ type: 'stream',
926
+ attempt,
927
+ reason: err.message,
928
+ });
929
+ const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000);
930
+ await new Promise(r => setTimeout(r, delay));
931
+ continue;
932
+ }
933
+
934
+ endTiming(0);
935
+ this.fileLogger?.event('HTTP_ERROR', {
936
+ path,
937
+ type: 'stream',
938
+ attempt,
939
+ error: err.message || 'network_error',
940
+ });
941
+ return { error: err.message || 'network_error' };
942
+ }
943
+ }
944
+ }
945
+
946
+ // Batch multiple items and send periodically (for step streaming)
947
+ createBatcher(path, options = {}) {
948
+ const flushInterval = options.flushInterval || 100;
949
+ const maxBatchSize = options.maxBatchSize || 50;
950
+ const perfTracker = this.perfTracker;
951
+ const fileLogger = this.fileLogger;
952
+ const verbose = this.verbose;
953
+ const logger = this.logger;
954
+
955
+ let buffer = [];
956
+ let flushTimer = null;
957
+ let totalFlushed = 0;
958
+
959
+ const flush = () => {
960
+ if (flushTimer) {
961
+ clearTimeout(flushTimer);
962
+ flushTimer = null;
963
+ }
964
+ if (buffer.length === 0) return;
965
+
966
+ const items = buffer;
967
+ buffer = [];
968
+ totalFlushed += items.length;
969
+
970
+ // Track batch size for efficiency analysis
971
+ perfTracker?.recordBatchFlush(items.length);
972
+ if (verbose) {
973
+ logger('perf', `BATCH flush: ${items.length} steps`);
974
+ }
975
+
976
+ // Log to file
977
+ fileLogger?.event('BATCH_FLUSH', {
978
+ path,
979
+ stepCount: items.length,
980
+ totalFlushed,
981
+ });
982
+
983
+ // Fire-and-forget batch
984
+ this.fire(path, { items });
985
+ };
986
+
987
+ return {
988
+ add: (item) => {
989
+ buffer.push(item);
990
+ // Track queue depth for backpressure analysis
991
+ perfTracker?.updateQueueDepth(buffer.length);
992
+
993
+ if (buffer.length >= maxBatchSize) {
994
+ flush();
995
+ } else if (!flushTimer) {
996
+ flushTimer = setTimeout(flush, flushInterval);
997
+ }
998
+ },
999
+ flush,
1000
+ getPending: () => buffer.length,
1001
+ };
1002
+ }
1003
+ }
1004
+
1005
+ // ============================================================================
1006
+ // TEST COLLECTOR - Collects test data during execution
1007
+ // ============================================================================
1008
+
1009
+ class TestCollector {
1010
+ constructor(options = {}) {
1011
+ this.testKey = null;
1012
+ // CLIENT-GENERATED UUID - no more waiting for server!
1013
+ // This eliminates the round-trip bottleneck for testId
1014
+ this.testId = options.testId || crypto.randomUUID();
1015
+ this.runId = null;
1016
+ this.filePath = null;
1017
+ this.titlePath = [];
1018
+ this.annotations = {};
1019
+ this.retry = 0;
1020
+ this.startedAt = null;
1021
+ this.steps = [];
1022
+ this.stepIndex = 0;
1023
+ this.activeSteps = new Map(); // stepObject -> stepData (for nesting)
1024
+ this.attachments = [];
1025
+
1026
+ // Real-time streaming callback
1027
+ this.onStepEvent = options.onStepEvent || null;
1028
+ }
1029
+
1030
+ // Start collecting a step
1031
+ startStep(step) {
1032
+ const stepData = {
1033
+ index: this.stepIndex++,
1034
+ title: step.title,
1035
+ category: step.category || null,
1036
+ status: 'running',
1037
+ startTime: step.startTime ? step.startTime.getTime() : Date.now(),
1038
+ duration: null,
1039
+ error: null,
1040
+ nestingLevel: this.activeSteps.size, // Nesting = number of currently active steps
1041
+ };
1042
+
1043
+ this.activeSteps.set(step, stepData);
1044
+ this.steps.push(stepData);
1045
+
1046
+ // Emit real-time event IMMEDIATELY - no waiting for server response!
1047
+ // testId is now client-generated UUID, available instantly
1048
+ if (this.onStepEvent) {
1049
+ this.onStepEvent({
1050
+ action: 'start',
1051
+ runId: this.runId,
1052
+ testId: this.testId, // Client-generated UUID - always available
1053
+ stepIndex: stepData.index,
1054
+ title: stepData.title,
1055
+ category: stepData.category,
1056
+ status: 'running',
1057
+ nestingLevel: stepData.nestingLevel,
1058
+ startTime: stepData.startTime,
1059
+ });
1060
+ }
1061
+
1062
+ return stepData;
1063
+ }
1064
+
1065
+ // End collecting a step
1066
+ endStep(step, error = null) {
1067
+ const stepData = this.activeSteps.get(step);
1068
+ if (!stepData) {
1069
+ // Step not found - might be internal Playwright step
1070
+ // Add it as a completed step
1071
+ const newStep = {
1072
+ index: this.stepIndex++,
1073
+ title: step.title,
1074
+ category: step.category || null,
1075
+ status: error ? 'failed' : 'passed',
1076
+ startTime: step.startTime ? step.startTime.getTime() : Date.now(),
1077
+ duration: step.duration || 0,
1078
+ error: error ? { message: stripAnsi(String(error.message || error)), stack: stripAnsi(error.stack) } : null,
1079
+ nestingLevel: 0,
1080
+ };
1081
+ this.steps.push(newStep);
1082
+
1083
+ // Emit real-time event IMMEDIATELY - testId is client-generated UUID
1084
+ if (this.onStepEvent) {
1085
+ this.onStepEvent({
1086
+ action: 'end',
1087
+ runId: this.runId,
1088
+ testId: this.testId, // Client-generated UUID - always available
1089
+ stepIndex: newStep.index,
1090
+ title: newStep.title,
1091
+ category: newStep.category,
1092
+ status: newStep.status,
1093
+ duration: newStep.duration,
1094
+ nestingLevel: newStep.nestingLevel,
1095
+ error: newStep.error,
1096
+ });
1097
+ }
1098
+
1099
+ return newStep;
1100
+ }
1101
+
1102
+ // Update existing step
1103
+ stepData.status = error ? 'failed' : 'passed';
1104
+ stepData.duration = step.duration || (Date.now() - stepData.startTime);
1105
+ stepData.error = error ? { message: stripAnsi(String(error.message || error)), stack: stripAnsi(error.stack) } : null;
1106
+
1107
+ this.activeSteps.delete(step);
1108
+
1109
+ // Emit real-time event IMMEDIATELY - testId is client-generated UUID
1110
+ if (this.onStepEvent) {
1111
+ this.onStepEvent({
1112
+ action: 'end',
1113
+ runId: this.runId,
1114
+ testId: this.testId, // Client-generated UUID - always available
1115
+ stepIndex: stepData.index,
1116
+ title: stepData.title,
1117
+ category: stepData.category,
1118
+ status: stepData.status,
1119
+ duration: stepData.duration,
1120
+ nestingLevel: stepData.nestingLevel,
1121
+ error: stepData.error,
1122
+ });
1123
+ }
1124
+
1125
+ return stepData;
1126
+ }
1127
+
1128
+ // Mark any still-running steps as skipped/passed (called at test end)
1129
+ finalizeSteps() {
1130
+ // First, finalize steps still in activeSteps (startStep called, but endStep not called)
1131
+ for (const [step, stepData] of this.activeSteps) {
1132
+ stepData.status = 'skipped';
1133
+ stepData.duration = Date.now() - stepData.startTime;
1134
+ }
1135
+ this.activeSteps.clear();
1136
+
1137
+ // Also finalize any steps in the array that are still 'running'
1138
+ // This handles edge cases where step end events were missed
1139
+ for (const step of this.steps) {
1140
+ if (step.status === 'running') {
1141
+ // Mark as 'interrupted' to indicate it didn't complete normally
1142
+ step.status = 'interrupted';
1143
+ step.duration = step.duration ?? (Date.now() - step.startTime);
1144
+ }
1145
+ }
1146
+
1147
+ // Sort steps by startTime to ensure correct order
1148
+ this.steps.sort((a, b) => a.startTime - b.startTime);
1149
+
1150
+ return this.steps;
1151
+ }
1152
+
1153
+ // Add attachment (async to avoid blocking event loop with large files)
1154
+ // When binaryMode=true, stores raw buffer for multipart upload (no base64!)
1155
+ // For large files (>10MB), stores file path for streaming upload
1156
+ async addAttachment(attachment, testId, runId, binaryMode = true) {
1157
+ if (!attachment.body && !attachment.path) return null;
1158
+
1159
+ try {
1160
+ let name = attachment.name || 'unknown';
1161
+ let contentType = attachment.contentType || 'application/octet-stream';
1162
+
1163
+ if (attachment.body) {
1164
+ // Inline body - always buffer (usually small)
1165
+ const buffer = Buffer.from(attachment.body);
1166
+
1167
+ if (binaryMode) {
1168
+ const attachmentData = {
1169
+ id: crypto.randomUUID(),
1170
+ name,
1171
+ contentType,
1172
+ buffer,
1173
+ size: buffer.length,
1174
+ isLargeFile: false,
1175
+ };
1176
+ this.attachments.push(attachmentData);
1177
+ return attachmentData;
1178
+ } else {
1179
+ const attachmentData = {
1180
+ id: crypto.randomUUID(),
1181
+ name,
1182
+ contentType,
1183
+ data: buffer.toString('base64'),
1184
+ size: buffer.length,
1185
+ compressed: false,
1186
+ };
1187
+ this.attachments.push(attachmentData);
1188
+ return attachmentData;
1189
+ }
1190
+ } else if (attachment.path) {
1191
+ // File path - check size to decide streaming vs buffering
1192
+ try {
1193
+ const stats = await fs.promises.stat(attachment.path);
1194
+ if (stats.size > 500 * 1024 * 1024) return null; // Skip >500MB
1195
+
1196
+ if (name === 'trace') name = path.basename(attachment.path);
1197
+
1198
+ // Large file: store path for streaming upload (no buffering!)
1199
+ if (stats.size > LARGE_FILE_THRESHOLD && binaryMode) {
1200
+ const attachmentData = {
1201
+ id: crypto.randomUUID(),
1202
+ name,
1203
+ contentType,
1204
+ filePath: attachment.path, // Store path, NOT buffer!
1205
+ size: stats.size,
1206
+ isLargeFile: true,
1207
+ };
1208
+ this.attachments.push(attachmentData);
1209
+ return attachmentData;
1210
+ }
1211
+
1212
+ // Small file: buffer it for regular upload
1213
+ const buffer = await fs.promises.readFile(attachment.path);
1214
+
1215
+ if (binaryMode) {
1216
+ const attachmentData = {
1217
+ id: crypto.randomUUID(),
1218
+ name,
1219
+ contentType,
1220
+ buffer,
1221
+ size: buffer.length,
1222
+ isLargeFile: false,
1223
+ };
1224
+ this.attachments.push(attachmentData);
1225
+ return attachmentData;
1226
+ } else {
1227
+ const attachmentData = {
1228
+ id: crypto.randomUUID(),
1229
+ name,
1230
+ contentType,
1231
+ data: buffer.toString('base64'),
1232
+ size: buffer.length,
1233
+ compressed: false,
1234
+ };
1235
+ this.attachments.push(attachmentData);
1236
+ return attachmentData;
1237
+ }
1238
+ } catch (fileErr) {
1239
+ // File doesn't exist or can't be read
1240
+ // This can happen with 'retain-on-failure' when Playwright cleans up
1241
+ // temporary files from non-final retry attempts
1242
+ return null;
1243
+ }
1244
+ }
1245
+
1246
+ return null;
1247
+ } catch (err) {
1248
+ return null;
1249
+ }
1250
+ }
1251
+
1252
+ // Build final test payload
1253
+ // When binaryMode=true, excludes attachments (they're uploaded via multipart)
1254
+ buildPayload(result, binaryMode = true) {
1255
+ const steps = this.finalizeSteps();
1256
+
1257
+ const payload = {
1258
+ testId: this.testId, // Client-generated UUID (primary identifier)
1259
+ runId: this.runId,
1260
+ filePath: this.filePath,
1261
+ titlePath: this.titlePath,
1262
+ annotations: this.annotations,
1263
+ retry: this.retry,
1264
+ startedAt: this.startedAt,
1265
+ endedAt: nowIso(),
1266
+ status: result.status,
1267
+ duration: result.duration,
1268
+ error: result.errors?.[0] ? {
1269
+ message: stripAnsi(result.errors[0].message || String(result.errors[0])),
1270
+ stack: stripAnsi(result.errors[0].stack),
1271
+ name: result.errors[0].name,
1272
+ } : null,
1273
+ steps: steps.map(s => ({
1274
+ title: s.title,
1275
+ category: s.category,
1276
+ status: s.status,
1277
+ duration: s.duration,
1278
+ error: s.error,
1279
+ nestingLevel: s.nestingLevel,
1280
+ index: s.index, // Include step index for proper ordering
1281
+ })),
1282
+ // Only include attachments in JSON if NOT using binary mode
1283
+ attachments: binaryMode ? [] : this.attachments,
1284
+ };
1285
+
1286
+ return payload;
1287
+ }
1288
+ }
1289
+
1290
+ // ============================================================================
1291
+ // REPORTER
1292
+ // ============================================================================
1293
+
1294
+ export default class M00nReporter {
1295
+ constructor(options = {}) {
1296
+ this.opts = options;
1297
+ this.debug = options.debug || false;
1298
+ this.verbose = options.verbose || false; // Performance/timing logs
1299
+ this.logFile = options.logFile || null; // Log file path
1300
+ this.disabled = false;
1301
+ this.realtime = options.realtime !== false; // Default: true
1302
+ this.binaryAttachments = options.binaryAttachments !== false; // Default: true (use multipart)
1303
+
1304
+ // Performance tracker for bottleneck analysis
1305
+ this.perfTracker = new PerformanceTracker(this.verbose || !!this.logFile);
1306
+
1307
+ // File logger for persisting metrics
1308
+ this.fileLogger = new FileLogger(this.logFile, !!this.logFile);
1309
+
1310
+ // Validate required options
1311
+ if (!options.serverUrl) {
1312
+ this.log('warn', '"serverUrl" is required. Reporter disabled.');
1313
+ this.disabled = true;
1314
+ return;
1315
+ }
1316
+
1317
+ if (!options.apiKey) {
1318
+ this.log('warn', '"apiKey" is required. Reporter disabled.');
1319
+ this.disabled = true;
1320
+ return;
1321
+ }
1322
+
1323
+ this.runId = null;
1324
+ this.http = new HttpClient(options.serverUrl, {
1325
+ apiKey: options.apiKey,
1326
+ timeout: 60000, // Increased for large file uploads
1327
+ maxRetries: 3,
1328
+ logger: this.log.bind(this),
1329
+ perfTracker: this.perfTracker,
1330
+ verbose: this.verbose,
1331
+ fileLogger: this.fileLogger,
1332
+ });
1333
+
1334
+ // Step batcher for real-time streaming (fire-and-forget, batched)
1335
+ // Optimized for high parallelism (30+ browsers):
1336
+ // - Larger batch size = fewer HTTP requests
1337
+ // - Longer interval = more batching, less overhead
1338
+ this.stepBatcher = this.http.createBatcher('/api/ingest/v2/steps/stream', {
1339
+ flushInterval: 200, // Flush every 200ms (was 100ms)
1340
+ maxBatchSize: 100, // Or when 100 steps accumulated (was 50)
1341
+ });
1342
+
1343
+ // Per-test collectors (key: testKey string)
1344
+ this.collectors = new Map();
1345
+
1346
+ // Track ALL in-flight onTestEnd executions to ensure they complete
1347
+ // This fixes the race condition where onEnd is called before onTestEnd finishes
1348
+ this.activeTestEndPromises = [];
1349
+
1350
+ // Track pending attachment uploads (fire-and-forget during onTestEnd)
1351
+ // These are collected and awaited during onEnd
1352
+ this.pendingAttachmentUploads = [];
1353
+
1354
+ // Global semaphore to limit concurrent attachment uploads
1355
+ // Prevents server/MinIO overload with high parallelism
1356
+ this.uploadSemaphore = new UploadSemaphore(MAX_GLOBAL_UPLOAD_CONCURRENCY);
1357
+
1358
+ // test/start is now fire-and-forget (no need to track promises)
1359
+ // testId is generated client-side, so steps can be streamed immediately
1360
+
1361
+ // Stats
1362
+ this.stats = {
1363
+ testsStarted: 0,
1364
+ testsReported: 0,
1365
+ stepsCounted: 0,
1366
+ stepsStreamed: 0,
1367
+ attachmentsCounted: 0,
1368
+ errors: 0,
1369
+ };
1370
+
1371
+ // Collect actual error messages for summary
1372
+ this.errorMessages = [];
1373
+
1374
+ // Take initial memory snapshot
1375
+ this.perfTracker.snapshotMemory('init');
1376
+
1377
+ // Log initialization
1378
+ if (this.logFile) {
1379
+ this.fileLogger.log('INIT', `Reporter initialized`, {
1380
+ serverUrl: options.serverUrl,
1381
+ realtime: this.realtime,
1382
+ binaryAttachments: this.binaryAttachments,
1383
+ });
1384
+ }
1385
+
1386
+ if (this.verbose) {
1387
+ this.log('perf', 'Performance logging enabled - tracking timing and concurrency metrics');
1388
+ }
1389
+ if (this.logFile) {
1390
+ this.log('info', `Performance metrics will be saved to: ${this.logFile}`);
1391
+ }
1392
+ }
1393
+
1394
+ // Add error with message tracking
1395
+ addError(message) {
1396
+ this.stats.errors++;
1397
+ if (this.errorMessages.length < 10) { // Limit to avoid spam
1398
+ this.errorMessages.push(message);
1399
+ }
1400
+ }
1401
+
1402
+ // Callback for real-time step events
1403
+ createStepEventHandler() {
1404
+ if (!this.realtime) return null;
1405
+
1406
+ return (stepEvent) => {
1407
+ this.stepBatcher.add(stepEvent);
1408
+ this.stats.stepsStreamed++;
1409
+ };
1410
+ }
1411
+
1412
+ printsToStdio() { return false; }
1413
+
1414
+ log(level, ...args) {
1415
+ if (level === 'debug' && !this.debug) return;
1416
+ if (level === 'perf' && !this.verbose) return;
1417
+ const prefix = '[M00nReporter]';
1418
+ const timestamp = this.verbose ? `[${((Date.now() - this.perfTracker.startTime) / 1000).toFixed(2)}s]` : '';
1419
+ if (level === 'error') console.error(prefix, timestamp, ...args);
1420
+ else if (level === 'warn') console.warn(prefix, timestamp, ...args);
1421
+ else if (level === 'perf') console.log(prefix, '[PERF]', timestamp, ...args);
1422
+ else console.log(prefix, timestamp, ...args);
1423
+ }
1424
+
1425
+ // -------------------------------------------------------------------------
1426
+ // LIFECYCLE HOOKS
1427
+ // -------------------------------------------------------------------------
1428
+
1429
+ async onBegin(config, rootSuite) {
1430
+ if (this.disabled) return;
1431
+
1432
+ // Wrap entire onBegin in try-catch to ensure test run continues even if reporter fails
1433
+ try {
1434
+ // Prepare run data
1435
+ const total = rootSuite.allTests().length;
1436
+ const launch = this.opts.launch || `Run ${new Date().toLocaleDateString()}`;
1437
+ const tags = parseTags(this.opts.tags);
1438
+ const startedAt = nowIso();
1439
+
1440
+ if (this.verbose) {
1441
+ this.log('perf', `Starting run with ${total} tests`);
1442
+ }
1443
+
1444
+ const runStartTime = performance.now();
1445
+
1446
+ // Run health check and run start in parallel (2s timeout for health check)
1447
+ const healthCheckPromise = fetch(`${this.opts.serverUrl}/healthz`, {
1448
+ method: 'GET',
1449
+ signal: AbortSignal.timeout(2000),
1450
+ }).then(resp => {
1451
+ if (!resp.ok) throw new Error('Health check failed');
1452
+ return { ok: true };
1453
+ }).catch(err => ({ ok: false, error: err }));
1454
+
1455
+ // Normalize attributes: flat key-value object
1456
+ const attributes = this.opts.attributes && typeof this.opts.attributes === 'object'
1457
+ ? { ...this.opts.attributes }
1458
+ : {};
1459
+
1460
+ // Add workers count from Playwright config (useful for timeline visualization)
1461
+ if (config.workers != null) {
1462
+ attributes.workers = config.workers;
1463
+ }
1464
+
1465
+ // Project is determined by the API key - no need to send projectId
1466
+ const runStartPromise = this.http.post('/api/ingest/v2/run/start', {
1467
+ launch,
1468
+ tags,
1469
+ total,
1470
+ startedAt,
1471
+ attributes,
1472
+ });
1473
+
1474
+ const [healthResult, resp] = await Promise.all([healthCheckPromise, runStartPromise]);
1475
+
1476
+ // Track run start timing
1477
+ this.perfTracker.timings.runStart = performance.now() - runStartTime;
1478
+
1479
+ // Check health result first
1480
+ if (!healthResult.ok) {
1481
+ this.log('warn', `Server unavailable (${this.opts.serverUrl}). Reporter disabled - tests will continue without reporting.`);
1482
+ this.disabled = true;
1483
+ return;
1484
+ }
1485
+
1486
+ if (resp.error) {
1487
+ if (resp.permanent) {
1488
+ this.log('warn', `${resp.error}. Reporter disabled - tests will continue without reporting.`);
1489
+ } else {
1490
+ this.log('warn', `Failed to start run: ${resp.error}. Reporter disabled - tests will continue without reporting.`);
1491
+ }
1492
+ this.disabled = true;
1493
+ return;
1494
+ }
1495
+
1496
+ this.runId = resp.runId;
1497
+ this.perfTracker.snapshotMemory('run_started');
1498
+
1499
+ // Log to file
1500
+ this.fileLogger?.event('RUN_START', {
1501
+ runId: this.runId,
1502
+ total,
1503
+ duration: Math.round(this.perfTracker.timings.runStart),
1504
+ });
1505
+
1506
+ if (this.verbose) {
1507
+ this.log('perf', `Run started in ${Math.round(this.perfTracker.timings.runStart)}ms: ${this.runId}`);
1508
+ } else {
1509
+ this.log('info', `Run started: ${this.runId}`);
1510
+ }
1511
+ } catch (err) {
1512
+ // Catch any unexpected errors during initialization
1513
+ this.log('warn', `Reporter initialization failed: ${err.message}. Tests will continue without reporting.`);
1514
+ this.disabled = true;
1515
+ }
1516
+ }
1517
+
1518
+ async onTestBegin(test, result) {
1519
+ if (this.disabled || !this.runId) return;
1520
+
1521
+ // Wrap in try-catch to ensure test processing never crashes the test run
1522
+ try {
1523
+ const testKey = getTestKey(test);
1524
+ const retry = result.retry || 0;
1525
+ // Use unique collector key that includes retry to avoid race conditions
1526
+ const collectorKey = getCollectorKey(test, retry);
1527
+
1528
+ // Generate testId (UUID) CLIENT-SIDE - no more waiting for server!
1529
+ const testId = crypto.randomUUID();
1530
+
1531
+ // Track test concurrency
1532
+ try {
1533
+ this.perfTracker.testStarted();
1534
+ } catch (e) {
1535
+ // Ignore perf tracking errors
1536
+ }
1537
+
1538
+ // Log to file
1539
+ try {
1540
+ this.fileLogger?.event('TEST_BEGIN', {
1541
+ testId,
1542
+ testKey: testKey.substring(0, 100),
1543
+ retry,
1544
+ activeTests: this.perfTracker.concurrency.currentActiveTests,
1545
+ });
1546
+ } catch (e) {
1547
+ // Ignore logging errors
1548
+ }
1549
+
1550
+ if (this.verbose) {
1551
+ this.log('perf', `Test BEGIN [${this.perfTracker.concurrency.currentActiveTests} active]: ${testKey.substring(0, 60)}...`);
1552
+ }
1553
+
1554
+ // Create collector for this test with real-time step streaming
1555
+ // testId is available IMMEDIATELY for step streaming
1556
+ const collector = new TestCollector({
1557
+ testId, // Client-generated UUID
1558
+ onStepEvent: this.createStepEventHandler(),
1559
+ });
1560
+ collector.testKey = testKey;
1561
+ collector.collectorKey = collectorKey; // Store for cleanup
1562
+ collector.runId = this.runId;
1563
+ collector.filePath = getFilePath(test);
1564
+ collector.titlePath = test.titlePath();
1565
+ collector.annotations = extractAnnotations(test);
1566
+ collector.retry = result.retry || 0;
1567
+ collector.startedAt = nowIso(result.startTime || new Date());
1568
+ collector._perfStartTime = performance.now(); // Track test duration for perf
1569
+
1570
+ this.collectors.set(collectorKey, collector);
1571
+
1572
+ // FIRE-AND-FORGET test/start - no waiting for response!
1573
+ // The server will create the test row, but we don't need to wait
1574
+ // Steps can be streamed immediately using the client-generated testId
1575
+ this.http.fire('/api/ingest/v2/test/start', {
1576
+ runId: this.runId,
1577
+ testId, // Client-generated UUID
1578
+ titlePath: collector.titlePath,
1579
+ filePath: collector.filePath,
1580
+ retry: collector.retry,
1581
+ startedAt: collector.startedAt,
1582
+ });
1583
+
1584
+ this.stats.testsStarted++;
1585
+ } catch (err) {
1586
+ // Catch ALL errors to ensure test run continues
1587
+ this.addError(`Test begin error: ${err.message}`);
1588
+ // Don't rethrow - test should continue even if reporting fails
1589
+ }
1590
+ }
1591
+
1592
+ async onStepBegin(test, result, step) {
1593
+ if (this.disabled || !this.runId) return;
1594
+
1595
+ // Wrap in try-catch to ensure step processing never crashes the test run
1596
+ try {
1597
+ const collectorKey = getCollectorKey(test, result.retry || 0);
1598
+ const collector = this.collectors.get(collectorKey);
1599
+ if (!collector) return;
1600
+
1601
+ collector.startStep(step);
1602
+ } catch (err) {
1603
+ // Silently ignore step errors - don't crash the test
1604
+ }
1605
+ }
1606
+
1607
+ async onStepEnd(test, result, step) {
1608
+ if (this.disabled || !this.runId) return;
1609
+
1610
+ // Wrap in try-catch to ensure step processing never crashes the test run
1611
+ try {
1612
+ const collectorKey = getCollectorKey(test, result.retry || 0);
1613
+ const collector = this.collectors.get(collectorKey);
1614
+ if (!collector) return;
1615
+
1616
+ collector.endStep(step, step.error);
1617
+ this.stats.stepsCounted++;
1618
+ } catch (err) {
1619
+ // Silently ignore step errors - don't crash the test
1620
+ }
1621
+ }
1622
+
1623
+ async onTestEnd(test, result) {
1624
+ if (this.disabled || !this.runId) return;
1625
+
1626
+ // Wrap the entire onTestEnd logic in a tracked promise
1627
+ // This ensures onEnd waits for ALL onTestEnd executions to complete
1628
+ // The promise is designed to NEVER reject - _doTestEnd catches all errors
1629
+ const testEndPromise = this._doTestEnd(test, result).catch(err => {
1630
+ // Extra safety net - if _doTestEnd somehow throws, catch it here
1631
+ try {
1632
+ this.addError(`Unexpected onTestEnd error: ${err.message}`);
1633
+ } catch (e) {
1634
+ // Ignore errors in error handling
1635
+ }
1636
+ });
1637
+ this.activeTestEndPromises.push(testEndPromise);
1638
+
1639
+ // Still await it here for Playwright's benefit
1640
+ await testEndPromise;
1641
+ }
1642
+
1643
+ // Internal method that does the actual test end work
1644
+ // This method is designed to NEVER throw - all errors are caught and logged
1645
+ // to ensure test runs complete even if the reporting service is unavailable
1646
+ async _doTestEnd(test, result) {
1647
+ const collectorKey = getCollectorKey(test, result.retry || 0);
1648
+ const collector = this.collectors.get(collectorKey);
1649
+ if (!collector) {
1650
+ // Not an error - might happen if reporter was disabled mid-run
1651
+ if (!this.disabled) {
1652
+ this.addError(`No collector for test: ${collectorKey}`);
1653
+ }
1654
+ return;
1655
+ }
1656
+
1657
+ const testEndStart = performance.now();
1658
+
1659
+ // Log attachment info for debugging
1660
+ if (this.debug) {
1661
+ const attachmentCount = result.attachments?.length || 0;
1662
+ const attachmentNames = result.attachments?.map(a => a.name).join(', ') || 'none';
1663
+ this.log('debug', `Test ${collectorKey} has ${attachmentCount} attachments: [${attachmentNames}]`);
1664
+ }
1665
+
1666
+ // testId is now client-generated - NO WAITING NEEDED!
1667
+ // This eliminates the 100-4000ms bottleneck from waiting for server response
1668
+ const testId = collector.testId;
1669
+
1670
+ try {
1671
+ // Process attachments (async to avoid blocking event loop)
1672
+ const attachmentReadStart = performance.now();
1673
+ const attachmentPromises = (result.attachments || []).map(
1674
+ attachment => collector.addAttachment(attachment, collector.testId, this.runId, this.binaryAttachments)
1675
+ .catch(err => {
1676
+ // Don't let attachment read errors crash the test
1677
+ if (this.debug) {
1678
+ this.log('debug', `Failed to read attachment: ${err.message}`);
1679
+ }
1680
+ return null;
1681
+ })
1682
+ );
1683
+ await Promise.all(attachmentPromises);
1684
+
1685
+ const attachmentReadDuration = performance.now() - attachmentReadStart;
1686
+ if (collector.attachments.length > 0) {
1687
+ this.perfTracker.timings.attachmentRead.push(attachmentReadDuration);
1688
+ const totalSize = collector.attachments.reduce((sum, a) => sum + (a.size || 0), 0);
1689
+ if (this.verbose) {
1690
+ this.log('perf', `Attachments read: ${collector.attachments.length} files, ${Math.round(totalSize/1024)}KB in ${Math.round(attachmentReadDuration)}ms`);
1691
+ }
1692
+ }
1693
+
1694
+ this.stats.attachmentsCounted += collector.attachments.length;
1695
+
1696
+ // Build payload WITHOUT attachments if using binary upload
1697
+ const payload = collector.buildPayload(result, this.binaryAttachments);
1698
+
1699
+ // Send test completion report and AWAIT it directly with extended timeout
1700
+ const resp = await this.http.post('/api/ingest/v2/test/complete', payload, { timeout: 60000 });
1701
+
1702
+ if (resp.error) {
1703
+ this.addError(`Failed to report test ${collector.testId}: ${resp.error}`);
1704
+ // Check if service became unavailable mid-run
1705
+ if (resp.serviceUnavailable && !this.disabled) {
1706
+ this.log('warn', 'Reporting service appears unavailable. Continuing tests without reporting.');
1707
+ // Don't fully disable - keep trying in case service comes back
1708
+ // But track that we had connectivity issues
1709
+ this._serviceUnavailableCount = (this._serviceUnavailableCount || 0) + 1;
1710
+ if (this._serviceUnavailableCount >= 5) {
1711
+ this.log('warn', 'Multiple service failures detected. Disabling reporter for remaining tests.');
1712
+ this.disabled = true;
1713
+ }
1714
+ }
1715
+ } else {
1716
+ this.stats.testsReported++;
1717
+ // Reset unavailable counter on success
1718
+ this._serviceUnavailableCount = 0;
1719
+ if (this.debug) {
1720
+ this.log('debug', `Test ${collector.testId} reported successfully`);
1721
+ }
1722
+ }
1723
+
1724
+ // Track test completion time BEFORE attachment upload (fast path!)
1725
+ // This is the key optimization - test is "done" immediately
1726
+ const testEndDuration = performance.now() - testEndStart;
1727
+ this.perfTracker.timings.testComplete.push(testEndDuration);
1728
+
1729
+ const testRunDuration = collector._perfStartTime
1730
+ ? Math.round(performance.now() - collector._perfStartTime)
1731
+ : result.duration;
1732
+
1733
+ // Upload attachments in BACKGROUND - fire-and-forget with tracking
1734
+ // Tests complete immediately, attachments upload in parallel with global concurrency limit
1735
+ // All pending uploads are awaited in onEnd before run finishes
1736
+ // Skip if we know service is unavailable
1737
+ if (this.binaryAttachments && collector.attachments.length > 0 && !this.disabled) {
1738
+ const attachmentCount = collector.attachments.length;
1739
+ const attachmentSize = collector.attachments.reduce((sum, a) => sum + (a.size || 0), 0);
1740
+
1741
+ // Create upload promise with error handling (don't let it crash)
1742
+ const uploadPromise = this.uploadAttachmentsWithBackpressure(
1743
+ collector.runId,
1744
+ collector.testId,
1745
+ collector.attachments
1746
+ ).catch(err => {
1747
+ this.addError(`Background upload failed for test ${testId}: ${err.message}`);
1748
+ this.fileLogger?.event('ATTACHMENT_UPLOAD_ERROR', {
1749
+ testId,
1750
+ error: err.message,
1751
+ attachmentCount,
1752
+ });
1753
+ return { uploaded: 0, failed: attachmentCount };
1754
+ });
1755
+
1756
+ // Track promise (will be awaited in onEnd)
1757
+ this.pendingAttachmentUploads.push(uploadPromise);
1758
+
1759
+ if (this.verbose) {
1760
+ const semStats = this.uploadSemaphore.getStats();
1761
+ this.log('perf', `Queued ${attachmentCount} attachments (${Math.round(attachmentSize/1024)}KB) for background upload (queue: ${semStats.queued}, active: ${semStats.running})`);
1762
+ }
1763
+ }
1764
+
1765
+ // Log to file
1766
+ this.fileLogger?.event('TEST_END', {
1767
+ testId,
1768
+ testKey: collector.testKey.substring(0, 100),
1769
+ status: result.status,
1770
+ retry: collector.retry,
1771
+ runDuration: testRunDuration,
1772
+ reportDuration: Math.round(testEndDuration),
1773
+ steps: collector.steps.length,
1774
+ attachments: collector.attachments.length,
1775
+ activeTests: this.perfTracker.concurrency.currentActiveTests - 1,
1776
+ });
1777
+
1778
+ if (this.verbose) {
1779
+ this.log('perf', `Test END [${this.perfTracker.concurrency.currentActiveTests - 1} active]: ${collector.testKey.substring(0, 40)}... (run: ${testRunDuration}ms, report: ${Math.round(testEndDuration)}ms)`);
1780
+ }
1781
+ } catch (err) {
1782
+ // Catch ALL errors to ensure test run continues
1783
+ this.addError(`Test ${testId} processing error: ${err.message}`);
1784
+ this.fileLogger?.event('TEST_ERROR', {
1785
+ testId,
1786
+ error: err.message,
1787
+ });
1788
+ // Don't rethrow - test should complete even if reporting fails
1789
+ } finally {
1790
+ // Track test concurrency
1791
+ this.perfTracker.testEnded();
1792
+
1793
+ // Cleanup collector using the unique collectorKey
1794
+ this.collectors.delete(collectorKey);
1795
+ }
1796
+ }
1797
+
1798
+ // Upload attachments with global backpressure control (fire-and-forget friendly)
1799
+ // Uses semaphore to limit concurrent uploads across ALL tests
1800
+ // Returns { uploaded: N, failed: M } for tracking
1801
+ async uploadAttachmentsWithBackpressure(runId, testId, attachments) {
1802
+ if (!attachments || attachments.length === 0) return { uploaded: 0, failed: 0 };
1803
+
1804
+ const results = { uploaded: 0, failed: 0 };
1805
+ const uploadStart = performance.now();
1806
+
1807
+ // Separate small and large files
1808
+ const smallFiles = attachments.filter(a => a.buffer && !a.isLargeFile);
1809
+ const largeFiles = attachments.filter(a => a.filePath && a.isLargeFile);
1810
+
1811
+ // Create upload tasks for all files
1812
+ const uploadTasks = [];
1813
+
1814
+ // Small files - upload individually with semaphore for better parallelism
1815
+ for (const file of smallFiles) {
1816
+ uploadTasks.push(this.uploadSemaphore.run(async () => {
1817
+ try {
1818
+ const resp = await this.http.postMultipart(
1819
+ '/api/ingest/v2/attachment/upload',
1820
+ { runId, testId: String(testId) },
1821
+ [{ name: file.name, contentType: file.contentType, buffer: file.buffer }],
1822
+ { timeout: UPLOAD_TIMEOUT }
1823
+ );
1824
+
1825
+ if (resp.error) {
1826
+ this.addError(`Upload failed (${file.name}): ${resp.error}`);
1827
+ return false;
1828
+ }
1829
+ return true;
1830
+ } catch (err) {
1831
+ this.addError(`Upload error (${file.name}): ${err.message}`);
1832
+ return false;
1833
+ }
1834
+ }));
1835
+ }
1836
+
1837
+ // Large files - stream upload with semaphore
1838
+ for (const largeFile of largeFiles) {
1839
+ uploadTasks.push(this.uploadSemaphore.run(async () => {
1840
+ try {
1841
+ if (this.debug) {
1842
+ this.log('debug', `Streaming large file: ${largeFile.name} (${(largeFile.size / 1024 / 1024).toFixed(1)}MB)`);
1843
+ }
1844
+
1845
+ const resp = await this.http.postStreamFile(
1846
+ '/api/ingest/v2/attachment/stream',
1847
+ { runId, testId: String(testId) },
1848
+ largeFile.filePath,
1849
+ largeFile.name,
1850
+ largeFile.contentType,
1851
+ { timeout: STREAM_UPLOAD_TIMEOUT }
1852
+ );
1853
+
1854
+ if (resp.error) {
1855
+ this.addError(`Stream upload failed (${largeFile.name}): ${resp.error}`);
1856
+ return false;
1857
+ }
1858
+ return true;
1859
+ } catch (err) {
1860
+ this.addError(`Stream upload error (${largeFile.name}): ${err.message}`);
1861
+ return false;
1862
+ }
1863
+ }));
1864
+ }
1865
+
1866
+ // Wait for all uploads to complete
1867
+ const uploadResults = await Promise.allSettled(uploadTasks);
1868
+
1869
+ // Count results
1870
+ for (const result of uploadResults) {
1871
+ if (result.status === 'fulfilled' && result.value === true) {
1872
+ results.uploaded++;
1873
+ } else {
1874
+ results.failed++;
1875
+ }
1876
+ }
1877
+
1878
+ const uploadDuration = performance.now() - uploadStart;
1879
+
1880
+ if (this.verbose) {
1881
+ this.log('perf', `Attachments uploaded: ${results.uploaded}/${attachments.length} in ${Math.round(uploadDuration)}ms`);
1882
+ }
1883
+
1884
+ // Log to file
1885
+ this.fileLogger?.event('ATTACHMENTS_UPLOADED', {
1886
+ testId,
1887
+ total: attachments.length,
1888
+ uploaded: results.uploaded,
1889
+ failed: results.failed,
1890
+ duration: Math.round(uploadDuration),
1891
+ });
1892
+
1893
+ return results;
1894
+ }
1895
+
1896
+ async onEnd(result) {
1897
+ // Always print summary at the end, even if disabled
1898
+ const printSummaryOnExit = () => {
1899
+ try {
1900
+ this.printSummary();
1901
+ } catch (e) {
1902
+ // Ignore summary errors
1903
+ }
1904
+ };
1905
+
1906
+ if (this.disabled || !this.runId) {
1907
+ printSummaryOnExit();
1908
+ return;
1909
+ }
1910
+
1911
+ const runEndStart = performance.now();
1912
+
1913
+ try {
1914
+ this.perfTracker.snapshotMemory('run_ending');
1915
+ } catch (e) {
1916
+ // Ignore memory snapshot errors
1917
+ }
1918
+
1919
+ if (this.verbose) {
1920
+ this.log('perf', `Run ending - ${this.activeTestEndPromises.length} tests in flight`);
1921
+ }
1922
+
1923
+ // CRITICAL: Keep the event loop alive until we're done
1924
+ // This prevents Node.js/Playwright from exiting prematurely
1925
+ const keepAlive = setInterval(() => {}, 100);
1926
+
1927
+ try {
1928
+ // Wait for ALL in-flight onTestEnd executions to complete
1929
+ // with a maximum timeout to avoid hanging forever
1930
+ if (this.activeTestEndPromises.length > 0) {
1931
+ if (this.verbose) {
1932
+ this.log('perf', `Waiting for ${this.activeTestEndPromises.length} test(s) to finish reporting...`);
1933
+ } else if (this.debug) {
1934
+ this.log('debug', `Waiting for ${this.activeTestEndPromises.length} test(s) to finish reporting...`);
1935
+ }
1936
+
1937
+ const waitStart = performance.now();
1938
+
1939
+ // Wait with timeout - max 60 seconds
1940
+ const timeout = new Promise(resolve => setTimeout(() => resolve('timeout'), 60000));
1941
+ try {
1942
+ const waitResult = await Promise.race([
1943
+ Promise.allSettled(this.activeTestEndPromises),
1944
+ timeout
1945
+ ]);
1946
+
1947
+ if (this.verbose) {
1948
+ this.log('perf', `Test reporting wait completed in ${Math.round(performance.now() - waitStart)}ms`);
1949
+ }
1950
+
1951
+ if (waitResult === 'timeout') {
1952
+ this.addError('Timeout waiting for tests to finish - some data may be lost');
1953
+ }
1954
+ } catch (e) {
1955
+ this.addError(`Error waiting for tests: ${e.message}`);
1956
+ }
1957
+ }
1958
+
1959
+ // Wait for all pending attachment uploads (fire-and-forget uploads from onTestEnd)
1960
+ // These are uploaded in background with concurrency control via semaphore
1961
+ if (this.pendingAttachmentUploads.length > 0) {
1962
+ try {
1963
+ const attachmentWaitStart = performance.now();
1964
+ const pendingCount = this.pendingAttachmentUploads.length;
1965
+ const semStats = this.uploadSemaphore.getStats();
1966
+
1967
+ if (this.verbose) {
1968
+ this.log('perf', `Waiting for ${pendingCount} attachment upload batches (${semStats.running} active, ${semStats.queued} queued)...`);
1969
+ }
1970
+
1971
+ // Log to file
1972
+ this.fileLogger?.event('ATTACHMENTS_WAIT_START', {
1973
+ pendingBatches: pendingCount,
1974
+ activeUploads: semStats.running,
1975
+ queuedUploads: semStats.queued,
1976
+ });
1977
+
1978
+ // Wait with generous timeout for attachment uploads
1979
+ const attachmentTimeout = new Promise(resolve =>
1980
+ setTimeout(() => resolve('timeout'), END_UPLOAD_WAIT_TIMEOUT)
1981
+ );
1982
+
1983
+ const uploadWaitResult = await Promise.race([
1984
+ Promise.allSettled(this.pendingAttachmentUploads),
1985
+ attachmentTimeout
1986
+ ]);
1987
+
1988
+ const attachmentWaitDuration = Math.round(performance.now() - attachmentWaitStart);
1989
+
1990
+ if (uploadWaitResult === 'timeout') {
1991
+ this.addError(`Timeout waiting for attachment uploads after ${END_UPLOAD_WAIT_TIMEOUT/1000}s - some attachments may be missing`);
1992
+ this.fileLogger?.event('ATTACHMENTS_WAIT_TIMEOUT', {
1993
+ duration: attachmentWaitDuration,
1994
+ pendingBatches: pendingCount,
1995
+ });
1996
+ } else {
1997
+ // Count success/failure
1998
+ let totalUploaded = 0;
1999
+ let totalFailed = 0;
2000
+ for (const res of uploadWaitResult) {
2001
+ if (res.status === 'fulfilled' && res.value) {
2002
+ totalUploaded += res.value.uploaded || 0;
2003
+ totalFailed += res.value.failed || 0;
2004
+ }
2005
+ }
2006
+
2007
+ if (this.verbose) {
2008
+ this.log('perf', `Attachment uploads complete: ${totalUploaded} uploaded, ${totalFailed} failed in ${attachmentWaitDuration}ms`);
2009
+ }
2010
+
2011
+ this.fileLogger?.event('ATTACHMENTS_WAIT_COMPLETE', {
2012
+ duration: attachmentWaitDuration,
2013
+ batches: pendingCount,
2014
+ uploaded: totalUploaded,
2015
+ failed: totalFailed,
2016
+ });
2017
+ }
2018
+ } catch (e) {
2019
+ this.addError(`Error waiting for attachments: ${e.message}`);
2020
+ }
2021
+ }
2022
+
2023
+ // test/start is now fire-and-forget - no need to wait for responses
2024
+ // testId is generated client-side, so we don't need server confirmation
2025
+
2026
+ // Flush any remaining streamed steps (wrapped in try-catch)
2027
+ try {
2028
+ const pendingSteps = this.stepBatcher.getPending();
2029
+ if (this.verbose && pendingSteps > 0) {
2030
+ this.log('perf', `Flushing ${pendingSteps} remaining step events`);
2031
+ }
2032
+ this.stepBatcher.flush();
2033
+ } catch (e) {
2034
+ // Ignore step flush errors
2035
+ }
2036
+
2037
+ // Check if any collectors are still active (tests that never completed)
2038
+ // and try to report them as incomplete
2039
+ if (this.collectors.size > 0) {
2040
+ try {
2041
+ this.log('warn', `${this.collectors.size} test(s) incomplete - attempting to report...`);
2042
+ await this.reportIncompleteTests();
2043
+ } catch (e) {
2044
+ this.log('warn', `Failed to report incomplete tests: ${e.message}`);
2045
+ }
2046
+ }
2047
+
2048
+ // Give a small delay for any final network I/O to complete
2049
+ await new Promise(r => setTimeout(r, 200));
2050
+
2051
+ // Send run end with retry (don't let this block test completion)
2052
+ try {
2053
+ const resp = await this.http.post('/api/ingest/v2/run/end', {
2054
+ runId: this.runId,
2055
+ status: result.status,
2056
+ endedAt: nowIso(),
2057
+ });
2058
+
2059
+ if (resp.error) {
2060
+ this.log('warn', `Failed to end run: ${resp.error}`);
2061
+ }
2062
+ } catch (e) {
2063
+ this.log('warn', `Error ending run: ${e.message}`);
2064
+ }
2065
+
2066
+ // Track run end timing
2067
+ try {
2068
+ this.perfTracker.timings.runEnd = performance.now() - runEndStart;
2069
+ this.perfTracker.snapshotMemory('run_ended');
2070
+
2071
+ // Log to file
2072
+ this.fileLogger?.event('RUN_END', {
2073
+ runId: this.runId,
2074
+ status: result.status,
2075
+ duration: Math.round(this.perfTracker.timings.runEnd),
2076
+ testsStarted: this.stats.testsStarted,
2077
+ testsReported: this.stats.testsReported,
2078
+ errors: this.stats.errors,
2079
+ });
2080
+
2081
+ if (this.verbose) {
2082
+ this.log('perf', `Run end completed in ${Math.round(this.perfTracker.timings.runEnd)}ms`);
2083
+ }
2084
+ } catch (e) {
2085
+ // Ignore tracking errors
2086
+ }
2087
+
2088
+ printSummaryOnExit();
2089
+
2090
+ // Write final performance report to file
2091
+ try {
2092
+ this.writePerformanceReportToFile();
2093
+ } catch (e) {
2094
+ // Ignore perf report errors
2095
+ }
2096
+
2097
+ // Close file logger
2098
+ try {
2099
+ this.fileLogger?.close();
2100
+ } catch (e) {
2101
+ // Ignore logger close errors
2102
+ }
2103
+
2104
+ } catch (err) {
2105
+ // Catch any unexpected errors in onEnd to ensure we don't crash the test run
2106
+ this.log('warn', `Error in reporter onEnd: ${err.message}`);
2107
+ printSummaryOnExit();
2108
+ } finally {
2109
+ // Stop keeping the event loop alive
2110
+ clearInterval(keepAlive);
2111
+ }
2112
+ }
2113
+
2114
+ // Report any tests that didn't complete normally
2115
+ async reportIncompleteTests() {
2116
+ const promises = [];
2117
+
2118
+ for (const [testKey, collector] of this.collectors) {
2119
+ // testId is always available (client-generated UUID)
2120
+ this.log('warn', `Reporting incomplete test ${collector.testId}: ${testKey}`);
2121
+
2122
+ const payload = {
2123
+ testId: collector.testId, // Client-generated UUID
2124
+ runId: collector.runId,
2125
+ titlePath: collector.titlePath,
2126
+ filePath: collector.filePath,
2127
+ retry: collector.retry,
2128
+ startedAt: collector.startedAt,
2129
+ endedAt: nowIso(),
2130
+ status: 'interrupted', // Mark as interrupted since it didn't complete normally
2131
+ duration: Date.now() - new Date(collector.startedAt).getTime(),
2132
+ error: { message: 'Test was interrupted - reporter did not receive completion data' },
2133
+ steps: collector.steps.map(s => ({
2134
+ title: s.title,
2135
+ category: s.category,
2136
+ status: s.status === 'running' ? 'interrupted' : s.status,
2137
+ duration: s.duration,
2138
+ error: s.error,
2139
+ nestingLevel: s.nestingLevel,
2140
+ index: s.index,
2141
+ })),
2142
+ attachments: [],
2143
+ };
2144
+
2145
+ promises.push(
2146
+ this.http.post('/api/ingest/v2/test/complete', payload)
2147
+ .then(resp => {
2148
+ if (!resp.error) {
2149
+ this.stats.testsReported++;
2150
+ this.log('info', `Incomplete test ${collector.testId} reported as interrupted`);
2151
+ } else {
2152
+ this.addError(`Failed to report incomplete test ${collector.testId}: ${resp.error}`);
2153
+ }
2154
+ })
2155
+ .catch((err) => {
2156
+ this.addError(`Incomplete test ${collector.testId} report failed: ${err.message}`);
2157
+ })
2158
+ );
2159
+ }
2160
+
2161
+ if (promises.length > 0) {
2162
+ await Promise.allSettled(promises);
2163
+ }
2164
+
2165
+ // Clear all collectors
2166
+ this.collectors.clear();
2167
+ }
2168
+
2169
+ // Write performance report to log file
2170
+ writePerformanceReportToFile() {
2171
+ if (!this.fileLogger?.enabled) return;
2172
+
2173
+ const perf = this.perfTracker.generateReport();
2174
+ if (!perf) return;
2175
+
2176
+ // Build report content
2177
+ const lines = [];
2178
+ lines.push(`Run ID: ${this.runId || 'N/A'}`);
2179
+ lines.push(`Total Duration: ${perf.duration}`);
2180
+ lines.push('');
2181
+
2182
+ lines.push('=== Stats ===');
2183
+ lines.push(`Tests: ${this.stats.testsReported}/${this.stats.testsStarted} reported`);
2184
+ lines.push(`Steps: ${this.stats.stepsCounted} collected, ${this.stats.stepsStreamed} streamed`);
2185
+ lines.push(`Attachments: ${this.stats.attachmentsCounted}`);
2186
+ lines.push(`Errors: ${this.stats.errors}`);
2187
+ lines.push('');
2188
+
2189
+ lines.push('=== Concurrency ===');
2190
+ lines.push(`Peak active tests: ${perf.concurrency.peakActiveTests}`);
2191
+ lines.push(`Peak pending requests: ${perf.concurrency.peakPendingRequests}`);
2192
+ lines.push(`Peak queued steps: ${perf.concurrency.peakQueuedSteps}`);
2193
+ lines.push('');
2194
+
2195
+ lines.push('=== Throughput ===');
2196
+ lines.push(`Total requests: ${perf.throughput.totalRequests} (${perf.throughput.requestsPerSecond}/s)`);
2197
+ lines.push(`Retry rate: ${perf.throughput.retryRate} (${perf.throughput.totalRetries} retries)`);
2198
+ lines.push(`Data uploaded: ${perf.throughput.bytesUploaded} (${perf.throughput.uploadSpeed})`);
2199
+ lines.push(`Step batches: ${perf.throughput.batchesFlushed} (avg ${perf.throughput.avgStepsPerBatch} steps/batch)`);
2200
+ lines.push('');
2201
+
2202
+ lines.push('=== Latencies (ms) ===');
2203
+ if (perf.latencies.runStart) lines.push(`Run start: ${perf.latencies.runStart}`);
2204
+ if (perf.latencies.runEnd) lines.push(`Run end: ${perf.latencies.runEnd}`);
2205
+
2206
+ const formatStats = (stats) => {
2207
+ if (!stats) return 'N/A';
2208
+ return `n=${stats.count} min=${stats.min} avg=${stats.avg} p50=${stats.p50} p90=${stats.p90} p99=${stats.p99} max=${stats.max}`;
2209
+ };
2210
+
2211
+ if (perf.latencies.httpPost) lines.push(`HTTP POST: ${formatStats(perf.latencies.httpPost)}`);
2212
+ if (perf.latencies.httpMultipart) lines.push(`Multipart: ${formatStats(perf.latencies.httpMultipart)}`);
2213
+ if (perf.latencies.httpStream) lines.push(`Stream: ${formatStats(perf.latencies.httpStream)}`);
2214
+ if (perf.latencies.attachmentRead) lines.push(`Attachment read: ${formatStats(perf.latencies.attachmentRead)}`);
2215
+ if (perf.latencies.testComplete) lines.push(`Test complete: ${formatStats(perf.latencies.testComplete)}`);
2216
+ lines.push('');
2217
+
2218
+ lines.push('=== Memory (MB) ===');
2219
+ for (const snap of perf.memory) {
2220
+ lines.push(`[${(snap.timestamp/1000).toFixed(1)}s] ${snap.label}: heap=${snap.heapUsed}/${snap.heapTotal} rss=${snap.rss} external=${snap.external}`);
2221
+ }
2222
+ lines.push('');
2223
+
2224
+ // Error messages
2225
+ if (this.errorMessages.length > 0) {
2226
+ lines.push('=== Errors ===');
2227
+ for (const msg of this.errorMessages) {
2228
+ lines.push(` - ${msg}`);
2229
+ }
2230
+ lines.push('');
2231
+ }
2232
+
2233
+ // Write JSON summary for machine parsing
2234
+ lines.push('=== JSON Summary ===');
2235
+ lines.push(JSON.stringify({
2236
+ runId: this.runId,
2237
+ stats: this.stats,
2238
+ concurrency: perf.concurrency,
2239
+ throughput: perf.throughput,
2240
+ latencies: {
2241
+ runStart: this.perfTracker.timings.runStart,
2242
+ runEnd: this.perfTracker.timings.runEnd,
2243
+ httpPost: perf.latencies.httpPost,
2244
+ httpMultipart: perf.latencies.httpMultipart,
2245
+ httpStream: perf.latencies.httpStream,
2246
+ attachmentRead: perf.latencies.attachmentRead,
2247
+ testComplete: perf.latencies.testComplete,
2248
+ },
2249
+ memory: perf.memory,
2250
+ }, null, 2));
2251
+
2252
+ this.fileLogger.writeReport('PERFORMANCE SUMMARY', lines.join('\n'));
2253
+ }
2254
+
2255
+ printSummary() {
2256
+ console.log('\n' + '═'.repeat(60));
2257
+ console.log('[M00nReporter] Run Summary');
2258
+ console.log('─'.repeat(60));
2259
+ console.log(` Run ID: ${this.runId || 'N/A'}`);
2260
+ console.log(` Tests: ${this.stats.testsReported}/${this.stats.testsStarted} reported`);
2261
+ console.log(` Steps: ${this.stats.stepsCounted} collected`);
2262
+ if (this.realtime) {
2263
+ console.log(` Streamed: ${this.stats.stepsStreamed} step events`);
2264
+ }
2265
+ console.log(` Attachments: ${this.stats.attachmentsCounted}`);
2266
+ if (this.stats.errors > 0) {
2267
+ console.log(` Errors: ${this.stats.errors}`);
2268
+ console.log('─'.repeat(60));
2269
+ console.log(' Error Details:');
2270
+ for (const msg of this.errorMessages) {
2271
+ console.log(` • ${msg}`);
2272
+ }
2273
+ if (this.stats.errors > this.errorMessages.length) {
2274
+ console.log(` ... and ${this.stats.errors - this.errorMessages.length} more`);
2275
+ }
2276
+ }
2277
+
2278
+ // Print performance report when verbose mode is enabled
2279
+ if (this.verbose) {
2280
+ const perf = this.perfTracker.generateReport();
2281
+ if (perf) {
2282
+ console.log('─'.repeat(60));
2283
+ console.log('[M00nReporter] Performance Report');
2284
+ console.log('─'.repeat(60));
2285
+ console.log(` Total Duration: ${perf.duration}`);
2286
+
2287
+ // Concurrency
2288
+ console.log('\n Concurrency:');
2289
+ console.log(` Peak active tests: ${perf.concurrency.peakActiveTests}`);
2290
+ console.log(` Peak pending requests: ${perf.concurrency.peakPendingRequests}`);
2291
+ console.log(` Peak queued steps: ${perf.concurrency.peakQueuedSteps}`);
2292
+
2293
+ // Throughput
2294
+ console.log('\n Throughput:');
2295
+ console.log(` Total requests: ${perf.throughput.totalRequests} (${perf.throughput.requestsPerSecond}/s)`);
2296
+ console.log(` Retry rate: ${perf.throughput.retryRate} (${perf.throughput.totalRetries} retries)`);
2297
+ console.log(` Data uploaded: ${perf.throughput.bytesUploaded} (${perf.throughput.uploadSpeed})`);
2298
+ console.log(` Step batches: ${perf.throughput.batchesFlushed} (avg ${perf.throughput.avgStepsPerBatch} steps/batch)`);
2299
+
2300
+ // Latencies
2301
+ console.log('\n Latencies (ms):');
2302
+ if (perf.latencies.runStart) {
2303
+ console.log(` Run start: ${perf.latencies.runStart}`);
2304
+ }
2305
+ if (perf.latencies.runEnd) {
2306
+ console.log(` Run end: ${perf.latencies.runEnd}`);
2307
+ }
2308
+
2309
+ const formatStats = (stats) => {
2310
+ if (!stats) return 'N/A';
2311
+ return `n=${stats.count} min=${stats.min} avg=${stats.avg} p90=${stats.p90} p99=${stats.p99} max=${stats.max}`;
2312
+ };
2313
+
2314
+ if (perf.latencies.httpPost) {
2315
+ console.log(` HTTP POST: ${formatStats(perf.latencies.httpPost)}`);
2316
+ }
2317
+ if (perf.latencies.httpMultipart) {
2318
+ console.log(` Multipart: ${formatStats(perf.latencies.httpMultipart)}`);
2319
+ }
2320
+ if (perf.latencies.httpStream) {
2321
+ console.log(` Stream upload: ${formatStats(perf.latencies.httpStream)}`);
2322
+ }
2323
+ if (perf.latencies.attachmentRead) {
2324
+ console.log(` Attachment read: ${formatStats(perf.latencies.attachmentRead)}`);
2325
+ }
2326
+ if (perf.latencies.testComplete) {
2327
+ console.log(` Test complete: ${formatStats(perf.latencies.testComplete)}`);
2328
+ }
2329
+
2330
+ // Memory
2331
+ if (perf.memory.length > 0) {
2332
+ console.log('\n Memory (MB):');
2333
+ for (const snap of perf.memory) {
2334
+ console.log(` [${(snap.timestamp/1000).toFixed(1)}s] ${snap.label}: heap=${snap.heapUsed}/${snap.heapTotal} rss=${snap.rss}`);
2335
+ }
2336
+ }
2337
+
2338
+ // Bottleneck analysis hints
2339
+ console.log('\n Bottleneck Analysis:');
2340
+ const hints = [];
2341
+
2342
+ if (perf.latencies.httpPost && perf.latencies.httpPost.p99 > 1000) {
2343
+ hints.push(`⚠ HTTP POST p99=${perf.latencies.httpPost.p99}ms - server may be overloaded`);
2344
+ }
2345
+ if (perf.throughput.totalRetries > perf.throughput.totalRequests * 0.1) {
2346
+ hints.push(`⚠ High retry rate (${perf.throughput.retryRate}) - network or server issues`);
2347
+ }
2348
+ if (perf.concurrency.peakActiveTests > 30) {
2349
+ hints.push(`⚠ High test concurrency (${perf.concurrency.peakActiveTests}) - consider reducing workers`);
2350
+ }
2351
+ if (perf.concurrency.peakQueuedSteps > 500) {
2352
+ hints.push(`⚠ Large step queue (${perf.concurrency.peakQueuedSteps}) - step streaming may be backed up`);
2353
+ }
2354
+ if (perf.latencies.testComplete && perf.latencies.testComplete.p90 > 2000) {
2355
+ hints.push(`⚠ Test completion p90=${perf.latencies.testComplete.p90}ms - attachments may be slow`);
2356
+ }
2357
+
2358
+ const memSnapshots = perf.memory;
2359
+ if (memSnapshots.length >= 2) {
2360
+ const first = memSnapshots[0];
2361
+ const last = memSnapshots[memSnapshots.length - 1];
2362
+ const heapGrowth = last.heapUsed - first.heapUsed;
2363
+ if (heapGrowth > 100) {
2364
+ hints.push(`⚠ Memory grew by ${heapGrowth}MB - possible memory leak or large attachments`);
2365
+ }
2366
+ }
2367
+
2368
+ if (hints.length === 0) {
2369
+ console.log(' ✓ No obvious bottlenecks detected');
2370
+ } else {
2371
+ for (const hint of hints) {
2372
+ console.log(` ${hint}`);
2373
+ }
2374
+ }
2375
+ }
2376
+ }
2377
+
2378
+ // Show log file path if logging to file
2379
+ if (this.logFile) {
2380
+ console.log('─'.repeat(60));
2381
+ console.log(` Log file: ${this.logFile}`);
2382
+ }
2383
+
2384
+ console.log('═'.repeat(60) + '\n');
2385
+ }
2386
+ }