watchdog-dashboard 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/MIT-LICENSE +20 -0
- data/README.md +123 -0
- data/Rakefile +8 -0
- data/app/assets/config/watchdog_dashboard_manifest.js +1 -0
- data/app/assets/stylesheets/watchdog/dashboard/application.css +15 -0
- data/app/controllers/concerns/flaky_stats.rb +217 -0
- data/app/controllers/concerns/metric_stats.rb +217 -0
- data/app/controllers/watchdog/dashboard/application_controller.rb +6 -0
- data/app/controllers/watchdog/dashboard/dashboard_controller.rb +71 -0
- data/app/controllers/watchdog/dashboard/metrics_controller.rb +37 -0
- data/app/helpers/watchdog/dashboard/application_helper.rb +6 -0
- data/app/helpers/watchdog/dashboard/dashboard_helper.rb +4 -0
- data/app/helpers/watchdog/dashboard/metrics_helper.rb +4 -0
- data/app/jobs/watchdog/dashboard/application_job.rb +6 -0
- data/app/mailers/watchdog/dashboard/application_mailer.rb +8 -0
- data/app/models/watchdog/dashboard/application_record.rb +7 -0
- data/app/models/watchdog/dashboard/metric.rb +5 -0
- data/app/views/layouts/watchdog/dashboard/application.html.erb +28 -0
- data/app/views/watchdog/dashboard/dashboard/_sidebar.html.erb +83 -0
- data/app/views/watchdog/dashboard/dashboard/flakies.html.erb +263 -0
- data/app/views/watchdog/dashboard/dashboard/historic.html.erb +495 -0
- data/app/views/watchdog/dashboard/dashboard/index.html.erb +219 -0
- data/app/views/watchdog/dashboard/dashboard/metrics.html.erb +263 -0
- data/config/initializers/assets.rb +1 -0
- data/config/routes.rb +10 -0
- data/db/migrate/20250331204354_create_watchdog_dashboard_metrics.rb +14 -0
- data/lib/tasks/watchdog/dashboard_tasks.rake +4 -0
- data/lib/watchdog/dashboard/engine.rb +7 -0
- data/lib/watchdog/dashboard/version.rb +5 -0
- data/lib/watchdog/dashboard.rb +19 -0
- metadata +92 -0
@@ -0,0 +1,495 @@
|
|
1
|
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/Chart.js/3.7.1/chart.min.js"></script>
|
2
|
+
<style>
|
3
|
+
.dashboard {
|
4
|
+
display: grid;
|
5
|
+
grid-template-columns: 1fr 1fr;
|
6
|
+
gap: 20px;
|
7
|
+
align-items: start;
|
8
|
+
}
|
9
|
+
.chart-container {
|
10
|
+
background-color: rgba(31, 41, 55, 0.8);
|
11
|
+
border-radius: 0.5rem;
|
12
|
+
border: 1px solid rgb(55, 65, 81);
|
13
|
+
padding: 20px;
|
14
|
+
box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1);
|
15
|
+
}
|
16
|
+
.chart-title {
|
17
|
+
text-align: center;
|
18
|
+
color: rgb(250, 204, 21);
|
19
|
+
margin-bottom: 20px;
|
20
|
+
font-size: 1.5rem;
|
21
|
+
}
|
22
|
+
canvas {
|
23
|
+
max-height: 400px;
|
24
|
+
}
|
25
|
+
.metrics-table {
|
26
|
+
width: 100%;
|
27
|
+
border-collapse: collapse;
|
28
|
+
}
|
29
|
+
.metrics-table th, .metrics-table td {
|
30
|
+
border: 1px solid rgb(55, 65, 81);
|
31
|
+
padding: 10px;
|
32
|
+
text-align: left;
|
33
|
+
}
|
34
|
+
.metrics-table th {
|
35
|
+
background-color: rgba(31, 41, 55, 0.8);
|
36
|
+
color: rgb(250, 204, 21);
|
37
|
+
}
|
38
|
+
</style>
|
39
|
+
|
40
|
+
<div class="dashboard">
|
41
|
+
<div class="chart-container">
|
42
|
+
<h2 class="chart-title">📊 Tests Performance Trend</h2>
|
43
|
+
<p>This graphic shows the trend of test performance over time, tracking changes in performance scores across multiple test runs. It helps identify if there has been an overall improvement or decline in the performance of tests.</p>
|
44
|
+
<canvas id="performanceTrendChart"></canvas>
|
45
|
+
</div>
|
46
|
+
<script>
|
47
|
+
// Performance Trend Chart
|
48
|
+
const performanceTrendCtx = document.getElementById('performanceTrendChart').getContext('2d');
|
49
|
+
const performanceTrendData = <%= @performance_trend.to_json.html_safe %>;
|
50
|
+
new Chart(performanceTrendCtx, {
|
51
|
+
type: 'line',
|
52
|
+
data: {
|
53
|
+
labels: performanceTrendData.map(item => item.test_date),
|
54
|
+
datasets: [{
|
55
|
+
label: 'Average Run Time (s)',
|
56
|
+
data: performanceTrendData.map(item => item.average_run_time),
|
57
|
+
borderColor: 'rgb(74, 222, 128)',
|
58
|
+
backgroundColor: 'rgba(74, 222, 128, 0.2)',
|
59
|
+
tension: 0.1
|
60
|
+
}]
|
61
|
+
},
|
62
|
+
options: {
|
63
|
+
responsive: true,
|
64
|
+
scales: {
|
65
|
+
y: {
|
66
|
+
beginAtZero: true,
|
67
|
+
ticks: { color: 'white' }
|
68
|
+
},
|
69
|
+
x: {
|
70
|
+
ticks: { color: 'white' }
|
71
|
+
}
|
72
|
+
},
|
73
|
+
plugins: {
|
74
|
+
legend: {
|
75
|
+
labels: { color: 'white' }
|
76
|
+
}
|
77
|
+
}
|
78
|
+
}
|
79
|
+
});
|
80
|
+
</script>
|
81
|
+
|
82
|
+
<div class="chart-container">
|
83
|
+
<h2 class="chart-title">🕒 Run Time Distribution</h2>
|
84
|
+
<p>This chart displays the distribution of test execution times, allowing you to observe how the runtime of tests is spread across different time intervals. It helps highlight tests that take longer than expected or show variations in execution times.</p>
|
85
|
+
<canvas id="runTimeDistributionChart"></canvas>
|
86
|
+
</div>
|
87
|
+
|
88
|
+
<script>
|
89
|
+
|
90
|
+
// Run Time Distribution Chart
|
91
|
+
const runTimeDistributionCtx = document.getElementById('runTimeDistributionChart').getContext('2d');
|
92
|
+
const runTimeDistributionData = <%= @run_time_distribution.to_json.html_safe %>;
|
93
|
+
new Chart(runTimeDistributionCtx, {
|
94
|
+
type: 'bar',
|
95
|
+
data: {
|
96
|
+
labels: runTimeDistributionData.map(item => `${item.run_time_bin}s`),
|
97
|
+
datasets: [{
|
98
|
+
label: 'Number of Tests',
|
99
|
+
data: runTimeDistributionData.map(item => item.test_count),
|
100
|
+
backgroundColor: 'rgb(239, 68, 68)',
|
101
|
+
borderColor: 'rgb(239, 68, 68)',
|
102
|
+
}]
|
103
|
+
},
|
104
|
+
options: {
|
105
|
+
responsive: true,
|
106
|
+
scales: {
|
107
|
+
y: {
|
108
|
+
beginAtZero: true,
|
109
|
+
ticks: { color: 'white' }
|
110
|
+
},
|
111
|
+
x: {
|
112
|
+
ticks: { color: 'white' }
|
113
|
+
}
|
114
|
+
},
|
115
|
+
plugins: {
|
116
|
+
legend: {
|
117
|
+
labels: { color: 'white' }
|
118
|
+
}
|
119
|
+
}
|
120
|
+
}
|
121
|
+
});
|
122
|
+
</script>
|
123
|
+
|
124
|
+
|
125
|
+
<div class="chart-container">
|
126
|
+
<h2 class="chart-title">📊 Test Count Trend</h2>
|
127
|
+
<p>This graphic shows the trend of the number of tests executed over a specific period, helping to track whether the volume of tests is increasing or decreasing. It can also indicate patterns in test execution frequency.</p>
|
128
|
+
<canvas id="testCountTrendChart"></canvas>
|
129
|
+
</div>
|
130
|
+
|
131
|
+
<script>
|
132
|
+
const testCountTrendCtx = document.getElementById('testCountTrendChart').getContext('2d');
|
133
|
+
const testCountTrendData = <%= @test_count_trend.to_json.html_safe %>;
|
134
|
+
new Chart(testCountTrendCtx, {
|
135
|
+
type: 'bar',
|
136
|
+
data: {
|
137
|
+
labels: testCountTrendData.map(item => item.test_date), // Etiquetas de los días
|
138
|
+
datasets: [{
|
139
|
+
label: 'Number of Tests',
|
140
|
+
data: testCountTrendData.map(item => item.test_count), // Número de pruebas por día
|
141
|
+
backgroundColor: 'rgb(75, 192, 192)', // Color de las barras
|
142
|
+
borderColor: 'rgb(75, 192, 192)', // Color de borde de las barras
|
143
|
+
borderWidth: 1
|
144
|
+
}]
|
145
|
+
},
|
146
|
+
options: {
|
147
|
+
responsive: true,
|
148
|
+
scales: {
|
149
|
+
y: {
|
150
|
+
beginAtZero: true,
|
151
|
+
ticks: { color: 'white' } // Color de las marcas del eje Y
|
152
|
+
},
|
153
|
+
x: {
|
154
|
+
ticks: { color: 'white' } // Color de las marcas del eje X
|
155
|
+
}
|
156
|
+
},
|
157
|
+
plugins: {
|
158
|
+
legend: {
|
159
|
+
labels: { color: 'white' } // Color de las etiquetas de la leyenda
|
160
|
+
}
|
161
|
+
}
|
162
|
+
}
|
163
|
+
});
|
164
|
+
</script>
|
165
|
+
|
166
|
+
<div class="chart-container">
|
167
|
+
<h2 class="chart-title">⏱️ Longest Tests by Day</h2>
|
168
|
+
<p>This chart highlights the longest running tests for each day, helping to identify tests that may be slowing down the testing process. It allows you to track specific tests that consistently take longer than others.</p>
|
169
|
+
<canvas id="longestTestsByDayChart"></canvas>
|
170
|
+
</div>
|
171
|
+
|
172
|
+
<script>
|
173
|
+
const longestTestsByDayCtx = document.getElementById('longestTestsByDayChart').getContext('2d');
|
174
|
+
const longestTestsByDayData = <%= @longest_tests_by_day.to_json.html_safe %>;
|
175
|
+
|
176
|
+
new Chart(longestTestsByDayCtx, {
|
177
|
+
type: 'bar',
|
178
|
+
data: {
|
179
|
+
labels: longestTestsByDayData.map(item => item.test_date), // Etiquetas de los días
|
180
|
+
datasets: [{
|
181
|
+
label: 'Longest Test Run Time (s)',
|
182
|
+
data: longestTestsByDayData.map(item => item.run_time), // Duración de la prueba más larga cada día
|
183
|
+
backgroundColor: 'rgb(255, 99, 132)', // Color de las barras
|
184
|
+
borderColor: 'rgb(255, 99, 132)', // Color de borde de las barras
|
185
|
+
borderWidth: 1
|
186
|
+
}]
|
187
|
+
},
|
188
|
+
options: {
|
189
|
+
responsive: true,
|
190
|
+
scales: {
|
191
|
+
y: {
|
192
|
+
beginAtZero: true,
|
193
|
+
ticks: { color: 'white' } // Color de las marcas del eje Y
|
194
|
+
},
|
195
|
+
x: {
|
196
|
+
ticks: { color: 'white' } // Color de las marcas del eje X
|
197
|
+
}
|
198
|
+
},
|
199
|
+
plugins: {
|
200
|
+
legend: {
|
201
|
+
labels: { color: 'white' } // Color de las etiquetas de la leyenda
|
202
|
+
}
|
203
|
+
}
|
204
|
+
}
|
205
|
+
});
|
206
|
+
</script>
|
207
|
+
|
208
|
+
<div class="chart-container">
|
209
|
+
<h2 class="chart-title">⏳ Total Execution Time by Day</h2>
|
210
|
+
<p>This graphic illustrates the total execution time of tests for each day, providing insight into the overall testing workload. It can highlight trends in testing time, such as spikes or periods of more intensive testing.</p>
|
211
|
+
<canvas id="totalExecutionTimeChart"></canvas>
|
212
|
+
</div>
|
213
|
+
|
214
|
+
<script>
|
215
|
+
const totalExecutionTimeCtx = document.getElementById('totalExecutionTimeChart').getContext('2d');
|
216
|
+
const totalExecutionTimeData = <%= @total_execution_time_by_day.to_json.html_safe %>;
|
217
|
+
|
218
|
+
new Chart(totalExecutionTimeCtx, {
|
219
|
+
type: 'line',
|
220
|
+
data: {
|
221
|
+
labels: totalExecutionTimeData.map(item => item.test_date), // Días en el eje X
|
222
|
+
datasets: [{
|
223
|
+
label: 'Total Execution Time (s)',
|
224
|
+
data: totalExecutionTimeData.map(item => item.total_run_time), // Total de tiempo de ejecución por día
|
225
|
+
borderColor: 'rgb(29, 78, 216)',
|
226
|
+
backgroundColor: 'rgba(29, 78, 216, 0.2)',
|
227
|
+
tension: 0.1
|
228
|
+
}]
|
229
|
+
},
|
230
|
+
options: {
|
231
|
+
responsive: true,
|
232
|
+
scales: {
|
233
|
+
y: {
|
234
|
+
beginAtZero: true,
|
235
|
+
ticks: { color: 'white' }
|
236
|
+
},
|
237
|
+
x: {
|
238
|
+
ticks: { color: 'white' }
|
239
|
+
}
|
240
|
+
},
|
241
|
+
plugins: {
|
242
|
+
legend: {
|
243
|
+
labels: { color: 'white' }
|
244
|
+
}
|
245
|
+
}
|
246
|
+
}
|
247
|
+
});
|
248
|
+
</script>
|
249
|
+
|
250
|
+
<div class="chart-container">
|
251
|
+
<h2 class="chart-title">⚠️ Tests Exceeding Time Threshold</h2>
|
252
|
+
<p>This chart tracks the number of tests that exceed a predefined time threshold. It helps identify tests that might need optimization or further investigation due to their high execution time.</p>
|
253
|
+
<canvas id="testsExceedingTimeThresholdChart"></canvas>
|
254
|
+
</div>
|
255
|
+
|
256
|
+
<script>
|
257
|
+
const testsExceedingTimeThresholdCtx = document.getElementById('testsExceedingTimeThresholdChart').getContext('2d');
|
258
|
+
const testsExceedingTimeThresholdData = <%= @tests_exceeding_time_threshold.to_json.html_safe %>;
|
259
|
+
|
260
|
+
new Chart(testsExceedingTimeThresholdCtx, {
|
261
|
+
type: 'bar',
|
262
|
+
data: {
|
263
|
+
labels: testsExceedingTimeThresholdData.map(item => item.test_date), // Días en el eje X
|
264
|
+
datasets: [{
|
265
|
+
label: 'Tests Exceeding Threshold',
|
266
|
+
data: testsExceedingTimeThresholdData.map(item => item.test_count), // Cantidad de tests que exceden el umbral
|
267
|
+
backgroundColor: 'rgb(255, 99, 132)',
|
268
|
+
borderColor: 'rgb(255, 99, 132)',
|
269
|
+
borderWidth: 1
|
270
|
+
}]
|
271
|
+
},
|
272
|
+
options: {
|
273
|
+
responsive: true,
|
274
|
+
scales: {
|
275
|
+
y: {
|
276
|
+
beginAtZero: true,
|
277
|
+
ticks: { color: 'white' }
|
278
|
+
},
|
279
|
+
x: {
|
280
|
+
ticks: { color: 'white' }
|
281
|
+
}
|
282
|
+
},
|
283
|
+
plugins: {
|
284
|
+
legend: {
|
285
|
+
labels: { color: 'white' }
|
286
|
+
}
|
287
|
+
}
|
288
|
+
}
|
289
|
+
});
|
290
|
+
</script>
|
291
|
+
|
292
|
+
<div class="chart-container">
|
293
|
+
<h2 class="chart-title">❌ Failed Tests Trend by File</h2>
|
294
|
+
<p>This graphic shows the trend of failed tests, categorized by file. It helps identify files with a high number of test failures, allowing for more targeted debugging and optimization efforts.</p>
|
295
|
+
<canvas id="failedTestsTrendByFileChart"></canvas>
|
296
|
+
</div>
|
297
|
+
|
298
|
+
<script>
|
299
|
+
const failedTestsTrendByFileCtx = document.getElementById('failedTestsTrendByFileChart').getContext('2d');
|
300
|
+
const failedTestsTrendByFileData = <%= @failed_tests_trend_by_file.to_json.html_safe %>;
|
301
|
+
|
302
|
+
// Organizando los datos por archivo
|
303
|
+
const labels = [...new Set(failedTestsTrendByFileData.map(item => item.test_date))]; // Fechas únicas
|
304
|
+
const filePaths = [...new Set(failedTestsTrendByFileData.map(item => item.file_path))]; // Archivos únicos
|
305
|
+
const datasets = filePaths.map(filePath => {
|
306
|
+
return {
|
307
|
+
label: filePath,
|
308
|
+
data: labels.map(label => {
|
309
|
+
const dataForLabel = failedTestsTrendByFileData.filter(item => item.test_date === label && item.file_path === filePath);
|
310
|
+
return dataForLabel.length > 0 ? dataForLabel[0].failed_count : 0;
|
311
|
+
}),
|
312
|
+
backgroundColor: getRandomColor(), // Color aleatorio para cada archivo
|
313
|
+
borderColor: 'rgb(0, 0, 0)',
|
314
|
+
borderWidth: 1
|
315
|
+
};
|
316
|
+
});
|
317
|
+
|
318
|
+
new Chart(failedTestsTrendByFileCtx, {
|
319
|
+
type: 'bar',
|
320
|
+
data: {
|
321
|
+
labels: labels, // Fechas en el eje X
|
322
|
+
datasets: datasets // Datos para cada archivo
|
323
|
+
},
|
324
|
+
options: {
|
325
|
+
responsive: true,
|
326
|
+
scales: {
|
327
|
+
y: {
|
328
|
+
beginAtZero: true,
|
329
|
+
ticks: { color: 'white' }
|
330
|
+
},
|
331
|
+
x: {
|
332
|
+
ticks: { color: 'white' }
|
333
|
+
}
|
334
|
+
},
|
335
|
+
plugins: {
|
336
|
+
legend: {
|
337
|
+
labels: { color: 'white' }
|
338
|
+
}
|
339
|
+
}
|
340
|
+
}
|
341
|
+
});
|
342
|
+
|
343
|
+
// Función para generar colores aleatorios
|
344
|
+
function getRandomColor() {
|
345
|
+
const letters = '0123456789ABCDEF';
|
346
|
+
let color = '#';
|
347
|
+
for (let i = 0; i < 6; i++) {
|
348
|
+
color += letters[Math.floor(Math.random() * 16)];
|
349
|
+
}
|
350
|
+
return color;
|
351
|
+
}
|
352
|
+
</script>
|
353
|
+
|
354
|
+
<div class="chart-container">
|
355
|
+
<h2 class="chart-title">⏱️ Average Execution Time by File</h2>
|
356
|
+
<p>This chart displays the average execution time for tests in each file, helping to identify which files have longer test durations. It provides insight into areas that may require optimization for faster test execution.</p>
|
357
|
+
<canvas id="avgExecutionTimeByFileChart"></canvas>
|
358
|
+
</div>
|
359
|
+
|
360
|
+
<script>
|
361
|
+
// Obtener el contexto del gráfico
|
362
|
+
const avgExecutionTimeByFileCtx = document.getElementById('avgExecutionTimeByFileChart').getContext('2d');
|
363
|
+
|
364
|
+
// Asegurarse de que los datos no estén vacíos
|
365
|
+
const avgExecutionTimeByFileData = <%= @avg_execution_time_by_file.to_json.html_safe %>;
|
366
|
+
|
367
|
+
// Verificar los datos en consola
|
368
|
+
console.log(avgExecutionTimeByFileData);
|
369
|
+
|
370
|
+
// Si hay datos, procesarlos
|
371
|
+
if (avgExecutionTimeByFileData.length > 0) {
|
372
|
+
// Usa nombres únicos para las variables
|
373
|
+
const filePaths = avgExecutionTimeByFileData.map(item => item.file_path); // Rutas de los archivos
|
374
|
+
const avgRunTimes = avgExecutionTimeByFileData.map(item => item.average_run_time); // Tiempos promedio de ejecución
|
375
|
+
|
376
|
+
new Chart(avgExecutionTimeByFileCtx, {
|
377
|
+
type: 'bar',
|
378
|
+
data: {
|
379
|
+
labels: filePaths, // Archivos en el eje X
|
380
|
+
datasets: [{
|
381
|
+
label: 'Average Execution Time (seconds)',
|
382
|
+
data: avgRunTimes, // Datos de tiempo promedio de ejecución
|
383
|
+
backgroundColor: 'rgb(67, 160, 71)', // Color de las barras
|
384
|
+
borderColor: 'rgb(67, 160, 71)',
|
385
|
+
borderWidth: 1
|
386
|
+
}]
|
387
|
+
},
|
388
|
+
options: {
|
389
|
+
responsive: true,
|
390
|
+
scales: {
|
391
|
+
y: {
|
392
|
+
beginAtZero: true,
|
393
|
+
ticks: { color: 'white' }
|
394
|
+
},
|
395
|
+
x: {
|
396
|
+
ticks: { color: 'white' },
|
397
|
+
angle: -45, // Para mejorar la visualización de los nombres de archivo largos
|
398
|
+
minRotation: 45 // Asegura que las etiquetas no se superpongan
|
399
|
+
}
|
400
|
+
},
|
401
|
+
plugins: {
|
402
|
+
legend: {
|
403
|
+
labels: { color: 'white' }
|
404
|
+
}
|
405
|
+
}
|
406
|
+
}
|
407
|
+
});
|
408
|
+
}
|
409
|
+
</script>
|
410
|
+
|
411
|
+
<div class="chart-container">
|
412
|
+
<h2 class="chart-title">📊 Test Stability Trend</h2>
|
413
|
+
<p>This graphic shows the trend of test stability over time, indicating how frequently tests pass or fail. It can help assess the consistency of tests and identify areas where stability may be an issue.</p>
|
414
|
+
<canvas id="stabilityTrendChart"></canvas>
|
415
|
+
</div>
|
416
|
+
|
417
|
+
<script>
|
418
|
+
const stabilityTrendCtx = document.getElementById('stabilityTrendChart').getContext('2d');
|
419
|
+
const stabilityTrendData = <%= @stability_trend.to_json.html_safe %>;
|
420
|
+
|
421
|
+
if (stabilityTrendData.length === 0) {
|
422
|
+
console.error('No data to display on the chart.');
|
423
|
+
}
|
424
|
+
|
425
|
+
// Usa nombres únicos para las variables
|
426
|
+
const stabilityLabels = stabilityTrendData.map(item => item.test_date); // Fechas de las pruebas
|
427
|
+
const passedData = stabilityTrendData.map(item => item.passed_count || 0); // Conteo de pruebas pasadas
|
428
|
+
const failedData = stabilityTrendData.map(item => item.failed_count || 0); // Conteo de pruebas fallidas
|
429
|
+
const skippedData = stabilityTrendData.map(item => item.skipped_count || 0); // Conteo de pruebas saltadas
|
430
|
+
const pendingData = stabilityTrendData.map(item => item.pending_count || 0); // Conteo de pruebas pendientes
|
431
|
+
const errorData = stabilityTrendData.map(item => item.error_count || 0); // Conteo de pruebas con error
|
432
|
+
|
433
|
+
new Chart(stabilityTrendCtx, {
|
434
|
+
type: 'line',
|
435
|
+
data: {
|
436
|
+
labels: stabilityLabels, // Fechas en el eje X
|
437
|
+
datasets: [
|
438
|
+
{
|
439
|
+
label: 'Passed',
|
440
|
+
data: passedData, // Datos de pruebas pasadas
|
441
|
+
borderColor: 'rgb(67, 160, 71)', // Color de línea para pasadas
|
442
|
+
backgroundColor: 'rgba(67, 160, 71, 0.2)', // Color de relleno
|
443
|
+
fill: true
|
444
|
+
},
|
445
|
+
{
|
446
|
+
label: 'Failed',
|
447
|
+
data: failedData, // Datos de pruebas fallidas
|
448
|
+
borderColor: 'rgb(239, 68, 68)', // Color de línea para fallidas
|
449
|
+
backgroundColor: 'rgba(239, 68, 68, 0.2)', // Color de relleno
|
450
|
+
fill: true
|
451
|
+
},
|
452
|
+
{
|
453
|
+
label: 'Skipped',
|
454
|
+
data: skippedData, // Datos de pruebas saltadas
|
455
|
+
borderColor: 'rgb(255, 165, 0)', // Color de línea para saltadas
|
456
|
+
backgroundColor: 'rgba(255, 165, 0, 0.2)', // Color de relleno
|
457
|
+
fill: true
|
458
|
+
},
|
459
|
+
{
|
460
|
+
label: 'Pending',
|
461
|
+
data: pendingData, // Datos de pruebas pendientes
|
462
|
+
borderColor: 'rgb(255, 205, 86)', // Color de línea para pendientes
|
463
|
+
backgroundColor: 'rgba(255, 205, 86, 0.2)', // Color de relleno
|
464
|
+
fill: true
|
465
|
+
},
|
466
|
+
{
|
467
|
+
label: 'Error',
|
468
|
+
data: errorData, // Datos de pruebas con error
|
469
|
+
borderColor: 'rgb(255, 0, 0)', // Color de línea para error
|
470
|
+
backgroundColor: 'rgba(255, 0, 0, 0.2)', // Color de relleno
|
471
|
+
fill: true
|
472
|
+
}
|
473
|
+
]
|
474
|
+
},
|
475
|
+
options: {
|
476
|
+
responsive: true,
|
477
|
+
scales: {
|
478
|
+
y: {
|
479
|
+
beginAtZero: true,
|
480
|
+
ticks: { color: 'white' }
|
481
|
+
},
|
482
|
+
x: {
|
483
|
+
ticks: { color: 'white' }
|
484
|
+
}
|
485
|
+
},
|
486
|
+
plugins: {
|
487
|
+
legend: {
|
488
|
+
labels: { color: 'white' }
|
489
|
+
}
|
490
|
+
}
|
491
|
+
}
|
492
|
+
});
|
493
|
+
</script>
|
494
|
+
|
495
|
+
</div>
|