perfshield 0.0.3 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/lib/runner.js +48 -38
  2. package/package.json +1 -1
package/lib/runner.js CHANGED
@@ -59,19 +59,39 @@ const buildHarnessIfNeeded = async sourcePath => {
59
59
  path: harnessPath
60
60
  };
61
61
  };
62
- const warmupBenchmarks = async (harness, benchmarks) => {
62
+ const withFreshHarness = async (engine, harnessPath, baselinePath, currentPath, callback) => {
63
+ const harness = await createNodeHarness(engine, harnessPath, baselinePath, currentPath);
64
+ try {
65
+ return await callback(harness);
66
+ } finally {
67
+ await harness.close();
68
+ }
69
+ };
70
+ const runIterationInFreshHarness = async (engine, harnessPath, baselinePath, currentPath, index, iterations, order) => await withFreshHarness(engine, harnessPath, baselinePath, currentPath, async harness => {
71
+ const results = {};
72
+ for (const version of order) {
73
+ const payload = {
74
+ index,
75
+ version
76
+ };
77
+ if (iterations != null) {
78
+ payload.iterations = iterations;
79
+ }
80
+ const result = await harness.runSample(payload);
81
+ results[version] = result.durationMs;
82
+ }
83
+ return results;
84
+ });
85
+ const warmupBenchmarks = async (engine, harnessPath, baselinePath, currentPath, benchmarks) => {
86
+ let roundRobinSeed = 0;
63
87
  for (let index = 0; index < benchmarks.length; index += 1) {
64
88
  const descriptor = benchmarks[index];
65
- for (const version of versions) {
66
- await harness.runSample({
67
- index,
68
- iterations: descriptor.iterations,
69
- version
70
- });
71
- }
89
+ const order = getVersionOrder(roundRobinSeed);
90
+ roundRobinSeed += 1;
91
+ await runIterationInFreshHarness(engine, harnessPath, baselinePath, currentPath, index, descriptor.iterations, order);
72
92
  }
73
93
  };
74
- const collectSamples = async (harness, benchmarks, minSamples) => {
94
+ const collectSamples = async (engine, harnessPath, baselinePath, currentPath, benchmarks, minSamples) => {
75
95
  const samples = benchmarks.map(() => ({
76
96
  baseline: [],
77
97
  current: []
@@ -82,17 +102,12 @@ const collectSamples = async (harness, benchmarks, minSamples) => {
82
102
  const descriptor = benchmarks[index];
83
103
  const order = getVersionOrder(roundRobinSeed);
84
104
  roundRobinSeed += 1;
85
- for (const version of order) {
86
- const result = await harness.runSample({
87
- index,
88
- iterations: descriptor.iterations,
89
- version
90
- });
91
- if (version === "baseline") {
92
- samples[index].baseline.push(result.durationMs);
93
- } else {
94
- samples[index].current.push(result.durationMs);
95
- }
105
+ const result = await runIterationInFreshHarness(engine, harnessPath, baselinePath, currentPath, index, descriptor.iterations, order);
106
+ if (result.baseline != null) {
107
+ samples[index].baseline.push(result.baseline);
108
+ }
109
+ if (result.current != null) {
110
+ samples[index].current.push(result.current);
96
111
  }
97
112
  }
98
113
  }
@@ -110,7 +125,7 @@ const autoSampleResolved = (samples, conditions) => samples.every(bucket => {
110
125
  }
111
126
  return true;
112
127
  });
113
- const autoSample = async (harness, benchmarks, samples, conditions, timeoutMs) => {
128
+ const autoSample = async (engine, harnessPath, baselinePath, currentPath, benchmarks, samples, conditions, timeoutMs) => {
114
129
  const startTime = Date.now();
115
130
  let roundRobinSeed = 0;
116
131
  while (Date.now() - startTime < timeoutMs) {
@@ -122,17 +137,12 @@ const autoSample = async (harness, benchmarks, samples, conditions, timeoutMs) =
122
137
  const descriptor = benchmarks[index];
123
138
  const order = getVersionOrder(roundRobinSeed);
124
139
  roundRobinSeed += 1;
125
- for (const version of order) {
126
- const result = await harness.runSample({
127
- index,
128
- iterations: descriptor.iterations,
129
- version
130
- });
131
- if (version === "baseline") {
132
- samples[index].baseline.push(result.durationMs);
133
- } else {
134
- samples[index].current.push(result.durationMs);
135
- }
140
+ const result = await runIterationInFreshHarness(engine, harnessPath, baselinePath, currentPath, index, descriptor.iterations, order);
141
+ if (result.baseline != null) {
142
+ samples[index].baseline.push(result.baseline);
143
+ }
144
+ if (result.current != null) {
145
+ samples[index].current.push(result.current);
136
146
  }
137
147
  }
138
148
  }
@@ -146,12 +156,13 @@ export const runEngineComparison = async options => {
146
156
  engine
147
157
  } = options;
148
158
  const harnessArtifact = await buildHarnessIfNeeded(getHarnessPath());
149
- const harness = await createNodeHarness(engine, harnessArtifact.path, resolve(baselinePath), resolve(currentPath));
159
+ const resolvedBaseline = resolve(baselinePath);
160
+ const resolvedCurrent = resolve(currentPath);
150
161
  try {
151
- const benchmarks = await harness.listBenchmarks();
152
- await warmupBenchmarks(harness, benchmarks);
153
- const samples = await collectSamples(harness, benchmarks, config.sampling.minSamples);
154
- await autoSample(harness, benchmarks, samples, config.sampling.conditions, config.sampling.timeoutMs);
162
+ const benchmarks = await withFreshHarness(engine, harnessArtifact.path, resolvedBaseline, resolvedCurrent, async harness => await harness.listBenchmarks());
163
+ await warmupBenchmarks(engine, harnessArtifact.path, resolvedBaseline, resolvedCurrent, benchmarks);
164
+ const samples = await collectSamples(engine, harnessArtifact.path, resolvedBaseline, resolvedCurrent, benchmarks, config.sampling.minSamples);
165
+ await autoSample(engine, harnessArtifact.path, resolvedBaseline, resolvedCurrent, benchmarks, samples, config.sampling.conditions, config.sampling.timeoutMs);
155
166
  const benchmarkResults = benchmarks.map((benchmark, index) => {
156
167
  const baselineSamples = samples[index].baseline;
157
168
  const currentSamples = samples[index].current;
@@ -176,7 +187,6 @@ export const runEngineComparison = async options => {
176
187
  engine
177
188
  };
178
189
  } finally {
179
- await harness.close();
180
190
  if (harnessArtifact.cleanup) {
181
191
  await harnessArtifact.cleanup();
182
192
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "perfshield",
3
- "version": "0.0.3",
3
+ "version": "0.0.4",
4
4
  "description": "A tool for doing web benchmarking across multiple JS engines and with statistical signifigance",
5
5
  "license": "MIT",
6
6
  "type": "module",