overtake 1.0.5 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +25 -29
- package/build/cli.cjs +43 -33
- package/build/cli.cjs.map +1 -1
- package/build/cli.js +42 -32
- package/build/cli.js.map +1 -1
- package/build/executor.cjs +6 -3
- package/build/executor.cjs.map +1 -1
- package/build/executor.d.ts +3 -2
- package/build/executor.js +6 -3
- package/build/executor.js.map +1 -1
- package/build/gc-watcher.cjs +31 -0
- package/build/gc-watcher.cjs.map +1 -0
- package/build/gc-watcher.d.ts +9 -0
- package/build/gc-watcher.js +21 -0
- package/build/gc-watcher.js.map +1 -0
- package/build/index.cjs +9 -1
- package/build/index.cjs.map +1 -1
- package/build/index.d.ts +1 -1
- package/build/index.js +9 -1
- package/build/index.js.map +1 -1
- package/build/runner.cjs +229 -24
- package/build/runner.cjs.map +1 -1
- package/build/runner.d.ts +1 -1
- package/build/runner.js +229 -24
- package/build/runner.js.map +1 -1
- package/build/types.cjs.map +1 -1
- package/build/types.d.ts +4 -0
- package/build/types.js.map +1 -1
- package/build/utils.cjs +21 -0
- package/build/utils.cjs.map +1 -1
- package/build/utils.d.ts +1 -0
- package/build/utils.js +18 -0
- package/build/utils.js.map +1 -1
- package/build/worker.cjs +104 -14
- package/build/worker.cjs.map +1 -1
- package/build/worker.d.ts +1 -1
- package/build/worker.js +63 -8
- package/build/worker.js.map +1 -1
- package/examples/accuracy.ts +54 -0
- package/examples/custom-reports.ts +0 -1
- package/examples/imports.ts +3 -7
- package/examples/quick-start.ts +2 -0
- package/package.json +11 -10
- package/src/cli.ts +44 -31
- package/src/executor.ts +8 -2
- package/src/gc-watcher.ts +23 -0
- package/src/index.ts +11 -0
- package/src/runner.ts +269 -23
- package/src/types.ts +4 -0
- package/src/utils.ts +20 -0
- package/src/worker.ts +72 -9
- package/build/queue.cjs +0 -48
- package/build/queue.cjs.map +0 -1
- package/build/queue.d.ts +0 -3
- package/build/queue.js +0 -38
- package/build/queue.js.map +0 -1
- package/src/queue.ts +0 -42
package/src/index.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { cpus } from 'node:os';
|
|
2
|
+
import { pathToFileURL } from 'node:url';
|
|
2
3
|
import { createExecutor, ExecutorOptions, ExecutorReport } from './executor.js';
|
|
3
4
|
import { MaybePromise, StepFn, SetupFn, TeardownFn, FeedFn, ReportType, ReportTypeList, DEFAULT_CYCLES } from './types.js';
|
|
4
5
|
|
|
@@ -141,20 +142,29 @@ export class Benchmark<TInput> {
|
|
|
141
142
|
minCycles = 50,
|
|
142
143
|
absThreshold = 1_000,
|
|
143
144
|
relThreshold = 0.02,
|
|
145
|
+
gcObserver = true,
|
|
144
146
|
reportTypes = DEFAULT_REPORT_TYPES as unknown as R,
|
|
147
|
+
baseUrl,
|
|
145
148
|
}: ExecutorOptions<R>): Promise<TargetReport<R>[]> {
|
|
146
149
|
if (this.#executed) {
|
|
147
150
|
throw new Error("Benchmark is executed and can't be reused");
|
|
148
151
|
}
|
|
149
152
|
this.#executed = true;
|
|
150
153
|
|
|
154
|
+
const resolvedBaseUrl = baseUrl ?? pathToFileURL(process.cwd()).href;
|
|
155
|
+
if (!baseUrl) {
|
|
156
|
+
console.warn("Overtake: baseUrl not provided; defaulting to process.cwd(). Pass the benchmark's import.meta.url so relative imports resolve correctly.");
|
|
157
|
+
}
|
|
158
|
+
|
|
151
159
|
const executor = createExecutor<unknown, TInput, R>({
|
|
160
|
+
baseUrl: resolvedBaseUrl,
|
|
152
161
|
workers,
|
|
153
162
|
warmupCycles,
|
|
154
163
|
maxCycles,
|
|
155
164
|
minCycles,
|
|
156
165
|
absThreshold,
|
|
157
166
|
relThreshold,
|
|
167
|
+
gcObserver,
|
|
158
168
|
reportTypes,
|
|
159
169
|
});
|
|
160
170
|
|
|
@@ -167,6 +177,7 @@ export class Benchmark<TInput> {
|
|
|
167
177
|
const data = await feed.fn?.();
|
|
168
178
|
executor
|
|
169
179
|
.push<ExecutorReport<R>>({
|
|
180
|
+
baseUrl: resolvedBaseUrl,
|
|
170
181
|
setup: target.setup,
|
|
171
182
|
teardown: target.teardown,
|
|
172
183
|
pre: measure.pre,
|
package/src/runner.ts
CHANGED
|
@@ -1,23 +1,181 @@
|
|
|
1
|
+
import { performance, PerformanceObserver } from 'node:perf_hooks';
|
|
1
2
|
import { Options, Control } from './types.js';
|
|
3
|
+
import { GCWatcher } from './gc-watcher.js';
|
|
4
|
+
import { StepFn, MaybePromise } from './types.js';
|
|
2
5
|
|
|
3
6
|
const COMPLETE_VALUE = 100_00;
|
|
4
7
|
|
|
8
|
+
const hr = process.hrtime.bigint.bind(process.hrtime);
|
|
9
|
+
|
|
5
10
|
const runSync = (run: Function) => {
|
|
6
11
|
return (...args: unknown[]) => {
|
|
7
|
-
const start =
|
|
12
|
+
const start = hr();
|
|
8
13
|
run(...args);
|
|
9
|
-
return
|
|
14
|
+
return hr() - start;
|
|
10
15
|
};
|
|
11
16
|
};
|
|
12
17
|
|
|
13
18
|
const runAsync = (run: Function) => {
|
|
14
19
|
return async (...args: unknown[]) => {
|
|
15
|
-
const start =
|
|
20
|
+
const start = hr();
|
|
16
21
|
await run(...args);
|
|
17
|
-
return
|
|
22
|
+
return hr() - start;
|
|
18
23
|
};
|
|
19
24
|
};
|
|
20
25
|
|
|
26
|
+
const isThenable = (value: unknown): value is PromiseLike<unknown> => {
|
|
27
|
+
return value !== null && (typeof value === 'object' || typeof value === 'function') && typeof (value as PromiseLike<unknown>).then === 'function';
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
const TARGET_SAMPLE_NS = 1_000_000n; // aim for ~1ms per measured sample
|
|
31
|
+
const MAX_BATCH = 1_048_576;
|
|
32
|
+
const PROGRESS_STRIDE = 16;
|
|
33
|
+
const GC_STRIDE = 32;
|
|
34
|
+
const OUTLIER_MULTIPLIER = 4;
|
|
35
|
+
const OUTLIER_IQR_MULTIPLIER = 3;
|
|
36
|
+
const OUTLIER_WINDOW = 64;
|
|
37
|
+
|
|
38
|
+
type GCEvent = { start: number; end: number };
|
|
39
|
+
|
|
40
|
+
const collectSample = async <TContext, TInput>(
|
|
41
|
+
batchSize: number,
|
|
42
|
+
run: (ctx: TContext, data: TInput) => MaybePromise<bigint>,
|
|
43
|
+
pre: StepFn<TContext, TInput> | undefined,
|
|
44
|
+
post: StepFn<TContext, TInput> | undefined,
|
|
45
|
+
context: TContext,
|
|
46
|
+
data: TInput,
|
|
47
|
+
) => {
|
|
48
|
+
let sampleDuration = 0n;
|
|
49
|
+
for (let b = 0; b < batchSize; b++) {
|
|
50
|
+
await pre?.(context, data);
|
|
51
|
+
sampleDuration += await run(context, data);
|
|
52
|
+
await post?.(context, data);
|
|
53
|
+
}
|
|
54
|
+
return sampleDuration / BigInt(batchSize);
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
const tuneParameters = async <TContext, TInput>({
|
|
58
|
+
initialBatch,
|
|
59
|
+
run,
|
|
60
|
+
pre,
|
|
61
|
+
post,
|
|
62
|
+
context,
|
|
63
|
+
data,
|
|
64
|
+
minCycles,
|
|
65
|
+
relThreshold,
|
|
66
|
+
maxCycles,
|
|
67
|
+
}: {
|
|
68
|
+
initialBatch: number;
|
|
69
|
+
run: (ctx: TContext, data: TInput) => MaybePromise<bigint>;
|
|
70
|
+
pre?: StepFn<TContext, TInput>;
|
|
71
|
+
post?: StepFn<TContext, TInput>;
|
|
72
|
+
context: TContext;
|
|
73
|
+
data: TInput;
|
|
74
|
+
minCycles: number;
|
|
75
|
+
relThreshold: number;
|
|
76
|
+
maxCycles: number;
|
|
77
|
+
}) => {
|
|
78
|
+
let batchSize = initialBatch;
|
|
79
|
+
let bestCv = Number.POSITIVE_INFINITY;
|
|
80
|
+
let bestBatch = batchSize;
|
|
81
|
+
|
|
82
|
+
for (let attempt = 0; attempt < 3; attempt++) {
|
|
83
|
+
const samples: number[] = [];
|
|
84
|
+
const sampleCount = Math.min(8, maxCycles);
|
|
85
|
+
for (let s = 0; s < sampleCount; s++) {
|
|
86
|
+
const duration = await collectSample(batchSize, run, pre, post, context, data);
|
|
87
|
+
samples.push(Number(duration));
|
|
88
|
+
}
|
|
89
|
+
const mean = samples.reduce((acc, v) => acc + v, 0) / samples.length;
|
|
90
|
+
const variance = samples.reduce((acc, v) => acc + (v - mean) * (v - mean), 0) / Math.max(1, samples.length - 1);
|
|
91
|
+
const stddev = Math.sqrt(variance);
|
|
92
|
+
const cv = mean === 0 ? Number.POSITIVE_INFINITY : stddev / mean;
|
|
93
|
+
|
|
94
|
+
if (cv < bestCv) {
|
|
95
|
+
bestCv = cv;
|
|
96
|
+
bestBatch = batchSize;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (cv <= relThreshold || batchSize >= MAX_BATCH) {
|
|
100
|
+
break;
|
|
101
|
+
}
|
|
102
|
+
batchSize = Math.min(MAX_BATCH, batchSize * 2);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
const tunedRel = bestCv < relThreshold ? Math.max(bestCv * 1.5, relThreshold * 0.5) : relThreshold;
|
|
106
|
+
const tunedMin = Math.min(maxCycles, Math.max(minCycles, Math.ceil(minCycles * Math.max(1, bestCv / (relThreshold || 1e-6)))));
|
|
107
|
+
|
|
108
|
+
return { batchSize: bestBatch, relThreshold: tunedRel, minCycles: tunedMin };
|
|
109
|
+
};
|
|
110
|
+
|
|
111
|
+
const createGCTracker = () => {
|
|
112
|
+
if (process.env.OVERTAKE_GC_OBSERVER !== '1') {
|
|
113
|
+
return null;
|
|
114
|
+
}
|
|
115
|
+
if (typeof PerformanceObserver === 'undefined') {
|
|
116
|
+
return null;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
const events: GCEvent[] = [];
|
|
120
|
+
const observer = new PerformanceObserver((list) => {
|
|
121
|
+
for (const entry of list.getEntries()) {
|
|
122
|
+
events.push({ start: entry.startTime, end: entry.startTime + entry.duration });
|
|
123
|
+
}
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
try {
|
|
127
|
+
observer.observe({ entryTypes: ['gc'] });
|
|
128
|
+
} catch {
|
|
129
|
+
return null;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const overlaps = (start: number, end: number) => {
|
|
133
|
+
let noisy = false;
|
|
134
|
+
for (let i = events.length - 1; i >= 0; i--) {
|
|
135
|
+
const event = events[i];
|
|
136
|
+
if (event.end < start - 5_000) {
|
|
137
|
+
events.splice(i, 1);
|
|
138
|
+
continue;
|
|
139
|
+
}
|
|
140
|
+
if (event.start <= end && event.end >= start) {
|
|
141
|
+
noisy = true;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
return noisy;
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
const dispose = () => observer.disconnect();
|
|
148
|
+
|
|
149
|
+
return { overlaps, dispose };
|
|
150
|
+
};
|
|
151
|
+
|
|
152
|
+
const pushWindow = (arr: number[], value: number, cap: number) => {
|
|
153
|
+
if (arr.length === cap) {
|
|
154
|
+
arr.shift();
|
|
155
|
+
}
|
|
156
|
+
arr.push(value);
|
|
157
|
+
};
|
|
158
|
+
|
|
159
|
+
const medianAndIqr = (arr: number[]) => {
|
|
160
|
+
if (arr.length === 0) return { median: 0, iqr: 0 };
|
|
161
|
+
const sorted = [...arr].sort((a, b) => a - b);
|
|
162
|
+
const mid = Math.floor(sorted.length / 2);
|
|
163
|
+
const median = sorted.length % 2 === 0 ? (sorted[mid - 1] + sorted[mid]) / 2 : sorted[mid];
|
|
164
|
+
const q1Idx = Math.floor(sorted.length * 0.25);
|
|
165
|
+
const q3Idx = Math.floor(sorted.length * 0.75);
|
|
166
|
+
const q1 = sorted[q1Idx];
|
|
167
|
+
const q3 = sorted[q3Idx];
|
|
168
|
+
return { median, iqr: q3 - q1 };
|
|
169
|
+
};
|
|
170
|
+
|
|
171
|
+
const windowCv = (arr: number[]) => {
|
|
172
|
+
if (arr.length < 2) return Number.POSITIVE_INFINITY;
|
|
173
|
+
const mean = arr.reduce((a, v) => a + v, 0) / arr.length;
|
|
174
|
+
const variance = arr.reduce((a, v) => a + (v - mean) * (v - mean), 0) / (arr.length - 1);
|
|
175
|
+
const stddev = Math.sqrt(variance);
|
|
176
|
+
return mean === 0 ? Number.POSITIVE_INFINITY : stddev / mean;
|
|
177
|
+
};
|
|
178
|
+
|
|
21
179
|
export const benchmark = async <TContext, TInput>({
|
|
22
180
|
setup,
|
|
23
181
|
teardown,
|
|
@@ -30,6 +188,7 @@ export const benchmark = async <TContext, TInput>({
|
|
|
30
188
|
minCycles,
|
|
31
189
|
absThreshold,
|
|
32
190
|
relThreshold,
|
|
191
|
+
gcObserver = false,
|
|
33
192
|
|
|
34
193
|
durationsSAB,
|
|
35
194
|
controlSAB,
|
|
@@ -43,47 +202,133 @@ export const benchmark = async <TContext, TInput>({
|
|
|
43
202
|
|
|
44
203
|
const context = (await setup?.()) as TContext;
|
|
45
204
|
const maxCycles = durations.length;
|
|
205
|
+
const gcWatcher = gcObserver ? new GCWatcher() : null;
|
|
206
|
+
const gcTracker = gcObserver ? createGCTracker() : null;
|
|
46
207
|
|
|
47
208
|
try {
|
|
209
|
+
// classify sync/async and capture initial duration
|
|
48
210
|
await pre?.(context, data!);
|
|
49
|
-
const
|
|
211
|
+
const probeStart = hr();
|
|
212
|
+
const probeResult = runRaw(context, data!);
|
|
213
|
+
const isAsync = isThenable(probeResult);
|
|
214
|
+
if (isAsync) {
|
|
215
|
+
await probeResult;
|
|
216
|
+
}
|
|
217
|
+
const durationProbe = hr() - probeStart;
|
|
50
218
|
await post?.(context, data!);
|
|
51
|
-
global.gc?.();
|
|
52
|
-
global.gc?.();
|
|
53
219
|
|
|
54
|
-
const run =
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
220
|
+
const run = isAsync ? runAsync(runRaw) : runSync(runRaw);
|
|
221
|
+
|
|
222
|
+
// choose batch size to amortize timer overhead
|
|
223
|
+
const durationPerRun = durationProbe === 0n ? 1n : durationProbe;
|
|
224
|
+
const suggestedBatch = Number(TARGET_SAMPLE_NS / durationPerRun);
|
|
225
|
+
const initialBatchSize = Math.min(MAX_BATCH, Math.max(1, suggestedBatch));
|
|
226
|
+
|
|
227
|
+
// auto-tune based on warmup samples
|
|
228
|
+
const tuned = await tuneParameters({
|
|
229
|
+
initialBatch: initialBatchSize,
|
|
230
|
+
run,
|
|
231
|
+
pre,
|
|
232
|
+
post,
|
|
233
|
+
context,
|
|
234
|
+
data: data as TInput,
|
|
235
|
+
minCycles,
|
|
236
|
+
relThreshold,
|
|
237
|
+
maxCycles,
|
|
238
|
+
});
|
|
239
|
+
let batchSize = tuned.batchSize;
|
|
240
|
+
minCycles = tuned.minCycles;
|
|
241
|
+
relThreshold = tuned.relThreshold;
|
|
242
|
+
|
|
243
|
+
// warmup: run until requested cycles, adapt if unstable
|
|
244
|
+
const warmupStart = Date.now();
|
|
245
|
+
let warmupRemaining = warmupCycles;
|
|
246
|
+
const warmupWindow: number[] = [];
|
|
247
|
+
const warmupCap = Math.max(warmupCycles, Math.min(maxCycles, warmupCycles * 4 || 1000));
|
|
248
|
+
|
|
249
|
+
while (Date.now() - warmupStart < 1_000 && warmupRemaining > 0) {
|
|
250
|
+
const start = hr();
|
|
251
|
+
await pre?.(context, data!);
|
|
252
|
+
await run(context, data);
|
|
253
|
+
await post?.(context, data!);
|
|
254
|
+
pushWindow(warmupWindow, Number(hr() - start), warmupCap);
|
|
255
|
+
warmupRemaining--;
|
|
58
256
|
}
|
|
59
|
-
|
|
257
|
+
let warmupDone = 0;
|
|
258
|
+
while (warmupDone < warmupRemaining) {
|
|
259
|
+
const start = hr();
|
|
60
260
|
await pre?.(context, data!);
|
|
61
261
|
await run(context, data);
|
|
62
262
|
await post?.(context, data!);
|
|
63
|
-
|
|
64
|
-
|
|
263
|
+
pushWindow(warmupWindow, Number(hr() - start), warmupCap);
|
|
264
|
+
warmupDone++;
|
|
265
|
+
if (global.gc && warmupDone % GC_STRIDE === 0) {
|
|
266
|
+
global.gc();
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
while (warmupWindow.length >= 8 && warmupWindow.length < warmupCap) {
|
|
270
|
+
const cv = windowCv(warmupWindow);
|
|
271
|
+
if (cv <= relThreshold * 2) {
|
|
272
|
+
break;
|
|
273
|
+
}
|
|
274
|
+
const start = hr();
|
|
275
|
+
await pre?.(context, data!);
|
|
276
|
+
await run(context, data);
|
|
277
|
+
await post?.(context, data!);
|
|
278
|
+
pushWindow(warmupWindow, Number(hr() - start), warmupCap);
|
|
65
279
|
}
|
|
66
280
|
|
|
67
281
|
let i = 0;
|
|
68
282
|
let mean = 0n;
|
|
69
283
|
let m2 = 0n;
|
|
284
|
+
const outlierWindow: number[] = [];
|
|
70
285
|
|
|
71
286
|
while (true) {
|
|
72
287
|
if (i >= maxCycles) break;
|
|
73
288
|
|
|
74
|
-
|
|
75
|
-
const
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
289
|
+
const gcMarker = gcWatcher?.start();
|
|
290
|
+
const sampleStart = performance.now();
|
|
291
|
+
let sampleDuration = 0n;
|
|
292
|
+
for (let b = 0; b < batchSize; b++) {
|
|
293
|
+
await pre?.(context, data!);
|
|
294
|
+
sampleDuration += await run(context, data);
|
|
295
|
+
await post?.(context, data!);
|
|
296
|
+
if (global.gc && (i + b) % GC_STRIDE === 0) {
|
|
297
|
+
global.gc();
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
// normalize by batch size
|
|
302
|
+
sampleDuration /= BigInt(batchSize);
|
|
79
303
|
|
|
80
|
-
|
|
81
|
-
const
|
|
304
|
+
const sampleEnd = performance.now();
|
|
305
|
+
const gcNoise = (gcMarker ? gcWatcher!.seen(gcMarker) : false) || (gcTracker?.overlaps(sampleStart, sampleEnd) ?? false);
|
|
306
|
+
if (gcNoise) {
|
|
307
|
+
continue;
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
const durationNumber = Number(sampleDuration);
|
|
311
|
+
pushWindow(outlierWindow, durationNumber, OUTLIER_WINDOW);
|
|
312
|
+
const { median, iqr } = medianAndIqr(outlierWindow);
|
|
313
|
+
const maxAllowed = median + OUTLIER_IQR_MULTIPLIER * iqr || Number.POSITIVE_INFINITY;
|
|
314
|
+
if (outlierWindow.length >= 8 && durationNumber > maxAllowed) {
|
|
315
|
+
continue;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
const meanNumber = Number(mean);
|
|
319
|
+
if (i >= 8 && meanNumber > 0 && durationNumber > OUTLIER_MULTIPLIER * meanNumber) {
|
|
320
|
+
continue;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
durations[i++] = sampleDuration;
|
|
324
|
+
const delta = sampleDuration - mean;
|
|
82
325
|
mean += delta / BigInt(i);
|
|
83
|
-
m2 += delta * (
|
|
326
|
+
m2 += delta * (sampleDuration - mean);
|
|
84
327
|
|
|
85
328
|
const progress = Math.max(i / maxCycles) * COMPLETE_VALUE;
|
|
86
|
-
|
|
329
|
+
if (i % PROGRESS_STRIDE === 0) {
|
|
330
|
+
control[Control.PROGRESS] = progress;
|
|
331
|
+
}
|
|
87
332
|
|
|
88
333
|
if (i >= minCycles) {
|
|
89
334
|
const variance = Number(m2) / (i - 1);
|
|
@@ -106,6 +351,7 @@ export const benchmark = async <TContext, TInput>({
|
|
|
106
351
|
console.error(e && typeof e === 'object' && 'stack' in e ? e.stack : e);
|
|
107
352
|
control[Control.COMPLETE] = 1;
|
|
108
353
|
} finally {
|
|
354
|
+
gcTracker?.dispose?.();
|
|
109
355
|
try {
|
|
110
356
|
await teardown?.(context);
|
|
111
357
|
} catch (e) {
|
package/src/types.ts
CHANGED
|
@@ -33,9 +33,12 @@ export interface BenchmarkOptions {
|
|
|
33
33
|
minCycles?: number;
|
|
34
34
|
absThreshold?: number; // ns
|
|
35
35
|
relThreshold?: number; // %
|
|
36
|
+
gcObserver?: boolean;
|
|
37
|
+
baseUrl?: string;
|
|
36
38
|
}
|
|
37
39
|
|
|
38
40
|
export interface RunOptions<TContext, TInput> {
|
|
41
|
+
baseUrl?: string;
|
|
39
42
|
setup?: SetupFn<TContext>;
|
|
40
43
|
teardown?: TeardownFn<TContext>;
|
|
41
44
|
pre?: StepFn<TContext, TInput>;
|
|
@@ -45,6 +48,7 @@ export interface RunOptions<TContext, TInput> {
|
|
|
45
48
|
}
|
|
46
49
|
|
|
47
50
|
export interface WorkerOptions extends Required<BenchmarkOptions> {
|
|
51
|
+
baseUrl: string;
|
|
48
52
|
setupCode?: string;
|
|
49
53
|
teardownCode?: string;
|
|
50
54
|
preCode?: string;
|
package/src/utils.ts
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import { transform } from '@swc/core';
|
|
2
|
+
|
|
1
3
|
export const abs = (value: bigint) => {
|
|
2
4
|
if (value < 0n) {
|
|
3
5
|
return -value;
|
|
@@ -63,3 +65,21 @@ export class ScaledBigInt {
|
|
|
63
65
|
return Number(div(this.value, this.scale));
|
|
64
66
|
}
|
|
65
67
|
}
|
|
68
|
+
|
|
69
|
+
export const transpile = async (code: string): Promise<string> => {
|
|
70
|
+
const output = await transform(code, {
|
|
71
|
+
filename: 'benchmark.ts',
|
|
72
|
+
jsc: {
|
|
73
|
+
parser: {
|
|
74
|
+
syntax: 'typescript',
|
|
75
|
+
tsx: false,
|
|
76
|
+
dynamicImport: true,
|
|
77
|
+
},
|
|
78
|
+
target: 'esnext',
|
|
79
|
+
},
|
|
80
|
+
module: {
|
|
81
|
+
type: 'es6',
|
|
82
|
+
},
|
|
83
|
+
});
|
|
84
|
+
return output.code;
|
|
85
|
+
};
|
package/src/worker.ts
CHANGED
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
import { workerData } from 'node:worker_threads';
|
|
2
|
+
import { SourceTextModule, SyntheticModule, createContext } from 'node:vm';
|
|
3
|
+
import { createRequire } from 'node:module';
|
|
4
|
+
import { fileURLToPath } from 'node:url';
|
|
2
5
|
import { benchmark } from './runner.js';
|
|
3
|
-
import {
|
|
6
|
+
import { WorkerOptions } from './types.js';
|
|
4
7
|
|
|
5
8
|
const {
|
|
9
|
+
baseUrl,
|
|
6
10
|
setupCode,
|
|
7
11
|
teardownCode,
|
|
8
12
|
preCode,
|
|
@@ -14,19 +18,79 @@ const {
|
|
|
14
18
|
minCycles,
|
|
15
19
|
absThreshold,
|
|
16
20
|
relThreshold,
|
|
21
|
+
gcObserver = true,
|
|
17
22
|
|
|
18
23
|
durationsSAB,
|
|
19
24
|
controlSAB,
|
|
20
25
|
}: WorkerOptions = workerData;
|
|
21
26
|
|
|
22
|
-
const
|
|
23
|
-
const teardown: TeardownFn<unknown> = teardownCode && Function(`return ${teardownCode};`)();
|
|
27
|
+
const serialize = (code?: string) => (code ? code : '() => {}');
|
|
24
28
|
|
|
25
|
-
const
|
|
26
|
-
const run: StepFn<unknown, unknown> = runCode && Function(`return ${runCode};`)();
|
|
27
|
-
const post: StepFn<unknown, unknown> = postCode && Function(`return ${postCode};`)();
|
|
29
|
+
const isCjs = typeof require !== 'undefined';
|
|
28
30
|
|
|
29
|
-
|
|
31
|
+
const resolveSpecifier = (specifier: string, parent: string) => {
|
|
32
|
+
if (!isCjs) {
|
|
33
|
+
try {
|
|
34
|
+
return import.meta.resolve(specifier, parent);
|
|
35
|
+
} catch {
|
|
36
|
+
// fall through to CommonJS resolution
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
const resolveFrom = createRequire(fileURLToPath(parent));
|
|
40
|
+
return resolveFrom.resolve(specifier);
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
const source = `
|
|
44
|
+
export const setup = ${serialize(setupCode)};
|
|
45
|
+
export const teardown = ${serialize(teardownCode)};
|
|
46
|
+
export const pre = ${serialize(preCode)};
|
|
47
|
+
export const run = ${serialize(runCode)};
|
|
48
|
+
export const post = ${serialize(postCode)};
|
|
49
|
+
`;
|
|
50
|
+
|
|
51
|
+
const context = createContext({ console, Buffer });
|
|
52
|
+
const imports = new Map<string, SyntheticModule>();
|
|
53
|
+
const mod = new SourceTextModule(source, {
|
|
54
|
+
identifier: baseUrl,
|
|
55
|
+
context,
|
|
56
|
+
initializeImportMeta(meta) {
|
|
57
|
+
meta.url = baseUrl;
|
|
58
|
+
},
|
|
59
|
+
importModuleDynamically(specifier, referencingModule) {
|
|
60
|
+
const base = referencingModule.identifier ?? baseUrl;
|
|
61
|
+
const resolved = resolveSpecifier(specifier, base);
|
|
62
|
+
return import(resolved);
|
|
63
|
+
},
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
await mod.link(async (specifier, referencingModule) => {
|
|
67
|
+
const base = referencingModule.identifier ?? baseUrl;
|
|
68
|
+
const target = resolveSpecifier(specifier, base);
|
|
69
|
+
const cached = imports.get(target);
|
|
70
|
+
if (cached) return cached;
|
|
71
|
+
|
|
72
|
+
const importedModule = await import(target);
|
|
73
|
+
const exportNames = Object.keys(importedModule);
|
|
74
|
+
const imported = new SyntheticModule(
|
|
75
|
+
exportNames,
|
|
76
|
+
() => {
|
|
77
|
+
exportNames.forEach((key) => imported.setExport(key, importedModule[key]));
|
|
78
|
+
},
|
|
79
|
+
{ identifier: target, context: referencingModule.context },
|
|
80
|
+
);
|
|
81
|
+
imports.set(target, imported);
|
|
82
|
+
return imported;
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
await mod.evaluate();
|
|
86
|
+
const { setup, teardown, pre, run, post } = mod.namespace as any;
|
|
87
|
+
|
|
88
|
+
if (!run) {
|
|
89
|
+
throw new Error('Benchmark run function is required');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
process.exitCode = await benchmark({
|
|
93
|
+
baseUrl,
|
|
30
94
|
setup,
|
|
31
95
|
teardown,
|
|
32
96
|
pre,
|
|
@@ -38,9 +102,8 @@ export const exitCode = await benchmark({
|
|
|
38
102
|
minCycles,
|
|
39
103
|
absThreshold,
|
|
40
104
|
relThreshold,
|
|
105
|
+
gcObserver,
|
|
41
106
|
|
|
42
107
|
durationsSAB,
|
|
43
108
|
controlSAB,
|
|
44
109
|
});
|
|
45
|
-
|
|
46
|
-
process.exit(exitCode);
|
package/build/queue.cjs
DELETED
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", {
|
|
3
|
-
value: true
|
|
4
|
-
});
|
|
5
|
-
Object.defineProperty(exports, "createQueue", {
|
|
6
|
-
enumerable: true,
|
|
7
|
-
get: function() {
|
|
8
|
-
return createQueue;
|
|
9
|
-
}
|
|
10
|
-
});
|
|
11
|
-
const createQueue = (worker, concurency = 1)=>{
|
|
12
|
-
const queue = new Set();
|
|
13
|
-
const processing = new Map();
|
|
14
|
-
const iterator = queue[Symbol.iterator]();
|
|
15
|
-
let next;
|
|
16
|
-
let counter = 0;
|
|
17
|
-
queueMicrotask(async ()=>{
|
|
18
|
-
while(true){
|
|
19
|
-
if (concurency > 0 && processing.size === concurency) {
|
|
20
|
-
await Promise.race(processing.values());
|
|
21
|
-
}
|
|
22
|
-
if (queue.size === 0) {
|
|
23
|
-
const { promise, resolve } = Promise.withResolvers();
|
|
24
|
-
next = resolve;
|
|
25
|
-
await promise;
|
|
26
|
-
}
|
|
27
|
-
const result = iterator.next();
|
|
28
|
-
if (result.done) {
|
|
29
|
-
break;
|
|
30
|
-
}
|
|
31
|
-
const id = counter++;
|
|
32
|
-
const task = Promise.resolve(worker(result.value)).catch(()=>{}).finally(()=>{
|
|
33
|
-
processing.delete(id);
|
|
34
|
-
});
|
|
35
|
-
processing.set(id, task);
|
|
36
|
-
}
|
|
37
|
-
});
|
|
38
|
-
return {
|
|
39
|
-
push: async (input)=>{
|
|
40
|
-
queue.add(input);
|
|
41
|
-
if (queue.size === 0) {
|
|
42
|
-
next?.();
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
};
|
|
46
|
-
};
|
|
47
|
-
|
|
48
|
-
//# sourceMappingURL=queue.cjs.map
|
package/build/queue.cjs.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/queue.ts"],"sourcesContent":["export const createQueue = <T>(worker: (task: T) => Promise<void>, concurency: number = 1) => {\n const queue = new Set<T>();\n const processing = new Map<number, Promise<void>>();\n const iterator = queue[Symbol.iterator]();\n\n let next: () => void;\n let counter = 0;\n\n queueMicrotask(async () => {\n while (true) {\n if (concurency > 0 && processing.size === concurency) {\n await Promise.race(processing.values());\n }\n if (queue.size === 0) {\n const { promise, resolve } = Promise.withResolvers<void>();\n next = resolve;\n await promise;\n }\n const result = iterator.next();\n if (result.done) {\n break;\n }\n const id = counter++;\n const task = Promise.resolve(worker(result.value))\n .catch(() => {})\n .finally(() => {\n processing.delete(id);\n });\n processing.set(id, task);\n }\n });\n\n return {\n push: async (input: T) => {\n queue.add(input);\n\n if (queue.size === 0) {\n next?.();\n }\n },\n };\n};\n"],"names":["createQueue","worker","concurency","queue","Set","processing","Map","iterator","Symbol","next","counter","queueMicrotask","size","Promise","race","values","promise","resolve","withResolvers","result","done","id","task","value","catch","finally","delete","set","push","input","add"],"mappings":";;;;+BAAaA;;;eAAAA;;;AAAN,MAAMA,cAAc,CAAIC,QAAoCC,aAAqB,CAAC;IACvF,MAAMC,QAAQ,IAAIC;IAClB,MAAMC,aAAa,IAAIC;IACvB,MAAMC,WAAWJ,KAAK,CAACK,OAAOD,QAAQ,CAAC;IAEvC,IAAIE;IACJ,IAAIC,UAAU;IAEdC,eAAe;QACb,MAAO,KAAM;YACX,IAAIT,aAAa,KAAKG,WAAWO,IAAI,KAAKV,YAAY;gBACpD,MAAMW,QAAQC,IAAI,CAACT,WAAWU,MAAM;YACtC;YACA,IAAIZ,MAAMS,IAAI,KAAK,GAAG;gBACpB,MAAM,EAAEI,OAAO,EAAEC,OAAO,EAAE,GAAGJ,QAAQK,aAAa;gBAClDT,OAAOQ;gBACP,MAAMD;YACR;YACA,MAAMG,SAASZ,SAASE,IAAI;YAC5B,IAAIU,OAAOC,IAAI,EAAE;gBACf;YACF;YACA,MAAMC,KAAKX;YACX,MAAMY,OAAOT,QAAQI,OAAO,CAAChB,OAAOkB,OAAOI,KAAK,GAC7CC,KAAK,CAAC,KAAO,GACbC,OAAO,CAAC;gBACPpB,WAAWqB,MAAM,CAACL;YACpB;YACFhB,WAAWsB,GAAG,CAACN,IAAIC;QACrB;IACF;IAEA,OAAO;QACLM,MAAM,OAAOC;YACX1B,MAAM2B,GAAG,CAACD;YAEV,IAAI1B,MAAMS,IAAI,KAAK,GAAG;gBACpBH;YACF;QACF;IACF;AACF"}
|
package/build/queue.d.ts
DELETED
package/build/queue.js
DELETED
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
export const createQueue = (worker, concurency = 1)=>{
|
|
2
|
-
const queue = new Set();
|
|
3
|
-
const processing = new Map();
|
|
4
|
-
const iterator = queue[Symbol.iterator]();
|
|
5
|
-
let next;
|
|
6
|
-
let counter = 0;
|
|
7
|
-
queueMicrotask(async ()=>{
|
|
8
|
-
while(true){
|
|
9
|
-
if (concurency > 0 && processing.size === concurency) {
|
|
10
|
-
await Promise.race(processing.values());
|
|
11
|
-
}
|
|
12
|
-
if (queue.size === 0) {
|
|
13
|
-
const { promise, resolve } = Promise.withResolvers();
|
|
14
|
-
next = resolve;
|
|
15
|
-
await promise;
|
|
16
|
-
}
|
|
17
|
-
const result = iterator.next();
|
|
18
|
-
if (result.done) {
|
|
19
|
-
break;
|
|
20
|
-
}
|
|
21
|
-
const id = counter++;
|
|
22
|
-
const task = Promise.resolve(worker(result.value)).catch(()=>{}).finally(()=>{
|
|
23
|
-
processing.delete(id);
|
|
24
|
-
});
|
|
25
|
-
processing.set(id, task);
|
|
26
|
-
}
|
|
27
|
-
});
|
|
28
|
-
return {
|
|
29
|
-
push: async (input)=>{
|
|
30
|
-
queue.add(input);
|
|
31
|
-
if (queue.size === 0) {
|
|
32
|
-
next?.();
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
};
|
|
36
|
-
};
|
|
37
|
-
|
|
38
|
-
//# sourceMappingURL=queue.js.map
|
package/build/queue.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/queue.ts"],"sourcesContent":["export const createQueue = <T>(worker: (task: T) => Promise<void>, concurency: number = 1) => {\n const queue = new Set<T>();\n const processing = new Map<number, Promise<void>>();\n const iterator = queue[Symbol.iterator]();\n\n let next: () => void;\n let counter = 0;\n\n queueMicrotask(async () => {\n while (true) {\n if (concurency > 0 && processing.size === concurency) {\n await Promise.race(processing.values());\n }\n if (queue.size === 0) {\n const { promise, resolve } = Promise.withResolvers<void>();\n next = resolve;\n await promise;\n }\n const result = iterator.next();\n if (result.done) {\n break;\n }\n const id = counter++;\n const task = Promise.resolve(worker(result.value))\n .catch(() => {})\n .finally(() => {\n processing.delete(id);\n });\n processing.set(id, task);\n }\n });\n\n return {\n push: async (input: T) => {\n queue.add(input);\n\n if (queue.size === 0) {\n next?.();\n }\n },\n };\n};\n"],"names":["createQueue","worker","concurency","queue","Set","processing","Map","iterator","Symbol","next","counter","queueMicrotask","size","Promise","race","values","promise","resolve","withResolvers","result","done","id","task","value","catch","finally","delete","set","push","input","add"],"mappings":"AAAA,OAAO,MAAMA,cAAc,CAAIC,QAAoCC,aAAqB,CAAC;IACvF,MAAMC,QAAQ,IAAIC;IAClB,MAAMC,aAAa,IAAIC;IACvB,MAAMC,WAAWJ,KAAK,CAACK,OAAOD,QAAQ,CAAC;IAEvC,IAAIE;IACJ,IAAIC,UAAU;IAEdC,eAAe;QACb,MAAO,KAAM;YACX,IAAIT,aAAa,KAAKG,WAAWO,IAAI,KAAKV,YAAY;gBACpD,MAAMW,QAAQC,IAAI,CAACT,WAAWU,MAAM;YACtC;YACA,IAAIZ,MAAMS,IAAI,KAAK,GAAG;gBACpB,MAAM,EAAEI,OAAO,EAAEC,OAAO,EAAE,GAAGJ,QAAQK,aAAa;gBAClDT,OAAOQ;gBACP,MAAMD;YACR;YACA,MAAMG,SAASZ,SAASE,IAAI;YAC5B,IAAIU,OAAOC,IAAI,EAAE;gBACf;YACF;YACA,MAAMC,KAAKX;YACX,MAAMY,OAAOT,QAAQI,OAAO,CAAChB,OAAOkB,OAAOI,KAAK,GAC7CC,KAAK,CAAC,KAAO,GACbC,OAAO,CAAC;gBACPpB,WAAWqB,MAAM,CAACL;YACpB;YACFhB,WAAWsB,GAAG,CAACN,IAAIC;QACrB;IACF;IAEA,OAAO;QACLM,MAAM,OAAOC;YACX1B,MAAM2B,GAAG,CAACD;YAEV,IAAI1B,MAAMS,IAAI,KAAK,GAAG;gBACpBH;YACF;QACF;IACF;AACF,EAAE"}
|