@durable-streams/benchmarks 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,12 @@
1
+ //#region src/index.d.ts
2
+ interface BenchmarkOptions {
3
+ /** Base URL of the server to benchmark */
4
+ baseUrl: string;
5
+ /** Environment name (e.g., "production", "local") */
6
+ environment?: string;
7
+ }
8
+ /**
9
+ * Run the full benchmark suite against a server
10
+ */
11
+ declare function runBenchmarks(options: BenchmarkOptions): void; //#endregion
12
+ export { BenchmarkOptions, runBenchmarks };
package/dist/index.js ADDED
@@ -0,0 +1,245 @@
1
+ import { writeFileSync } from "node:fs";
2
+ import { afterAll, bench, describe } from "vitest";
3
+ import { DurableStream } from "@durable-streams/client";
4
+
5
+ //#region src/index.ts
6
+ const results = new Map();
7
+ function recordResult(name, value, unit) {
8
+ if (!results.has(name)) results.set(name, {
9
+ values: [],
10
+ unit
11
+ });
12
+ results.get(name).values.push(value);
13
+ }
14
+ function calculateStats(values) {
15
+ const sorted = [...values].sort((a, b) => a - b);
16
+ const min = sorted[0];
17
+ const max = sorted[sorted.length - 1];
18
+ const mean = values.reduce((a, b) => a + b, 0) / values.length;
19
+ const p50 = sorted[Math.floor(sorted.length * .5)];
20
+ const p75 = sorted[Math.floor(sorted.length * .75)];
21
+ const p99 = sorted[Math.floor(sorted.length * .99)];
22
+ return {
23
+ min,
24
+ max,
25
+ mean,
26
+ p50,
27
+ p75,
28
+ p99
29
+ };
30
+ }
31
+ function printResults() {
32
+ console.log(`\n=== RESULTS SO FAR ===`);
33
+ const tableData = {};
34
+ for (const [name, data] of results.entries()) {
35
+ if (data.values.length === 0) continue;
36
+ const stats = calculateStats(data.values);
37
+ tableData[name] = {
38
+ Min: `${stats.min.toFixed(2)} ${data.unit}`,
39
+ Max: `${stats.max.toFixed(2)} ${data.unit}`,
40
+ Mean: `${stats.mean.toFixed(2)} ${data.unit}`,
41
+ P50: `${stats.p50.toFixed(2)} ${data.unit}`,
42
+ P75: `${stats.p75.toFixed(2)} ${data.unit}`,
43
+ P99: `${stats.p99.toFixed(2)} ${data.unit}`,
44
+ Iterations: data.values.length
45
+ };
46
+ }
47
+ console.table(tableData);
48
+ }
49
+ /**
50
+ * Run the full benchmark suite against a server
51
+ */
52
+ function runBenchmarks(options) {
53
+ const { baseUrl, environment = `unknown` } = options;
54
+ afterAll(() => {
55
+ const statsOutput = {};
56
+ for (const [name, data] of results.entries()) {
57
+ const stats = calculateStats(data.values);
58
+ statsOutput[name] = {
59
+ ...stats,
60
+ unit: data.unit,
61
+ iterations: data.values.length
62
+ };
63
+ }
64
+ const output = {
65
+ environment,
66
+ baseUrl,
67
+ timestamp: new Date().toISOString(),
68
+ results: statsOutput
69
+ };
70
+ writeFileSync(`benchmark-results.json`, JSON.stringify(output, null, 2), `utf-8`);
71
+ console.log(`\n\n=== BENCHMARK RESULTS ===`);
72
+ console.log(`Environment: ${output.environment}`);
73
+ console.log(`Base URL: ${output.baseUrl}`);
74
+ console.log(``);
75
+ const finalTableData = {};
76
+ for (const [name, stats] of Object.entries(statsOutput)) finalTableData[name] = {
77
+ Min: `${stats.min.toFixed(2)} ${stats.unit}`,
78
+ Max: `${stats.max.toFixed(2)} ${stats.unit}`,
79
+ Mean: `${stats.mean.toFixed(2)} ${stats.unit}`,
80
+ P50: `${stats.p50.toFixed(2)} ${stats.unit}`,
81
+ P75: `${stats.p75.toFixed(2)} ${stats.unit}`,
82
+ P99: `${stats.p99.toFixed(2)} ${stats.unit}`,
83
+ Iterations: stats.iterations
84
+ };
85
+ console.table(finalTableData);
86
+ console.log(`\n\nResults saved to benchmark-results.json`);
87
+ });
88
+ describe(`Latency - Round-trip Time`, () => {
89
+ bench(`baseline ping (round-trip network latency)`, async () => {
90
+ const startTime = performance.now();
91
+ await fetch(`${baseUrl}/health`);
92
+ const endTime = performance.now();
93
+ const pingTime = endTime - startTime;
94
+ recordResult(`Baseline Ping`, pingTime, `ms`);
95
+ }, {
96
+ iterations: 5,
97
+ time: 5e3
98
+ });
99
+ bench(`append and receive via long-poll (100 bytes)`, async () => {
100
+ const streamPath = `/v1/stream/latency-bench-${Date.now()}-${Math.random()}`;
101
+ const stream = await DurableStream.create({
102
+ url: `${baseUrl}${streamPath}`,
103
+ contentType: `application/octet-stream`
104
+ });
105
+ const message = new Uint8Array(100).fill(42);
106
+ let offset = (await stream.head()).offset;
107
+ const pingStart = performance.now();
108
+ await fetch(`${baseUrl}/health`);
109
+ const pingEnd = performance.now();
110
+ const pingTime = pingEnd - pingStart;
111
+ const warmupPromise = (async () => {
112
+ const res = await stream.stream({
113
+ offset,
114
+ live: `long-poll`
115
+ });
116
+ await new Promise((resolve) => {
117
+ const unsubscribe = res.subscribeBytes(async (chunk) => {
118
+ if (chunk.data.length > 0) {
119
+ offset = chunk.offset;
120
+ unsubscribe();
121
+ res.cancel();
122
+ resolve();
123
+ }
124
+ });
125
+ });
126
+ })();
127
+ await stream.append(message);
128
+ await warmupPromise;
129
+ const readPromise = (async () => {
130
+ const res = await stream.stream({
131
+ offset,
132
+ live: `long-poll`
133
+ });
134
+ await new Promise((resolve) => {
135
+ const unsubscribe = res.subscribeBytes(async (chunk) => {
136
+ if (chunk.data.length > 0) {
137
+ unsubscribe();
138
+ res.cancel();
139
+ resolve();
140
+ }
141
+ });
142
+ });
143
+ })();
144
+ const startTime = performance.now();
145
+ await stream.append(message);
146
+ await readPromise;
147
+ const endTime = performance.now();
148
+ await stream.delete();
149
+ const totalLatency = endTime - startTime;
150
+ const overhead = totalLatency - pingTime;
151
+ recordResult(`Latency - Total RTT`, totalLatency, `ms`);
152
+ recordResult(`Latency - Ping`, pingTime, `ms`);
153
+ recordResult(`Latency - Overhead`, overhead, `ms`);
154
+ }, {
155
+ iterations: 10,
156
+ time: 15e3
157
+ });
158
+ afterAll(() => {
159
+ printResults();
160
+ });
161
+ });
162
+ describe(`Message Throughput`, () => {
163
+ bench(`small messages (100 bytes)`, async () => {
164
+ const streamPath = `/v1/stream/msg-small-${Date.now()}-${Math.random()}`;
165
+ const stream = await DurableStream.create({
166
+ url: `${baseUrl}${streamPath}`,
167
+ contentType: `application/octet-stream`
168
+ });
169
+ const message = new Uint8Array(100).fill(42);
170
+ const messageCount = 1e3;
171
+ const concurrency = 75;
172
+ const startTime = performance.now();
173
+ for (let batch = 0; batch < messageCount / concurrency; batch++) await Promise.all(Array.from({ length: concurrency }, () => stream.append(message)));
174
+ const endTime = performance.now();
175
+ const elapsedSeconds = (endTime - startTime) / 1e3;
176
+ const messagesPerSecond = messageCount / elapsedSeconds;
177
+ await stream.delete();
178
+ recordResult(`Throughput - Small Messages`, messagesPerSecond, `msg/sec`);
179
+ }, {
180
+ iterations: 3,
181
+ time: 1e4
182
+ });
183
+ bench(`large messages (1MB)`, async () => {
184
+ const streamPath = `/v1/stream/msg-large-${Date.now()}-${Math.random()}`;
185
+ const stream = await DurableStream.create({
186
+ url: `${baseUrl}${streamPath}`,
187
+ contentType: `application/octet-stream`
188
+ });
189
+ const message = new Uint8Array(1024 * 1024).fill(42);
190
+ const messageCount = 50;
191
+ const concurrency = 15;
192
+ const startTime = performance.now();
193
+ for (let batch = 0; batch < messageCount / concurrency; batch++) await Promise.all(Array.from({ length: concurrency }, () => stream.append(message)));
194
+ const endTime = performance.now();
195
+ const elapsedSeconds = (endTime - startTime) / 1e3;
196
+ const messagesPerSecond = messageCount / elapsedSeconds;
197
+ await stream.delete();
198
+ recordResult(`Throughput - Large Messages`, messagesPerSecond, `msg/sec`);
199
+ }, {
200
+ iterations: 2,
201
+ time: 1e4
202
+ });
203
+ afterAll(() => {
204
+ printResults();
205
+ });
206
+ });
207
+ describe.skip(`Byte Throughput`, () => {
208
+ bench(`streaming throughput - appendStream`, async () => {
209
+ const streamPath = `/v1/stream/byte-stream-${Date.now()}-${Math.random()}`;
210
+ const stream = await DurableStream.create({
211
+ url: `${baseUrl}${streamPath}`,
212
+ contentType: `application/octet-stream`
213
+ });
214
+ const chunkSize = 64 * 1024;
215
+ const chunk = new Uint8Array(chunkSize).fill(42);
216
+ const totalChunks = 100;
217
+ const startTime = performance.now();
218
+ const appends = [];
219
+ for (let i = 0; i < totalChunks; i++) appends.push(stream.append(chunk));
220
+ await Promise.all(appends);
221
+ const endTime = performance.now();
222
+ let bytesRead = 0;
223
+ const readRes = await stream.stream({ live: false });
224
+ const reader = readRes.bodyStream().getReader();
225
+ let result = await reader.read();
226
+ while (!result.done) {
227
+ bytesRead += result.value.length;
228
+ result = await reader.read();
229
+ }
230
+ const elapsedSeconds = (endTime - startTime) / 1e3;
231
+ const mbPerSecond = bytesRead / (1024 * 1024) / elapsedSeconds;
232
+ await stream.delete();
233
+ recordResult(`Throughput - Streaming (appendStream)`, mbPerSecond, `MB/sec`);
234
+ }, {
235
+ iterations: 3,
236
+ time: 1e4
237
+ });
238
+ afterAll(() => {
239
+ printResults();
240
+ });
241
+ });
242
+ }
243
+
244
+ //#endregion
245
+ export { runBenchmarks };
package/package.json ADDED
@@ -0,0 +1,37 @@
1
+ {
2
+ "name": "@durable-streams/benchmarks",
3
+ "version": "0.1.0",
4
+ "description": "Performance benchmark suite for Durable Streams server implementations",
5
+ "author": "Durable Stream contributors",
6
+ "license": "Apache-2.0",
7
+ "type": "module",
8
+ "main": "./dist/index.js",
9
+ "types": "./dist/index.d.ts",
10
+ "exports": {
11
+ ".": {
12
+ "import": "./dist/index.js",
13
+ "types": "./dist/index.d.ts"
14
+ }
15
+ },
16
+ "scripts": {
17
+ "build": "tsdown",
18
+ "dev": "tsdown --watch",
19
+ "typecheck": "tsc --noEmit"
20
+ },
21
+ "dependencies": {
22
+ "@durable-streams/client": "workspace:*",
23
+ "vitest": "^3.2.4"
24
+ },
25
+ "devDependencies": {
26
+ "@types/node": "^22.15.21",
27
+ "tsdown": "^0.9.0",
28
+ "typescript": "^5.0.0"
29
+ },
30
+ "files": [
31
+ "dist",
32
+ "src"
33
+ ],
34
+ "engines": {
35
+ "node": ">=18.0.0"
36
+ }
37
+ }
package/src/index.ts ADDED
@@ -0,0 +1,362 @@
1
+ /**
2
+ * Performance benchmarks for Durable Streams server implementations
3
+ * Tests latency, message throughput, and byte throughput
4
+ *
5
+ * Success Criteria:
6
+ * - Latency overhead: < 10ms round-trip
7
+ * - Message throughput: 100+ messages/second (small messages)
8
+ * - Byte throughput: 100 MB/s (large messages)
9
+ */
10
+
11
+ import { writeFileSync } from "node:fs"
12
+ import { afterAll, bench, describe } from "vitest"
13
+ import { DurableStream } from "@durable-streams/client"
14
+
15
+ export interface BenchmarkOptions {
16
+ /** Base URL of the server to benchmark */
17
+ baseUrl: string
18
+ /** Environment name (e.g., "production", "local") */
19
+ environment?: string
20
+ }
21
+
22
+ // Store benchmark results
23
+ const results: Map<string, { values: Array<number>; unit: string }> = new Map()
24
+
25
+ function recordResult(name: string, value: number, unit: string) {
26
+ if (!results.has(name)) {
27
+ results.set(name, { values: [], unit })
28
+ }
29
+ results.get(name)!.values.push(value)
30
+ }
31
+
32
+ function calculateStats(values: Array<number>) {
33
+ const sorted = [...values].sort((a, b) => a - b)
34
+ const min = sorted[0]!
35
+ const max = sorted[sorted.length - 1]!
36
+ const mean = values.reduce((a, b) => a + b, 0) / values.length
37
+ const p50 = sorted[Math.floor(sorted.length * 0.5)]!
38
+ const p75 = sorted[Math.floor(sorted.length * 0.75)]!
39
+ const p99 = sorted[Math.floor(sorted.length * 0.99)]!
40
+ return { min, max, mean, p50, p75, p99 }
41
+ }
42
+
43
+ function printResults() {
44
+ console.log(`\n=== RESULTS SO FAR ===`)
45
+
46
+ const tableData: Record<string, any> = {}
47
+
48
+ for (const [name, data] of results.entries()) {
49
+ if (data.values.length === 0) continue
50
+ const stats = calculateStats(data.values)
51
+
52
+ tableData[name] = {
53
+ Min: `${stats.min.toFixed(2)} ${data.unit}`,
54
+ Max: `${stats.max.toFixed(2)} ${data.unit}`,
55
+ Mean: `${stats.mean.toFixed(2)} ${data.unit}`,
56
+ P50: `${stats.p50.toFixed(2)} ${data.unit}`,
57
+ P75: `${stats.p75.toFixed(2)} ${data.unit}`,
58
+ P99: `${stats.p99.toFixed(2)} ${data.unit}`,
59
+ Iterations: data.values.length,
60
+ }
61
+ }
62
+
63
+ console.table(tableData)
64
+ }
65
+
66
+ /**
67
+ * Run the full benchmark suite against a server
68
+ */
69
+ export function runBenchmarks(options: BenchmarkOptions): void {
70
+ const { baseUrl, environment = `unknown` } = options
71
+
72
+ afterAll(() => {
73
+ // Calculate statistics and write results
74
+ const statsOutput: Record<string, any> = {}
75
+
76
+ for (const [name, data] of results.entries()) {
77
+ const stats = calculateStats(data.values)
78
+ statsOutput[name] = {
79
+ ...stats,
80
+ unit: data.unit,
81
+ iterations: data.values.length,
82
+ }
83
+ }
84
+
85
+ const output = {
86
+ environment,
87
+ baseUrl,
88
+ timestamp: new Date().toISOString(),
89
+ results: statsOutput,
90
+ }
91
+
92
+ writeFileSync(
93
+ `benchmark-results.json`,
94
+ JSON.stringify(output, null, 2),
95
+ `utf-8`
96
+ )
97
+
98
+ console.log(`\n\n=== BENCHMARK RESULTS ===`)
99
+ console.log(`Environment: ${output.environment}`)
100
+ console.log(`Base URL: ${output.baseUrl}`)
101
+ console.log(``)
102
+
103
+ const finalTableData: Record<string, any> = {}
104
+ for (const [name, stats] of Object.entries(statsOutput)) {
105
+ finalTableData[name] = {
106
+ Min: `${stats.min.toFixed(2)} ${stats.unit}`,
107
+ Max: `${stats.max.toFixed(2)} ${stats.unit}`,
108
+ Mean: `${stats.mean.toFixed(2)} ${stats.unit}`,
109
+ P50: `${stats.p50.toFixed(2)} ${stats.unit}`,
110
+ P75: `${stats.p75.toFixed(2)} ${stats.unit}`,
111
+ P99: `${stats.p99.toFixed(2)} ${stats.unit}`,
112
+ Iterations: stats.iterations,
113
+ }
114
+ }
115
+
116
+ console.table(finalTableData)
117
+
118
+ console.log(`\n\nResults saved to benchmark-results.json`)
119
+ })
120
+
121
+ // ============================================================================
122
+ // Latency Benchmarks
123
+ // ============================================================================
124
+
125
+ describe(`Latency - Round-trip Time`, () => {
126
+ bench(
127
+ `baseline ping (round-trip network latency)`,
128
+ async () => {
129
+ // Measure baseline network latency with a simple HEAD request
130
+ const startTime = performance.now()
131
+ await fetch(`${baseUrl}/health`)
132
+ const endTime = performance.now()
133
+
134
+ const pingTime = endTime - startTime
135
+ recordResult(`Baseline Ping`, pingTime, `ms`)
136
+ },
137
+ { iterations: 5, time: 5000 }
138
+ )
139
+
140
+ bench(
141
+ `append and receive via long-poll (100 bytes)`,
142
+ async () => {
143
+ const streamPath = `/v1/stream/latency-bench-${Date.now()}-${Math.random()}`
144
+ const stream = await DurableStream.create({
145
+ url: `${baseUrl}${streamPath}`,
146
+ contentType: `application/octet-stream`,
147
+ })
148
+
149
+ const message = new Uint8Array(100).fill(42)
150
+ let offset = (await stream.head()).offset
151
+
152
+ // Measure baseline ping for this test
153
+ const pingStart = performance.now()
154
+ await fetch(`${baseUrl}/health`)
155
+ const pingEnd = performance.now()
156
+ const pingTime = pingEnd - pingStart
157
+
158
+ // Warmup: append and receive once (don't measure)
159
+ const warmupPromise = (async () => {
160
+ const res = await stream.stream({
161
+ offset,
162
+ live: `long-poll`,
163
+ })
164
+ await new Promise<void>((resolve) => {
165
+ const unsubscribe = res.subscribeBytes(async (chunk) => {
166
+ if (chunk.data.length > 0) {
167
+ offset = chunk.offset
168
+ unsubscribe()
169
+ res.cancel()
170
+ resolve()
171
+ }
172
+ })
173
+ })
174
+ })()
175
+
176
+ await stream.append(message)
177
+ await warmupPromise
178
+
179
+ // Actual measurement: append and receive second time
180
+ const readPromise = (async () => {
181
+ const res = await stream.stream({
182
+ offset,
183
+ live: `long-poll`,
184
+ })
185
+ await new Promise<void>((resolve) => {
186
+ const unsubscribe = res.subscribeBytes(async (chunk) => {
187
+ if (chunk.data.length > 0) {
188
+ unsubscribe()
189
+ res.cancel()
190
+ resolve()
191
+ }
192
+ })
193
+ })
194
+ })()
195
+
196
+ const startTime = performance.now()
197
+ await stream.append(message)
198
+ await readPromise
199
+ const endTime = performance.now()
200
+
201
+ // Cleanup
202
+ await stream.delete()
203
+
204
+ const totalLatency = endTime - startTime
205
+ const overhead = totalLatency - pingTime
206
+
207
+ recordResult(`Latency - Total RTT`, totalLatency, `ms`)
208
+ recordResult(`Latency - Ping`, pingTime, `ms`)
209
+ recordResult(`Latency - Overhead`, overhead, `ms`)
210
+ },
211
+ { iterations: 10, time: 15000 }
212
+ )
213
+
214
+ afterAll(() => {
215
+ printResults()
216
+ })
217
+ })
218
+
219
+ // ============================================================================
220
+ // Message Throughput Benchmarks
221
+ // ============================================================================
222
+
223
+ describe(`Message Throughput`, () => {
224
+ bench(
225
+ `small messages (100 bytes)`,
226
+ async () => {
227
+ const streamPath = `/v1/stream/msg-small-${Date.now()}-${Math.random()}`
228
+ const stream = await DurableStream.create({
229
+ url: `${baseUrl}${streamPath}`,
230
+ contentType: `application/octet-stream`,
231
+ })
232
+
233
+ const message = new Uint8Array(100).fill(42)
234
+ const messageCount = 1000
235
+ const concurrency = 75
236
+
237
+ const startTime = performance.now()
238
+
239
+ // Send messages in batches with concurrency
240
+ for (let batch = 0; batch < messageCount / concurrency; batch++) {
241
+ await Promise.all(
242
+ Array.from({ length: concurrency }, () => stream.append(message))
243
+ )
244
+ }
245
+
246
+ const endTime = performance.now()
247
+ const elapsedSeconds = (endTime - startTime) / 1000
248
+ const messagesPerSecond = messageCount / elapsedSeconds
249
+
250
+ // Cleanup
251
+ await stream.delete()
252
+
253
+ recordResult(
254
+ `Throughput - Small Messages`,
255
+ messagesPerSecond,
256
+ `msg/sec`
257
+ )
258
+ },
259
+ { iterations: 3, time: 10000 }
260
+ )
261
+
262
+ bench(
263
+ `large messages (1MB)`,
264
+ async () => {
265
+ const streamPath = `/v1/stream/msg-large-${Date.now()}-${Math.random()}`
266
+ const stream = await DurableStream.create({
267
+ url: `${baseUrl}${streamPath}`,
268
+ contentType: `application/octet-stream`,
269
+ })
270
+
271
+ const message = new Uint8Array(1024 * 1024).fill(42) // 1MB
272
+ const messageCount = 50
273
+ const concurrency = 15
274
+
275
+ const startTime = performance.now()
276
+
277
+ // Send messages in batches with concurrency
278
+ for (let batch = 0; batch < messageCount / concurrency; batch++) {
279
+ await Promise.all(
280
+ Array.from({ length: concurrency }, () => stream.append(message))
281
+ )
282
+ }
283
+
284
+ const endTime = performance.now()
285
+ const elapsedSeconds = (endTime - startTime) / 1000
286
+ const messagesPerSecond = messageCount / elapsedSeconds
287
+
288
+ // Cleanup
289
+ await stream.delete()
290
+
291
+ recordResult(
292
+ `Throughput - Large Messages`,
293
+ messagesPerSecond,
294
+ `msg/sec`
295
+ )
296
+ },
297
+ { iterations: 2, time: 10000 }
298
+ )
299
+
300
+ afterAll(() => {
301
+ printResults()
302
+ })
303
+ })
304
+
305
+ // ============================================================================
306
+ // Byte Throughput Benchmarks
307
+ // ============================================================================
308
+
309
+ describe.skip(`Byte Throughput`, () => {
310
+ bench(
311
+ `streaming throughput - appendStream`,
312
+ async () => {
313
+ const streamPath = `/v1/stream/byte-stream-${Date.now()}-${Math.random()}`
314
+ const stream = await DurableStream.create({
315
+ url: `${baseUrl}${streamPath}`,
316
+ contentType: `application/octet-stream`,
317
+ })
318
+
319
+ const chunkSize = 64 * 1024 // 64KB chunks
320
+ const chunk = new Uint8Array(chunkSize).fill(42)
321
+ const totalChunks = 100 // Send 100 chunks = ~6.4MB total
322
+
323
+ const startTime = performance.now()
324
+
325
+ const appends = []
326
+ for (let i = 0; i < totalChunks; i++) {
327
+ appends.push(stream.append(chunk))
328
+ }
329
+ await Promise.all(appends)
330
+
331
+ const endTime = performance.now()
332
+
333
+ // Read back to verify
334
+ let bytesRead = 0
335
+ const readRes = await stream.stream({ live: false })
336
+ const reader = readRes.bodyStream().getReader()
337
+ let result = await reader.read()
338
+ while (!result.done) {
339
+ bytesRead += result.value.length
340
+ result = await reader.read()
341
+ }
342
+
343
+ const elapsedSeconds = (endTime - startTime) / 1000
344
+ const mbPerSecond = bytesRead / (1024 * 1024) / elapsedSeconds
345
+
346
+ // Cleanup
347
+ await stream.delete()
348
+
349
+ recordResult(
350
+ `Throughput - Streaming (appendStream)`,
351
+ mbPerSecond,
352
+ `MB/sec`
353
+ )
354
+ },
355
+ { iterations: 3, time: 10000 }
356
+ )
357
+
358
+ afterAll(() => {
359
+ printResults()
360
+ })
361
+ })
362
+ }