@nestia/benchmark 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ import { IBenchmarkEvent } from "../IBenchmarkEvent";
2
+ export interface IBenchmarkServant {
3
+ execute(props: {
4
+ count: number;
5
+ simultaneous: number;
6
+ }): Promise<IBenchmarkEvent[]>;
7
+ }
@@ -0,0 +1,3 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ //# sourceMappingURL=IBenchmarkServant.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"IBenchmarkServant.js","sourceRoot":"","sources":["../../src/internal/IBenchmarkServant.ts"],"names":[],"mappings":""}
package/package.json ADDED
@@ -0,0 +1,52 @@
1
+ {
2
+ "name": "@nestia/benchmark",
3
+ "version": "0.1.0",
4
+ "description": "NestJS Performance Benchmark Program",
5
+ "main": "lib/index.js",
6
+ "typings": "lib/index.d.ts",
7
+ "scripts": {
8
+ "build": "npm run build:main && npm run build:test",
9
+ "build:main": "rimraf lib && tsc",
10
+ "build:test": "rimraf bin && tsc -p test/tsconfig.json",
11
+ "dev": "npm run build:test -- --watch",
12
+ "prepare": "ts-patch install && typia patch",
13
+ "test": "node bin/test"
14
+ },
15
+ "keywords": [
16
+ "e2e",
17
+ "nestia",
18
+ "nestjs",
19
+ "Performance",
20
+ "benchmark"
21
+ ],
22
+ "author": "Jeongho Nam",
23
+ "license": "MIT",
24
+ "devDependencies": {
25
+ "@nestia/core": "^3.5.0",
26
+ "@nestia/e2e": "^0.7.0",
27
+ "@nestia/sdk": "^3.5.0",
28
+ "@nestjs/common": "^10.3.10",
29
+ "@nestjs/core": "^10.3.10",
30
+ "@nestjs/platform-express": "^10.3.10",
31
+ "@types/uuid": "^10.0.0",
32
+ "nestia": "^5.3.1",
33
+ "ts-node": "^10.9.2",
34
+ "ts-patch": "^3.2.1",
35
+ "typescript": "^5.5.3",
36
+ "typescript-transform-paths": "^3.4.7",
37
+ "typia": "^6.4.3",
38
+ "uuid": "^10.0.0"
39
+ },
40
+ "dependencies": {
41
+ "@nestia/fetcher": "^3.5.0",
42
+ "tgrid": "^1.0.2",
43
+ "tstl": "^3.0.0"
44
+ },
45
+ "files": [
46
+ "lib",
47
+ "src",
48
+ "README.md",
49
+ "LICENSE",
50
+ "package.json"
51
+ ]
52
+ }
@@ -0,0 +1,486 @@
1
+ import { IConnection } from "@nestia/fetcher";
2
+ import fs from "fs";
3
+ import os from "os";
4
+ import { Driver, WorkerConnector, WorkerServer } from "tgrid";
5
+ import { HashMap, hash } from "tstl";
6
+
7
+ import { IBenchmarkEvent } from "./IBenchmarkEvent";
8
+ import { IBenchmarkMaster } from "./internal/IBenchmarkMaster";
9
+ import { IBenchmarkServant } from "./internal/IBenchmarkServant";
10
+
11
+ /**
12
+ * Dynamic benchmark executor running prefixed functions.
13
+ *
14
+ * `DynamicBenchmarker` is composed with two programs,
15
+ * {@link DynamicBenchmarker.master} and
16
+ * {@link DynamicBenchmarker.servant servants}. The master program creates
17
+ * multiple servant programs, and the servant programs execute the prefixed
18
+ * functions in parallel. When the pre-congirued count of requests are all
19
+ * completed, the master program collects the results and returns them.
20
+ *
21
+ * Therefore, when you want to benchmark the performance of a backend server,
22
+ * you have to make two programs; one for calling the
23
+ * {@link DynamicBenchmarker.master} function, and the other for calling the
24
+ * {@link DynamicBenchmarker.servant} function. Also, never forget to write
25
+ * the path of the servant program to the
26
+ * {@link DynamicBenchmarker.IMasterProps.servant} property.
27
+ *
28
+ * Also, you when you complete the benchmark execution through the
29
+ * {@link DynamicBenchmarker.master} and {@link DynamicBenchmarker.servant}
30
+ * functions, you can convert the result to markdown content by using the
31
+ * {@link DynamicBenchmarker.markdown} function.
32
+ *
33
+ * Additionally, if you hope to see some utilization cases,
34
+ * see the below example tagged links.
35
+ *
36
+ * @example https://github.com/samchon/nestia-start/blob/master/test/benchmaark/index.ts
37
+ * @example https://github.com/samchon/backend/blob/master/test/benchmark/index.ts
38
+ * @author Jeongho Nam - https://github.com/samchon
39
+ */
40
+ export namespace DynamicBenchmarker {
41
+ /**
42
+ * Properties of the master program.
43
+ */
44
+ export interface IMasterProps {
45
+ /**
46
+ * Total count of the requests.
47
+ */
48
+ count: number;
49
+
50
+ /**
51
+ * Number of threads.
52
+ *
53
+ * The number of threads to be executed as parallel servant.
54
+ */
55
+ threads: number;
56
+
57
+ /**
58
+ * Number of simultaneous requests.
59
+ *
60
+ * The number of requests to be executed simultaneously.
61
+ *
62
+ * This property value would be divided by the {@link threads} in the servants.
63
+ */
64
+ simultaneous: number;
65
+
66
+ /**
67
+ * Path of the servant program.
68
+ *
69
+ * The path of the servant program executing the
70
+ * {@link DynamicBenchmarker.servant} function.
71
+ */
72
+ servant: string;
73
+
74
+ /**
75
+ * Filter function.
76
+ *
77
+ * The filter function to determine whether to execute the function in
78
+ * the servant or not.
79
+ *
80
+ * @param name Function name
81
+ * @returns Whether to execute the function or not.
82
+ */
83
+ filter?: (name: string) => boolean;
84
+
85
+ /**
86
+ * Progress callback function.
87
+ *
88
+ * @param complete The number of completed requests.
89
+ */
90
+ progress?: (complete: number) => void;
91
+
92
+ /**
93
+ * Standard I/O option.
94
+ *
95
+ * The standard I/O option for the servant programs.
96
+ */
97
+ stdio?: undefined | "overlapped" | "pipe" | "ignore" | "inherit";
98
+ }
99
+
100
+ /**
101
+ * Properties of the servant progrma.
102
+ */
103
+ export interface IServantProps<Parameters extends any[]> {
104
+ /**
105
+ * Default connection.
106
+ *
107
+ * Default connection to be used in the servant.
108
+ */
109
+ connection: IConnection;
110
+
111
+ /**
112
+ * Location of the benchmark functions.
113
+ */
114
+ location: string;
115
+
116
+ /**
117
+ * Prefix of the benchmark functions.
118
+ *
119
+ * Every prefixed function will be executed in the servant.
120
+ *
121
+ * In other words, if a function name doesn't start with the prefix,
122
+ * then it would never be executed.
123
+ */
124
+ prefix: string;
125
+
126
+ /**
127
+ * Get parameters of a function.
128
+ *
129
+ * When composing the parameters, never forget to copy the
130
+ * {@link IConnection.logger} property of default connection to the
131
+ * returning parameters.
132
+ *
133
+ * @param connection Default connection instance
134
+ * @param name Function name
135
+ */
136
+ parameters: (connection: IConnection, name: string) => Parameters;
137
+ }
138
+
139
+ /**
140
+ * Benchmark report.
141
+ */
142
+ export interface IReport {
143
+ count: number;
144
+ threads: number;
145
+ simultaneous: number;
146
+ started_at: string;
147
+ completed_at: string;
148
+ statistics: IReport.IStatistics;
149
+ endpoints: Array<IReport.IEndpoint & IReport.IStatistics>;
150
+ }
151
+ export namespace IReport {
152
+ export interface IEndpoint {
153
+ method: string;
154
+ path: string;
155
+ }
156
+ export interface IStatistics {
157
+ count: number;
158
+ success: number;
159
+ mean: number | null;
160
+ stdev: number | null;
161
+ minimum: number | null;
162
+ maximum: number | null;
163
+ }
164
+ }
165
+
166
+ /**
167
+ * Master program.
168
+ *
169
+ * Creates a master program that executing the servant programs in parallel.
170
+ *
171
+ * Note that, {@link IMasterProps.servant} property must be the path of
172
+ * the servant program executing the {@link servant} function.
173
+ *
174
+ * @param props Properties of the master program
175
+ * @returns Benchmark report
176
+ */
177
+ export const master = async (props: IMasterProps): Promise<IReport> => {
178
+ const completes: number[] = new Array(props.threads).fill(0);
179
+ const servants: WorkerConnector<
180
+ null,
181
+ IBenchmarkMaster,
182
+ IBenchmarkServant
183
+ >[] = await Promise.all(
184
+ new Array(props.threads).fill(null).map(async (_, i) => {
185
+ const connector: WorkerConnector<
186
+ null,
187
+ IBenchmarkMaster,
188
+ IBenchmarkServant
189
+ > = new WorkerConnector(
190
+ null,
191
+ {
192
+ filter: props.filter ?? (() => true),
193
+ progress: (current) => {
194
+ completes[i] = current;
195
+ if (props.progress)
196
+ props.progress(completes.reduce((a, b) => a + b, 0));
197
+ },
198
+ },
199
+ "process",
200
+ );
201
+ await connector.connect(props.servant, { stdio: props.stdio });
202
+ return connector;
203
+ }),
204
+ );
205
+ const started_at: Date = new Date();
206
+ const events: IBenchmarkEvent[] = (
207
+ await Promise.all(
208
+ servants.map((connector) =>
209
+ connector.getDriver().execute({
210
+ count: Math.ceil(props.count / props.threads),
211
+ simultaneous: Math.ceil(props.simultaneous / props.threads),
212
+ }),
213
+ ),
214
+ )
215
+ ).flat();
216
+ const completed_at: Date = new Date();
217
+ await Promise.all(servants.map((connector) => connector.close()));
218
+ if (props.progress) props.progress(props.count);
219
+
220
+ const endpoints: HashMap<IReport.IEndpoint, IBenchmarkEvent[]> =
221
+ new HashMap(
222
+ (key) => hash(key.method, key.path),
223
+ (x, y) => x.method === y.method && x.path === y.path,
224
+ );
225
+ for (const e of events)
226
+ endpoints
227
+ .take(
228
+ {
229
+ method: e.metadata.method,
230
+ path: e.metadata.template ?? e.metadata.path,
231
+ },
232
+ () => [],
233
+ )
234
+ .push(e);
235
+ return {
236
+ count: props.count,
237
+ threads: props.threads,
238
+ simultaneous: props.simultaneous,
239
+ statistics: statistics(events),
240
+ endpoints: [...endpoints].map((it) => ({
241
+ ...statistics(it.second),
242
+ ...it.first,
243
+ })),
244
+ started_at: started_at.toISOString(),
245
+ completed_at: completed_at.toISOString(),
246
+ };
247
+ };
248
+
249
+ /**
250
+ * Create a servant program.
251
+ *
252
+ * Creates a servant program executing the prefixed functions in parallel.
253
+ *
254
+ * @param props Properties of the servant program
255
+ * @returns Servant program as a worker server
256
+ */
257
+ export const servant = async <Parameters extends any[]>(
258
+ props: IServantProps<Parameters>,
259
+ ): Promise<WorkerServer<null, IBenchmarkServant, IBenchmarkMaster>> => {
260
+ const server: WorkerServer<null, IBenchmarkServant, IBenchmarkMaster> =
261
+ new WorkerServer();
262
+ await server.open({
263
+ execute: execute({
264
+ driver: server.getDriver(),
265
+ props,
266
+ }),
267
+ });
268
+ return server;
269
+ };
270
+
271
+ /**
272
+ * Convert the benchmark report to markdown content.
273
+ *
274
+ * @param report Benchmark report
275
+ * @returns Markdown content
276
+ */
277
+ export const markdown = (report: DynamicBenchmarker.IReport): string => {
278
+ const format = (value: number | null) =>
279
+ value === null ? "N/A" : (Math.floor(value * 100) / 100).toLocaleString();
280
+ const writeHead = () =>
281
+ [
282
+ "Type",
283
+ "Count",
284
+ "Success",
285
+ "Mean.",
286
+ "Stdev.",
287
+ "Minimum",
288
+ "Maximum",
289
+ ].join(" | ") +
290
+ "\n" +
291
+ new Array(7).fill("----").join("|");
292
+ const writeRow = (
293
+ title: string,
294
+ s: DynamicBenchmarker.IReport.IStatistics,
295
+ ) =>
296
+ [
297
+ title,
298
+ s.count.toLocaleString(),
299
+ s.success.toLocaleString(),
300
+ format(s.mean),
301
+ format(s.stdev),
302
+ format(s.minimum),
303
+ format(s.maximum),
304
+ ].join(" | ");
305
+ return [
306
+ `# Benchmark Report`,
307
+ "> Generated by [`@nestia/e2e`](https://github.com/samchon/nestia)",
308
+ ``,
309
+ ` - Specifications`,
310
+ ` - CPU: ${os.cpus()[0].model}`,
311
+ ` - RAM: ${Math.floor(os.totalmem() / 1024 / 1024 / 1024).toLocaleString()} GB`,
312
+ ` - NodeJS Version: ${process.version}`,
313
+ ` - Backend Server: 1 core / 1 thread`,
314
+ ` - Arguments`,
315
+ ` - Count: ${report.count.toLocaleString()}`,
316
+ ` - Threads: ${report.threads.toLocaleString()}`,
317
+ ` - Simultaneous: ${report.simultaneous.toLocaleString()}`,
318
+ ` - Time`,
319
+ ` - Start: ${report.started_at}`,
320
+ ` - Complete: ${report.completed_at}`,
321
+ ` - Elapsed: ${(new Date(report.completed_at).getTime() - new Date(report.started_at).getTime()).toLocaleString()} ms`,
322
+ ``,
323
+ writeHead(),
324
+ writeRow("Total", report.statistics),
325
+ "",
326
+ "> Unit: milliseconds",
327
+ "",
328
+ "## Endpoints",
329
+ writeHead(),
330
+ ...report.endpoints
331
+ .slice()
332
+ .sort((a, b) => (b.mean ?? 0) - (a.mean ?? 0))
333
+ .map((endpoint) =>
334
+ writeRow(`${endpoint.method} ${endpoint.path}`, endpoint),
335
+ ),
336
+ "",
337
+ "> Unit: milliseconds",
338
+ "",
339
+ "## Failures",
340
+ "Method | Path | Count | Failures",
341
+ "-------|------|-------|----------",
342
+ ...report.endpoints
343
+ .filter((e) => e.success !== e.count)
344
+ .slice()
345
+ .sort((a, b) => b.count - a.count)
346
+ .map((e) =>
347
+ [
348
+ e.method,
349
+ e.path,
350
+ e.count.toLocaleString(),
351
+ (e.count - e.success).toLocaleString(),
352
+ ].join(" | "),
353
+ ),
354
+ ].join("\n");
355
+ };
356
+
357
+ const execute =
358
+ <Parameters extends any[]>(ctx: {
359
+ driver: Driver<IBenchmarkMaster>;
360
+ props: IServantProps<Parameters>;
361
+ }) =>
362
+ async (mass: {
363
+ count: number;
364
+ simultaneous: number;
365
+ }): Promise<IBenchmarkEvent[]> => {
366
+ const functions: IFunction<Parameters>[] = [];
367
+ await iterate({
368
+ collection: functions,
369
+ driver: ctx.driver,
370
+ props: ctx.props,
371
+ })(ctx.props.location);
372
+
373
+ const entireEvents: IBenchmarkEvent[] = [];
374
+ await Promise.all(
375
+ new Array(mass.simultaneous)
376
+ .fill(null)
377
+ .map(() => 1)
378
+ .map(async () => {
379
+ while (entireEvents.length < mass.count) {
380
+ const localEvents: IBenchmarkEvent[] = [];
381
+ const func: IFunction<Parameters> =
382
+ functions[Math.floor(Math.random() * functions.length)];
383
+ const connection: IConnection = {
384
+ ...ctx.props.connection,
385
+ logger: async (fe): Promise<void> => {
386
+ const be: IBenchmarkEvent = {
387
+ metadata: fe.route,
388
+ status: fe.status,
389
+ started_at: fe.started_at.toISOString(),
390
+ repond_at: fe.respond_at?.toISOString() ?? null,
391
+ completed_at: fe.completed_at.toISOString(),
392
+ success: true,
393
+ };
394
+ localEvents.push(be);
395
+ entireEvents.push(be);
396
+ },
397
+ };
398
+ try {
399
+ await func.value(...ctx.props.parameters(connection, func.key));
400
+ } catch (exp) {
401
+ for (const e of localEvents)
402
+ e.success = e.status === 200 || e.status === 201;
403
+ }
404
+ if (localEvents.length !== 0)
405
+ ctx.driver.progress(entireEvents.length).catch(() => {});
406
+ }
407
+ }),
408
+ );
409
+ await ctx.driver.progress(entireEvents.length);
410
+ return entireEvents;
411
+ };
412
+ }
413
+
414
+ interface IFunction<Parameters extends any[]> {
415
+ key: string;
416
+ value: (...args: Parameters) => Promise<void>;
417
+ }
418
+
419
+ const iterate =
420
+ <Parameters extends any[]>(ctx: {
421
+ collection: IFunction<Parameters>[];
422
+ driver: Driver<IBenchmarkMaster>;
423
+ props: DynamicBenchmarker.IServantProps<Parameters>;
424
+ }) =>
425
+ async (path: string): Promise<void> => {
426
+ const directory: string[] = await fs.promises.readdir(path);
427
+ for (const file of directory) {
428
+ const location: string = `${path}/${file}`;
429
+ const stat: fs.Stats = await fs.promises.stat(location);
430
+ if (stat.isDirectory() === true) await iterate(ctx)(location);
431
+ else if (file.endsWith(".js") === true) {
432
+ const modulo = await import(location);
433
+ for (const [key, value] of Object.entries(modulo)) {
434
+ if (typeof value !== "function") continue;
435
+ else if (key.startsWith(ctx.props.prefix) === false) continue;
436
+ else if ((await ctx.driver.filter(key)) === false) continue;
437
+ ctx.collection.push({
438
+ key,
439
+ value: value as (...args: Parameters) => Promise<any>,
440
+ });
441
+ }
442
+ }
443
+ }
444
+ };
445
+
446
+ const statistics = (
447
+ events: IBenchmarkEvent[],
448
+ ): DynamicBenchmarker.IReport.IStatistics => {
449
+ const successes: IBenchmarkEvent[] = events.filter((event) => event.success);
450
+ return {
451
+ count: events.length,
452
+ success: successes.length,
453
+ ...average(events),
454
+ };
455
+ };
456
+
457
+ const average = (
458
+ events: IBenchmarkEvent[],
459
+ ): Pick<
460
+ DynamicBenchmarker.IReport.IStatistics,
461
+ "mean" | "stdev" | "minimum" | "maximum"
462
+ > => {
463
+ if (events.length === 0)
464
+ return {
465
+ mean: null,
466
+ stdev: null,
467
+ minimum: null,
468
+ maximum: null,
469
+ };
470
+ let mean: number = 0;
471
+ let stdev: number = 0;
472
+ let minimum: number = Number.MAX_SAFE_INTEGER;
473
+ let maximum: number = Number.MIN_SAFE_INTEGER;
474
+ for (const event of events) {
475
+ const elapsed: number =
476
+ new Date(event.completed_at).getTime() -
477
+ new Date(event.started_at).getTime();
478
+ mean += elapsed;
479
+ stdev += elapsed * elapsed;
480
+ minimum = Math.min(minimum, elapsed);
481
+ maximum = Math.max(maximum, elapsed);
482
+ }
483
+ mean /= events.length;
484
+ stdev = Math.sqrt(stdev / events.length - mean * mean);
485
+ return { mean, stdev, minimum, maximum };
486
+ };
@@ -0,0 +1,10 @@
1
+ import { IFetchRoute } from "@nestia/fetcher";
2
+
3
+ export interface IBenchmarkEvent {
4
+ metadata: IFetchRoute<any>;
5
+ status: number | null;
6
+ started_at: string;
7
+ repond_at: string | null;
8
+ completed_at: string;
9
+ success: boolean;
10
+ }
package/src/index.ts ADDED
@@ -0,0 +1,2 @@
1
+ export * from "./DynamicBenchmarker";
2
+ export * from "./IBenchmarkEvent";
@@ -0,0 +1,4 @@
1
+ export interface IBenchmarkMaster {
2
+ filter: (name: string) => boolean;
3
+ progress: (current: number) => void;
4
+ }
@@ -0,0 +1,8 @@
1
+ import { IBenchmarkEvent } from "../IBenchmarkEvent";
2
+
3
+ export interface IBenchmarkServant {
4
+ execute(props: {
5
+ count: number;
6
+ simultaneous: number;
7
+ }): Promise<IBenchmarkEvent[]>;
8
+ }