express-api-stress-tester 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +250 -0
- package/example-config.json +14 -0
- package/package.json +57 -0
- package/src/cli.js +60 -0
- package/src/dynamicGenerators.js +76 -0
- package/src/index.js +11 -0
- package/src/logger.js +51 -0
- package/src/metrics.js +94 -0
- package/src/payloadParser.js +42 -0
- package/src/runner.js +116 -0
- package/src/worker.js +86 -0
package/README.md
ADDED
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
# express-api-stress-tester
|
|
2
|
+
|
|
3
|
+
High-performance API stress testing and load testing tool for Node.js.
|
|
4
|
+
Simulate up to **100,000 concurrent users** sending requests to your APIs.
|
|
5
|
+
|
|
6
|
+
---
|
|
7
|
+
|
|
8
|
+
## Features
|
|
9
|
+
|
|
10
|
+
- **High concurrency** – powered by [undici](https://github.com/nodejs/undici) and `worker_threads`
|
|
11
|
+
- **Dynamic payloads** – auto-generate names, emails, UUIDs, and more
|
|
12
|
+
- **Bulk payload mode** – send different bodies across requests
|
|
13
|
+
- **Detailed reports** – requests/sec, response times, error rates, CPU & memory
|
|
14
|
+
- **CLI & programmatic API** – use from the terminal or inside your Node.js app
|
|
15
|
+
- **Streaming architecture** – minimal memory footprint at scale
|
|
16
|
+
|
|
17
|
+
---
|
|
18
|
+
|
|
19
|
+
## Installation
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
npm install express-api-stress-tester
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
Or run directly with `npx`:
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
npx express-api-stress-tester config.json
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
---
|
|
32
|
+
|
|
33
|
+
## CLI Usage
|
|
34
|
+
|
|
35
|
+
Create a `config.json` file:
|
|
36
|
+
|
|
37
|
+
```json
|
|
38
|
+
{
|
|
39
|
+
"url": "https://api.example.com/users",
|
|
40
|
+
"method": "POST",
|
|
41
|
+
"concurrency": 5000,
|
|
42
|
+
"duration": 30,
|
|
43
|
+
"headers": {
|
|
44
|
+
"Content-Type": "application/json",
|
|
45
|
+
"Authorization": "Bearer YOUR_TOKEN"
|
|
46
|
+
},
|
|
47
|
+
"payload": {
|
|
48
|
+
"username": "{name}",
|
|
49
|
+
"email": "{email}"
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
Run the test:
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
npx express-api-stress-tester config.json
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
The tool will print a summary to stdout and append it to `stress-test-report.txt`.
|
|
61
|
+
|
|
62
|
+
---
|
|
63
|
+
|
|
64
|
+
## Programmatic Usage
|
|
65
|
+
|
|
66
|
+
```js
|
|
67
|
+
import { stressTest } from "express-api-stress-tester";
|
|
68
|
+
|
|
69
|
+
const summary = await stressTest({
|
|
70
|
+
url: "https://api.example.com/users",
|
|
71
|
+
method: "POST",
|
|
72
|
+
concurrency: 5000,
|
|
73
|
+
duration: 30,
|
|
74
|
+
headers: {
|
|
75
|
+
"Content-Type": "application/json",
|
|
76
|
+
},
|
|
77
|
+
payload: {
|
|
78
|
+
name: "{name}",
|
|
79
|
+
email: "{email}",
|
|
80
|
+
},
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
console.log(summary);
|
|
84
|
+
// {
|
|
85
|
+
// totalRequests: 252000,
|
|
86
|
+
// requestsPerSec: 8400,
|
|
87
|
+
// avgResponseTime: 120,
|
|
88
|
+
// errorRate: 1.2,
|
|
89
|
+
// successRate: 98.8,
|
|
90
|
+
// cpuPercent: '45.0',
|
|
91
|
+
// memoryMB: '320.0',
|
|
92
|
+
// result: 'PASSED'
|
|
93
|
+
// }
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
---
|
|
97
|
+
|
|
98
|
+
## Configuration Options
|
|
99
|
+
|
|
100
|
+
| Option | Type | Required | Default | Description |
|
|
101
|
+
| ------------- | -------- | -------- | ------- | --------------------------------------- |
|
|
102
|
+
| `url` | string | yes | – | Target API URL |
|
|
103
|
+
| `method` | string | no | `GET` | HTTP method (GET, POST, PUT, PATCH, DELETE) |
|
|
104
|
+
| `headers` | object | no | `{}` | Request headers |
|
|
105
|
+
| `payload` | object | no | – | Single payload template (supports dynamic placeholders) |
|
|
106
|
+
| `payloads` | array | no | – | Bulk payloads – array of payload objects distributed round-robin |
|
|
107
|
+
| `concurrency` | number | no | `1` | Number of concurrent users (up to 100k) |
|
|
108
|
+
| `duration` | number | no | `10` | Test duration in seconds |
|
|
109
|
+
|
|
110
|
+
---
|
|
111
|
+
|
|
112
|
+
## Dynamic Payload Placeholders
|
|
113
|
+
|
|
114
|
+
Use placeholders in your payload values. They are replaced with fresh random data for every request.
|
|
115
|
+
|
|
116
|
+
| Placeholder | Example Output |
|
|
117
|
+
| -------------- | ---------------------------- |
|
|
118
|
+
| `{name}` | `Alice Johnson` |
|
|
119
|
+
| `{botName}` | `AlphaBot` |
|
|
120
|
+
| `{email}` | `alice4231@example.com` |
|
|
121
|
+
| `{uuid}` | `550e8400-e29b-41d4-a716-...`|
|
|
122
|
+
| `{number}` | `483291` |
|
|
123
|
+
| `{timestamp}` | `1710547200000` |
|
|
124
|
+
|
|
125
|
+
**Example:**
|
|
126
|
+
|
|
127
|
+
```json
|
|
128
|
+
{
|
|
129
|
+
"payload": {
|
|
130
|
+
"name": "{name}",
|
|
131
|
+
"botName": "{botName}",
|
|
132
|
+
"email": "{email}",
|
|
133
|
+
"uuid": "{uuid}",
|
|
134
|
+
"age": "{number}",
|
|
135
|
+
"createdAt": "{timestamp}"
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
Placeholders also work inside larger strings:
|
|
141
|
+
|
|
142
|
+
```json
|
|
143
|
+
{ "greeting": "Hello {name}, your ID is {uuid}" }
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
---
|
|
147
|
+
|
|
148
|
+
## Bulk Payload Mode
|
|
149
|
+
|
|
150
|
+
Send multiple different payloads. They are distributed across requests in round-robin order.
|
|
151
|
+
|
|
152
|
+
```json
|
|
153
|
+
{
|
|
154
|
+
"url": "https://api.example.com/users",
|
|
155
|
+
"method": "POST",
|
|
156
|
+
"concurrency": 100,
|
|
157
|
+
"duration": 10,
|
|
158
|
+
"headers": { "Content-Type": "application/json" },
|
|
159
|
+
"payloads": [
|
|
160
|
+
{ "name": "Alice" },
|
|
161
|
+
{ "name": "Bob" },
|
|
162
|
+
{ "name": "Charlie" }
|
|
163
|
+
]
|
|
164
|
+
}
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
---
|
|
168
|
+
|
|
169
|
+
## Report Output
|
|
170
|
+
|
|
171
|
+
After each test, a report is appended to `stress-test-report.txt`:
|
|
172
|
+
|
|
173
|
+
```
|
|
174
|
+
==================================================
|
|
175
|
+
API Stress Test Report
|
|
176
|
+
==================================================
|
|
177
|
+
API URL: https://api.example.com/users
|
|
178
|
+
Method: POST
|
|
179
|
+
Concurrent Users: 5000
|
|
180
|
+
Duration (s): 30.0
|
|
181
|
+
Total Requests: 252000
|
|
182
|
+
Requests/sec: 8400
|
|
183
|
+
Avg Response Time: 120ms
|
|
184
|
+
Error Rate: 1.2%
|
|
185
|
+
Success Rate: 98.8%
|
|
186
|
+
CPU Usage: 45.0%
|
|
187
|
+
Memory Usage: 320.0MB
|
|
188
|
+
Result: PASSED
|
|
189
|
+
==================================================
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
**Result rules:**
|
|
193
|
+
- `PASSED` → error rate < 5%
|
|
194
|
+
- `FAILED` → error rate ≥ 5%
|
|
195
|
+
|
|
196
|
+
---
|
|
197
|
+
|
|
198
|
+
## Project Structure
|
|
199
|
+
|
|
200
|
+
```
|
|
201
|
+
api-stress-tester/
|
|
202
|
+
├── src/
|
|
203
|
+
│ ├── index.js # Public API exports
|
|
204
|
+
│ ├── runner.js # Main orchestrator (spawns workers, collects metrics)
|
|
205
|
+
│ ├── worker.js # Worker thread – executes HTTP requests via undici
|
|
206
|
+
│ ├── payloadParser.js # Payload template resolver (single & bulk)
|
|
207
|
+
│ ├── dynamicGenerators.js # Random data generators for placeholders
|
|
208
|
+
│ ├── metrics.js # MetricsCollector – aggregates counters
|
|
209
|
+
│ ├── logger.js # Streaming logger & report writer
|
|
210
|
+
│ └── cli.js # CLI entry point
|
|
211
|
+
├── tests/
|
|
212
|
+
│ ├── payload.test.js # Payload & generator tests
|
|
213
|
+
│ └── stress.test.js # Metrics, report, and runner tests
|
|
214
|
+
├── package.json
|
|
215
|
+
├── example-config.json
|
|
216
|
+
└── README.md
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
---
|
|
220
|
+
|
|
221
|
+
## Running Tests
|
|
222
|
+
|
|
223
|
+
```bash
|
|
224
|
+
npm test
|
|
225
|
+
```
|
|
226
|
+
|
|
227
|
+
---
|
|
228
|
+
|
|
229
|
+
## Performance Notes
|
|
230
|
+
|
|
231
|
+
- Uses **undici** for HTTP – significantly faster than `axios` or `node-fetch`
|
|
232
|
+
- **Worker threads** distribute load across CPU cores
|
|
233
|
+
- Controlled **batch dispatching** prevents event loop starvation
|
|
234
|
+
- Metrics use **aggregate counters** (not per-request arrays) to minimise memory
|
|
235
|
+
- Report writing uses **appendFileSync** – no large in-memory buffers
|
|
236
|
+
|
|
237
|
+
---
|
|
238
|
+
|
|
239
|
+
## How to Publish
|
|
240
|
+
|
|
241
|
+
```bash
|
|
242
|
+
npm login --auth-type=web
|
|
243
|
+
npm publish
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
---
|
|
247
|
+
|
|
248
|
+
## License
|
|
249
|
+
|
|
250
|
+
MIT
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
{
|
|
2
|
+
"url": "https://jsonplaceholder.typicode.com/posts",
|
|
3
|
+
"method": "POST",
|
|
4
|
+
"concurrency": 10,
|
|
5
|
+
"duration": 5,
|
|
6
|
+
"headers": {
|
|
7
|
+
"Content-Type": "application/json"
|
|
8
|
+
},
|
|
9
|
+
"payload": {
|
|
10
|
+
"title": "{name}",
|
|
11
|
+
"body": "Stress test payload {uuid}",
|
|
12
|
+
"userId": "{number}"
|
|
13
|
+
}
|
|
14
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "express-api-stress-tester",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "High-performance API stress testing and load testing tool capable of simulating up to 100k concurrent users",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "src/index.js",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": "./src/index.js"
|
|
9
|
+
},
|
|
10
|
+
"bin": {
|
|
11
|
+
"express-api-stress-tester": "src/cli.js"
|
|
12
|
+
},
|
|
13
|
+
"scripts": {
|
|
14
|
+
"test": "node --experimental-vm-modules node_modules/.bin/jest --forceExit",
|
|
15
|
+
"stress": "node src/cli.js"
|
|
16
|
+
},
|
|
17
|
+
"keywords": [
|
|
18
|
+
"api",
|
|
19
|
+
"stress-test",
|
|
20
|
+
"load-test",
|
|
21
|
+
"performance",
|
|
22
|
+
"benchmark",
|
|
23
|
+
"http",
|
|
24
|
+
"concurrent",
|
|
25
|
+
"undici"
|
|
26
|
+
],
|
|
27
|
+
"author": "",
|
|
28
|
+
"license": "MIT",
|
|
29
|
+
"dependencies": {
|
|
30
|
+
"undici": "^6.24.1",
|
|
31
|
+
"uuid": "^11.1.0"
|
|
32
|
+
},
|
|
33
|
+
"devDependencies": {
|
|
34
|
+
"@jest/globals": "^29.7.0",
|
|
35
|
+
"@jest/test-sequencer": "^29.7.0",
|
|
36
|
+
"jest": "^29.7.0"
|
|
37
|
+
},
|
|
38
|
+
"jest": {
|
|
39
|
+
"transform": {},
|
|
40
|
+
"testEnvironment": "node",
|
|
41
|
+
"extensionsToTreatAsEsm": [],
|
|
42
|
+
"testMatch": [
|
|
43
|
+
"**/tests/**/*.test.js"
|
|
44
|
+
]
|
|
45
|
+
},
|
|
46
|
+
"engines": {
|
|
47
|
+
"node": ">=18.0.0"
|
|
48
|
+
},
|
|
49
|
+
"publishConfig": {
|
|
50
|
+
"access": "public"
|
|
51
|
+
},
|
|
52
|
+
"files": [
|
|
53
|
+
"src/",
|
|
54
|
+
"README.md",
|
|
55
|
+
"example-config.json"
|
|
56
|
+
]
|
|
57
|
+
}
|
package/src/cli.js
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* CLI entry point for api-stress-tester.
|
|
5
|
+
*
|
|
6
|
+
* Usage:
|
|
7
|
+
* npx express-api-stress-tester config.json
|
|
8
|
+
* node src/cli.js config.json
|
|
9
|
+
*/
|
|
10
|
+
import { readFileSync } from 'node:fs';
|
|
11
|
+
import { resolve } from 'node:path';
|
|
12
|
+
import { runStressTest } from './runner.js';
|
|
13
|
+
|
|
14
|
+
async function main() {
|
|
15
|
+
const args = process.argv.slice(2);
|
|
16
|
+
|
|
17
|
+
if (args.length === 0 || args.includes('--help') || args.includes('-h')) {
|
|
18
|
+
console.log(`
|
|
19
|
+
express-api-stress-tester – High-performance API stress testing tool
|
|
20
|
+
|
|
21
|
+
Usage:
|
|
22
|
+
express-api-stress-tester <config.json>
|
|
23
|
+
|
|
24
|
+
Options:
|
|
25
|
+
--help, -h Show this help message
|
|
26
|
+
|
|
27
|
+
Example config.json:
|
|
28
|
+
{
|
|
29
|
+
"url": "https://api.example.com/users",
|
|
30
|
+
"method": "POST",
|
|
31
|
+
"concurrency": 5000,
|
|
32
|
+
"duration": 30,
|
|
33
|
+
"headers": { "Content-Type": "application/json" },
|
|
34
|
+
"payload": { "name": "{name}", "email": "{email}" }
|
|
35
|
+
}
|
|
36
|
+
`);
|
|
37
|
+
process.exit(0);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const configPath = resolve(args[0]);
|
|
41
|
+
|
|
42
|
+
let config;
|
|
43
|
+
try {
|
|
44
|
+
const raw = readFileSync(configPath, 'utf-8');
|
|
45
|
+
config = JSON.parse(raw);
|
|
46
|
+
} catch (err) {
|
|
47
|
+
console.error(`Error reading config file: ${err.message}`);
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
try {
|
|
52
|
+
const summary = await runStressTest(config);
|
|
53
|
+
process.exit(summary.result === 'PASSED' ? 0 : 1);
|
|
54
|
+
} catch (err) {
|
|
55
|
+
console.error(`Stress test failed: ${err.message}`);
|
|
56
|
+
process.exit(1);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
main();
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import { randomUUID } from 'node:crypto';
|
|
2
|
+
|
|
3
|
+
// Pre-computed pools for fast random selection
|
|
4
|
+
const FIRST_NAMES = [
|
|
5
|
+
'Alice', 'Bob', 'Charlie', 'Diana', 'Eve', 'Frank', 'Grace', 'Hank',
|
|
6
|
+
'Ivy', 'Jack', 'Karen', 'Leo', 'Mona', 'Nick', 'Olivia', 'Paul',
|
|
7
|
+
'Quinn', 'Rita', 'Sam', 'Tina', 'Uma', 'Vince', 'Wendy', 'Xander',
|
|
8
|
+
'Yara', 'Zane', 'Aria', 'Ben', 'Cleo', 'Derek'
|
|
9
|
+
];
|
|
10
|
+
|
|
11
|
+
const LAST_NAMES = [
|
|
12
|
+
'Smith', 'Johnson', 'Williams', 'Brown', 'Jones', 'Garcia', 'Miller',
|
|
13
|
+
'Davis', 'Rodriguez', 'Martinez', 'Hernandez', 'Lopez', 'Gonzalez',
|
|
14
|
+
'Wilson', 'Anderson', 'Thomas', 'Taylor', 'Moore', 'Jackson', 'Martin'
|
|
15
|
+
];
|
|
16
|
+
|
|
17
|
+
const BOT_PREFIXES = [
|
|
18
|
+
'Alpha', 'Beta', 'Gamma', 'Delta', 'Sigma', 'Omega', 'Nova', 'Turbo',
|
|
19
|
+
'Hyper', 'Ultra', 'Mega', 'Giga', 'Nano', 'Cyber', 'Robo', 'Auto'
|
|
20
|
+
];
|
|
21
|
+
|
|
22
|
+
const BOT_SUFFIXES = [
|
|
23
|
+
'Bot', 'Agent', 'Helper', 'Runner', 'Worker', 'Pilot', 'Guard', 'Scout'
|
|
24
|
+
];
|
|
25
|
+
|
|
26
|
+
const EMAIL_DOMAINS = [
|
|
27
|
+
'example.com', 'test.io', 'mail.org', 'demo.net', 'sample.dev'
|
|
28
|
+
];
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Pick a random element from an array.
|
|
32
|
+
* Uses Math.random for speed (no need for crypto-grade randomness here).
|
|
33
|
+
*/
|
|
34
|
+
const pick = (arr) => arr[Math.floor(Math.random() * arr.length)];
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Map of placeholder tokens to generator functions.
|
|
38
|
+
* Each generator returns a fresh random value on every call.
|
|
39
|
+
*/
|
|
40
|
+
export const generators = {
|
|
41
|
+
'{name}': () => `${pick(FIRST_NAMES)} ${pick(LAST_NAMES)}`,
|
|
42
|
+
'{botName}': () => `${pick(BOT_PREFIXES)}${pick(BOT_SUFFIXES)}`,
|
|
43
|
+
'{email}': () => {
|
|
44
|
+
const user = `${pick(FIRST_NAMES).toLowerCase()}${Math.floor(Math.random() * 10000)}`;
|
|
45
|
+
return `${user}@${pick(EMAIL_DOMAINS)}`;
|
|
46
|
+
},
|
|
47
|
+
'{uuid}': () => randomUUID(),
|
|
48
|
+
'{number}': () => Math.floor(Math.random() * 1_000_000),
|
|
49
|
+
'{timestamp}': () => Date.now(),
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Resolve a single placeholder string.
|
|
54
|
+
* Returns the generator output if the entire value is a known token,
|
|
55
|
+
* otherwise replaces all occurrences within the string.
|
|
56
|
+
*/
|
|
57
|
+
export function resolveValue(value) {
|
|
58
|
+
if (typeof value !== 'string') return value;
|
|
59
|
+
|
|
60
|
+
// Fast path: value is exactly a token
|
|
61
|
+
if (generators[value] !== undefined) {
|
|
62
|
+
return generators[value]();
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Slow path: replace all embedded tokens
|
|
66
|
+
let result = value;
|
|
67
|
+
for (const [token, gen] of Object.entries(generators)) {
|
|
68
|
+
if (result.includes(token)) {
|
|
69
|
+
// Replace all occurrences – each gets a unique value
|
|
70
|
+
while (result.includes(token)) {
|
|
71
|
+
result = result.replace(token, String(gen()));
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return result;
|
|
76
|
+
}
|
package/src/index.js
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* express-api-stress-tester – public API
|
|
3
|
+
*
|
|
4
|
+
* Usage:
|
|
5
|
+
* import { stressTest } from 'express-api-stress-tester';
|
|
6
|
+
* const result = await stressTest({ url: '...', concurrency: 5000, duration: 30 });
|
|
7
|
+
*/
|
|
8
|
+
export { runStressTest as stressTest } from './runner.js';
|
|
9
|
+
export { parsePayload, getPayload } from './payloadParser.js';
|
|
10
|
+
export { generators, resolveValue } from './dynamicGenerators.js';
|
|
11
|
+
export { MetricsCollector } from './metrics.js';
|
package/src/logger.js
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { appendFileSync } from 'node:fs';
|
|
2
|
+
|
|
3
|
+
const REPORT_FILE = 'stress-test-report.txt';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Write a structured log line to stdout.
|
|
7
|
+
*/
|
|
8
|
+
export function log(message) {
|
|
9
|
+
const ts = new Date().toISOString();
|
|
10
|
+
process.stdout.write(`[${ts}] ${message}\n`);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Write the final report to the report file and also to stdout.
|
|
15
|
+
*/
|
|
16
|
+
export function writeReport(config, summary, reportPath) {
|
|
17
|
+
const filePath = reportPath || REPORT_FILE;
|
|
18
|
+
const divider = '='.repeat(50);
|
|
19
|
+
const lines = [
|
|
20
|
+
divider,
|
|
21
|
+
` API Stress Test Report`,
|
|
22
|
+
divider,
|
|
23
|
+
`API URL: ${config.url}`,
|
|
24
|
+
`Method: ${(config.method || 'GET').toUpperCase()}`,
|
|
25
|
+
`Concurrent Users: ${config.concurrency || 1}`,
|
|
26
|
+
`Duration (s): ${summary.elapsedSeconds}`,
|
|
27
|
+
`Total Requests: ${summary.totalRequests}`,
|
|
28
|
+
`Requests/sec: ${summary.requestsPerSec}`,
|
|
29
|
+
`Avg Response Time: ${summary.avgResponseTime}ms`,
|
|
30
|
+
`Error Rate: ${summary.errorRate}%`,
|
|
31
|
+
`Success Rate: ${summary.successRate}%`,
|
|
32
|
+
`CPU Usage: ${summary.cpuPercent}%`,
|
|
33
|
+
`Memory Usage: ${summary.memoryMB}MB`,
|
|
34
|
+
`Result: ${summary.result}`,
|
|
35
|
+
divider,
|
|
36
|
+
'',
|
|
37
|
+
];
|
|
38
|
+
const report = lines.join('\n');
|
|
39
|
+
|
|
40
|
+
// Append to file
|
|
41
|
+
try {
|
|
42
|
+
appendFileSync(filePath, report + '\n');
|
|
43
|
+
} catch (err) {
|
|
44
|
+
process.stderr.write(`Failed to write report: ${err.message}\n`);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Also print to stdout
|
|
48
|
+
process.stdout.write(report + '\n');
|
|
49
|
+
|
|
50
|
+
return report;
|
|
51
|
+
}
|
package/src/metrics.js
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import { cpus } from 'node:os';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Lightweight metrics collector.
|
|
5
|
+
* Stores only aggregate counters to avoid memory bloat at high concurrency.
|
|
6
|
+
*/
|
|
7
|
+
export class MetricsCollector {
|
|
8
|
+
constructor() {
|
|
9
|
+
this.totalRequests = 0;
|
|
10
|
+
this.successCount = 0;
|
|
11
|
+
this.errorCount = 0;
|
|
12
|
+
this.totalResponseTime = 0; // sum of ms
|
|
13
|
+
this.startTime = 0;
|
|
14
|
+
this.endTime = 0;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
start() {
|
|
18
|
+
this.startTime = Date.now();
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
stop() {
|
|
22
|
+
this.endTime = Date.now();
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/** Record one completed request */
|
|
26
|
+
record(responseTimeMs, isError) {
|
|
27
|
+
this.totalRequests++;
|
|
28
|
+
this.totalResponseTime += responseTimeMs;
|
|
29
|
+
if (isError) {
|
|
30
|
+
this.errorCount++;
|
|
31
|
+
} else {
|
|
32
|
+
this.successCount++;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/** Merge counters from a worker thread's partial result */
|
|
37
|
+
merge(partial) {
|
|
38
|
+
this.totalRequests += partial.totalRequests || 0;
|
|
39
|
+
this.successCount += partial.successCount || 0;
|
|
40
|
+
this.errorCount += partial.errorCount || 0;
|
|
41
|
+
this.totalResponseTime += partial.totalResponseTime || 0;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/** Capture current CPU & memory usage */
|
|
45
|
+
static getResourceUsage() {
|
|
46
|
+
const mem = process.memoryUsage();
|
|
47
|
+
// CPU usage averaged across cores (percentage over a brief sample)
|
|
48
|
+
const cpuArray = cpus();
|
|
49
|
+
let totalIdle = 0;
|
|
50
|
+
let totalTick = 0;
|
|
51
|
+
for (const cpu of cpuArray) {
|
|
52
|
+
const { user, nice, sys, idle, irq } = cpu.times;
|
|
53
|
+
totalTick += user + nice + sys + idle + irq;
|
|
54
|
+
totalIdle += idle;
|
|
55
|
+
}
|
|
56
|
+
const cpuPercent = ((1 - totalIdle / totalTick) * 100).toFixed(1);
|
|
57
|
+
const memoryMB = (mem.heapUsed / 1024 / 1024).toFixed(1);
|
|
58
|
+
return { cpuPercent, memoryMB };
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/** Build the final summary object */
|
|
62
|
+
getSummary() {
|
|
63
|
+
const elapsed = (this.endTime - this.startTime) / 1000 || 1;
|
|
64
|
+
const rps = (this.totalRequests / elapsed).toFixed(0);
|
|
65
|
+
const avgResponse =
|
|
66
|
+
this.totalRequests > 0
|
|
67
|
+
? (this.totalResponseTime / this.totalRequests).toFixed(0)
|
|
68
|
+
: 0;
|
|
69
|
+
const errorRate =
|
|
70
|
+
this.totalRequests > 0
|
|
71
|
+
? ((this.errorCount / this.totalRequests) * 100).toFixed(1)
|
|
72
|
+
: '0.0';
|
|
73
|
+
const successRate =
|
|
74
|
+
this.totalRequests > 0
|
|
75
|
+
? ((this.successCount / this.totalRequests) * 100).toFixed(1)
|
|
76
|
+
: '0.0';
|
|
77
|
+
|
|
78
|
+
const { cpuPercent, memoryMB } = MetricsCollector.getResourceUsage();
|
|
79
|
+
|
|
80
|
+
const passed = parseFloat(errorRate) < 5;
|
|
81
|
+
|
|
82
|
+
return {
|
|
83
|
+
totalRequests: this.totalRequests,
|
|
84
|
+
requestsPerSec: Number(rps),
|
|
85
|
+
avgResponseTime: Number(avgResponse),
|
|
86
|
+
errorRate: parseFloat(errorRate),
|
|
87
|
+
successRate: parseFloat(successRate),
|
|
88
|
+
cpuPercent,
|
|
89
|
+
memoryMB,
|
|
90
|
+
result: passed ? 'PASSED' : 'FAILED',
|
|
91
|
+
elapsedSeconds: elapsed.toFixed(1),
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { resolveValue } from './dynamicGenerators.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Deep-clone a plain object/array and resolve every dynamic placeholder.
|
|
5
|
+
* Works recursively so nested payloads are supported.
|
|
6
|
+
*/
|
|
7
|
+
export function parsePayload(template) {
|
|
8
|
+
if (template === null || template === undefined) return template;
|
|
9
|
+
|
|
10
|
+
if (Array.isArray(template)) {
|
|
11
|
+
return template.map(parsePayload);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
if (typeof template === 'object') {
|
|
15
|
+
const result = {};
|
|
16
|
+
for (const [key, value] of Object.entries(template)) {
|
|
17
|
+
result[key] = parsePayload(value);
|
|
18
|
+
}
|
|
19
|
+
return result;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
return resolveValue(template);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Given a config, return a resolved payload for a single request.
|
|
27
|
+
* Supports both single payload and bulk payloads (round-robin).
|
|
28
|
+
*/
|
|
29
|
+
export function getPayload(config, requestIndex) {
|
|
30
|
+
// Bulk mode: config.payloads is an array of payload objects
|
|
31
|
+
if (Array.isArray(config.payloads) && config.payloads.length > 0) {
|
|
32
|
+
const template = config.payloads[requestIndex % config.payloads.length];
|
|
33
|
+
return parsePayload(template);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Single payload mode
|
|
37
|
+
if (config.payload) {
|
|
38
|
+
return parsePayload(config.payload);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return undefined;
|
|
42
|
+
}
|
package/src/runner.js
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { Worker } from 'node:worker_threads';
|
|
2
|
+
import { cpus } from 'node:os';
|
|
3
|
+
import { fileURLToPath } from 'node:url';
|
|
4
|
+
import { dirname, join } from 'node:path';
|
|
5
|
+
import { MetricsCollector } from './metrics.js';
|
|
6
|
+
import { log, writeReport } from './logger.js';
|
|
7
|
+
|
|
8
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
+
const __dirname = dirname(__filename);
|
|
10
|
+
const WORKER_PATH = join(__dirname, 'worker.js');
|
|
11
|
+
|
|
12
|
+
// Max workers = number of CPU cores (minus 1 for main thread, min 1)
|
|
13
|
+
const DEFAULT_NUM_WORKERS = Math.max(1, cpus().length - 1);
|
|
14
|
+
// Max tasks dispatched per batch per worker to avoid overloading event loop
|
|
15
|
+
const BATCH_SIZE = 200;
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Main test runner.
|
|
19
|
+
*
|
|
20
|
+
* Spawns worker threads and distributes request tasks across them
|
|
21
|
+
* for the configured duration. Collects metrics and writes a report.
|
|
22
|
+
*
|
|
23
|
+
* @param {object} config - stress test configuration
|
|
24
|
+
* @param {object} [options] - optional overrides
|
|
25
|
+
* @param {string} [options.reportPath] - custom report file path
|
|
26
|
+
* @returns {Promise<object>} summary metrics
|
|
27
|
+
*/
|
|
28
|
+
export async function runStressTest(config, options = {}) {
|
|
29
|
+
// ── Validate ──────────────────────────────────────────────────────
|
|
30
|
+
if (!config || !config.url) {
|
|
31
|
+
throw new Error('config.url is required');
|
|
32
|
+
}
|
|
33
|
+
const concurrency = config.concurrency || 1;
|
|
34
|
+
const duration = config.duration || 10; // seconds
|
|
35
|
+
|
|
36
|
+
log(`Starting stress test → ${config.url}`);
|
|
37
|
+
log(`Concurrency: ${concurrency} | Duration: ${duration}s`);
|
|
38
|
+
|
|
39
|
+
// ── Decide worker count ───────────────────────────────────────────
|
|
40
|
+
const numWorkers = Math.min(DEFAULT_NUM_WORKERS, concurrency);
|
|
41
|
+
const metrics = new MetricsCollector();
|
|
42
|
+
|
|
43
|
+
// ── Spawn workers ─────────────────────────────────────────────────
|
|
44
|
+
const workers = [];
|
|
45
|
+
for (let i = 0; i < numWorkers; i++) {
|
|
46
|
+
const w = new Worker(WORKER_PATH, { workerData: config });
|
|
47
|
+
workers.push(w);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Listen for partial metrics from workers
|
|
51
|
+
for (const w of workers) {
|
|
52
|
+
w.on('message', (msg) => {
|
|
53
|
+
if (msg.type === 'result') {
|
|
54
|
+
metrics.merge(msg.metrics);
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
w.on('error', (err) => {
|
|
58
|
+
log(`Worker error: ${err.message}`);
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
// ── Dispatch loop ─────────────────────────────────────────────────
|
|
63
|
+
// We send batches of task indices to workers in round-robin fashion
|
|
64
|
+
// until the duration expires.
|
|
65
|
+
metrics.start();
|
|
66
|
+
const endAt = Date.now() + duration * 1000;
|
|
67
|
+
let taskIndex = 0;
|
|
68
|
+
|
|
69
|
+
while (Date.now() < endAt) {
|
|
70
|
+
const batchPromises = [];
|
|
71
|
+
|
|
72
|
+
for (const w of workers) {
|
|
73
|
+
const tasks = [];
|
|
74
|
+
const batchLimit = Math.min(
|
|
75
|
+
BATCH_SIZE,
|
|
76
|
+
Math.ceil(concurrency / numWorkers)
|
|
77
|
+
);
|
|
78
|
+
for (let j = 0; j < batchLimit; j++) {
|
|
79
|
+
tasks.push(taskIndex++);
|
|
80
|
+
}
|
|
81
|
+
// Wrap in a promise that resolves when the worker replies
|
|
82
|
+
const p = new Promise((resolve) => {
|
|
83
|
+
const handler = (msg) => {
|
|
84
|
+
if (msg.type === 'result') {
|
|
85
|
+
w.removeListener('message', handler);
|
|
86
|
+
resolve();
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
w.on('message', handler);
|
|
90
|
+
w.postMessage({ type: 'batch', tasks });
|
|
91
|
+
});
|
|
92
|
+
batchPromises.push(p);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Wait for all workers to finish this round before dispatching next
|
|
96
|
+
await Promise.all(batchPromises);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
metrics.stop();
|
|
100
|
+
|
|
101
|
+
// ── Teardown workers ──────────────────────────────────────────────
|
|
102
|
+
const termPromises = workers.map(
|
|
103
|
+
(w) =>
|
|
104
|
+
new Promise((resolve) => {
|
|
105
|
+
w.on('exit', resolve);
|
|
106
|
+
w.postMessage({ type: 'stop' });
|
|
107
|
+
})
|
|
108
|
+
);
|
|
109
|
+
await Promise.all(termPromises);
|
|
110
|
+
|
|
111
|
+
// ── Report ────────────────────────────────────────────────────────
|
|
112
|
+
const summary = metrics.getSummary();
|
|
113
|
+
writeReport(config, summary, options.reportPath);
|
|
114
|
+
|
|
115
|
+
return summary;
|
|
116
|
+
}
|
package/src/worker.js
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Worker thread entry point.
|
|
3
|
+
*
|
|
4
|
+
* Receives task batches from the main thread and executes HTTP requests
|
|
5
|
+
* using undici's high-performance Pool.
|
|
6
|
+
*
|
|
7
|
+
* Communication is via parentPort messages:
|
|
8
|
+
* main → worker : { type: 'batch', tasks: [...] }
|
|
9
|
+
* main → worker : { type: 'stop' }
|
|
10
|
+
* worker → main : { type: 'result', metrics: {...} }
|
|
11
|
+
*/
|
|
12
|
+
import { parentPort, workerData } from 'node:worker_threads';
|
|
13
|
+
import { request } from 'undici';
|
|
14
|
+
import { getPayload } from './payloadParser.js';
|
|
15
|
+
|
|
16
|
+
const config = workerData;
|
|
17
|
+
|
|
18
|
+
// Aggregate counters for this worker
|
|
19
|
+
let totalRequests = 0;
|
|
20
|
+
let successCount = 0;
|
|
21
|
+
let errorCount = 0;
|
|
22
|
+
let totalResponseTime = 0;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Execute a single HTTP request and record timing.
|
|
26
|
+
*/
|
|
27
|
+
async function executeRequest(taskIndex) {
|
|
28
|
+
const startMs = Date.now();
|
|
29
|
+
let isError = false;
|
|
30
|
+
|
|
31
|
+
try {
|
|
32
|
+
const body = getPayload(config, taskIndex);
|
|
33
|
+
const options = {
|
|
34
|
+
method: (config.method || 'GET').toUpperCase(),
|
|
35
|
+
headers: config.headers || {},
|
|
36
|
+
};
|
|
37
|
+
if (body !== undefined && options.method !== 'GET') {
|
|
38
|
+
options.body = JSON.stringify(body);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const res = await request(config.url, options);
|
|
42
|
+
|
|
43
|
+
// Consume the body to free resources (undici requires this)
|
|
44
|
+
// eslint-disable-next-line no-unused-vars
|
|
45
|
+
const data = await res.body.text();
|
|
46
|
+
|
|
47
|
+
if (res.statusCode >= 400) {
|
|
48
|
+
isError = true;
|
|
49
|
+
}
|
|
50
|
+
} catch {
|
|
51
|
+
isError = true;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
const elapsed = Date.now() - startMs;
|
|
55
|
+
totalRequests++;
|
|
56
|
+
totalResponseTime += elapsed;
|
|
57
|
+
if (isError) {
|
|
58
|
+
errorCount++;
|
|
59
|
+
} else {
|
|
60
|
+
successCount++;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
parentPort.on('message', async (msg) => {
|
|
65
|
+
if (msg.type === 'batch') {
|
|
66
|
+
// Run all tasks in the batch concurrently
|
|
67
|
+
const promises = msg.tasks.map((taskIndex) => executeRequest(taskIndex));
|
|
68
|
+
await Promise.all(promises);
|
|
69
|
+
|
|
70
|
+
// Report partial metrics back to main thread
|
|
71
|
+
parentPort.postMessage({
|
|
72
|
+
type: 'result',
|
|
73
|
+
metrics: { totalRequests, successCount, errorCount, totalResponseTime },
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
// Reset for next batch
|
|
77
|
+
totalRequests = 0;
|
|
78
|
+
successCount = 0;
|
|
79
|
+
errorCount = 0;
|
|
80
|
+
totalResponseTime = 0;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if (msg.type === 'stop') {
|
|
84
|
+
process.exit(0);
|
|
85
|
+
}
|
|
86
|
+
});
|