@depup/artillery 2.0.30-depup.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +63 -0
- package/bin/run +29 -0
- package/bin/run.cmd +3 -0
- package/changes.json +138 -0
- package/console-reporter.js +1 -0
- package/lib/artillery-global.js +33 -0
- package/lib/cli/banner.js +8 -0
- package/lib/cli/common-flags.js +80 -0
- package/lib/cli/hooks/version.js +20 -0
- package/lib/cmds/dino.js +109 -0
- package/lib/cmds/quick.js +122 -0
- package/lib/cmds/report.js +34 -0
- package/lib/cmds/run-aci.js +91 -0
- package/lib/cmds/run-fargate.js +192 -0
- package/lib/cmds/run-lambda.js +96 -0
- package/lib/cmds/run.js +671 -0
- package/lib/console-capture.js +92 -0
- package/lib/console-reporter.js +438 -0
- package/lib/create-bom/built-in-plugins.js +12 -0
- package/lib/create-bom/create-bom.js +301 -0
- package/lib/dispatcher.js +9 -0
- package/lib/dist.js +222 -0
- package/lib/index.js +5 -0
- package/lib/launch-platform.js +439 -0
- package/lib/load-plugins.js +113 -0
- package/lib/platform/aws/aws-cloudwatch.js +106 -0
- package/lib/platform/aws/aws-create-sqs-queue.js +58 -0
- package/lib/platform/aws/aws-ensure-s3-bucket-exists.js +78 -0
- package/lib/platform/aws/aws-get-account-id.js +26 -0
- package/lib/platform/aws/aws-get-bucket-region.js +18 -0
- package/lib/platform/aws/aws-get-credentials.js +28 -0
- package/lib/platform/aws/aws-get-default-region.js +26 -0
- package/lib/platform/aws/aws-whoami.js +15 -0
- package/lib/platform/aws/constants.js +7 -0
- package/lib/platform/aws/iam-cf-templates/aws-iam-fargate-cf-template.yml +219 -0
- package/lib/platform/aws/iam-cf-templates/aws-iam-lambda-cf-template.yml +125 -0
- package/lib/platform/aws/iam-cf-templates/gh-oidc-fargate.yml +241 -0
- package/lib/platform/aws/iam-cf-templates/gh-oidc-lambda.yml +153 -0
- package/lib/platform/aws-ecs/ecs.js +247 -0
- package/lib/platform/aws-ecs/legacy/aws-util.js +134 -0
- package/lib/platform/aws-ecs/legacy/bom.js +528 -0
- package/lib/platform/aws-ecs/legacy/constants.js +27 -0
- package/lib/platform/aws-ecs/legacy/create-s3-client.js +24 -0
- package/lib/platform/aws-ecs/legacy/create-test.js +247 -0
- package/lib/platform/aws-ecs/legacy/errors.js +34 -0
- package/lib/platform/aws-ecs/legacy/find-public-subnets.js +149 -0
- package/lib/platform/aws-ecs/legacy/plugins/artillery-plugin-inspect-script/index.js +27 -0
- package/lib/platform/aws-ecs/legacy/plugins/artillery-plugin-sqs-reporter/azure-aqs.js +80 -0
- package/lib/platform/aws-ecs/legacy/plugins/artillery-plugin-sqs-reporter/index.js +202 -0
- package/lib/platform/aws-ecs/legacy/plugins.js +16 -0
- package/lib/platform/aws-ecs/legacy/run-cluster.js +1994 -0
- package/lib/platform/aws-ecs/legacy/sqs-reporter.js +401 -0
- package/lib/platform/aws-ecs/legacy/tags.js +22 -0
- package/lib/platform/aws-ecs/legacy/test-run-status.js +9 -0
- package/lib/platform/aws-ecs/legacy/time.js +67 -0
- package/lib/platform/aws-ecs/legacy/util.js +97 -0
- package/lib/platform/aws-ecs/worker/Dockerfile +64 -0
- package/lib/platform/aws-ecs/worker/helpers.sh +80 -0
- package/lib/platform/aws-ecs/worker/loadgen-worker +656 -0
- package/lib/platform/aws-lambda/dependencies.js +130 -0
- package/lib/platform/aws-lambda/index.js +734 -0
- package/lib/platform/aws-lambda/lambda-handler/a9-handler-dependencies.js +73 -0
- package/lib/platform/aws-lambda/lambda-handler/a9-handler-helpers.js +43 -0
- package/lib/platform/aws-lambda/lambda-handler/a9-handler-index.js +235 -0
- package/lib/platform/aws-lambda/lambda-handler/package.json +15 -0
- package/lib/platform/aws-lambda/prices.js +29 -0
- package/lib/platform/az/aci.js +694 -0
- package/lib/platform/az/aqs-queue-consumer.js +88 -0
- package/lib/platform/az/regions.js +52 -0
- package/lib/platform/cloud/api.js +72 -0
- package/lib/platform/cloud/cloud.js +448 -0
- package/lib/platform/cloud/http-client.js +19 -0
- package/lib/platform/local/artillery-worker-local.js +154 -0
- package/lib/platform/local/index.js +174 -0
- package/lib/platform/local/worker.js +261 -0
- package/lib/platform/worker-states.js +13 -0
- package/lib/queue-consumer/index.js +56 -0
- package/lib/stash.js +41 -0
- package/lib/telemetry.js +78 -0
- package/lib/util/await-on-ee.js +24 -0
- package/lib/util/generate-id.js +9 -0
- package/lib/util/parse-tag-string.js +21 -0
- package/lib/util/prepare-test-execution-plan.js +216 -0
- package/lib/util/sleep.js +7 -0
- package/lib/util/validate-script.js +132 -0
- package/lib/util.js +294 -0
- package/lib/utils-config.js +31 -0
- package/package.json +323 -0
- package/types.d.ts +317 -0
- package/util.js +1 -0
|
@@ -0,0 +1,401 @@
|
|
|
1
|
+
const EventEmitter = require('node:events');
|
|
2
|
+
|
|
3
|
+
const { Consumer } = require('sqs-consumer');
|
|
4
|
+
const { S3Client, GetObjectCommand } = require('@aws-sdk/client-s3');
|
|
5
|
+
const driftless = require('driftless');
|
|
6
|
+
const debug = require('debug')('sqs-reporter');
|
|
7
|
+
const debugV = require('debug')('sqs-reporter:v');
|
|
8
|
+
|
|
9
|
+
const _ = require('lodash');
|
|
10
|
+
|
|
11
|
+
class SqsReporter extends EventEmitter {
|
|
12
|
+
constructor(opts) {
|
|
13
|
+
super();
|
|
14
|
+
|
|
15
|
+
this.sqsQueueUrl = opts.sqsQueueUrl;
|
|
16
|
+
this.region = opts.region;
|
|
17
|
+
this.testId = opts.testId;
|
|
18
|
+
this.count = opts.count;
|
|
19
|
+
|
|
20
|
+
this.periodsReportedFor = [];
|
|
21
|
+
|
|
22
|
+
this.ee = new EventEmitter();
|
|
23
|
+
|
|
24
|
+
this.workerState = {};
|
|
25
|
+
this.lastIntermediateReportAt = 0;
|
|
26
|
+
this.taskWatcher = null;
|
|
27
|
+
|
|
28
|
+
this.metricsByPeriod = {}; // individual intermediates by worker
|
|
29
|
+
this.mergedPeriodMetrics = []; // merged intermediates for a period
|
|
30
|
+
|
|
31
|
+
//TODO: this code is repeated from `launch-platform.js` - refactor later
|
|
32
|
+
this.phaseStartedEventsSeen = {};
|
|
33
|
+
this.phaseCompletedEventsSeen = {};
|
|
34
|
+
|
|
35
|
+
// Debug info:
|
|
36
|
+
this.messagesProcessed = {};
|
|
37
|
+
this.metricsMessagesFromWorkers = {};
|
|
38
|
+
|
|
39
|
+
this.poolSize =
|
|
40
|
+
typeof process.env.SQS_CONSUMER_POOL_SIZE !== 'undefined'
|
|
41
|
+
? parseInt(process.env.SQS_CONSUMER_POOL_SIZE, 10)
|
|
42
|
+
: Math.max(Math.ceil(this.count / 10), 75);
|
|
43
|
+
|
|
44
|
+
this.s3 = null;
|
|
45
|
+
this.s3Bucket = process.env.ARTILLERY_S3_BUCKET || null;
|
|
46
|
+
if (this.s3Bucket) {
|
|
47
|
+
this.s3 = new S3Client({ region: opts.region });
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
_allWorkersDone() {
|
|
52
|
+
return Object.keys(this.workerState).length === this.count;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async _fetchFromS3(s3Key) {
|
|
56
|
+
const response = await this.s3.send(
|
|
57
|
+
new GetObjectCommand({
|
|
58
|
+
Bucket: this.s3Bucket,
|
|
59
|
+
Key: s3Key
|
|
60
|
+
})
|
|
61
|
+
);
|
|
62
|
+
return JSON.parse(await response.Body.transformToString());
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
stop() {
|
|
66
|
+
debug('stopping');
|
|
67
|
+
for (const sqsConsumer of this.sqsConsumers) {
|
|
68
|
+
sqsConsumer.stop();
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
start() {
|
|
73
|
+
debug('starting');
|
|
74
|
+
|
|
75
|
+
this.sqsDebugInterval = driftless.setDriftlessInterval(() => {
|
|
76
|
+
debug(this.messagesProcessed);
|
|
77
|
+
let total = 0;
|
|
78
|
+
for (const [_k, v] of Object.entries(this.messagesProcessed)) {
|
|
79
|
+
total += v;
|
|
80
|
+
}
|
|
81
|
+
debug('total:', total);
|
|
82
|
+
}, 10 * 1000);
|
|
83
|
+
|
|
84
|
+
this.intermediateReporterInterval = driftless.setDriftlessInterval(() => {
|
|
85
|
+
if (Object.keys(this.metricsByPeriod).length === 0) {
|
|
86
|
+
return; // nothing received yet
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// We always look at the earliest period available so that reports come in chronological order
|
|
90
|
+
const earliestPeriodAvailable = Object.keys(this.metricsByPeriod)
|
|
91
|
+
.filter((x) => this.periodsReportedFor.indexOf(x) === -1)
|
|
92
|
+
.sort()[0];
|
|
93
|
+
|
|
94
|
+
// TODO: better name. One above is earliestNotAlreadyReported
|
|
95
|
+
const earliest = Object.keys(this.metricsByPeriod).sort()[0];
|
|
96
|
+
if (this.periodsReportedFor.indexOf(earliest) > -1) {
|
|
97
|
+
global.artillery.log(
|
|
98
|
+
'Warning: multiple batches of metrics for period',
|
|
99
|
+
earliest,
|
|
100
|
+
new Date(Number(earliest))
|
|
101
|
+
);
|
|
102
|
+
delete this.metricsByPeriod[earliest]; // FIXME: need to merge them in for the final report
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// We can process SQS messages in batches of 10 at a time, so
|
|
106
|
+
// when there are more workers, we need to wait longer:
|
|
107
|
+
const MAX_WAIT_FOR_PERIOD_MS =
|
|
108
|
+
(Math.ceil(this.count / 10) * 2 + 20) * 1000;
|
|
109
|
+
|
|
110
|
+
if (
|
|
111
|
+
typeof earliestPeriodAvailable !== 'undefined' &&
|
|
112
|
+
(this.metricsByPeriod[earliestPeriodAvailable].length === this.count ||
|
|
113
|
+
Date.now() - Number(earliestPeriodAvailable) > MAX_WAIT_FOR_PERIOD_MS)
|
|
114
|
+
) {
|
|
115
|
+
// TODO: autoscaling. Handle workers that drop off as the first case - self.count needs to be updated dynamically
|
|
116
|
+
debug(
|
|
117
|
+
'have metrics from all workers for period or MAX_WAIT_FOR_PERIOD reached',
|
|
118
|
+
earliestPeriodAvailable
|
|
119
|
+
);
|
|
120
|
+
|
|
121
|
+
debug(
|
|
122
|
+
'Report @',
|
|
123
|
+
new Date(Number(earliestPeriodAvailable)),
|
|
124
|
+
'made up of items:',
|
|
125
|
+
this.metricsByPeriod[String(earliestPeriodAvailable)].length
|
|
126
|
+
);
|
|
127
|
+
|
|
128
|
+
// TODO: Track how many workers provided metrics in the metrics report
|
|
129
|
+
const stats = global.artillery.__SSMS.mergeBuckets(
|
|
130
|
+
this.metricsByPeriod[String(earliestPeriodAvailable)]
|
|
131
|
+
)[String(earliestPeriodAvailable)];
|
|
132
|
+
this.mergedPeriodMetrics.push(stats);
|
|
133
|
+
// summarize histograms for console reporter
|
|
134
|
+
stats.summaries = {};
|
|
135
|
+
for (const [name, value] of Object.entries(stats.histograms || {})) {
|
|
136
|
+
const summary = global.artillery.__SSMS.summarizeHistogram(value);
|
|
137
|
+
stats.summaries[name] = summary;
|
|
138
|
+
delete this.metricsByPeriod[String(earliestPeriodAvailable)];
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
this.periodsReportedFor.push(earliestPeriodAvailable);
|
|
142
|
+
|
|
143
|
+
debug('Emitting stats event');
|
|
144
|
+
this.emit('stats', stats);
|
|
145
|
+
} else {
|
|
146
|
+
debug('Waiting for more workerStats before emitting stats event');
|
|
147
|
+
}
|
|
148
|
+
}, 5 * 1000);
|
|
149
|
+
|
|
150
|
+
this.workersDoneWatcher = driftless.setDriftlessInterval(() => {
|
|
151
|
+
if (!this._allWorkersDone()) {
|
|
152
|
+
return;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Have we received and processed all intermediate metrics?
|
|
156
|
+
if (Object.keys(this.metricsByPeriod).length > 0) {
|
|
157
|
+
debug(
|
|
158
|
+
'All workers done but still waiting on some intermediate reports'
|
|
159
|
+
);
|
|
160
|
+
return;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
debug('ready to emit done event');
|
|
164
|
+
debug('mergedPeriodMetrics');
|
|
165
|
+
debug(this.mergedPeriodMetrics);
|
|
166
|
+
|
|
167
|
+
// Merge by period, then compress and emit
|
|
168
|
+
const stats = global.artillery.__SSMS.pack(this.mergedPeriodMetrics);
|
|
169
|
+
stats.summaries = {};
|
|
170
|
+
for (const [name, value] of Object.entries(stats.histograms || {})) {
|
|
171
|
+
const summary = global.artillery.__SSMS.summarizeHistogram(value);
|
|
172
|
+
stats.summaries[name] = summary;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
if (process.env.DEBUG === 'sqs-reporter:v') {
|
|
176
|
+
for (const [workerId, metrics] of Object.entries(
|
|
177
|
+
this.metricsMessagesFromWorkers
|
|
178
|
+
)) {
|
|
179
|
+
debugV('worker', workerId, '->', metrics.length, 'items');
|
|
180
|
+
}
|
|
181
|
+
// fs.writeFileSync('worker-metrics-dump.json', JSON.stringify(self.metricsMessagesFromWorkers));
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
this.emit('done', stats);
|
|
185
|
+
|
|
186
|
+
driftless.clearDriftless(this.intermediateReporterInterval);
|
|
187
|
+
driftless.clearDriftless(this.workersDoneWatcher);
|
|
188
|
+
driftless.clearDriftless(this.sqsDebugInterval);
|
|
189
|
+
|
|
190
|
+
for (const sqsConsumer of this.sqsConsumers) {
|
|
191
|
+
sqsConsumer.stop();
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
this.emit('workersDone', this.workerState);
|
|
195
|
+
}, 5 * 1000);
|
|
196
|
+
|
|
197
|
+
this.ee.on('message', (body, attrs) => {
|
|
198
|
+
const workerId = attrs.workerId?.StringValue;
|
|
199
|
+
|
|
200
|
+
if (!workerId) {
|
|
201
|
+
debug('Got message with no workerId');
|
|
202
|
+
debug(body);
|
|
203
|
+
return;
|
|
204
|
+
}
|
|
205
|
+
if (body.event === 'workerDone' || body.event === 'workerError') {
|
|
206
|
+
this.workerState[workerId] = body.event;
|
|
207
|
+
this.emit(body.event, body, attrs);
|
|
208
|
+
|
|
209
|
+
debug(workerId, body.event);
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
//TODO: this code is repeated from `launch-platform.js` - refactor later
|
|
214
|
+
if (body.event === 'phaseStarted') {
|
|
215
|
+
if (
|
|
216
|
+
typeof this.phaseStartedEventsSeen[body.phase.index] === 'undefined'
|
|
217
|
+
) {
|
|
218
|
+
this.phaseStartedEventsSeen[body.phase.index] = Date.now();
|
|
219
|
+
this.emit(body.event, body.phase);
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
return;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
//TODO: this code is repeated from `launch-platform.js` - refactor later
|
|
226
|
+
if (body.event === 'phaseCompleted') {
|
|
227
|
+
if (
|
|
228
|
+
typeof this.phaseCompletedEventsSeen[body.phase.index] === 'undefined'
|
|
229
|
+
) {
|
|
230
|
+
this.phaseCompletedEventsSeen[body.phase.index] = Date.now();
|
|
231
|
+
this.emit(body.event, body.phase);
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
return;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// 'done' event is from SQS Plugin - unused for now
|
|
238
|
+
if (body.event === 'done') {
|
|
239
|
+
return;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
if (body.msg) {
|
|
243
|
+
this.emit('workerMessage', body, attrs);
|
|
244
|
+
return;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
if (body.event === 'workerStats') {
|
|
248
|
+
// v2 SSMS stats
|
|
249
|
+
const workerStats = global.artillery.__SSMS.deserializeMetrics(
|
|
250
|
+
body.stats
|
|
251
|
+
);
|
|
252
|
+
const period = workerStats.period;
|
|
253
|
+
|
|
254
|
+
debug(
|
|
255
|
+
'processing workerStats event, worker:',
|
|
256
|
+
workerId,
|
|
257
|
+
'period',
|
|
258
|
+
period
|
|
259
|
+
);
|
|
260
|
+
|
|
261
|
+
debugV(workerStats);
|
|
262
|
+
if (typeof this.metricsByPeriod[period] === 'undefined') {
|
|
263
|
+
this.metricsByPeriod[period] = [];
|
|
264
|
+
}
|
|
265
|
+
this.metricsByPeriod[period].push(workerStats);
|
|
266
|
+
|
|
267
|
+
if (process.env.DEBUG === 'sqs-reporter:v') {
|
|
268
|
+
if (
|
|
269
|
+
typeof this.metricsMessagesFromWorkers[workerId] === 'undefined'
|
|
270
|
+
) {
|
|
271
|
+
this.metricsMessagesFromWorkers[workerId] = [];
|
|
272
|
+
}
|
|
273
|
+
this.metricsMessagesFromWorkers[workerId].push(workerStats);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
debugV('metricsByPeriod:');
|
|
277
|
+
debugV(this.metricsByPeriod);
|
|
278
|
+
debug('number of periods processed');
|
|
279
|
+
debug(Object.keys(this.metricsByPeriod));
|
|
280
|
+
debug('number of metrics collections for period:', period, ':');
|
|
281
|
+
debug(this.metricsByPeriod[period].length, 'expecting:', this.count);
|
|
282
|
+
}
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
this.ee.on('messageReceiveTimeout', () => {
|
|
286
|
+
// TODO: 10 polls with no results, e.g. if all workers crashed
|
|
287
|
+
});
|
|
288
|
+
|
|
289
|
+
const createConsumer = (i) => Consumer.create({
|
|
290
|
+
queueUrl: process.env.SQS_QUEUE_URL || this.sqsQueueUrl,
|
|
291
|
+
region: this.region,
|
|
292
|
+
waitTimeSeconds: 10,
|
|
293
|
+
messageAttributeNames: ['testId', 'workerId'],
|
|
294
|
+
visibilityTimeout: 60,
|
|
295
|
+
batchSize: 10,
|
|
296
|
+
handleMessage: async (message) => {
|
|
297
|
+
let body = null;
|
|
298
|
+
try {
|
|
299
|
+
body = JSON.parse(message.Body);
|
|
300
|
+
} catch (err) {
|
|
301
|
+
console.error(err);
|
|
302
|
+
console.log(message.Body);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
//
|
|
306
|
+
// Ignore any messages that are invalid or not tagged properly.
|
|
307
|
+
//
|
|
308
|
+
|
|
309
|
+
if (process.env.LOG_SQS_MESSAGES) {
|
|
310
|
+
console.log(message);
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
if (!body) {
|
|
314
|
+
throw new Error();
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
// Handle overflow messages stored in S3
|
|
318
|
+
if (body._overflowRef && this.s3 && this.s3Bucket) {
|
|
319
|
+
try {
|
|
320
|
+
debug('Fetching overflow payload from S3: %s', body._overflowRef);
|
|
321
|
+
body = await this._fetchFromS3(body._overflowRef);
|
|
322
|
+
} catch (s3Err) {
|
|
323
|
+
console.error('Failed to fetch overflow message from S3:', s3Err);
|
|
324
|
+
throw new Error(
|
|
325
|
+
`Failed to fetch overflow message: ${body._overflowRef}`
|
|
326
|
+
);
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
const attrs = message.MessageAttributes;
|
|
331
|
+
if (!attrs || !attrs.testId) {
|
|
332
|
+
throw new Error();
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
if (this.testId !== attrs.testId.StringValue) {
|
|
336
|
+
throw new Error();
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
if (!this.messagesProcessed[i]) {
|
|
340
|
+
this.messagesProcessed[i] = 0;
|
|
341
|
+
}
|
|
342
|
+
this.messagesProcessed[i] += 1;
|
|
343
|
+
|
|
344
|
+
process.nextTick(() => {
|
|
345
|
+
this.ee.emit('message', body, attrs);
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
});
|
|
349
|
+
|
|
350
|
+
this.sqsConsumers = [];
|
|
351
|
+
for (let i = 0; i < this.poolSize; i++) {
|
|
352
|
+
const sqsConsumer = createConsumer(i);
|
|
353
|
+
|
|
354
|
+
sqsConsumer.on('error', (err) => {
|
|
355
|
+
// TODO: Ignore "SQSError: SQS delete message failed:" errors
|
|
356
|
+
if (err.message?.match(/ReceiptHandle.+expired/i)) {
|
|
357
|
+
debug(err.name, err.message);
|
|
358
|
+
} else {
|
|
359
|
+
artillery.log(err);
|
|
360
|
+
sqsConsumer.stop();
|
|
361
|
+
this.emit('error', err);
|
|
362
|
+
}
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
let empty = 0;
|
|
366
|
+
sqsConsumer.on('empty', () => {
|
|
367
|
+
empty++;
|
|
368
|
+
if (empty > 10) {
|
|
369
|
+
this.ee.emit('messageReceiveTimeout'); // TODO:
|
|
370
|
+
}
|
|
371
|
+
});
|
|
372
|
+
sqsConsumer.start();
|
|
373
|
+
|
|
374
|
+
this.sqsConsumers.push(sqsConsumer);
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
// Given a (combined) stats object, what's the difference between the
|
|
379
|
+
// time of earliest and latest requests made?
|
|
380
|
+
calculateSpread(stats) {
|
|
381
|
+
const period = _.reduce(
|
|
382
|
+
stats._requestTimestamps,
|
|
383
|
+
(acc, ts) => {
|
|
384
|
+
acc.min = Math.min(acc.min, ts);
|
|
385
|
+
acc.max = Math.max(acc.max, ts);
|
|
386
|
+
return acc;
|
|
387
|
+
},
|
|
388
|
+
{ min: Infinity, max: 0 }
|
|
389
|
+
);
|
|
390
|
+
|
|
391
|
+
const spread = round((period.max - period.min) / 1000, 1);
|
|
392
|
+
return spread;
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
function round(number, decimals) {
|
|
397
|
+
const m = 10 ** decimals;
|
|
398
|
+
return Math.round(number * m) / m;
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
module.exports = { SqsReporter };
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
function parseTags(input) {
|
|
4
|
+
const tags = [];
|
|
5
|
+
if (input) {
|
|
6
|
+
const tagList = input.split(',').map((x) => x.trim());
|
|
7
|
+
for (const t of tagList) {
|
|
8
|
+
const cs = t.split(':');
|
|
9
|
+
if (cs.length !== 2) {
|
|
10
|
+
console.error(`Invalid tag, skipping: ${t}`);
|
|
11
|
+
} else {
|
|
12
|
+
tags.push({ name: cs[0].trim(), value: cs[1].trim() });
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
return tags;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
module.exports = {
|
|
21
|
+
parseTags
|
|
22
|
+
};
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
const EventEmitter = require('node:events');
|
|
4
|
+
const driftless = require('driftless');
|
|
5
|
+
|
|
6
|
+
async function sleep(ms) {
|
|
7
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
class Timeout extends EventEmitter {
|
|
11
|
+
constructor(duration) {
|
|
12
|
+
super();
|
|
13
|
+
this._startedAt = null;
|
|
14
|
+
this._duration = duration;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
start() {
|
|
18
|
+
this._startedAt = Date.now();
|
|
19
|
+
this._timeout = driftless.setDriftlessTimeout(() => {
|
|
20
|
+
this.emit('timeout');
|
|
21
|
+
}, this._duration);
|
|
22
|
+
return this;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
stop() {
|
|
26
|
+
driftless.clearDriftless(this._timeout);
|
|
27
|
+
return this;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
timedout() {
|
|
31
|
+
return Date.now() - this._startedAt > this._duration;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Turn a string like 2m into number of milliseconds
|
|
36
|
+
// Supported units: ms, s, m, h
|
|
37
|
+
function timeStringToMs(timeStr) {
|
|
38
|
+
const rx = /^([0-9]+).+$/i;
|
|
39
|
+
|
|
40
|
+
if (!rx.test(timeStr)) {
|
|
41
|
+
throw new Error(`Invalid time string: ${timeStr}`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
let multiplier = 0;
|
|
45
|
+
if (timeStr.endsWith('ms')) {
|
|
46
|
+
multiplier = 1;
|
|
47
|
+
} else if (timeStr.endsWith('s')) {
|
|
48
|
+
multiplier = 1000;
|
|
49
|
+
} else if (timeStr.endsWith('m')) {
|
|
50
|
+
multiplier = 60 * 1000;
|
|
51
|
+
} else if (timeStr.endsWith('h')) {
|
|
52
|
+
multiplier = 60 * 60 * 1000;
|
|
53
|
+
} else {
|
|
54
|
+
throw new Error(
|
|
55
|
+
`Unknown unit suffix in ${timeStr}. Supported units: ms, s, m, h`
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const n = parseInt(timeStr.match(rx)[0], 10);
|
|
60
|
+
return n * multiplier;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
module.exports = {
|
|
64
|
+
Timeout,
|
|
65
|
+
sleep,
|
|
66
|
+
timeStringToMs
|
|
67
|
+
};
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
const _debug = require('debug')('artillery:util');
|
|
4
|
+
|
|
5
|
+
const chalk = require('chalk');
|
|
6
|
+
|
|
7
|
+
const _ = require('lodash');
|
|
8
|
+
|
|
9
|
+
const _A = require('async');
|
|
10
|
+
|
|
11
|
+
const createS3Client = require('./create-s3-client');
|
|
12
|
+
|
|
13
|
+
const supportedRegions = [
|
|
14
|
+
'us-east-1',
|
|
15
|
+
'us-east-2',
|
|
16
|
+
'us-west-1',
|
|
17
|
+
'us-west-2',
|
|
18
|
+
'us-gov-east-1',
|
|
19
|
+
'us-gov-west-1',
|
|
20
|
+
'ca-central-1',
|
|
21
|
+
'eu-west-1',
|
|
22
|
+
'eu-west-2',
|
|
23
|
+
'eu-west-3',
|
|
24
|
+
'eu-central-1',
|
|
25
|
+
'eu-north-1',
|
|
26
|
+
'ap-south-1',
|
|
27
|
+
'ap-east-1',
|
|
28
|
+
'ap-northeast-2',
|
|
29
|
+
'ap-southeast-1',
|
|
30
|
+
'ap-southeast-2',
|
|
31
|
+
'ap-northeast-1',
|
|
32
|
+
'me-south-1',
|
|
33
|
+
'il-central-1',
|
|
34
|
+
'sa-east-1',
|
|
35
|
+
'cn-north-1',
|
|
36
|
+
'cn-northwest-1'
|
|
37
|
+
];
|
|
38
|
+
|
|
39
|
+
const getAccountId = require('../../aws/aws-get-account-id');
|
|
40
|
+
|
|
41
|
+
const { S3_BUCKET_NAME_PREFIX } = require('./constants');
|
|
42
|
+
const { paginateListObjectsV2 } = require('@aws-sdk/client-s3');
|
|
43
|
+
|
|
44
|
+
function atob(data) {
|
|
45
|
+
return Buffer.from(data, 'base64').toString('ascii');
|
|
46
|
+
}
|
|
47
|
+
function btoa(data) {
|
|
48
|
+
return Buffer.from(data).toString('base64');
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async function getBucketName() {
|
|
52
|
+
if (process.env.ARTILLERY_S3_BUCKET) {
|
|
53
|
+
return process.env.ARTILLERY_S3_BUCKET;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const accountId = await getAccountId();
|
|
57
|
+
const bucketName = `${S3_BUCKET_NAME_PREFIX}-${accountId}`;
|
|
58
|
+
// const bucketArn = `arn:aws:s3:::${bucketName}`;
|
|
59
|
+
return bucketName;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
function formatError(err) {
|
|
63
|
+
return (
|
|
64
|
+
`${chalk.red('Error')}: ${err.message}${err.code ? ` (${err.code})` : ''}`
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async function listAllObjectsWithPrefix(bucketName, prefix) {
|
|
69
|
+
const s3Client = createS3Client();
|
|
70
|
+
const allObjects = [];
|
|
71
|
+
|
|
72
|
+
const paginator = paginateListObjectsV2(
|
|
73
|
+
{ client: s3Client },
|
|
74
|
+
{
|
|
75
|
+
Bucket: bucketName,
|
|
76
|
+
Prefix: prefix,
|
|
77
|
+
MaxKeys: 1000
|
|
78
|
+
}
|
|
79
|
+
);
|
|
80
|
+
for await (const page of paginator) {
|
|
81
|
+
if (page.Contents) {
|
|
82
|
+
allObjects.push(...page.Contents);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return allObjects;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
module.exports = {
|
|
90
|
+
supportedRegions,
|
|
91
|
+
getAccountId,
|
|
92
|
+
atob,
|
|
93
|
+
btoa,
|
|
94
|
+
formatError,
|
|
95
|
+
listAllObjectsWithPrefix,
|
|
96
|
+
getBucketName
|
|
97
|
+
};
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
# ********************************
|
|
2
|
+
# NOTE: Version we use here needs to be kept consistent with that in
|
|
3
|
+
# artillery-engine-playwright.
|
|
4
|
+
# ********************************
|
|
5
|
+
FROM mcr.microsoft.com/playwright:v1.58.1
|
|
6
|
+
|
|
7
|
+
ARG TARGETARCH
|
|
8
|
+
|
|
9
|
+
ENV DEBIAN_FRONTEND=noninteractive
|
|
10
|
+
|
|
11
|
+
# Install aws-lambda-ric build dependencies
|
|
12
|
+
RUN apt-get update && apt-get install -y \
|
|
13
|
+
g++ \
|
|
14
|
+
make \
|
|
15
|
+
cmake \
|
|
16
|
+
unzip \
|
|
17
|
+
libcurl4-openssl-dev \
|
|
18
|
+
autoconf \
|
|
19
|
+
libtool \
|
|
20
|
+
python3-pip && pip3 install awscli --break-system-packages && pip3 install azure-cli==2.76.0 --break-system-packages
|
|
21
|
+
|
|
22
|
+
RUN <<EOT
|
|
23
|
+
echo 'ipv4' >> ~/.curlrc
|
|
24
|
+
if [ "$TARGETARCH" = "arm64" ]; then
|
|
25
|
+
# Temporal fix for SSL_ERROR_SYSCALL error on arm64
|
|
26
|
+
# see: https://github.com/curl/curl/issues/14154
|
|
27
|
+
echo 'insecure' >> ~/.curlrc
|
|
28
|
+
fi
|
|
29
|
+
EOT
|
|
30
|
+
|
|
31
|
+
ARG WORKER_VERSION
|
|
32
|
+
ENV WORKER_VERSION=$WORKER_VERSION
|
|
33
|
+
|
|
34
|
+
# Additional dependencies for Fargate
|
|
35
|
+
RUN apt-get install -y bash jq pwgen curl git zip tree
|
|
36
|
+
|
|
37
|
+
# Define custom function directory
|
|
38
|
+
ARG FUNCTION_DIR="/artillery"
|
|
39
|
+
RUN mkdir -p ${FUNCTION_DIR}
|
|
40
|
+
WORKDIR ${FUNCTION_DIR}
|
|
41
|
+
|
|
42
|
+
COPY packages packages
|
|
43
|
+
COPY packages/artillery/lib/platform/aws-lambda/lambda-handler/ .
|
|
44
|
+
COPY package.json package.json
|
|
45
|
+
|
|
46
|
+
## Copy Fargate worker files
|
|
47
|
+
COPY ./packages/artillery/lib/platform/aws-ecs/worker/loadgen-worker /artillery/loadgen-worker
|
|
48
|
+
COPY ./packages/artillery/lib/platform/aws-ecs/worker/helpers.sh /artillery/helpers.sh
|
|
49
|
+
|
|
50
|
+
# Install dependencies
|
|
51
|
+
RUN npm install -w artillery --ignore-scripts --omit=dev
|
|
52
|
+
RUN npm install aws-lambda-ric
|
|
53
|
+
|
|
54
|
+
RUN npm cache clean --force \
|
|
55
|
+
&& rm ./package.json \
|
|
56
|
+
&& rm -rf /root/.cache \
|
|
57
|
+
&& ln -s /artillery/node_modules/.bin/artillery /usr/local/bin/artillery \
|
|
58
|
+
&& rm -rf /ms-playwright/firefox* \
|
|
59
|
+
&& rm -rf /ms-playwright/webkit* \
|
|
60
|
+
&& echo "ok"
|
|
61
|
+
|
|
62
|
+
RUN chmod +x /artillery/loadgen-worker
|
|
63
|
+
|
|
64
|
+
ENTRYPOINT ["/artillery/packages/artillery/bin/run"]
|