@govuk-pay/cli 0.0.16 → 0.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/package.json +4 -1
  2. package/resources/legacy-ruby-cli/README.md +10 -3
  3. package/resources/legacy-ruby-cli/config/secrets.yml +35 -64
  4. package/resources/legacy-ruby-cli/config/service_secrets.yml +4 -7
  5. package/resources/legacy-ruby-cli/lib/pay_cli/commands/aws.rb +0 -22
  6. package/resources/legacy-ruby-cli/lib/pay_cli/commands/doctor.rb +0 -1
  7. package/resources/legacy-ruby-cli/lib/pay_cli/commands/local/config.rb +4 -0
  8. package/resources/legacy-ruby-cli/lib/pay_cli/commands/local/config.yaml +2 -0
  9. package/resources/legacy-ruby-cli/lib/pay_cli/commands/local/docker.rb +33 -3
  10. package/resources/legacy-ruby-cli/lib/pay_cli/commands/local/files/all.yaml +120 -54
  11. package/resources/legacy-ruby-cli/lib/pay_cli/commands/local/files/card.yaml +25 -42
  12. package/resources/legacy-ruby-cli/lib/pay_cli/commands/local/files/docker-compose.erb +110 -5
  13. package/resources/legacy-ruby-cli/lib/pay_cli/commands/local/files/services/egress/squid.conf +47 -0
  14. package/resources/legacy-ruby-cli/lib/pay_cli/commands/local.rb +40 -9
  15. package/resources/legacy-ruby-cli/lib/pay_cli/commands/ssm.rb +7 -60
  16. package/resources/legacy-ruby-cli/lib/pay_cli/entry_point.rb +0 -11
  17. package/resources/legacy-ruby-cli/lib/pay_cli/environment.rb +1 -1
  18. package/resources/legacy-ruby-cli/lib/pay_cli/secrets.rb +1 -1
  19. package/resources/usageDetails.txt +1 -0
  20. package/src/commands/tunnel.js +370 -0
  21. package/src/core/commandRouter.js +4 -0
  22. package/src/core/constants.js +19 -1
  23. package/resources/legacy-ruby-cli/lib/pay_cli/aws/document.rb +0 -23
  24. package/resources/legacy-ruby-cli/lib/pay_cli/ec2.rb +0 -38
  25. package/resources/legacy-ruby-cli/vulnerability_scan/.nvmrc +0 -1
  26. package/resources/legacy-ruby-cli/vulnerability_scan/generate_vulnerability_report.js +0 -88
  27. package/resources/legacy-ruby-cli/vulnerability_scan/package.json +0 -15
  28. package/resources/legacy-ruby-cli/vulnerability_scan/scan.sh +0 -88
@@ -0,0 +1,370 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.logTunnelCommands = void 0;
7
+ const standardContent_js_1 = require("../core/standardContent.js");
8
+ const client_ec2_1 = require("@aws-sdk/client-ec2");
9
+ const client_ecs_1 = require("@aws-sdk/client-ecs");
10
+ const client_rds_1 = require("@aws-sdk/client-rds");
11
+ const readline_1 = __importDefault(require("readline"));
12
+ const child_process_1 = require("child_process");
13
+ const constants_js_1 = require("../core/constants.js");
14
+ let ec2;
15
+ let ecs;
16
+ const FORMAT = {
17
+ red: '\x1b[31m',
18
+ green: '\x1b[32m',
19
+ yellow: '\x1b[33m',
20
+ reset: '\x1b[0m',
21
+ ul: '\x1b[4m',
22
+ ulstop: '\x1b[24m'
23
+ };
24
+ function logTunnelCommands() {
25
+ console.log(`Commands:
26
+ pay tunnel <ENVIRONMENT> <APP-NAME> # Open tunnel to application <APP-NAME> database in specified environment <ENVIRONMENT>
27
+ pay tunnel help # Describe tunnel command`);
28
+ }
29
+ exports.logTunnelCommands = logTunnelCommands;
30
+ async function tunnelHandler(options) {
31
+ await (0, standardContent_js_1.showHeader)();
32
+ const { environment, application } = parseArguments(options);
33
+ console.log(`Opening a database tunnel to ${environment} ${application}`);
34
+ ec2 = new client_ec2_1.EC2Client();
35
+ ecs = new client_ecs_1.ECSClient();
36
+ let bastionTask = null;
37
+ let tunnel = null;
38
+ try {
39
+ printWarningToUser();
40
+ const database = await getDatabaseDetails(environment, application);
41
+ bastionTask = await startBastion(environment);
42
+ tunnel = openTunnel(bastionTask, database, environment);
43
+ printHowToTunnelText(application, environment, database.EngineVersion);
44
+ await waitForExit();
45
+ await shutdown(environment, tunnel, bastionTask);
46
+ }
47
+ catch (error) {
48
+ if (typeof error === 'string') {
49
+ printError(error);
50
+ }
51
+ else if (error instanceof Error) {
52
+ printError(error.message);
53
+ }
54
+ await shutdown(environment, tunnel, bastionTask);
55
+ process.exit(2);
56
+ }
57
+ }
58
+ exports.default = tunnelHandler;
59
+ function parseArguments(options) {
60
+ if (options.arguments.length !== 2 || options.arguments[0] === 'help') {
61
+ logTunnelCommands();
62
+ process.exit(0);
63
+ }
64
+ const environment = options.arguments[0];
65
+ const application = options.arguments[1];
66
+ if (!constants_js_1.APPLICATIONS.includes(application)) {
67
+ printError(`Invalid application: "${application}". Must be one of ${constants_js_1.APPLICATIONS.join(', ')}`);
68
+ process.exit(2);
69
+ }
70
+ if (!constants_js_1.ENVIRONMENTS.includes(environment)) {
71
+ printError(`Invalid environment: "${environment}". Must be one of ${constants_js_1.ENVIRONMENTS.join(', ')}`);
72
+ process.exit(2);
73
+ }
74
+ return {
75
+ environment, application
76
+ };
77
+ }
78
+ async function waitForExit() {
79
+ const prompt = "\nTo shutdown bastion task and close tunnel type 'exit' or 'Ctrl-C'\n";
80
+ const rl = readline_1.default.createInterface({
81
+ input: process.stdin,
82
+ output: process.stdout
83
+ });
84
+ printGreen(prompt);
85
+ return await new Promise(resolve => {
86
+ rl.on('line', input => {
87
+ if (input === 'exit') {
88
+ rl.close();
89
+ resolve();
90
+ }
91
+ else {
92
+ console.log(`Unknown command ${input}`);
93
+ printGreen(prompt);
94
+ }
95
+ });
96
+ rl.on('SIGINT', () => {
97
+ console.log('Received: Ctrl-C');
98
+ rl.close();
99
+ resolve();
100
+ });
101
+ });
102
+ }
103
+ async function startBastion(environment) {
104
+ const subnetIds = await getBastionSubNets(environment);
105
+ const securityGroupIds = await getBastionSecurityGroups(environment);
106
+ let bastionTask = await launchBastionTask(environment, subnetIds, securityGroupIds);
107
+ bastionTask = await waitForBastionToBeReady(bastionTask, environment);
108
+ return bastionTask;
109
+ }
110
+ async function getBastionSubNets(environment) {
111
+ const subnetCommand = new client_ec2_1.DescribeSubnetsCommand({
112
+ Filters: [{
113
+ Name: 'tag:Name',
114
+ Values: [`${environment}-bastion`]
115
+ }]
116
+ });
117
+ const subnetCommandResponse = await ec2.send(subnetCommand);
118
+ const subnets = subnetCommandResponse.Subnets;
119
+ if (subnets == null || subnets.length < 1) {
120
+ throw new Error(`Failed to find subnets for the bastion in ${environment}`);
121
+ }
122
+ return subnets
123
+ .filter(s => s.SubnetId !== undefined)
124
+ .map((subnet) => subnet.SubnetId);
125
+ }
126
+ async function getBastionSecurityGroups(environment) {
127
+ const securityGroupCommand = new client_ec2_1.DescribeSecurityGroupsCommand({
128
+ Filters: [{
129
+ Name: 'group-name',
130
+ Values: [`${environment}-bastion`]
131
+ }]
132
+ });
133
+ const securityGroupCommandResponse = await ec2.send(securityGroupCommand);
134
+ const securityGroups = securityGroupCommandResponse.SecurityGroups;
135
+ if (securityGroups == null || securityGroups.length < 1) {
136
+ throw new Error(`Failed to find security groups for the bastion in ${environment}`);
137
+ }
138
+ return securityGroups
139
+ .filter(s => s.GroupId !== undefined)
140
+ .map((securityGroup) => securityGroup.GroupId);
141
+ }
142
+ async function launchBastionTask(environment, subnetIds, securityGroupIds) {
143
+ const runTaskCommand = new client_ecs_1.RunTaskCommand({
144
+ cluster: `${environment}-fargate`,
145
+ taskDefinition: `${environment}-bastion`,
146
+ launchType: 'FARGATE',
147
+ enableExecuteCommand: true,
148
+ networkConfiguration: {
149
+ awsvpcConfiguration: {
150
+ subnets: subnetIds,
151
+ securityGroups: securityGroupIds,
152
+ assignPublicIp: 'DISABLED'
153
+ }
154
+ },
155
+ // TODO: Add the developer's name as a Tag or suffix to startedBy.
156
+ startedBy: 'PAY-CLI'
157
+ });
158
+ const runTaskCommandResponse = await ecs.send(runTaskCommand);
159
+ if ((runTaskCommandResponse.failures != null) && runTaskCommandResponse.failures.length > 0) {
160
+ const failureReasons = runTaskCommandResponse.failures
161
+ .map((failure) => failure.reason)
162
+ .join(', ');
163
+ throw new Error(`Failed to run bastion task: ${failureReasons}`);
164
+ }
165
+ else if (runTaskCommandResponse.tasks?.length !== 1) {
166
+ throw new Error(`Expected 1 bastion task but got: ${String(runTaskCommandResponse.tasks?.length)}`);
167
+ }
168
+ else {
169
+ return runTaskCommandResponse.tasks[0];
170
+ }
171
+ }
172
+ async function refreshTask(taskArn, environment) {
173
+ const describeTaskCommand = new client_ecs_1.DescribeTasksCommand({
174
+ cluster: `${environment}-fargate`,
175
+ tasks: [taskArn]
176
+ });
177
+ const describeTaskCommandResponse = await ecs.send(describeTaskCommand);
178
+ if ((describeTaskCommandResponse.failures != null) && describeTaskCommandResponse.failures.length > 0) {
179
+ const failureReasons = describeTaskCommandResponse.failures
180
+ .map((failure) => failure.reason)
181
+ .join(', ');
182
+ throw new Error(`Failed to get task: ${failureReasons}`);
183
+ }
184
+ else if (describeTaskCommandResponse.tasks?.length !== 1) {
185
+ throw new Error(`Expected 1 bastion task but got: ${String(describeTaskCommandResponse.tasks?.length)}`);
186
+ }
187
+ else {
188
+ return describeTaskCommandResponse.tasks[0];
189
+ }
190
+ }
191
+ async function sleep(ms) {
192
+ return await new Promise(resolve => setTimeout(resolve, ms));
193
+ }
194
+ async function waitForBastionToBeReady(task, environment) {
195
+ console.log('Waiting for the bastion task to start');
196
+ let previousStatus;
197
+ while (!isRunningAndConnected(task)) {
198
+ if (task.lastStatus !== previousStatus) {
199
+ previousStatus = task.lastStatus;
200
+ process.stdout.write(`\n\tCurrent status: ${task.lastStatus} `);
201
+ if (task.lastStatus === 'RUNNING') {
202
+ process.stdout.write('\n\tWaiting for the bastion to connect to the network ');
203
+ }
204
+ }
205
+ else {
206
+ process.stdout.write('.');
207
+ }
208
+ await sleep(1000);
209
+ task = await refreshTask(task.taskArn, environment);
210
+ if (task.desiredStatus !== 'RUNNING') {
211
+ console.error(`\tBastion desired state is unexpectedly: ${task.desiredStatus}`);
212
+ throw Error('Bastion task failed to start');
213
+ }
214
+ }
215
+ console.log('\n\tBastion started successfully');
216
+ return task;
217
+ }
218
+ function isRunningAndConnected(task) {
219
+ // RUNNING doesn't mean that the task is ready to accept network connections.
220
+ // The 'ExecuteCommandAgent' must be running within the container before ssm
221
+ // can connect. Note: task.connectivity === 'CONNECTED' is not sufficient.
222
+ const connectionStatus = task.attachments
223
+ ?.filter(a => a.type === 'ElasticNetworkInterface')
224
+ .map(a => a.status)[0];
225
+ const agentStatus = task.containers
226
+ ?.flatMap(c => c.managedAgents)
227
+ .filter(agent => agent?.name === 'ExecuteCommandAgent')
228
+ .map(agent => agent?.lastStatus)[0];
229
+ return task.lastStatus === 'RUNNING' && connectionStatus === 'ATTACHED' && agentStatus === 'RUNNING';
230
+ }
231
+ async function stopBastion(task, environment) {
232
+ const stopTaskCommand = new client_ecs_1.StopTaskCommand({
233
+ task: task.taskArn,
234
+ cluster: `${environment}-fargate`,
235
+ reason: 'Stopping bastion'
236
+ });
237
+ const stopTaskCommandResponse = await ecs.send(stopTaskCommand);
238
+ if (stopTaskCommandResponse.task == null) {
239
+ throw new Error(`Failed to stop bastion task: ${task.taskArn}`);
240
+ }
241
+ console.log('Stopping Bastion');
242
+ }
243
+ async function getDatabaseDetails(environment, application) {
244
+ const describeDbCommand = new client_rds_1.DescribeDBInstancesCommand({});
245
+ const rds = new client_rds_1.RDSClient();
246
+ const describeDbCommandResponse = await rds.send(describeDbCommand);
247
+ if (describeDbCommandResponse.DBInstances == null || describeDbCommandResponse.DBInstances.length < 1) {
248
+ throw new Error(`Failed to find the database for ${application} in ${environment}`);
249
+ }
250
+ const appDatabases = describeDbCommandResponse.DBInstances
251
+ .filter(s => s.DBInstanceIdentifier?.startsWith(`${environment}-${application}`));
252
+ if (appDatabases.length === 0) {
253
+ throw new Error(`Failed to find the database for ${application} in ${environment}`);
254
+ }
255
+ else if (appDatabases.length > 1) {
256
+ // TODO: Allow an argument to specify the exact database name. Print out how
257
+ // to re-run the describeDbCommand specifying the required database.
258
+ const databaseNames = appDatabases.map(d => d.DBInstanceIdentifier).join(':');
259
+ throw new Error(`There are multiple matching databases: ${databaseNames}`);
260
+ }
261
+ else {
262
+ return appDatabases[0];
263
+ }
264
+ }
265
+ function openTunnel(task, db, environment) {
266
+ const cluster = `${environment}-fargate`;
267
+ const ecsTaskId = task.taskArn?.split('/').at(-1);
268
+ const ecsContainerRunTimeId = task.containers?.map(c => c.runtimeId)[0];
269
+ const rdsEndpoint = db.Endpoint?.Address;
270
+ const target = `ecs:${cluster}_${ecsTaskId}_${ecsContainerRunTimeId}`;
271
+ const parameters = `{"host":["${rdsEndpoint}"],"portNumber":["5432"],"localPortNumber":["65432"]}`;
272
+ const commandArgs = [
273
+ 'ssm',
274
+ 'start-session',
275
+ '--target', target,
276
+ '--document-name', 'AWS-StartPortForwardingSessionToRemoteHost',
277
+ '--parameters', parameters
278
+ ];
279
+ const tunnelProc = (0, child_process_1.spawn)('aws', commandArgs, { detached: true });
280
+ // TODO: useful for testing when the spawned process exits itself. Consider
281
+ // removing before final launch of the command.
282
+ // const tunnelProc = spawn('sleep', ['5'], { detached: true })
283
+ tunnelProc.stdout.on('data', (data) => {
284
+ console.log(`tunnel command: ${data}`);
285
+ });
286
+ tunnelProc.stderr.on('data', (data) => {
287
+ console.error(`tunnel command error: ${data}`);
288
+ });
289
+ tunnelProc.on('close', (code, _signal) => {
290
+ if (code !== null) {
291
+ console.error(`\nTunnel unexpectedly closed with exit code: ${code}`);
292
+ console.group();
293
+ console.error('Type `exit` or `Ctrl-C` to shutdown the bastion task then try again.');
294
+ const tunnelCommandForConsole = commandArgs
295
+ .join(' ')
296
+ .replaceAll('--', '\\\n--')
297
+ .replace('{', "'{")
298
+ .replace('}', "}'");
299
+ console.error(`The command to manually create the tunnel is: \naws ${tunnelCommandForConsole}`);
300
+ console.groupEnd();
301
+ }
302
+ else {
303
+ console.log('Tunnel closed');
304
+ }
305
+ });
306
+ return tunnelProc;
307
+ }
308
+ async function shutdown(environment, tunnel, bastionTask) {
309
+ console.log('Shutting down');
310
+ if (tunnel !== undefined && tunnel !== null) {
311
+ if (tunnel.exitCode === null && tunnel.pid !== undefined) {
312
+ process.kill(-(tunnel.pid));
313
+ }
314
+ else {
315
+ console.log(`Tunnel already closed, exit code: ${tunnel?.exitCode ?? 'no exit code'}`);
316
+ }
317
+ }
318
+ if (bastionTask !== null) {
319
+ bastionTask = await refreshTask(bastionTask.taskArn, environment);
320
+ if (bastionTask.desiredStatus !== 'STOPPED') {
321
+ await stopBastion(bastionTask, environment);
322
+ }
323
+ }
324
+ printGreen('Shutdown complete.');
325
+ }
326
+ function printHowToTunnelText(application, environment, dbEngineVersion) {
327
+ const dbUser = getDbUser(application);
328
+ const payLowPassDbSecretName = getPayLowPassDbSecretname(environment, dbUser);
329
+ const paySecretsPasswordName = getPaySecretsPasswordName();
330
+ printGreen(`\nConnected tunnel to ${application} RDS database in ${environment} on port 65432\n`);
331
+ printGreen('Copy DB credentials to clipboard (in another window) using pay-low-pass:');
332
+ printGreen(` pay-low-pass ${payLowPassDbSecretName} | pbcopy`);
333
+ printGreen('Alternatively, fetch credentials from pay secrets:');
334
+ printGreen(` pay secrets fetch ${environment} ${application} ${paySecretsPasswordName} | pbcopy`);
335
+ printGreen('Open psql with:');
336
+ printGreen(` psql -h localhost -p 65432 -U ${dbUser} -d ${application}`);
337
+ printGreen('Alternatively connect using docker instead of needing psql installed locally and set the password automatically using pay-low-pass:');
338
+ printGreen(` docker run --rm -ti postgres:${dbEngineVersion}-alpine psql --host docker.for.mac.localhost --port 65432 --user ${dbUser} --dbname ${application}`);
339
+ printGreen('Or even more conveniently connect using a docker container and set the password automatically using pay-low-pass:');
340
+ printGreen(` docker run -e "PGPASSWORD=$(pay-low-pass ${payLowPassDbSecretName})" --rm -ti postgres:${dbEngineVersion}-alpine psql --host docker.for.mac.localhost --port 65432 --user ${dbUser} --dbname ${application}\n`);
341
+ }
342
+ // TODO: add a write flag. Default to readonly.
343
+ function getDbUser(application) {
344
+ return `${application}_support_readonly`;
345
+ }
346
+ function getPayLowPassDbSecretname(environment, user) {
347
+ return `aws/rds/support_readonly_users/${environment.split('-')[0]}/${user}`;
348
+ }
349
+ function getPaySecretsPasswordName() {
350
+ return 'DB_SUPPORT_PASSWORD_READONLY';
351
+ }
352
+ function printWarningToUser() {
353
+ console.log(FORMAT.yellow, '⚠️ WARNING: When using SSM, any and all activity you perform may be getting logged for security auditing purposes (think PCI).', FORMAT.reset);
354
+ console.log(FORMAT.yellow, `Avoid sending or accessing ${FORMAT.ul}anything${FORMAT.ulstop} that could cause a security breach, such as:`, FORMAT.reset);
355
+ console.log(FORMAT.yellow, FORMAT.reset);
356
+ console.log(FORMAT.yellow, '• Secret API Keys or Tokens', FORMAT.reset);
357
+ console.log(FORMAT.yellow, '• Credentials or Passwords', FORMAT.reset);
358
+ console.log(FORMAT.yellow, '• Cardholder Data or Personally-Identifiable Information (PII)', FORMAT.reset);
359
+ console.log(FORMAT.yellow, '• Anything else that may be protected by GDPR or PCI-DSS', FORMAT.reset);
360
+ console.log(FORMAT.yellow, "• Anything classified as GSC 'Secret' or above", FORMAT.reset);
361
+ console.log(FORMAT.yellow, FORMAT.reset);
362
+ console.log(FORMAT.yellow, `If you have a problem with this or aren't sure, use Ctrl-C ${FORMAT.ul}right now${FORMAT.ulstop} and discontinue your SSM session.`, FORMAT.reset);
363
+ console.log(FORMAT.yellow, FORMAT.reset);
364
+ }
365
+ function printError(error) {
366
+ console.error(`${FORMAT.red}${error}${FORMAT.reset}`);
367
+ }
368
+ function printGreen(message) {
369
+ console.error(`${FORMAT.green}${message}${FORMAT.reset}`);
370
+ }
@@ -8,10 +8,14 @@ const browse_js_1 = __importDefault(require("../commands/browse.js"));
8
8
  const demo_js_1 = __importDefault(require("../commands/demo.js"));
9
9
  const legacy_1 = __importDefault(require("../commands/legacy"));
10
10
  const help_1 = __importDefault(require("../commands/help"));
11
+ const tunnel_js_1 = __importDefault(require("../commands/tunnel.js"));
11
12
  const handlers = new Map();
12
13
  handlers.set('browse', {
13
14
  handler: browse_js_1.default
14
15
  });
16
+ handlers.set('tunnel', {
17
+ handler: tunnel_js_1.default
18
+ });
15
19
  handlers.set('legacy', {
16
20
  handler: legacy_1.default
17
21
  });
@@ -23,11 +23,29 @@ var __importStar = (this && this.__importStar) || function (mod) {
23
23
  return result;
24
24
  };
25
25
  Object.defineProperty(exports, "__esModule", { value: true });
26
- exports.distDirForBuildTasks = exports.rootDirForBuildTasks = exports.rootDir = void 0;
26
+ exports.APPLICATIONS = exports.ENVIRONMENTS = exports.distDirForBuildTasks = exports.rootDirForBuildTasks = exports.rootDir = void 0;
27
27
  const path = __importStar(require("path"));
28
28
  exports.rootDir = path.resolve(__dirname, '..', '..');
29
29
  exports.rootDirForBuildTasks = exports.rootDir.endsWith('dist') ? path.resolve(exports.rootDir, '..') : exports.rootDir;
30
30
  exports.distDirForBuildTasks = path.join(exports.rootDirForBuildTasks, 'dist');
31
+ exports.ENVIRONMENTS = ['test-12', 'test-perf-1', 'staging-2', 'deploy-tooling', 'production-2'];
32
+ exports.APPLICATIONS = [
33
+ 'adminusers',
34
+ 'cardid',
35
+ 'connector',
36
+ 'egress',
37
+ 'frontend',
38
+ 'ledger',
39
+ 'notifications',
40
+ 'pact-broker',
41
+ 'products',
42
+ 'products',
43
+ 'publicapi',
44
+ 'publicauth',
45
+ 'selfservice',
46
+ 'toolbox',
47
+ 'webhooks'
48
+ ];
31
49
  exports.default = {
32
50
  rootDir: exports.rootDir,
33
51
  distDirForBuildTasks: exports.distDirForBuildTasks,
@@ -1,23 +0,0 @@
1
- require 'date'
2
- require 'English'
3
- require 'aws-sdk-core'
4
-
5
- module PayCLI::Aws
6
- class Document
7
- def self.security_group_rules!(env)
8
- PayCLI::Environment.setup! env
9
- ec2 = Aws::EC2::Client.new()
10
- vpc_options = {
11
- :filters => [{ name: "tag:Name" , values: ["#{env}-vpc"] } ]
12
- }
13
- vpcs = ec2.describe_vpcs(vpc_options)
14
- vpc_id = vpcs[:vpcs][0][:vpc_id]
15
- STDERR.puts "Got vpc id #{vpc_id} for #{env}"
16
- STDERR.puts "Querying aws for security groups and using aws_security_viz to write diagram to security-group-visualisation-latest.svg "
17
- pid = system "bash", "-c" ,"aws_security_viz -o <(aws ec2 describe-security-groups --filters Name=vpc-id,Values=#{vpc_id}) -f security-group-visualisation-latest.svg --color"
18
- STDERR.puts "Copy security-group-visualisation-latest.svg in the current directory to https://github.com/alphagov/pay-team-manual/blob/master/images/security-group-visualisation-latest.svg and review"
19
- exit $CHILD_STATUS.exitstatus
20
- end
21
-
22
- end
23
- end
@@ -1,38 +0,0 @@
1
- require 'aws-sdk-ec2'
2
-
3
- class PayCLI::Ec2
4
-
5
- attr_reader :ec2, :logger
6
-
7
- def initialize(region: default_region, logger: Logger.new(STDERR))
8
- @ec2 = ::Aws::EC2::Client.new(region: region)
9
- @logger = logger
10
- end
11
-
12
- def all_instances
13
- @instances ||= fetch_all_instances
14
- end
15
-
16
- private
17
- def default_region
18
- ENV['AWS_REGION'] || ENV['AWS_DEFAULT_REGION'] || 'eu-west-1'
19
- end
20
-
21
- def fetch_all_instances
22
- next_token = nil
23
- instances = []
24
-
25
- begin
26
- response = ec2.describe_instances(next_token: next_token)
27
-
28
- if response
29
- instances += response.reservations.flat_map { |r| r.instances }
30
- next_token = response.next_token
31
- else
32
- break
33
- end
34
- end while next_token
35
-
36
- return instances
37
- end
38
- end
@@ -1,88 +0,0 @@
1
- const fs = require('fs')
2
- const path = require('path')
3
- const stringify = require('csv-stringify/sync').stringify
4
-
5
- const INPUT_PATH = `${path.resolve(path.dirname(__filename))}/reports`
6
- const OUTPUT_PATH = `${path.resolve(path.dirname(__filename))}/reports`
7
-
8
- const HEADERS = ["App name", "Release", "Vulnerability", "Severity", "Status", "Package", "Fixed version"]
9
- const NODE_MAJOR_VERSION = process.versions.node.split('.')[0];
10
-
11
- if (NODE_MAJOR_VERSION < 16) {
12
- console.warn('⛔️ requires >= Node 16')
13
- process.exit(1)
14
- } else {
15
- generate()
16
- }
17
-
18
- function generate() {
19
- const date = new Date().toJSON().slice(0, 10);
20
- const jsonFiles = fs.readdirSync(INPUT_PATH).filter(file => path.extname(file) === '.json')
21
- const reportFilePath = `${OUTPUT_PATH}/vulnerability_scan_report-${date}.csv`
22
- const violatedRules = new Map()
23
- let parseCount = 0
24
- let rowCount = 0
25
- try {
26
- jsonFiles.forEach(file => {
27
- const appInfo = extractAppAndReleaseFromFilename(file)
28
- const filePath = path.join(INPUT_PATH, file)
29
- const data = fs.readFileSync(filePath, 'utf8')
30
- const parsedData = JSON.parse(data)
31
-
32
- const appViolations = [];
33
- violatedRules.set(`${appInfo.name}:${appInfo.release}`, appViolations)
34
-
35
- parsedData["runs"][0]["tool"]["driver"]["rules"].forEach(
36
- (violatedRule) => appViolations.push({
37
- "App name": appInfo.name,
38
- "Release": appInfo.release,
39
- "Vulnerability": violatedRule.id,
40
- "Severity": violatedRule.properties.cvssV3_severity,
41
- "Status": "",
42
- "Package": violatedRule.properties.purls.join("\n"),
43
- "Fixed version": violatedRule.properties.fixed_version,
44
- })
45
- )
46
-
47
- parseCount++
48
- })
49
- } catch (err) {
50
- console.error(`Error: Failed parsing source json after successfully parsing ${parseCount} of ${jsonFiles.length} source files: `, err)
51
- process.exit(1)
52
- }
53
-
54
- const csv_rows = [HEADERS];
55
- try {
56
- violatedRules.forEach((violations) => {
57
- violations.forEach((violation) => {
58
- const row = HEADERS.map(key => violation[key])
59
- csv_rows.push(row)
60
- rowCount++
61
- })
62
- })
63
- } catch (err) {
64
- console.error("Error collating results for writing to CSV: ", err)
65
- process.exit(1)
66
- }
67
-
68
- try {
69
- fs.writeFileSync(reportFilePath, stringify(csv_rows))
70
- } catch (err) {
71
- console.error("Error writing CSV file: ", err.message)
72
- process.exit(1)
73
- }
74
-
75
- console.log(`Parsed ${parseCount} of ${jsonFiles.length} JSON files, wrote ${rowCount} row(s) to ${reportFilePath}`)
76
- }
77
-
78
- function extractAppAndReleaseFromFilename(fileName) {
79
- const regex = /^.*?(?=-\d+)/ // matches any characters at the beginning of the string that are followed by a dash and one or more digits
80
- const match = fileName.match(regex)
81
- if (match && match[0]) {
82
- const release = fileName.replace(`${match[0]}-`, "").replace(".json", "")
83
- return { name: match[0], release }
84
- } else {
85
- const fallback = fileName.replace(".json", "")
86
- return { name: fallback, release: fallback}
87
- }
88
- }
@@ -1,15 +0,0 @@
1
- {
2
- "name": "vulnerability_scan",
3
- "version": "1.0.0",
4
- "description": "",
5
- "main": "generate_vulnerability_report.js",
6
- "scripts": {
7
- "test": "echo \"Error: no test specified\" && exit 1"
8
- },
9
- "keywords": [],
10
- "author": "",
11
- "license": "MIT",
12
- "dependencies": {
13
- "csv-stringify": "^6"
14
- }
15
- }
@@ -1,88 +0,0 @@
1
- #!/bin/bash
2
-
3
- # Script for scanning ECR docker vulnerabilities with "docker scout cves"
4
- # https://docs.docker.com/engine/reference/commandline/scout_cves/
5
-
6
- # **** Run this from the root of pay-infra to store reports in this folder. ****
7
-
8
- # This will take about 5 minutes unless you already have the images cached locally.
9
- # You will be prompted for your aws-vault auth/MFA code.
10
- # The report will be stored in reports/vulnerability_scan_report-YYYY-MM-DD.csv
11
-
12
- set -euo pipefail
13
-
14
- ACCOUNT="staging"
15
- ACCOUNT_ID="888564216586"
16
- SOURCE_DIR=$(realpath "$(dirname "${BASH_SOURCE[0]}")")
17
- REPORTS_FOLDER="$SOURCE_DIR/reports"
18
- ARCHITECTURE_TO_SCAN="linux/amd64"
19
-
20
- echo "🔍 checking for dependencies..."
21
-
22
- declare -a commands=("aws" "aws-vault" "docker" "docker scout")
23
-
24
- for cmd in "${commands[@]}"; do
25
- # shellcheck disable=SC2086
26
- if ! command -v $cmd &> /dev/null
27
- then
28
- echo "❌ $cmd"
29
- exit 1
30
- else
31
- echo "✅ $cmd"
32
- fi
33
- done
34
-
35
- # Login to ECR
36
- echo "Logging into $ACCOUNT ECR"
37
- aws-vault exec "$ACCOUNT" -- aws ecr get-login-password --region eu-west-1 | docker login --username AWS --password-stdin "${ACCOUNT_ID}.dkr.ecr.eu-west-1.amazonaws.com"
38
-
39
- IMAGES=""
40
-
41
- # Get a all ECS clusters
42
- echo "Getting list of ECS clusters"
43
- CLUSTERS=$(aws-vault exec "$ACCOUNT" -- aws ecs list-clusters --query clusterArns --output text)
44
-
45
- for CLUSTER in $CLUSTERS; do
46
- echo "Checking services in cluster $CLUSTER"
47
- SERVICES=$(aws-vault exec "$ACCOUNT" -- aws ecs list-services --cluster "$CLUSTER" --query 'serviceArns' --output text | xargs -n1 | sort)
48
-
49
- for SERVICE in $SERVICES; do
50
- echo "Checking for container images in service $SERVICE"
51
- TASK_DEFINITION=$(\
52
- aws-vault exec "$ACCOUNT" -- \
53
- aws ecs describe-services --cluster "$CLUSTER" --service "$SERVICE" --query 'services[].taskDefinition' --output text
54
- )
55
- CONTAINER_IMAGES=$(aws-vault exec "$ACCOUNT" -- aws ecs describe-task-definition --task-definition "$TASK_DEFINITION" --query 'taskDefinition.containerDefinitions[].image' --output text)
56
- for CONTAINER_IMAGE in $CONTAINER_IMAGES; do
57
- IMAGES="$IMAGES $CONTAINER_IMAGE"
58
- done
59
- done
60
- done
61
-
62
- for IMAGE in $(xargs -n1 <<<"$IMAGES" | sort | uniq); do
63
- SHORT_REPO_AND_TAG=$(cut -d'/' -f 3 <<<"$IMAGE")
64
- SHORT_REPO_NAME=$(cut -f 1 -d ":" <<<"$SHORT_REPO_AND_TAG")
65
- IMAGE_TAG=$(cut -f 2 -d ":" <<<"$SHORT_REPO_AND_TAG")
66
-
67
- echo "Scanning image $IMAGE"
68
- docker scout cves --format sarif --platform "$ARCHITECTURE_TO_SCAN" --output "${REPORTS_FOLDER}/${SHORT_REPO_NAME}-${IMAGE_TAG}.json" "$IMAGE"
69
- done
70
-
71
- pushd "$SOURCE_DIR" >>/dev/null 2>&1
72
-
73
- echo "Installing node dependencies"
74
- npm install
75
-
76
- echo
77
- echo "|============================================================================================"
78
- echo "| Generating vulnerability report"
79
- echo "|============================================================================================"
80
- node "${SOURCE_DIR}/generate_vulnerability_report.js"
81
- echo "|============================================================================================"
82
- echo
83
-
84
- popd >>/dev/null 2>&1
85
-
86
- # Clean up report JSON files once done
87
- echo "Removing JSON report files..."
88
- rm $REPORTS_FOLDER/*.json