eip-s3-deploy 2.2.0 → 2.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/deploy.js CHANGED
@@ -1,186 +1,350 @@
1
1
  #!/usr/bin/env node
2
2
 
3
- /**
4
- * --------------
5
- * DATE: 2020-06-18 @ 11:54
6
- * AUTHOR: ollie
7
- * ORIGINAL NAME: /./deploy
8
- * --------------
9
- * Created for the olivers-tools
10
- */
11
-
12
- const aws = require ( 'aws-sdk' );
13
3
  const path = require ( 'path' );
14
- const _ = require ( 'lodash' );
4
+ const { s3 } = require ( 'eip-cloud-services' );
15
5
  const fs = require ( 'fs' ).promises;
16
- const s3 = new aws.S3 ( { region: 'eu-west-1' } );
17
- const fetch = require ( 'node-fetch' );
6
+ const crypto = require ( 'crypto' );
18
7
  const recursive = require ( 'recursive-readdir' );
19
- const md5File = require ( 'md5-file' );
20
8
  const handlebars = require ( 'handlebars' );
21
9
  const mime = require ( 'mime' );
22
10
  const progress = require ( 'cli-progress' );
23
- const args = require ( 'minimist' ) ( process.argv.slice ( 2 ) );
11
+
12
+ const COMMAND_OPTIONS = [ '-f', '--force', '-ff', '--folderFilter', '-e', '--env', '-v', '--verbose', '-log' ];
13
+ const logOutputRequested = process.argv.includes ( '-log' ) || process.argv.includes ( '--log' );
14
+ const forceRedeploy = process.argv.includes ( '-f' ) || process.argv.includes ( '--force' );
15
+ const verboseOutput = process.argv.includes ( '-v' ) || process.argv.includes ( '--verbose' );
16
+
17
+ const getCommandOptions = ( options ) => {
18
+ const optionIndex = process.argv.findIndex ( arg => options.includes ( arg ) );
19
+ if ( optionIndex === -1 ) {
20
+ return null;
21
+ }
22
+
23
+ const option = process.argv[ optionIndex ];
24
+ const value = process.argv[ optionIndex + 1 ];
25
+
26
+ if ( !value || COMMAND_OPTIONS.includes ( value ) ) {
27
+ usage ( `Option "${option}" requires a value` );
28
+ }
29
+
30
+ return value;
31
+ };
32
+
33
+ const folderFilter = getCommandOptions ( [ '-ff', '--folderFilter' ] );
34
+ const environment = getCommandOptions ( [ '-e', '--env' ] ) || process.argv[ 2 ];
24
35
 
25
36
  const loadDeploymentLock = async () => fs.access ( process.cwd () + '/deployment.lock' )
26
37
  .then ( () => fs.readFile ( path.join ( process.cwd (), 'deployment.lock' ), 'utf8' ) )
27
- .then ( json => JSON.parse ( json )[ args._[ 0 ] ] || ( {} ) )
38
+ .then ( json => JSON.parse ( json )[ environment ] || ( {} ) )
28
39
  .catch ( () => ( {} ) );
29
40
 
30
41
  const writeDeploymentLock = async ( deploymentHash ) => fs.access ( process.cwd () + '/deployment.lock' )
31
42
  .then ( () => fs.readFile ( path.join ( process.cwd (), 'deployment.lock' ), 'utf8' ) )
32
43
  .then ( json => {
33
44
  const previousHash = JSON.parse ( json );
34
- previousHash[ args._[ 0 ] ] = previousHash[ args._[ 0 ] ] ? _.merge ( previousHash[ args._[ 0 ] ], deploymentHash ) : deploymentHash;
45
+ previousHash[ environment ] = previousHash[ environment ] ? { ...previousHash[ environment ], ...deploymentHash } : deploymentHash;
35
46
 
36
47
  return fs.writeFile ( path.join ( process.cwd (), 'deployment.lock' ), JSON.stringify ( previousHash, null, 2 ) );
37
48
  } )
38
49
  .catch ( () => {
39
50
  const newHash = {};
40
- newHash[ args._[ 0 ] ] = deploymentHash;
51
+ newHash[ environment ] = deploymentHash;
41
52
 
42
53
  return fs.writeFile ( path.join ( process.cwd (), 'deployment.lock' ), JSON.stringify ( newHash, null, 2 ) );
43
54
  } );
44
55
 
45
- const deploy = async () => {
46
- const files = await fs.readdir ( process.cwd () );
47
- let deployConfName;
56
+ const calculateMD5 = async ( filePath ) => {
57
+ const content = await fs.readFile ( filePath );
58
+
59
+ return crypto.createHash ( 'md5' ).update ( content ).digest ( 'hex' );
60
+ };
48
61
 
49
- if ( files.find ( f => f === 'deployConf.cjs' ) )
50
- deployConfName = 'deployConf.cjs';
51
- else
52
- deployConfName = 'deployConf.js';
62
+ const readline = require ( 'readline' );
53
63
 
54
- const parentConfig = require ( path.join ( process.cwd (), deployConfName ) );
55
- const config = require ( path.join ( process.cwd (), deployConfName ) )[ args._[ 0 ] ];
56
- const promiseChain = {};
64
+ const getUserConfirmation = async ( env, destinations ) => {
65
+ const projectName = await getProjectName (); // returns a promise now
57
66
 
58
- if ( !config ) {
59
- process.stdout.write ( '\x1b[38;5;196mThe environment specified cannot be found in the deployConfig.js file.\x1b[0m\n' );
60
- process.exit ( 0 );
61
- }
62
- if ( !parentConfig.version || parentConfig.version !== 2 ) {
63
- process.stdout.write ( '\x1b[38;5;196mThere is a deployConf.js file however it\'s not using the latest version please update your config to version 2.\x1b[0m\n' );
64
- process.stdout.write ( '\x1b[38;5;214mHere is an example deployConf.js to assist:\x1b[0m - https://cf.eip.telegraph.co.uk/testFolder/example.html\n' );
65
- process.exit ( 0 );
66
- }
67
- if ( ![ 'bucket', 'folder', 'cache' ].every ( key => Object.keys ( config ).includes ( key ) ) ){
68
- process.stdout.write ( '\x1b[38;5;214mThere is a deployConf.js file however it\'s not structured correctly, please make sure you\'ve got the correct attributes.\x1b[0m\n' );
69
- process.stdout.write ( '\x1b[38;5;190mHere is an example deployConf.js to assist:\x1b[0m - https://cf.eip.telegraph.co.uk/testFolder/example.html\n' );
70
- process.exit ( 0 );
67
+ const rl = readline.createInterface ( {
68
+ input: process.stdin,
69
+ output: process.stdout
70
+ } );
71
+
72
+ const intro = projectName
73
+ ? `🔒 You are about to deploy \x1b[38;5;190m${projectName}\x1b[0m to the \x1b[38;5;190m${env}\x1b[0m environment.\n`
74
+ : `🔒 You are about to deploy to the \x1b[38;5;190m${env}\x1b[0m environment.\n`;
75
+
76
+ const targets =
77
+ `\nThe project will be deployed to:\n\x1b[38;5;214m→ ${destinations.join ( ',\n→ ' )}\x1b[0m\n\n`;
78
+
79
+ return new Promise ( ( resolve ) => {
80
+ rl.question (
81
+ `\n${intro}${targets}` +
82
+ 'Press ENTER to proceed or type anything else then ENTER to abort (or Ctrl+C): ',
83
+ ( answer ) => {
84
+ rl.close ();
85
+ resolve ( answer.trim () === '' );
86
+ }
87
+ );
88
+ } );
89
+ };
90
+
91
+ const getProjectName = () => {
92
+ const packageJsonPath = path.join ( process.cwd (), 'package.json' );
93
+
94
+ return fs.access ( packageJsonPath )
95
+ .then ( () => fs.readFile ( packageJsonPath, 'utf8' ) )
96
+ .then ( ( data ) => {
97
+ const packageJson = JSON.parse ( data );
98
+
99
+ return packageJson.name || 'Unknown Project';
100
+ } )
101
+ .catch ( () => null );
102
+ };
103
+
104
+ const deploy = async () => {
105
+
106
+ if ( process.argv.length <= 2 || process.argv.includes ( '--help' ) || process.argv.includes ( '-h' ) || process.argv.includes ( 'help' ) ) {
107
+ showHelp ();
108
+
109
+ return;
71
110
  }
72
111
 
73
- process.stdout.write ( '-------------------\n' );
74
- process.stdout.write ( '\x1b[38;5;214mINITIATING DEPLOYMENT TO:\x1b[0m \x1b[38;5;190m' + args._[ 0 ] + '\n' );
75
- process.stdout.write ( '\x1b[38;5;214m📁 Copying contents of:\x1b[0m \x1b[38;5;190m' + ( config.targetFolder ? process.cwd () + '/' + config.targetFolder : process.cwd () ) + ( args._[ 1 ] ? '/' + args._[ 1 ] : '' ) + '\x1b[0m\n' );
76
- process.stdout.write ( '\x1b[38;5;214m🗑 to S3 Bucket/Folder:\x1b[0m \x1b[38;5;190m' + config.bucket + '/' + ( config.folder || '' ) + ( args._[ 1 ] ? args._[ 1 ] + '/' : '' ) + '\x1b[0m\n' );
77
- process.stdout.write ( '\x1b[38;5;214m☁️ CloudFront CDN Invalidation:\x1b[0m ' + ( config.cdnId ? '\x1b[38;5;190mYES (' + config.cdnId + ')' : '\x1b[38;5;9mNO' ) + '\x1b[0m\n' );
78
- if ( args._[ 1 ] ) process.stdout.write ( '\x1b[38;5;214m📂 Custom Deploy Folder Selected:\x1b[0m \x1b[38;5;190m' + args._[ 1 ] + '\x1b[0m\n' );
79
- if ( args.f ) {
112
+ try {
113
+ const files = await fs.readdir ( process.cwd () );
114
+ let deployConfName;
115
+
116
+ if ( files.find ( f => f === 'deployConf.cjs' ) )
117
+ deployConfName = 'deployConf.cjs';
118
+ else
119
+ deployConfName = 'deployConf.js';
120
+
121
+ const parentConfig = require ( path.join ( process.cwd (), deployConfName ) );
122
+ const config = require ( path.join ( process.cwd (), deployConfName ) )[ environment ];
123
+ if ( typeof config.bucket === 'string' ) config.bucket = [ config.bucket ];
124
+ if ( typeof config.folder === 'string' ) config.folder = [ config.folder ];
125
+ const promiseChain = {};
126
+
127
+ if ( !config ) {
128
+ process.stdout.write ( '\x1b[38;5;196mThe environment specified cannot be found in the deployConfig.js file.\x1b[0m\n' );
129
+ process.exit ( 0 );
130
+ }
131
+ if ( !parentConfig.version || parentConfig.version !== 2 ) {
132
+ process.stdout.write ( '\x1b[38;5;196mThere is a deployConf.js file however it\'s not using the latest version please update your config to version 2.\x1b[0m\n' );
133
+ process.stdout.write ( '\x1b[38;5;214mHere is an example deployConf.js to assist:\x1b[0m - https://cf.eip.telegraph.co.uk/testFolder/example.html\n' );
134
+ process.exit ( 0 );
135
+ }
136
+ if ( ![ 'bucket', 'folder', 'cache' ].every ( key => Object.keys ( config ).includes ( key ) ) ){
137
+ process.stdout.write ( '\x1b[38;5;214mThere is a deployConf.js file however it\'s not structured correctly, please make sure you\'ve got the correct attributes.\x1b[0m\n' );
138
+ process.stdout.write ( '\x1b[38;5;190mHere is an example deployConf.js to assist:\x1b[0m - https://cf.eip.telegraph.co.uk/testFolder/example.html\n' );
139
+ process.exit ( 0 );
140
+ }
141
+
142
+ process.stdout.write ( '-------------------\n' );
143
+ process.stdout.write ( '\x1b[38;5;214mINITIATING FILE DEPLOYMENT TO:\x1b[0m \x1b[38;5;190m' + environment + '\n' );
144
+ process.stdout.write ( '\x1b[38;5;214m📁 Copying contents of:\x1b[0m \x1b[38;5;190m' + ( config.targetFolder ? process.cwd () + '/' + config.targetFolder : process.cwd () ) + ( folderFilter ? '/' + folderFilter : '' ) + '\x1b[0m\n' );
145
+ config.bucket.forEach ( ( bucket, i ) => {
146
+ process.stdout.write ( '\x1b[38;5;214m🗑 to S3 Bucket/Folder:\x1b[0m \x1b[38;5;190m' + bucket + '/' + ( config.folder[ i ] || '' ) + ( folderFilter ? '/' + folderFilter + '/' : '' ) + '\x1b[0m\n' );
147
+ } );
148
+ process.stdout.write ( '\x1b[38;5;214m☁️ CloudFront CDN Invalidation:\x1b[0m ' + ( config.cdnId ? '\x1b[38;5;190mYES (' + config.cdnId + ')' : '\x1b[38;5;9mNO' ) + '\x1b[0m\n' );
149
+ if ( folderFilter ) {
150
+ process.stdout.write ( '-------------------\n' );
151
+ process.stdout.write ( '\x1b[38;5;214m📂 Folder Filter Applied:\x1b[0m \x1b[38;5;190m' + folderFilter + '\x1b[0m\n' );
152
+ }
153
+ if ( forceRedeploy ) {
154
+ process.stdout.write ( '-------------------\n' );
155
+ process.stdout.write ( '\x1b[38;5;196m🔥 FORCING DEPLOYMENT - deployment.lock will be ignored. 🔥\x1b[0m\n' );
156
+ }
80
157
  process.stdout.write ( '-------------------\n' );
81
- process.stdout.write ( '\x1b[38;5;196m🔥 FORCING DEPLOYMENT - Deployment.lock will be ignored. 🔥\x1b[0m\n' );
82
- }
83
- process.stdout.write ( '-------------------\n' );
84
158
 
85
- let scanDirectory = path.resolve ( config.targetFolder && config.targetFolder !== '' ? `${ process.cwd () }/${ config.targetFolder }` : process.cwd () );
86
- if ( args._[ 1 ] ) scanDirectory = path.join ( scanDirectory, args._[ 1 ] );
159
+ const scanDirectory = path.resolve ( config.targetFolder && config.targetFolder !== '' ? `${ process.cwd () }/${ config.targetFolder }` : process.cwd () );
160
+
161
+ let scannedFiles = await recursive ( scanDirectory, [ ...( config.omit || [] ), 'deployment.lock' ] );
162
+ const deploymentLock = await loadDeploymentLock ();
87
163
 
88
- const scannedFiles = await recursive ( scanDirectory, _.concat ( config.omit || [], [ 'deployment.lock' ] ) );
89
- const deploymentLock = await loadDeploymentLock ();
164
+ if ( folderFilter ) scannedFiles = scannedFiles.filter ( filepath => filepath.includes ( `${ scanDirectory }/${ folderFilter }` ) );
90
165
 
91
- promiseChain.changedFiles = promiseChain.newFiles = 0;
92
- promiseChain.deploymentHash = {};
166
+ promiseChain.changedFiles = promiseChain.newFiles = 0;
167
+ promiseChain.deploymentHash = {};
93
168
 
94
- const filesToUpload = _.compact ( _.map ( scannedFiles, filename => {
95
- promiseChain.deploymentHash[ _.replace ( filename, process.cwd (), '' ) ] = md5File.sync ( filename );
96
- if ( _.has ( deploymentLock, _.replace ( filename, process.cwd (), '' ) ) ) {
97
- if ( deploymentLock[ _.replace ( filename, process.cwd (), '' ) ] === promiseChain.deploymentHash[ _.replace ( filename, process.cwd (), '' ) ] ) {
98
- return null;
99
- }
100
- promiseChain.changedFiles++;
169
+ const filesToUpload = ( await Promise.all ( scannedFiles.map ( async filename => {
170
+ const md5Hash = await calculateMD5 ( filename );
171
+ promiseChain.deploymentHash[ filename.replace ( process.cwd (), '' ) ] = md5Hash;
172
+ if ( Object.prototype.hasOwnProperty.call ( deploymentLock, filename.replace ( process.cwd (), '' ) ) ) {
173
+ if ( deploymentLock[ filename.replace ( process.cwd (), '' ) ] === md5Hash ) {
174
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;245mNo Change: ' + filename + '\x1b[0m\n' );
175
+
176
+ return null;
177
+ }
178
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;172mChange: ' + filename + '\x1b[0m\n' );
179
+ promiseChain.changedFiles++;
101
180
 
181
+ return filename;
182
+ }
183
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;64mChange: ' + filename + '\x1b[0m\n' );
184
+ promiseChain.newFiles++;
185
+
102
186
  return filename;
103
- }
104
- promiseChain.newFiles++;
187
+ } ) ) ).filter ( Boolean );
188
+ if ( verboseOutput ) process.stdout.write ( '-------------------\n' );
189
+
190
+ process.stdout.write ( `📗 ${ forceRedeploy ? scannedFiles.length : promiseChain.newFiles } new files | 📙 ${ forceRedeploy ? 0 : promiseChain.changedFiles } updated files | 📕 ${ forceRedeploy ? 0 : scannedFiles.length - filesToUpload.length } unchanged files\n` );
191
+ process.stdout.write ( '-------------------\n' );
192
+
193
+ if ( ( forceRedeploy ? scannedFiles.length : filesToUpload.length ) === 0 ) {
194
+ process.stdout.write ( '\x1b[38;5;190mNothing to deploy\x1b[0m (you can force redeploy using the \'-f\' option)\n' );
195
+ process.stdout.write ( '-------------------\n' );
105
196
 
106
- return filename;
107
- } ) );
197
+ return;
198
+ }
108
199
 
109
- process.stdout.write ( `📗 ${ args.f ? scannedFiles.length : promiseChain.newFiles } new files | 📙 ${ args.f ? 0 : promiseChain.changedFiles } updated files | 📕 ${ args.f ? 0 : scannedFiles.length - filesToUpload.length } unchanged files\n` );
110
- process.stdout.write ( '-------------------\n' );
200
+ if ( logOutputRequested ) {
201
+ const list = ( forceRedeploy ? scannedFiles : filesToUpload );
202
+ process.stdout.write ( '\x1b[38;5;245m\nFiles to be deployed:\x1b[0m\n' );
203
+ list.forEach ( f => process.stdout.write ( `${f.replace ( process.cwd (), '' )}\n` ) );
204
+ process.stdout.write ( '-------------------\n' );
205
+ }
111
206
 
112
- if ( ( args.f ? scannedFiles.length : filesToUpload.length ) === 0 ) {
113
- process.stdout.write ( '\x1b[38;5;190mNothing to deploy\x1b[0m (you can force redeploy using the \'-f\' option)\n' );
114
- process.stdout.write ( '-------------------\n' );
207
+ // ------------------------------------------------------------------
208
+ // Guard-rail: require explicit user confirmation
209
+ const okToProceed = await getUserConfirmation (
210
+ environment,
211
+ config.bucket.map ( ( b, i ) => `${b}/${config.folder[ i ] || ''}` )
212
+ );
213
+
214
+ if ( !okToProceed ) {
215
+ process.stdout.write ( '\x1b[38;5;196mDeployment cancelled by user.\x1b[0m\n' );
216
+
217
+ return;
218
+ }
219
+ // ------------------------------------------------------------------
220
+
221
+ process.stdout.write ( '\n' );
222
+ const progressBar = new progress.SingleBar ( {
223
+ format: '\x1b[38;5;214m🚀 Deploying [\x1b[38;5;190m{bar}\x1b[0m\x1b[38;5;214m] {percentage}% | ETA: {eta}s | {value}/{total}',
224
+ barCompleteChar: '#',
225
+ barIncompleteChar: '-',
226
+ barGlue: '\x1b[38;5;214m',
227
+ etaBuffer: 10000,
228
+ etaAsynchronousUpdate: true
229
+ } );
230
+ if ( !verboseOutput ) progressBar.start ( ( forceRedeploy ? scannedFiles.length : filesToUpload.length ) * config.bucket.length );
231
+
232
+ await Promise.all ( config.bucket.map ( async ( bucket, i ) => {
233
+ await Promise.all ( ( forceRedeploy ? scannedFiles : filesToUpload ).map ( filename => fs.readFile ( filename )
234
+ .then ( file => {
235
+ let body = file;
236
+
237
+ try {
238
+ body = config.data ? handlebars.compile ( file.toString ( 'utf8' ) ) ( config.data ) : file;
239
+ }
240
+ catch ( e ) {
241
+ // do nothing
242
+ }
243
+
244
+ let key = ( config.folder[ i ] ? `${config.folder[ i ]}` : '' ) +
245
+ ( config.targetFolder && config.targetFolder !== '' ? filename.replace ( process.cwd (), '' ).replace ( `${config.targetFolder}/`, '' ) : filename.replace ( process.cwd (), '' ) );
246
+ key = key.replace ( /\\/g, '/' );
247
+ if ( key.startsWith ( '/' ) ) {
248
+ key = key.substring ( 1 );
249
+ }
250
+ if ( key.endsWith ( '/' ) ) {
251
+ key = key.slice ( 0, -1 );
252
+ }
253
+
254
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;172mUploading to S3: ' + filename + '\x1b[0m\n' );
255
+
256
+ return s3.set ( key, body, {
257
+ bucket: bucket,
258
+ contentType: mime.getType ( filename ),
259
+ cacheControl: Object.keys ( config.cache )
260
+ .map ( cacheKey => typeof config.cache[ cacheKey ] === 'boolean' ?
261
+ config.cache[ cacheKey ] ? cacheKey : null :
262
+ config.cache[ cacheKey ] ? `${cacheKey}=${config.cache[ cacheKey ]}` : null )
263
+ .filter ( n => n )
264
+ .join ( ', ' )
265
+ } ).then ( () => !verboseOutput && progressBar.increment () );
266
+ } )
267
+ ) );
268
+ } ) );
115
269
 
116
- return;
117
- }
270
+ if ( !verboseOutput ) progressBar.stop ();
118
271
 
119
- const progressBar = new progress.SingleBar ( {
120
- format: '\x1b[38;5;214m🚀 Deploying [\x1b[38;5;190m{bar}\x1b[0m\x1b[38;5;214m] {percentage}% | ETA: {eta}s | {value}/{total}',
121
- barCompleteChar: '#',
122
- barIncompleteChar: '-',
123
- barGlue: '\x1b[38;5;214m',
124
- etaBuffer: 10000,
125
- etaAsynchronousUpdate: true
126
- } );
127
- progressBar.start ( args.f ? scannedFiles.length : filesToUpload.length );
272
+ if ( config.cdnId ) {
273
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;172mRequesting invalidation: ' + [ config.folder ? '/' + config.folder + '/*' : '/*' ] + '\x1b[0m\n' );
274
+ await fetch ( 'https://tools.eip.telegraph.co.uk/v1/invalidate', {
275
+ method: 'POST',
276
+ headers: {
277
+ 'Content-Type': 'application/json'
278
+ },
279
+ body: JSON.stringify ( {
280
+ cdn: config.cdnId,
281
+ key: config.folder.map ( folder => folder ? '/' + folder + '/*' : '/*' ),
282
+ environment: config?.environment || undefined
283
+ } )
284
+ } );
285
+ process.stdout.write ( '-------------------\n' );
286
+ config.folder.forEach ( folder => {
287
+ process.stdout.write ( '📤 \x1b[38;5;190mCloudFront Cache Cleared\x1b[0m | Invalidation: ' + ( folder ? '/' + folder + '/*' : '/*' ) + '\n' );
288
+ } );
289
+ }
128
290
 
129
- await Promise.all ( _.map ( args.f ? scannedFiles : filesToUpload, filename => fs.readFile ( filename )
130
- .then ( file => {
131
- let body = file;
291
+ // This should be done after everything is complete.
292
+ await writeDeploymentLock ( promiseChain.deploymentHash );
132
293
 
133
- try {
134
- body = config.data ? handlebars.compile ( file.toString ( 'utf8' ) ) ( config.data ) : file;
135
- }
136
- catch ( e ) {
137
- //do nothing
138
- }
294
+ const user = process.env.PAPI_USER || 'Unknown';
295
+ const machine = process.env.PAPI_MACHINE || 'Unknown';
296
+ const deploymentReport = [ {
297
+ project: config.folder,
298
+ context: config.bucket,
299
+ user: `${user} - ${machine}`,
300
+ environment: environment,
301
+ timestamp: Date.now ()
302
+ } ];
303
+ if ( deploymentReport.length > 0 && config?.deploymentReportEnabled !== false ){
304
+ await fetch ( 'https://tools.eip.telegraph.co.uk/v1/healthcheck/deployment', {
305
+ method: 'POST',
306
+ headers: {
307
+ 'Content-Type': 'application/json'
308
+ },
309
+ body: JSON.stringify ( { deployments: deploymentReport } )
310
+ } );
139
311
 
140
- let key = ( config.folder ? `${ config.folder }` : '' ) + ( config.targetFolder && config.targetFolder !== '' ? _.replace ( _.replace ( filename, process.cwd (), '' ), `${ config.targetFolder }/`, '' ) : _.replace ( filename, process.cwd (), '' ) );
141
- key = key.replace ( /\\/g, '/' );
142
- if ( key.indexOf ( '/' ) === 0 ){
143
- key = key.substr ( 1 );
144
- }
145
- if ( _.lastIndexOf ( key, '/' ) === key.length - 1 ){
146
- key = key.substr ( 0, key.length - 1 );
147
- }
312
+ process.stdout.write ( '-------------------\n' );
313
+ process.stdout.write ( '✅ \x1b[38;5;190mDeployment report has been sent.\x1b[0m\n' );
314
+ }
148
315
 
149
- return s3.putObject ( {
150
- Bucket: config.bucket,
151
- Key: key,
152
- ACL: 'public-read',
153
- Body: body,
154
- CacheControl: Object.keys ( config.cache ).map ( cacheKey => typeof config.cache[ cacheKey ] === 'boolean' ? config.cache[ cacheKey ] ? cacheKey : null : config.cache[ cacheKey ] ? `${cacheKey}=${config.cache[ cacheKey ]}` : null ).filter ( n => n ).join ( ', ' ),
155
- ContentType: mime.getType ( filename )
156
- } ).promise ().then ( () => progressBar.increment () );
157
-
158
- } ) ) );
159
-
160
- progressBar.stop ();
161
-
162
- if ( config.cdnId ) {
163
- await fetch ( 'https://tools.eip.telegraph.co.uk/v1/invalidate', {
164
- method: 'POST',
165
- headers: {
166
- 'Content-Type': 'application/json'
167
- },
168
- body: JSON.stringify ( {
169
- cdn: config.cdnId,
170
- key: [ config.folder ? '/' + config.folder + '/*' : '/*' ],
171
- environment: config?.environment || undefined
172
- } )
173
- } );
174
316
  process.stdout.write ( '-------------------\n' );
175
- process.stdout.write ( '📤 \x1b[38;5;190mCloudFront Cache Cleared\x1b[0m | Invalidation: ' + ( config.folder ? '/' + config.folder + '/*' : '/*' ) + '\n' );
317
+ process.stdout.write ( ' \x1b[38;5;190mDeployment completed.\x1b[0m\n' );
318
+ process.stdout.write ( '-------------------\n' );
176
319
  }
320
+ catch ( error ){
321
+ console.log ( error );
322
+ showHelp ();
323
+
324
+ return;
325
+ }
326
+ };
177
327
 
178
- // This should be done after everything is complete.
179
- await writeDeploymentLock ( promiseChain.deploymentHash );
180
-
181
- process.stdout.write ( '-------------------\n' );
182
- process.stdout.write ( '✅ \x1b[38;5;190mDeployment completed.\x1b[0m\n' );
183
- process.stdout.write ( '-------------------\n' );
328
+ const showHelp = () => {
329
+ console.log ( 'S3 Deploy Script' );
330
+ console.log ( 'Usage:' );
331
+ console.log ( ' s3-deploy <environment_name> [-f|--force] [-ff|--folderFilter <folder_filter>] [-e|--env <environment_name>] [-log]' );
332
+ console.log ( 'Options:' );
333
+ console.log ( ' -f, --force Force redeployment of files' );
334
+ console.log ( ' -ff, --folderFilter Specify a single folder to deploy' );
335
+ console.log ( ' -e, --env Specify the environment for deployment' );
336
+ console.log ( ' -v, --verbose Print more output' );
337
+ console.log ( ' -log List every file that will be uploaded before confirmation' );
338
+ console.log ( 'Examples:' );
339
+ console.log ( ' s3-deploy test' );
340
+ console.log ( ' s3-deploy production --force' );
341
+ console.log ( ' s3-deploy test -ff myFolder' );
342
+ console.log ( ' s3-deploy -ff myFolder -e prod -f' );
343
+ console.log ( ' s3-deploy --folderFilter myFolder -e test -f' );
344
+ process.exit ( 0 );
184
345
  };
185
346
 
186
- deploy ();
347
+ deploy ().catch ( ( error ) => {
348
+ console.error ( `ERROR: ${error}` );
349
+ console.log ( error );
350
+ } );
package/deployConf.js CHANGED
@@ -25,10 +25,10 @@ module.exports = {
25
25
  }
26
26
  },
27
27
  test: {
28
- bucket: 's3-test.eip.telegraph.co.uk', // The location you want to deploy to.
28
+ bucket: [ 's3-test.eip.telegraph.co.uk', 's3-test.eip.telegraph.co.uk' ], // The location you want to deploy to.
29
29
  cdnId: 'cf', // The prefix or identifier for the CDN. e.g. "cf", "cf-particle-html", etc
30
30
  environment: 'test', // The variant of environment.
31
- folder: 'testFolder', // The remote folder you which to upload to.
31
+ folder: [ 'testFolder', 'testFolder2' ], // The remote folder you which to upload to.
32
32
  targetFolder: 'test', // The local folder to use as a root for uploading. Remove this to use root folder.
33
33
  cache: {
34
34
  'max-age': 3600, // Specifies the maximum age of the resource in seconds on a user's device.
package/deployment.lock CHANGED
@@ -8,10 +8,11 @@
8
8
  "test/folder2/file1.txt": "d41d8cd98f00b204e9800998ecf8427e",
9
9
  "/test/file1.txt": "f5b5ed0c72f4e9984c3e7a8b18c08c2b",
10
10
  "/test/file2.txt": "d41d8cd98f00b204e9800998ecf8427e",
11
- "/test/folder1/file1.txt": "d41d8cd98f00b204e9800998ecf8427e",
11
+ "/test/folder1/file1.txt": "5d41402abc4b2a76b9719d911017c592",
12
12
  "/test/folder1/file2.txt": "d41d8cd98f00b204e9800998ecf8427e",
13
13
  "/test/folder2/file1.txt": "d41d8cd98f00b204e9800998ecf8427e",
14
- "/test/folder2/file2.txt": "d41d8cd98f00b204e9800998ecf8427e"
14
+ "/test/folder2/file2.txt": "d41d8cd98f00b204e9800998ecf8427e",
15
+ "/test/example.html": "47caf57861b1dd625f93f21871503582"
15
16
  },
16
17
  "prod": {
17
18
  "/test/file1.txt": "f5b5ed0c72f4e9984c3e7a8b18c08c2b",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "eip-s3-deploy",
3
- "version": "2.2.0",
3
+ "version": "2.2.2",
4
4
  "description": "Deploy static websites to S3 - all files will be public",
5
5
  "main": "deploy.js",
6
6
  "bin": {
@@ -8,7 +8,7 @@
8
8
  },
9
9
  "preferGlobal": true,
10
10
  "scripts": {
11
- "test": "cd ./test && node ../deploy.js test"
11
+ "test": "node ./deploy.js test -f"
12
12
  },
13
13
  "repository": {
14
14
  "type": "git",
@@ -24,14 +24,10 @@
24
24
  "author": "Oliver Edgington <oliver.edgington@telegraph.co.uk>",
25
25
  "license": "ISC",
26
26
  "dependencies": {
27
- "aws-sdk": "^2.700.0",
28
27
  "cli-progress": "^3.8.2",
28
+ "eip-cloud-services": "^1.0.16",
29
29
  "handlebars": "^4.7.6",
30
- "lodash": "^4.17.15",
31
- "md5-file": "^5.0.0",
32
30
  "mime": "^2.4.6",
33
- "minimist": "^1.2.5",
34
- "node-fetch": "2.6.7",
35
31
  "recursive-readdir": "^2.2.2"
36
32
  }
37
33
  }
package/test/example.html CHANGED
@@ -26,7 +26,7 @@
26
26
  border-radius: 4px;
27
27
  }
28
28
 
29
- /* PrismJS 1.29.0
29
+ /* PrismJS 1.29
30
30
  https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript */
31
31
  code[class*=language-],pre[class*=language-]{color:#ccc;background:0 0;font-family:Consolas,Monaco,'Andale Mono','Ubuntu Mono',monospace;font-size:1em;text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4;-webkit-hyphens:none;-moz-hyphens:none;-ms-hyphens:none;hyphens:none}pre[class*=language-]{padding:1em;margin:.5em 0;overflow:auto}:not(pre)>code[class*=language-],pre[class*=language-]{background:#2d2d2d}:not(pre)>code[class*=language-]{padding:.1em;border-radius:.3em;white-space:normal}.token.block-comment,.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#999}.token.punctuation{color:#ccc}.token.attr-name,.token.deleted,.token.namespace,.token.tag{color:#e2777a}.token.function-name{color:#6196cc}.token.boolean,.token.function,.token.number{color:#f08d49}.token.class-name,.token.constant,.token.property,.token.symbol{color:#f8c555}.token.atrule,.token.builtin,.token.important,.token.keyword,.token.selector{color:#cc99cd}.token.attr-value,.token.char,.token.regex,.token.string,.token.variable{color:#7ec699}.token.entity,.token.operator,.token.url{color:#67cdcc}.token.bold,.token.important{font-weight:700}.token.italic{font-style:italic}.token.entity{cursor:help}.token.inserted{color:green}
32
32
 
@@ -0,0 +1 @@
1
+ hello