eip-s3-deploy 2.2.0 → 2.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/deploy.js CHANGED
@@ -1,186 +1,281 @@
1
1
  #!/usr/bin/env node
2
2
 
3
- /**
4
- * --------------
5
- * DATE: 2020-06-18 @ 11:54
6
- * AUTHOR: ollie
7
- * ORIGINAL NAME: /./deploy
8
- * --------------
9
- * Created for the olivers-tools
10
- */
11
-
12
- const aws = require ( 'aws-sdk' );
13
3
  const path = require ( 'path' );
14
- const _ = require ( 'lodash' );
4
+ const { s3 } = require ( 'eip-cloud-services' );
15
5
  const fs = require ( 'fs' ).promises;
16
- const s3 = new aws.S3 ( { region: 'eu-west-1' } );
17
- const fetch = require ( 'node-fetch' );
6
+ const crypto = require ( 'crypto' );
18
7
  const recursive = require ( 'recursive-readdir' );
19
- const md5File = require ( 'md5-file' );
20
8
  const handlebars = require ( 'handlebars' );
21
9
  const mime = require ( 'mime' );
22
10
  const progress = require ( 'cli-progress' );
23
- const args = require ( 'minimist' ) ( process.argv.slice ( 2 ) );
11
+
12
+ const COMMAND_OPTIONS = [ '-f', '--force', '-ff', '--folderFilter', '-e', '--env', '-v', '--verbose' ];
13
+ const forceRedeploy = process.argv.includes ( '-f' ) || process.argv.includes ( '--force' );
14
+ const verboseOutput = process.argv.includes ( '-v' ) || process.argv.includes ( '--verbose' );
15
+
16
+ const getCommandOptions = ( options ) => {
17
+ const optionIndex = process.argv.findIndex ( arg => options.includes ( arg ) );
18
+ if ( optionIndex === -1 ) {
19
+ return null;
20
+ }
21
+
22
+ const option = process.argv[ optionIndex ];
23
+ const value = process.argv[ optionIndex + 1 ];
24
+
25
+ if ( !value || COMMAND_OPTIONS.includes ( value ) ) {
26
+ usage ( `Option "${option}" requires a value` );
27
+ }
28
+
29
+ return value;
30
+ };
31
+
32
+ const folderFilter = getCommandOptions ( [ '-ff', '--folderFilter' ] );
33
+ const environment = getCommandOptions ( [ '-e', '--env' ] ) || process.argv[ 2 ];
24
34
 
25
35
  const loadDeploymentLock = async () => fs.access ( process.cwd () + '/deployment.lock' )
26
36
  .then ( () => fs.readFile ( path.join ( process.cwd (), 'deployment.lock' ), 'utf8' ) )
27
- .then ( json => JSON.parse ( json )[ args._[ 0 ] ] || ( {} ) )
37
+ .then ( json => JSON.parse ( json )[ environment ] || ( {} ) )
28
38
  .catch ( () => ( {} ) );
29
39
 
30
40
  const writeDeploymentLock = async ( deploymentHash ) => fs.access ( process.cwd () + '/deployment.lock' )
31
41
  .then ( () => fs.readFile ( path.join ( process.cwd (), 'deployment.lock' ), 'utf8' ) )
32
42
  .then ( json => {
33
43
  const previousHash = JSON.parse ( json );
34
- previousHash[ args._[ 0 ] ] = previousHash[ args._[ 0 ] ] ? _.merge ( previousHash[ args._[ 0 ] ], deploymentHash ) : deploymentHash;
44
+ previousHash[ environment ] = previousHash[ environment ] ? { ...previousHash[ environment ], ...deploymentHash } : deploymentHash;
35
45
 
36
46
  return fs.writeFile ( path.join ( process.cwd (), 'deployment.lock' ), JSON.stringify ( previousHash, null, 2 ) );
37
47
  } )
38
48
  .catch ( () => {
39
49
  const newHash = {};
40
- newHash[ args._[ 0 ] ] = deploymentHash;
50
+ newHash[ environment ] = deploymentHash;
41
51
 
42
52
  return fs.writeFile ( path.join ( process.cwd (), 'deployment.lock' ), JSON.stringify ( newHash, null, 2 ) );
43
53
  } );
44
54
 
55
+ const calculateMD5 = async ( filePath ) => {
56
+ const content = await fs.readFile ( filePath );
57
+
58
+ return crypto.createHash ( 'md5' ).update ( content ).digest ( 'hex' );
59
+ };
60
+
45
61
  const deploy = async () => {
46
- const files = await fs.readdir ( process.cwd () );
47
- let deployConfName;
48
62
 
49
- if ( files.find ( f => f === 'deployConf.cjs' ) )
50
- deployConfName = 'deployConf.cjs';
51
- else
52
- deployConfName = 'deployConf.js';
63
+ if ( process.argv.length <= 2 || process.argv.includes ( '--help' ) || process.argv.includes ( '-h' ) || process.argv.includes ( 'help' ) ) {
64
+ showHelp ();
65
+
66
+ return;
67
+ }
68
+
69
+ try {
70
+ const files = await fs.readdir ( process.cwd () );
71
+ let deployConfName;
53
72
 
54
- const parentConfig = require ( path.join ( process.cwd (), deployConfName ) );
55
- const config = require ( path.join ( process.cwd (), deployConfName ) )[ args._[ 0 ] ];
56
- const promiseChain = {};
73
+ if ( files.find ( f => f === 'deployConf.cjs' ) )
74
+ deployConfName = 'deployConf.cjs';
75
+ else
76
+ deployConfName = 'deployConf.js';
57
77
 
58
- if ( !config ) {
59
- process.stdout.write ( '\x1b[38;5;196mThe environment specified cannot be found in the deployConfig.js file.\x1b[0m\n' );
60
- process.exit ( 0 );
61
- }
62
- if ( !parentConfig.version || parentConfig.version !== 2 ) {
63
- process.stdout.write ( '\x1b[38;5;196mThere is a deployConf.js file however it\'s not using the latest version please update your config to version 2.\x1b[0m\n' );
64
- process.stdout.write ( '\x1b[38;5;214mHere is an example deployConf.js to assist:\x1b[0m - https://cf.eip.telegraph.co.uk/testFolder/example.html\n' );
65
- process.exit ( 0 );
66
- }
67
- if ( ![ 'bucket', 'folder', 'cache' ].every ( key => Object.keys ( config ).includes ( key ) ) ){
68
- process.stdout.write ( '\x1b[38;5;214mThere is a deployConf.js file however it\'s not structured correctly, please make sure you\'ve got the correct attributes.\x1b[0m\n' );
69
- process.stdout.write ( '\x1b[38;5;190mHere is an example deployConf.js to assist:\x1b[0m - https://cf.eip.telegraph.co.uk/testFolder/example.html\n' );
70
- process.exit ( 0 );
71
- }
78
+ const parentConfig = require ( path.join ( process.cwd (), deployConfName ) );
79
+ const config = require ( path.join ( process.cwd (), deployConfName ) )[ environment ];
80
+ if ( typeof config.bucket === 'string' ) config.bucket = [ config.bucket ];
81
+ if ( typeof config.folder === 'string' ) config.folder = [ config.folder ];
82
+ const promiseChain = {};
83
+
84
+ if ( !config ) {
85
+ process.stdout.write ( '\x1b[38;5;196mThe environment specified cannot be found in the deployConfig.js file.\x1b[0m\n' );
86
+ process.exit ( 0 );
87
+ }
88
+ if ( !parentConfig.version || parentConfig.version !== 2 ) {
89
+ process.stdout.write ( '\x1b[38;5;196mThere is a deployConf.js file however it\'s not using the latest version please update your config to version 2.\x1b[0m\n' );
90
+ process.stdout.write ( '\x1b[38;5;214mHere is an example deployConf.js to assist:\x1b[0m - https://cf.eip.telegraph.co.uk/testFolder/example.html\n' );
91
+ process.exit ( 0 );
92
+ }
93
+ if ( ![ 'bucket', 'folder', 'cache' ].every ( key => Object.keys ( config ).includes ( key ) ) ){
94
+ process.stdout.write ( '\x1b[38;5;214mThere is a deployConf.js file however it\'s not structured correctly, please make sure you\'ve got the correct attributes.\x1b[0m\n' );
95
+ process.stdout.write ( '\x1b[38;5;190mHere is an example deployConf.js to assist:\x1b[0m - https://cf.eip.telegraph.co.uk/testFolder/example.html\n' );
96
+ process.exit ( 0 );
97
+ }
72
98
 
73
- process.stdout.write ( '-------------------\n' );
74
- process.stdout.write ( '\x1b[38;5;214mINITIATING DEPLOYMENT TO:\x1b[0m \x1b[38;5;190m' + args._[ 0 ] + '\n' );
75
- process.stdout.write ( '\x1b[38;5;214m📁 Copying contents of:\x1b[0m \x1b[38;5;190m' + ( config.targetFolder ? process.cwd () + '/' + config.targetFolder : process.cwd () ) + ( args._[ 1 ] ? '/' + args._[ 1 ] : '' ) + '\x1b[0m\n' );
76
- process.stdout.write ( '\x1b[38;5;214m🗑 to S3 Bucket/Folder:\x1b[0m \x1b[38;5;190m' + config.bucket + '/' + ( config.folder || '' ) + ( args._[ 1 ] ? args._[ 1 ] + '/' : '' ) + '\x1b[0m\n' );
77
- process.stdout.write ( '\x1b[38;5;214m☁️ CloudFront CDN Invalidation:\x1b[0m ' + ( config.cdnId ? '\x1b[38;5;190mYES (' + config.cdnId + ')' : '\x1b[38;5;9mNO' ) + '\x1b[0m\n' );
78
- if ( args._[ 1 ] ) process.stdout.write ( '\x1b[38;5;214m📂 Custom Deploy Folder Selected:\x1b[0m \x1b[38;5;190m' + args._[ 1 ] + '\x1b[0m\n' );
79
- if ( args.f ) {
80
99
  process.stdout.write ( '-------------------\n' );
81
- process.stdout.write ( '\x1b[38;5;196m🔥 FORCING DEPLOYMENT - Deployment.lock will be ignored. 🔥\x1b[0m\n' );
82
- }
83
- process.stdout.write ( '-------------------\n' );
100
+ process.stdout.write ( '\x1b[38;5;214mINITIATING FILE DEPLOYMENT TO:\x1b[0m \x1b[38;5;190m' + environment + '\n' );
101
+ process.stdout.write ( '\x1b[38;5;214m📁 Copying contents of:\x1b[0m \x1b[38;5;190m' + ( config.targetFolder ? process.cwd () + '/' + config.targetFolder : process.cwd () ) + ( folderFilter ? '/' + folderFilter : '' ) + '\x1b[0m\n' );
102
+ config.bucket.forEach ( ( bucket, i ) => {
103
+ process.stdout.write ( '\x1b[38;5;214m🗑 to S3 Bucket/Folder:\x1b[0m \x1b[38;5;190m' + bucket + '/' + ( config.folder[ i ] || '' ) + ( folderFilter ? '/' + folderFilter + '/' : '' ) + '\x1b[0m\n' );
104
+ } );
105
+ process.stdout.write ( '\x1b[38;5;214m☁️ CloudFront CDN Invalidation:\x1b[0m ' + ( config.cdnId ? '\x1b[38;5;190mYES (' + config.cdnId + ')' : '\x1b[38;5;9mNO' ) + '\x1b[0m\n' );
106
+ if ( folderFilter ) {
107
+ process.stdout.write ( '-------------------\n' );
108
+ process.stdout.write ( '\x1b[38;5;214m📂 Folder Filter Applied:\x1b[0m \x1b[38;5;190m' + folderFilter + '\x1b[0m\n' );
109
+ }
110
+ if ( forceRedeploy ) {
111
+ process.stdout.write ( '-------------------\n' );
112
+ process.stdout.write ( '\x1b[38;5;196m🔥 FORCING DEPLOYMENT - deployment.lock will be ignored. 🔥\x1b[0m\n' );
113
+ }
114
+ process.stdout.write ( '-------------------\n' );
84
115
 
85
- let scanDirectory = path.resolve ( config.targetFolder && config.targetFolder !== '' ? `${ process.cwd () }/${ config.targetFolder }` : process.cwd () );
86
- if ( args._[ 1 ] ) scanDirectory = path.join ( scanDirectory, args._[ 1 ] );
116
+ const scanDirectory = path.resolve ( config.targetFolder && config.targetFolder !== '' ? `${ process.cwd () }/${ config.targetFolder }` : process.cwd () );
117
+
118
+ let scannedFiles = await recursive ( scanDirectory, [ ...( config.omit || [] ), 'deployment.lock' ] );
119
+ const deploymentLock = await loadDeploymentLock ();
87
120
 
88
- const scannedFiles = await recursive ( scanDirectory, _.concat ( config.omit || [], [ 'deployment.lock' ] ) );
89
- const deploymentLock = await loadDeploymentLock ();
121
+ if ( folderFilter ) scannedFiles = scannedFiles.filter ( filepath => filepath.includes ( `${ scanDirectory }/${ folderFilter }` ) );
90
122
 
91
- promiseChain.changedFiles = promiseChain.newFiles = 0;
92
- promiseChain.deploymentHash = {};
123
+ promiseChain.changedFiles = promiseChain.newFiles = 0;
124
+ promiseChain.deploymentHash = {};
93
125
 
94
- const filesToUpload = _.compact ( _.map ( scannedFiles, filename => {
95
- promiseChain.deploymentHash[ _.replace ( filename, process.cwd (), '' ) ] = md5File.sync ( filename );
96
- if ( _.has ( deploymentLock, _.replace ( filename, process.cwd (), '' ) ) ) {
97
- if ( deploymentLock[ _.replace ( filename, process.cwd (), '' ) ] === promiseChain.deploymentHash[ _.replace ( filename, process.cwd (), '' ) ] ) {
98
- return null;
99
- }
100
- promiseChain.changedFiles++;
126
+ const filesToUpload = ( await Promise.all ( scannedFiles.map ( async filename => {
127
+ const md5Hash = await calculateMD5 ( filename );
128
+ promiseChain.deploymentHash[ filename.replace ( process.cwd (), '' ) ] = md5Hash;
129
+ if ( Object.prototype.hasOwnProperty.call ( deploymentLock, filename.replace ( process.cwd (), '' ) ) ) {
130
+ if ( deploymentLock[ filename.replace ( process.cwd (), '' ) ] === md5Hash ) {
131
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;245mNo Change: ' + filename + '\x1b[0m\n' );
132
+
133
+ return null;
134
+ }
135
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;172mChange: ' + filename + '\x1b[0m\n' );
136
+ promiseChain.changedFiles++;
101
137
 
102
- return filename;
103
- }
104
- promiseChain.newFiles++;
138
+ return filename;
139
+ }
140
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;64mChange: ' + filename + '\x1b[0m\n' );
141
+ promiseChain.newFiles++;
105
142
 
106
- return filename;
107
- } ) );
108
-
109
- process.stdout.write ( `📗 ${ args.f ? scannedFiles.length : promiseChain.newFiles } new files | 📙 ${ args.f ? 0 : promiseChain.changedFiles } updated files | 📕 ${ args.f ? 0 : scannedFiles.length - filesToUpload.length } unchanged files\n` );
110
- process.stdout.write ( '-------------------\n' );
143
+ return filename;
144
+ } ) ) ).filter ( Boolean );
145
+ if ( verboseOutput ) process.stdout.write ( '-------------------\n' );
111
146
 
112
- if ( ( args.f ? scannedFiles.length : filesToUpload.length ) === 0 ) {
113
- process.stdout.write ( '\x1b[38;5;190mNothing to deploy\x1b[0m (you can force redeploy using the \'-f\' option)\n' );
147
+ process.stdout.write ( `📗 ${ forceRedeploy ? scannedFiles.length : promiseChain.newFiles } new files | 📙 ${ forceRedeploy ? 0 : promiseChain.changedFiles } updated files | 📕 ${ forceRedeploy ? 0 : scannedFiles.length - filesToUpload.length } unchanged files\n` );
114
148
  process.stdout.write ( '-------------------\n' );
149
+
150
+ if ( ( forceRedeploy ? scannedFiles.length : filesToUpload.length ) === 0 ) {
151
+ process.stdout.write ( '\x1b[38;5;190mNothing to deploy\x1b[0m (you can force redeploy using the \'-f\' option)\n' );
152
+ process.stdout.write ( '-------------------\n' );
115
153
 
116
- return;
117
- }
154
+ return;
155
+ }
118
156
 
119
- const progressBar = new progress.SingleBar ( {
120
- format: '\x1b[38;5;214m🚀 Deploying [\x1b[38;5;190m{bar}\x1b[0m\x1b[38;5;214m] {percentage}% | ETA: {eta}s | {value}/{total}',
121
- barCompleteChar: '#',
122
- barIncompleteChar: '-',
123
- barGlue: '\x1b[38;5;214m',
124
- etaBuffer: 10000,
125
- etaAsynchronousUpdate: true
126
- } );
127
- progressBar.start ( args.f ? scannedFiles.length : filesToUpload.length );
157
+ const progressBar = new progress.SingleBar ( {
158
+ format: '\x1b[38;5;214m🚀 Deploying [\x1b[38;5;190m{bar}\x1b[0m\x1b[38;5;214m] {percentage}% | ETA: {eta}s | {value}/{total}',
159
+ barCompleteChar: '#',
160
+ barIncompleteChar: '-',
161
+ barGlue: '\x1b[38;5;214m',
162
+ etaBuffer: 10000,
163
+ etaAsynchronousUpdate: true
164
+ } );
165
+ if ( !verboseOutput ) progressBar.start ( ( forceRedeploy ? scannedFiles.length : filesToUpload.length ) * config.bucket.length );
128
166
 
129
- await Promise.all ( _.map ( args.f ? scannedFiles : filesToUpload, filename => fs.readFile ( filename )
130
- .then ( file => {
131
- let body = file;
167
+ await Promise.all ( config.bucket.map ( async ( bucket, i ) => {
168
+ await Promise.all ( ( forceRedeploy ? scannedFiles : filesToUpload ).map ( filename => fs.readFile ( filename )
169
+ .then ( file => {
170
+ let body = file;
132
171
 
133
- try {
134
- body = config.data ? handlebars.compile ( file.toString ( 'utf8' ) ) ( config.data ) : file;
135
- }
136
- catch ( e ) {
137
- //do nothing
138
- }
172
+ try {
173
+ body = config.data ? handlebars.compile ( file.toString ( 'utf8' ) ) ( config.data ) : file;
174
+ }
175
+ catch ( e ) {
176
+ // do nothing
177
+ }
139
178
 
140
- let key = ( config.folder ? `${ config.folder }` : '' ) + ( config.targetFolder && config.targetFolder !== '' ? _.replace ( _.replace ( filename, process.cwd (), '' ), `${ config.targetFolder }/`, '' ) : _.replace ( filename, process.cwd (), '' ) );
141
- key = key.replace ( /\\/g, '/' );
142
- if ( key.indexOf ( '/' ) === 0 ){
143
- key = key.substr ( 1 );
144
- }
145
- if ( _.lastIndexOf ( key, '/' ) === key.length - 1 ){
146
- key = key.substr ( 0, key.length - 1 );
147
- }
179
+ let key = ( config.folder[ i ] ? `${config.folder[ i ]}` : '' ) +
180
+ ( config.targetFolder && config.targetFolder !== '' ? filename.replace ( process.cwd (), '' ).replace ( `${config.targetFolder}/`, '' ) : filename.replace ( process.cwd (), '' ) );
181
+ key = key.replace ( /\\/g, '/' );
182
+ if ( key.startsWith ( '/' ) ) {
183
+ key = key.substring ( 1 );
184
+ }
185
+ if ( key.endsWith ( '/' ) ) {
186
+ key = key.slice ( 0, -1 );
187
+ }
188
+
189
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;172mUploading to S3: ' + filename + '\x1b[0m\n' );
190
+
191
+ return s3.set ( key, body, {
192
+ bucket: bucket,
193
+ contentType: mime.getType ( filename ),
194
+ cacheControl: Object.keys ( config.cache )
195
+ .map ( cacheKey => typeof config.cache[ cacheKey ] === 'boolean' ?
196
+ config.cache[ cacheKey ] ? cacheKey : null :
197
+ config.cache[ cacheKey ] ? `${cacheKey}=${config.cache[ cacheKey ]}` : null )
198
+ .filter ( n => n )
199
+ .join ( ', ' )
200
+ } ).then ( () => !verboseOutput && progressBar.increment () );
201
+ } )
202
+ ) );
203
+ } ) );
204
+
205
+ if ( !verboseOutput ) progressBar.stop ();
206
+
207
+ if ( config.cdnId ) {
208
+ if ( verboseOutput ) process.stdout.write ( '\x1b[38;5;172mRequesting invalidation: ' + [ config.folder ? '/' + config.folder + '/*' : '/*' ] + '\x1b[0m\n' );
209
+ await fetch ( 'https://tools.eip.telegraph.co.uk/v1/invalidate', {
210
+ method: 'POST',
211
+ headers: {
212
+ 'Content-Type': 'application/json'
213
+ },
214
+ body: JSON.stringify ( {
215
+ cdn: config.cdnId,
216
+ key: config.folder.map ( folder => folder ? '/' + folder + '/*' : '/*' ),
217
+ environment: config?.environment || undefined
218
+ } )
219
+ } );
220
+ process.stdout.write ( '-------------------\n' );
221
+ config.folder.forEach ( folder => {
222
+ process.stdout.write ( '📤 \x1b[38;5;190mCloudFront Cache Cleared\x1b[0m | Invalidation: ' + ( folder ? '/' + folder + '/*' : '/*' ) + '\n' );
223
+ } );
224
+ }
225
+
226
+ // This should be done after everything is complete.
227
+ await writeDeploymentLock ( promiseChain.deploymentHash );
228
+
229
+ const user = process.env.PAPI_USER || 'Unknown';
230
+ const machine = process.env.PAPI_MACHINE || 'Unknown';
231
+ const deploymentReport = [ {
232
+ project: config.folder,
233
+ context: config.bucket,
234
+ user: `${user} - ${machine}`,
235
+ environment: environment,
236
+ timestamp: Date.now ()
237
+ } ];
238
+ if ( deploymentReport.length > 0 ){
239
+ await fetch ( 'https://tools.eip.telegraph.co.uk/v1/healthcheck/deployment', {
240
+ method: 'POST',
241
+ headers: {
242
+ 'Content-Type': 'application/json'
243
+ },
244
+ body: JSON.stringify ( { deployments: deploymentReport } )
245
+ } );
246
+ }
148
247
 
149
- return s3.putObject ( {
150
- Bucket: config.bucket,
151
- Key: key,
152
- ACL: 'public-read',
153
- Body: body,
154
- CacheControl: Object.keys ( config.cache ).map ( cacheKey => typeof config.cache[ cacheKey ] === 'boolean' ? config.cache[ cacheKey ] ? cacheKey : null : config.cache[ cacheKey ] ? `${cacheKey}=${config.cache[ cacheKey ]}` : null ).filter ( n => n ).join ( ', ' ),
155
- ContentType: mime.getType ( filename )
156
- } ).promise ().then ( () => progressBar.increment () );
157
-
158
- } ) ) );
159
-
160
- progressBar.stop ();
161
-
162
- if ( config.cdnId ) {
163
- await fetch ( 'https://tools.eip.telegraph.co.uk/v1/invalidate', {
164
- method: 'POST',
165
- headers: {
166
- 'Content-Type': 'application/json'
167
- },
168
- body: JSON.stringify ( {
169
- cdn: config.cdnId,
170
- key: [ config.folder ? '/' + config.folder + '/*' : '/*' ],
171
- environment: config?.environment || undefined
172
- } )
173
- } );
174
248
  process.stdout.write ( '-------------------\n' );
175
- process.stdout.write ( '📤 \x1b[38;5;190mCloudFront Cache Cleared\x1b[0m | Invalidation: ' + ( config.folder ? '/' + config.folder + '/*' : '/*' ) + '\n' );
249
+ process.stdout.write ( ' \x1b[38;5;190mDeployment completed.\x1b[0m\n' );
250
+ process.stdout.write ( '-------------------\n' );
176
251
  }
252
+ catch ( error ){
253
+ console.log ( error );
254
+ showHelp ();
255
+
256
+ return;
257
+ }
258
+ };
177
259
 
178
- // This should be done after everything is complete.
179
- await writeDeploymentLock ( promiseChain.deploymentHash );
180
-
181
- process.stdout.write ( '-------------------\n' );
182
- process.stdout.write ( '✅ \x1b[38;5;190mDeployment completed.\x1b[0m\n' );
183
- process.stdout.write ( '-------------------\n' );
260
+ const showHelp = () => {
261
+ console.log ( 'S3 Deploy Script' );
262
+ console.log ( 'Usage:' );
263
+ console.log ( ' s3-deploy <environment_name> [-f|--force] [-ff|--folderFilter <folder_filter>] [-e|--env <environment_name>]' );
264
+ console.log ( 'Options:' );
265
+ console.log ( ' -f, --force Force redeployment of files' );
266
+ console.log ( ' -ff, --folderFilter Specify a single folder to deploy' );
267
+ console.log ( ' -e, --env Specify the environment for deployment' );
268
+ console.log ( ' -v, --verbose Print more output' );
269
+ console.log ( 'Examples:' );
270
+ console.log ( ' s3-deploy test' );
271
+ console.log ( ' s3-deploy production --force' );
272
+ console.log ( ' s3-deploy test -ff myFolder' );
273
+ console.log ( ' s3-deploy -ff myFolder -e prod -f' );
274
+ console.log ( ' s3-deploy --folderFilter myFolder -e test -f' );
275
+ process.exit ( 0 );
184
276
  };
185
277
 
186
- deploy ();
278
+ deploy ().catch ( ( error ) => {
279
+ console.error ( `ERROR: ${error}` );
280
+ console.log ( error );
281
+ } );
package/deployConf.js CHANGED
@@ -25,10 +25,10 @@ module.exports = {
25
25
  }
26
26
  },
27
27
  test: {
28
- bucket: 's3-test.eip.telegraph.co.uk', // The location you want to deploy to.
28
+ bucket: [ 's3-test.eip.telegraph.co.uk', 's3-test.eip.telegraph.co.uk' ], // The location you want to deploy to.
29
29
  cdnId: 'cf', // The prefix or identifier for the CDN. e.g. "cf", "cf-particle-html", etc
30
30
  environment: 'test', // The variant of environment.
31
- folder: 'testFolder', // The remote folder you which to upload to.
31
+ folder: [ 'testFolder', 'testFolder2' ], // The remote folder you which to upload to.
32
32
  targetFolder: 'test', // The local folder to use as a root for uploading. Remove this to use root folder.
33
33
  cache: {
34
34
  'max-age': 3600, // Specifies the maximum age of the resource in seconds on a user's device.
package/deployment.lock CHANGED
@@ -8,10 +8,11 @@
8
8
  "test/folder2/file1.txt": "d41d8cd98f00b204e9800998ecf8427e",
9
9
  "/test/file1.txt": "f5b5ed0c72f4e9984c3e7a8b18c08c2b",
10
10
  "/test/file2.txt": "d41d8cd98f00b204e9800998ecf8427e",
11
- "/test/folder1/file1.txt": "d41d8cd98f00b204e9800998ecf8427e",
11
+ "/test/folder1/file1.txt": "5d41402abc4b2a76b9719d911017c592",
12
12
  "/test/folder1/file2.txt": "d41d8cd98f00b204e9800998ecf8427e",
13
13
  "/test/folder2/file1.txt": "d41d8cd98f00b204e9800998ecf8427e",
14
- "/test/folder2/file2.txt": "d41d8cd98f00b204e9800998ecf8427e"
14
+ "/test/folder2/file2.txt": "d41d8cd98f00b204e9800998ecf8427e",
15
+ "/test/example.html": "d3ee67eb6e0f33d59c944fb4786359f3"
15
16
  },
16
17
  "prod": {
17
18
  "/test/file1.txt": "f5b5ed0c72f4e9984c3e7a8b18c08c2b",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "eip-s3-deploy",
3
- "version": "2.2.0",
3
+ "version": "2.2.1",
4
4
  "description": "Deploy static websites to S3 - all files will be public",
5
5
  "main": "deploy.js",
6
6
  "bin": {
@@ -8,7 +8,7 @@
8
8
  },
9
9
  "preferGlobal": true,
10
10
  "scripts": {
11
- "test": "cd ./test && node ../deploy.js test"
11
+ "test": "node ./deploy.js test -f"
12
12
  },
13
13
  "repository": {
14
14
  "type": "git",
@@ -24,14 +24,10 @@
24
24
  "author": "Oliver Edgington <oliver.edgington@telegraph.co.uk>",
25
25
  "license": "ISC",
26
26
  "dependencies": {
27
- "aws-sdk": "^2.700.0",
28
27
  "cli-progress": "^3.8.2",
28
+ "eip-cloud-services": "^1.0.16",
29
29
  "handlebars": "^4.7.6",
30
- "lodash": "^4.17.15",
31
- "md5-file": "^5.0.0",
32
30
  "mime": "^2.4.6",
33
- "minimist": "^1.2.5",
34
- "node-fetch": "2.6.7",
35
31
  "recursive-readdir": "^2.2.2"
36
32
  }
37
33
  }
@@ -0,0 +1 @@
1
+ hello