data-exporter 1.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,164 @@
1
+ require 'erb'
2
+ require 'yaml'
3
+ require 'active_support/core_ext/hash'
4
+
5
+ module DataExporter
6
+ class Configuration
7
+ ALLOWED_OVERRIDES = %w(mode export_dir download_dir unpack_dir pii_file csv)
8
+
9
+ def initialize
10
+ reset!
11
+ end
12
+
13
+ def reset!
14
+ @config_file = nil
15
+ @override = HashWithIndifferentAccess.new
16
+ @sections = HashWithIndifferentAccess.new { |h,k| h[k] = HashWithIndifferentAccess.new }
17
+ end
18
+
19
+ def database=(h = {})
20
+ @sections[mode][:mysql] ||= HashWithIndifferentAccess.new
21
+ @sections[mode][:mysql].merge!(h)
22
+ end
23
+
24
+ def database
25
+ @sections[mode][:mysql] ||= HashWithIndifferentAccess.new
26
+ @sections[mode][:mysql]
27
+ end
28
+ alias :mysql :database
29
+
30
+ def mysqldump_path
31
+ @sections[mode].fetch(:mysqldump_path, 'mysqldump')
32
+ end
33
+
34
+ def mysqldump_options
35
+ @sections[mode].fetch(:mysqldump_options, '').split(/\s+/).compact
36
+ end
37
+
38
+ def s3
39
+ @sections[mode][:s3] ||= HashWithIndifferentAccess.new
40
+ @sections[mode][:s3].tap do |s3|
41
+ s3[:bucket_name].sub!(%r{\As3://}, '') if s3[:bucket_name]
42
+ s3[:bucket] = s3[:bucket_name]
43
+ end
44
+ end
45
+
46
+ def sftp
47
+ @sections[mode][:sftp] ||= HashWithIndifferentAccess.new
48
+ @sections[mode][:sftp].tap do |sftp|
49
+ sftp[:user] ||= ENV['USER']
50
+ end
51
+ end
52
+
53
+ def redis
54
+ @sections[mode][:redis] ||= HashWithIndifferentAccess.new
55
+ @sections[mode][:redis]
56
+ end
57
+
58
+ def mode
59
+ @override.fetch(:mode, :default)
60
+ end
61
+
62
+ %w(export_dir download_dir unpack_dir).each do |dir|
63
+ define_method(dir) do
64
+ @override[dir] || @sections[mode].fetch(dir, Dir.mktmpdir('data_exporter'))
65
+ end
66
+ end
67
+
68
+ def backup_dir
69
+ @sections[mode][:backup_dir]
70
+ end
71
+
72
+ def backup_key
73
+ absolute_config_path(@sections[mode][:backup_key])
74
+ end
75
+
76
+ def backup_prefix
77
+ @sections[mode][:backup_prefix] || @sections[mode][:s3][:prefix]
78
+ end
79
+
80
+ def archive_base_directory
81
+ @sections[mode].fetch(:archive_base_directory, 'data_exporter')
82
+ end
83
+
84
+ def pii_file
85
+ @override[:pii_file] || @sections[mode][:pii_file]
86
+ end
87
+
88
+ def pii_fields(table)
89
+ return [] unless pii_file
90
+ @pii_data ||= load_yaml_file(pii_file)
91
+ @pii_data.fetch(table, []).map(&:to_s)
92
+ end
93
+
94
+ def csv_enabled?
95
+ @override[:csv] || @sections[mode][:export_csv]
96
+ end
97
+
98
+ def sftp_enabled?
99
+ sftp && sftp[:host] && sftp[:user]
100
+ end
101
+
102
+ def load(options = {})
103
+ @config_file = options[:config_file]
104
+ @override.merge!(options.select { |k,_| ALLOWED_OVERRIDES.include?(k.to_sym) || ALLOWED_OVERRIDES.include?(k.to_s) })
105
+ process_configuration(load_yaml_file(@config_file))
106
+ validate!(options)
107
+ self
108
+ end
109
+
110
+ def validate!(options = {})
111
+ raise ArgumentError, "#{@config_file} missing s3 section" unless s3_config_complete? unless sftp_enabled?
112
+ raise ArgumentError, "#{@config_file} missing mysql section" unless database_config_complete? if options[:mysql_required]
113
+ raise ArgumentError, "#{@config_file} missing redis section" unless redis_config_complete? if options[:redis_key_prefix]
114
+ end
115
+
116
+ private
117
+
118
+ def config_path
119
+ File.expand_path('..', @config_file)
120
+ end
121
+
122
+ def absolute_config_path(path)
123
+ if File.absolute_path(path) == path
124
+ path
125
+ else
126
+ File.join(config_path, path)
127
+ end
128
+ end
129
+
130
+ def load_yaml_file(yaml_file)
131
+ case yaml_file
132
+ when '-'
133
+ YAML.load(STDIN.read)
134
+ when /yml.erb\z/
135
+ YAML.load(ERB.new(File.read(yaml_file)).result)
136
+ else
137
+ YAML.load_file(yaml_file)
138
+ end.with_indifferent_access
139
+ end
140
+
141
+ def process_configuration(config_hash)
142
+ return process_configuration_with_sections(config_hash) if config_hash['default'].is_a?(Hash)
143
+ @sections[:default] = config_hash
144
+ end
145
+
146
+ def process_configuration_with_sections(config_hash)
147
+ config_hash.each do |section, config|
148
+ @sections[section] = config
149
+ end
150
+ end
151
+
152
+ def database_config_complete?
153
+ database && database[:username] && database[:database]
154
+ end
155
+
156
+ def s3_config_complete?
157
+ s3 && s3[:bucket] && s3[:access_key_id] && s3[:secret_access_key]
158
+ end
159
+
160
+ def redis_config_complete?
161
+ redis && redis[:host] && redis[:port]
162
+ end
163
+ end
164
+ end
@@ -0,0 +1,3 @@
1
+ module DataExporter
2
+ VERSION = '1.3.7'
3
+ end
@@ -0,0 +1,50 @@
1
+ require 'spec_helper'
2
+
3
+ describe DataExporter::Actions do
4
+ let(:instance) do
5
+ Class.new do
6
+ include DataExporter::Actions
7
+ end.new
8
+ end
9
+
10
+ describe '#find_last_sftp_backup' do
11
+ let(:prefix) { 'data-export' }
12
+ let(:suffix) { 'sql.gz.enc' }
13
+
14
+ subject do
15
+ instance.find_last_sftp_backup(prefix, suffix)
16
+ end
17
+
18
+ context 'with --sftp' do
19
+ let(:remote_files) do
20
+ [
21
+ OpenStruct.new(name: '/data_export-2013-05-05_db.sql.gz.enc', mtime: 100, size: 10),
22
+ OpenStruct.new(name: '/data_export-2013-05-06_db.sql.gz.enc', mtime: 200, size: 20),
23
+ OpenStruct.new(name: '/data_export-2013-05-06_db.sql.gz.enc', mtime: 300, size: 10)
24
+ ]
25
+ end
26
+
27
+ before do
28
+ sftp_file_doubles = []
29
+ remote_files.each do |remote_file|
30
+ sftp_file_double = double
31
+ expect(sftp_file_double).to receive(:name).at_most(:once).and_return(remote_file.name)
32
+ expect(sftp_file_double).to receive(:attributes).at_least(:once).and_return(double(:mtime => remote_file.mtime, :size => remote_file.size))
33
+ sftp_file_doubles << sftp_file_double
34
+ end
35
+
36
+ sftp_dir_double = double
37
+ expect(sftp_dir_double).to receive(:glob).with('/', 'data-export*sql.gz.enc').
38
+ and_yield(sftp_file_doubles[2]).
39
+ and_yield(sftp_file_doubles[1]).
40
+ and_yield(sftp_file_doubles[0])
41
+
42
+ sftp_session_double = double
43
+ expect(sftp_session_double).to receive(:dir).and_return(sftp_dir_double)
44
+ expect(instance).to receive(:sftp).and_return(sftp_session_double)
45
+ end
46
+
47
+ it { is_expected.to eq(remote_files.last) }
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,430 @@
1
+ require 'spec_helper'
2
+
3
+ describe DataExporter::CLI do
4
+ include DataExporter::Actions
5
+
6
+ def stub_s3_upload(remote_file)
7
+ s3_object_double = double
8
+ expect(s3_object_double).to receive(:write).once
9
+
10
+ s3_objects_double = double('s3_objects')
11
+ expect(s3_objects_double).to receive(:[]).with(remote_file).and_return(s3_object_double)
12
+
13
+ expect_any_instance_of(AWS::S3::Bucket).to receive(:objects).and_return(s3_objects_double)
14
+ end
15
+
16
+ def stub_s3_find_last(remote_file)
17
+ s3_object_double = double('s3_object', :key => remote_file, :last_modified => Time.now.to_i, :content_length => 1024)
18
+
19
+ s3_objects_double = double('s3_objects')
20
+ expect(s3_objects_double).to receive(:with_prefix).and_return([s3_object_double])
21
+
22
+ expect_any_instance_of(AWS::S3::Bucket).to receive(:objects).and_return(s3_objects_double)
23
+ end
24
+
25
+ def stub_s3_download(local_file, remote_file)
26
+ s3_object_double = double('s3_object', :key => remote_file, :last_modified => Time.now.to_i, :content_length => 1024)
27
+ expect(s3_object_double).to receive(:read).and_yield(File.read(local_file))
28
+
29
+ s3_objects_double = double('s3_objects')
30
+ expect(s3_objects_double).to receive(:with_prefix).with(File.join(backup_dir, backup_prefix)).and_return([s3_object_double])
31
+
32
+ expect_any_instance_of(AWS::S3::Bucket).to receive(:objects).and_return(s3_objects_double)
33
+ end
34
+
35
+ let(:export_dir) { Dir.mktmpdir('export') }
36
+ let(:backup_dir) { 'backups' }
37
+ let(:unpack_dir) { Dir.mktmpdir('unpack') }
38
+ let(:download_dir) { Dir.mktmpdir('download') }
39
+ let(:backup_key) { File.expand_path('../fixtures/backup_key', __FILE__) }
40
+ let(:backup_prefix) { 'data_export' }
41
+ let(:archive_base_directory) { 'data_exporter' }
42
+ let(:sftp_host) { 'localhost' }
43
+ let(:sftp_user) { ENV['USER'] }
44
+ let(:archive_dir) { Dir.mktmpdir('archive') }
45
+
46
+ let(:mysql_config) do
47
+ {
48
+ adapter: 'mysql2',
49
+ host: 'localhost',
50
+ database: 'centurion_test',
51
+ username: 'root'
52
+ }
53
+ end
54
+
55
+ let(:s3_config) do
56
+ {
57
+ access_key_id: 'spec',
58
+ secret_access_key: 'spec',
59
+ bucket_name: 'spec',
60
+ prefix: backup_prefix
61
+ }
62
+ end
63
+
64
+ let(:redis_config) do
65
+ {
66
+ host: 'localhost',
67
+ port: 6379
68
+ }
69
+ end
70
+
71
+ let(:sftp_config) { {} }
72
+
73
+ let(:config_hash) do
74
+ {
75
+ export_dir: export_dir,
76
+ backup_dir: backup_dir,
77
+ backup_key: backup_key,
78
+ unpack_dir: unpack_dir,
79
+ download_dir: download_dir,
80
+ mysqldump_options: '--extended-insert --single-transaction',
81
+ mysql: mysql_config,
82
+ redis: redis_config,
83
+ s3: s3_config,
84
+ sftp: sftp_config
85
+ }
86
+ end
87
+
88
+ let(:pii_hash) do
89
+ {
90
+ :users => [:last_name, :email]
91
+ }
92
+ end
93
+
94
+ let(:config_file) do
95
+ File.open(File.join(Dir.mktmpdir('data_exporter'), 'config.yml'), 'w') do |file|
96
+ file.write config_hash.to_yaml
97
+ file.flush
98
+ end
99
+ end
100
+
101
+ let(:pii_file) do
102
+ File.open(File.join(Dir.mktmpdir('data_exporter'), 'pii.yml'), 'w') do |file|
103
+ file.write pii_hash.to_yaml
104
+ file.flush
105
+ end
106
+ end
107
+
108
+ before do
109
+ DataExporter.configure do |config|
110
+ config.reset!
111
+ config.database = {
112
+ adapter: 'mysql2',
113
+ host: 'localhost',
114
+ database: 'centurion_test',
115
+ username: 'root'
116
+ }
117
+ end
118
+ end
119
+
120
+ after do
121
+ FileUtils.rm(config_file)
122
+ FileUtils.rm(pii_file)
123
+ FileUtils.rm_rf(export_dir)
124
+ FileUtils.rm_rf(unpack_dir)
125
+ FileUtils.rm_rf(download_dir)
126
+ end
127
+
128
+ let(:archive_contents) { Dir.entries(unpack_dir) }
129
+
130
+ describe '#export_task' do
131
+ let(:options) { [] }
132
+
133
+ before do
134
+ Timecop.freeze('2013-04-20')
135
+ end
136
+
137
+ let(:encrypted_archive_base_name) { 'data_export_2013-04-20-00-00_db.sql.gz.enc' }
138
+ let(:encrypted_archive) { File.join(export_dir, encrypted_archive_base_name) }
139
+ let(:remote_encrypted_archive) { File.join(backup_dir, encrypted_archive_base_name) }
140
+
141
+ subject do
142
+ DataExporter::CLI.start(['export', *options, '--quiet', '--preserve', '--config-file', config_file.path])
143
+ end
144
+
145
+ shared_examples 'mysqldump export' do
146
+ it 'exports a sql file' do
147
+ expect(archive_contents.size).to eq(3)
148
+ expect(archive_contents.last).to match(/data_export.*_db.sql/)
149
+ backup_contents = File.read(File.join(unpack_dir, archive_contents.last)).split("\n")
150
+ expect(backup_contents[0]).to match(/\A-- MySQL dump/)
151
+ end
152
+ end
153
+
154
+ context 'with default options' do
155
+ before do
156
+ stub_s3_upload(remote_encrypted_archive)
157
+ subject
158
+ unpack(encrypted_archive, unpack_dir)
159
+ end
160
+
161
+ it_behaves_like 'mysqldump export'
162
+
163
+ context 'without config.backup_dir' do
164
+ let(:backup_dir) { nil }
165
+ let(:remote_encrypted_archive) { encrypted_archive_base_name }
166
+
167
+ it_behaves_like 'mysqldump export'
168
+ end
169
+
170
+ context 'with --csv option' do
171
+ let(:options) { ['--csv'] }
172
+
173
+ let(:encrypted_archive_base_name) { 'data_export_2013-04-20-00-00_db.csv.tar.gz.enc' }
174
+
175
+ it 'exports an archive of exported csv files' do
176
+ expect(archive_contents).to include 'users_1.csv'
177
+ expect(archive_contents).to_not include 'schema_migrations_1.csv'
178
+ expect(archive_contents).to_not include 'checksums_1.csv'
179
+ end
180
+
181
+ it 'contains a user export csv file' do
182
+ backup_contents = File.read(File.join(unpack_dir, 'users_1.csv')).split("\n")
183
+ expect(backup_contents[0]).to eq("id,username,first_name,last_name,email,created_at,updated_at")
184
+ expect(backup_contents[1]).to match(/\A1,,Emily,James,emily@socialcast\.com.*/)
185
+ expect(backup_contents[2]).to match(/\A2,,Jennifer,Lawson,jennifer@socialcast\.com.*/)
186
+ end
187
+ end
188
+
189
+ context 'with --csv and --pii-file option' do
190
+ let(:options) { ['--csv', '--pii-file', pii_file] }
191
+
192
+ let(:encrypted_archive_base_name) { 'data_export_2013-04-20-00-00_db.csv.tar.gz.enc' }
193
+
194
+ it 'exports an archive of exported csv files' do
195
+ expect(archive_contents).to include 'users_1.csv'
196
+ expect(archive_contents).to_not include 'schema_migrations_1.csv'
197
+ expect(archive_contents).to_not include 'checksums_1.csv'
198
+ end
199
+
200
+ it 'contains a user export csv file without PII fields' do
201
+ backup_contents = File.read(File.join(unpack_dir, 'users_1.csv')).split("\n")
202
+ expect(backup_contents[0]).to eq("id,username,first_name,created_at,updated_at")
203
+ expect(backup_contents[1]).to match(/\A1,,Emily,.*/)
204
+ expect(backup_contents[1]).not_to match(/emily@socialcast\.com/)
205
+ expect(backup_contents[2]).to match(/\A2,,Jennifer,.*/)
206
+ expect(backup_contents[2]).not_to match(/jennifer@socialcast\.com/)
207
+ end
208
+ end
209
+ end
210
+
211
+ context 'with --csv and --archive-dir options' do
212
+ let(:mysql_config) { {} }
213
+ let(:options) { ['--csv', '--archive-dir', archive_dir, '--date', '2014-04-20'] }
214
+ let(:contents) do
215
+ <<-EOS
216
+ id,username,first_name,last_name,email
217
+ 3,bot,Production,Bot,bot@socialcast.com
218
+ EOS
219
+ end
220
+
221
+ before do
222
+ stub_s3_upload(remote_encrypted_archive)
223
+ File.open(File.join(archive_dir, 'users_1.csv'), 'w') do |file|
224
+ file.write contents
225
+ end
226
+ subject
227
+ unpack(encrypted_archive, unpack_dir)
228
+ end
229
+
230
+ let(:encrypted_archive_base_name) { 'data_export_2014-04-20-00-00_db.csv.tar.gz.enc' }
231
+
232
+ it 'exports an archive of exported csv files' do
233
+ expect(archive_contents).to include 'users_1.csv'
234
+ expect(archive_contents).to_not include 'schema_migrations_1.csv'
235
+ expect(archive_contents).to_not include 'checksums_1.csv'
236
+ end
237
+
238
+ it 'contains a user export csv file' do
239
+ backup_contents = File.read(File.join(unpack_dir, 'users_1.csv'))
240
+ expect(backup_contents).to eq(contents)
241
+ end
242
+ end
243
+
244
+ context 'with sftp config' do
245
+ let(:sftp_config) do
246
+ {
247
+ host: sftp_host,
248
+ user: sftp_user,
249
+ backup_dir: backup_dir
250
+ }
251
+ end
252
+
253
+ before do
254
+ expect_any_instance_of(AWS::S3::S3Object).to receive(:write).never
255
+ sftp_double = double
256
+ expect(sftp_double).to receive(:mkdir).with(backup_dir)
257
+ expect(sftp_double).to receive(:upload!).with(encrypted_archive, File.join(backup_dir, encrypted_archive_base_name))
258
+ expect(Net::SFTP).to receive(:start).with(sftp_host, sftp_user, {timeout: 30}).and_return(sftp_double)
259
+ subject
260
+ end
261
+
262
+ it 'uploads the archive via SFTP' do
263
+ end
264
+ end
265
+
266
+ context 'with an incomplete mysql configuration section' do
267
+ let(:mysql_config) { {} }
268
+
269
+ it { expect { subject }.to raise_error ArgumentError, /missing mysql section/ }
270
+ end
271
+ end
272
+
273
+ describe '#unpack_task' do
274
+ let(:options) { [] }
275
+ let(:encrypted_archive_basename) { 'data_export-2013-05-05_db.sql.gz.enc' }
276
+ let(:encrypted_archive) { File.join(export_dir, 'data_export-2013-05-05_db.sql.gz.enc') }
277
+ let(:remote_encrypted_archive) { File.join(backup_dir, 'data_export-2013-05-05_db.sql.gz.enc') }
278
+
279
+ subject do
280
+ DataExporter::CLI.start(['unpack', *options, '--quiet', '--config-file', config_file.path])
281
+ end
282
+
283
+ context 'when backups are found' do
284
+ before do
285
+ export(backup_key, encrypted_archive)
286
+ stub_s3_download(encrypted_archive, remote_encrypted_archive)
287
+ subject
288
+ end
289
+
290
+ it 'unpacks a sql file' do
291
+ expect(archive_contents.size).to eq(3)
292
+ expect(archive_contents.last).to match(/data_export.*_db.sql/)
293
+ backup_contents = File.read(File.join(unpack_dir, archive_contents.last)).split("\n")
294
+ expect(backup_contents[0]).to match(/\A-- MySQL dump/)
295
+ end
296
+
297
+ context 'with --csv option' do
298
+ let(:options) { ['--csv'] }
299
+ let(:encrypted_archive) { File.join(export_dir, 'data_export-2013-05-05_db.csv.tar.gz.enc') }
300
+ let(:remote_encrypted_archive) { File.join(backup_dir, 'data_export-2013-05-05_db.csv.tar.gz.enc') }
301
+
302
+ it 'unpacks an archive of exported csv files' do
303
+ expect(archive_contents).to include 'users_1.csv'
304
+ expect(archive_contents).to_not include 'schema_migrations_1.csv'
305
+ expect(archive_contents).to_not include 'checksums_1.csv'
306
+ end
307
+
308
+ it 'contains a user export csv file' do
309
+ backup_contents = File.read(File.join(unpack_dir, 'users_1.csv')).split("\n")
310
+ expect(backup_contents[0]).to eq("id,username,first_name,last_name,email,created_at,updated_at")
311
+ expect(backup_contents[1]).to match(/\A1,,Emily,James,emily@socialcast\.com.*/)
312
+ expect(backup_contents[2]).to match(/\A2,,Jennifer,Lawson,jennifer@socialcast\.com.*/)
313
+ end
314
+ end
315
+
316
+ context 'with --date option' do
317
+ let(:options) { ['--date', '2013-05-05'] }
318
+
319
+ it 'unpacks a sql file' do
320
+ expect(archive_contents.size).to eq(3)
321
+ expect(archive_contents.last).to match(/data_export.*_db.sql/)
322
+ backup_contents = File.read(File.join(unpack_dir, archive_contents.last)).split("\n")
323
+ expect(backup_contents[0]).to match(/\A-- MySQL dump/)
324
+ end
325
+ end
326
+ end
327
+
328
+ context 'when Open3.pipeline returns a non-zero exit status' do
329
+ before do
330
+ export(backup_key, encrypted_archive)
331
+ stub_s3_download(encrypted_archive, remote_encrypted_archive)
332
+ allow(Open3).to receive(:pipeline_start).and_yield([double(value: double(success?: true)), double(value: double(success?: false))])
333
+ end
334
+
335
+ it { expect { subject }.to raise_error SystemExit, /Problem unpacking/ }
336
+
337
+ context 'with --csv option' do
338
+ let(:options) { ['--csv'] }
339
+ let(:encrypted_archive) { File.join(export_dir, 'data_export-2013-05-05_db.csv.tar.gz.enc') }
340
+ let(:remote_encrypted_archive) { File.join(backup_dir, 'data_export-2013-05-05_db.csv.tar.gz.enc') }
341
+ it { expect { subject }.to raise_error SystemExit, /Problem unpacking/ }
342
+ end
343
+ end
344
+
345
+ context 'when backups are not found' do
346
+ before do
347
+ export(backup_key, encrypted_archive)
348
+ s3_objects_double = double('s3_objects')
349
+ expect(s3_objects_double).to receive(:with_prefix)
350
+ expect_any_instance_of(AWS::S3::Bucket).to receive(:objects).and_return(s3_objects_double)
351
+ end
352
+
353
+ it { expect { subject }.to raise_error SystemExit, /No backups found/ }
354
+ end
355
+
356
+ context 'with sftp config' do
357
+ let(:sftp_config) do
358
+ {
359
+ host: sftp_host,
360
+ user: sftp_user
361
+ }
362
+ end
363
+
364
+ let(:remote_encrypted_archive) { File.join(backup_dir, 'data_export-2013-05-05_db.sql.gz.enc') }
365
+ let(:downloaded_encrypted_archive) { File.join(download_dir, 'data_export-2013-05-05_db.sql.gz.enc') }
366
+
367
+ context 'when backups exist' do
368
+ before do
369
+ export(backup_key, encrypted_archive)
370
+
371
+ sftp_dir_double, sftp_file_double, sftp_session_double = double, double, double
372
+ expect(sftp_file_double).to receive(:name).and_return(encrypted_archive_basename)
373
+ expect(sftp_file_double).to receive(:attributes).at_least(:once).and_return(double(:mtime => Time.now.to_i, :size => nil))
374
+ expect(sftp_dir_double).to receive(:glob).with(backup_dir, 'data_export*sql.gz.enc').and_yield(sftp_file_double)
375
+ expect(sftp_session_double).to receive(:dir).and_return(sftp_dir_double)
376
+ expect(sftp_session_double).to receive(:download!).with(remote_encrypted_archive, downloaded_encrypted_archive) do
377
+ FileUtils.cp(encrypted_archive, downloaded_encrypted_archive)
378
+ end
379
+
380
+ expect(Net::SFTP).to receive(:start).with(sftp_host, sftp_user, {timeout: 30}).and_return(sftp_session_double)
381
+ subject
382
+ end
383
+
384
+ it 'downloads the archive via SFTP' do; end
385
+
386
+ it 'unpacks a sql file' do
387
+ expect(archive_contents.size).to eq(3)
388
+ expect(archive_contents.last).to match(/data_export.*_db.sql/)
389
+ backup_contents = File.read(File.join(unpack_dir, archive_contents.last)).split("\n")
390
+ expect(backup_contents[0]).to match(/\A-- MySQL dump/)
391
+ end
392
+ end
393
+
394
+ context 'when no backups are found' do
395
+ before do
396
+ export(backup_key, encrypted_archive)
397
+
398
+ sftp_dir_double, sftp_session_double = double, double
399
+ expect(sftp_dir_double).to receive(:glob)
400
+ expect(sftp_session_double).to receive(:dir).and_return(sftp_dir_double)
401
+ expect(Net::SFTP).to receive(:start).with(sftp_host, sftp_user, {timeout: 30}).and_return(sftp_session_double)
402
+ end
403
+
404
+ it { expect { subject }.to raise_error SystemExit, /No backups found/ }
405
+ end
406
+ end
407
+ end
408
+
409
+ describe '#status_task' do
410
+ let(:options) { [] }
411
+
412
+ subject do
413
+ DataExporter::CLI.start(['status', *options, '--quiet', '--config-file', config_file.path])
414
+ end
415
+
416
+ let(:encrypted_archive_base_name) { 'data_export_2013-04-20-00-00_db.sql.gz.enc' }
417
+ let(:remote_encrypted_archive) { File.join(backup_dir, encrypted_archive_base_name) }
418
+
419
+ context 'with --redis-key-prefix' do
420
+ let(:options) { ['--redis-key-prefix', 'redis_key']}
421
+ before :each do
422
+ stub_s3_find_last(remote_encrypted_archive)
423
+ expect_any_instance_of(Redis).to receive(:set).twice
424
+ subject
425
+ end
426
+
427
+ it 'updates redis counters' do; end
428
+ end
429
+ end
430
+ end