s3_website 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (101) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +5 -0
  3. data/.travis.yml +5 -0
  4. data/Gemfile +3 -0
  5. data/Gemfile.lock +81 -0
  6. data/LICENSE +42 -0
  7. data/README.md +332 -0
  8. data/Rakefile +18 -0
  9. data/bin/s3_website +38 -0
  10. data/changelog.md +7 -0
  11. data/example-configurations.md +60 -0
  12. data/features/as-library.feature +29 -0
  13. data/features/cassettes/cucumber_tags/create-redirect.yml +384 -0
  14. data/features/cassettes/cucumber_tags/new-and-changed-files.yml +303 -0
  15. data/features/cassettes/cucumber_tags/new-files-for-sydney.yml +211 -0
  16. data/features/cassettes/cucumber_tags/new-files.yml +355 -0
  17. data/features/cassettes/cucumber_tags/no-new-or-changed-files.yml +359 -0
  18. data/features/cassettes/cucumber_tags/one-file-to-delete.yml +390 -0
  19. data/features/cassettes/cucumber_tags/only-changed-files.yml +411 -0
  20. data/features/cassettes/cucumber_tags/s3-and-cloudfront-when-updating-a-file.yml +435 -0
  21. data/features/cassettes/cucumber_tags/s3-and-cloudfront.yml +290 -0
  22. data/features/cloudfront.feature +35 -0
  23. data/features/delete.feature +18 -0
  24. data/features/instructions-for-new-user.feature +94 -0
  25. data/features/redirects.feature +16 -0
  26. data/features/step_definitions/steps.rb +67 -0
  27. data/features/support/env.rb +26 -0
  28. data/features/support/test_site_dirs/cdn-powered.blog.fi/_site/css/styles.css +3 -0
  29. data/features/support/test_site_dirs/cdn-powered.blog.fi/_site/index.html +5 -0
  30. data/features/support/test_site_dirs/cdn-powered.blog.fi/s3_website.yml +4 -0
  31. data/features/support/test_site_dirs/cdn-powered.with-one-change.blog.fi/_site/css/styles.css +3 -0
  32. data/features/support/test_site_dirs/cdn-powered.with-one-change.blog.fi/_site/index.html +10 -0
  33. data/features/support/test_site_dirs/cdn-powered.with-one-change.blog.fi/s3_website.yml +4 -0
  34. data/features/support/test_site_dirs/create-redirects/_site/.gitkeep +0 -0
  35. data/features/support/test_site_dirs/create-redirects/s3_website.yml +6 -0
  36. data/features/support/test_site_dirs/index-and-assets.blog.fi/_site/assets/picture.gif +0 -0
  37. data/features/support/test_site_dirs/index-and-assets.blog.fi/_site/css/styles.css +3 -0
  38. data/features/support/test_site_dirs/index-and-assets.blog.fi/_site/index.html +5 -0
  39. data/features/support/test_site_dirs/index-and-assets.blog.fi/s3_website.yml +3 -0
  40. data/features/support/test_site_dirs/my.blog.com/_site/css/styles.css +3 -0
  41. data/features/support/test_site_dirs/my.blog.com/_site/index.html +5 -0
  42. data/features/support/test_site_dirs/my.blog.com/s3_website.yml +3 -0
  43. data/features/support/test_site_dirs/my.sydney.blog.au/_site/css/styles.css +3 -0
  44. data/features/support/test_site_dirs/my.sydney.blog.au/_site/index.html +5 -0
  45. data/features/support/test_site_dirs/my.sydney.blog.au/s3_website.yml +4 -0
  46. data/features/support/test_site_dirs/new-and-changed-files.com/_site/css/styles.css +4 -0
  47. data/features/support/test_site_dirs/new-and-changed-files.com/_site/index.html +8 -0
  48. data/features/support/test_site_dirs/new-and-changed-files.com/s3_website.yml +3 -0
  49. data/features/support/test_site_dirs/no-new-or-changed-files.com/_site/css/styles.css +3 -0
  50. data/features/support/test_site_dirs/no-new-or-changed-files.com/_site/index.html +5 -0
  51. data/features/support/test_site_dirs/no-new-or-changed-files.com/s3_website.yml +3 -0
  52. data/features/support/test_site_dirs/only-changed-files.com/_site/css/styles.css +3 -0
  53. data/features/support/test_site_dirs/only-changed-files.com/_site/index.html +9 -0
  54. data/features/support/test_site_dirs/only-changed-files.com/s3_website.yml +3 -0
  55. data/features/support/test_site_dirs/site.with.css-maxage.com/_site/css/styles.css +3 -0
  56. data/features/support/test_site_dirs/site.with.css-maxage.com/_site/index.html +5 -0
  57. data/features/support/test_site_dirs/site.with.css-maxage.com/s3_website.yml +5 -0
  58. data/features/support/test_site_dirs/site.with.gzipped-and-max-aged-content.com/_site/css/styles.css +3 -0
  59. data/features/support/test_site_dirs/site.with.gzipped-and-max-aged-content.com/_site/index.html +5 -0
  60. data/features/support/test_site_dirs/site.with.gzipped-and-max-aged-content.com/s3_website.yml +5 -0
  61. data/features/support/test_site_dirs/site.with.gzipped-html.com/_site/css/styles.css +3 -0
  62. data/features/support/test_site_dirs/site.with.gzipped-html.com/_site/index.html +5 -0
  63. data/features/support/test_site_dirs/site.with.gzipped-html.com/s3_website.yml +5 -0
  64. data/features/support/test_site_dirs/site.with.maxage.com/_site/css/styles.css +3 -0
  65. data/features/support/test_site_dirs/site.with.maxage.com/_site/index.html +5 -0
  66. data/features/support/test_site_dirs/site.with.maxage.com/s3_website.yml +4 -0
  67. data/features/support/test_site_dirs/unpublish-a-post.com/_site/css/styles.css +3 -0
  68. data/features/support/test_site_dirs/unpublish-a-post.com/s3_website.yml +3 -0
  69. data/features/support/vcr.rb +18 -0
  70. data/features/sync.feature +80 -0
  71. data/features/website-performance.feature +57 -0
  72. data/lib/cloudfront/invalidator.rb +22 -0
  73. data/lib/s3_website.rb +20 -0
  74. data/lib/s3_website/config_loader.rb +56 -0
  75. data/lib/s3_website/diff_helper.rb +21 -0
  76. data/lib/s3_website/endpoint.rb +30 -0
  77. data/lib/s3_website/errors.rb +28 -0
  78. data/lib/s3_website/keyboard.rb +27 -0
  79. data/lib/s3_website/parallelism.rb +18 -0
  80. data/lib/s3_website/retry.rb +19 -0
  81. data/lib/s3_website/tasks.rb +42 -0
  82. data/lib/s3_website/upload.rb +103 -0
  83. data/lib/s3_website/uploader.rb +160 -0
  84. data/s3-website.gemspec +39 -0
  85. data/spec/lib/config_loader_spec.rb +20 -0
  86. data/spec/lib/endpoint_spec.rb +27 -0
  87. data/spec/lib/keyboard_spec.rb +59 -0
  88. data/spec/lib/parallelism_spec.rb +43 -0
  89. data/spec/lib/retry_spec.rb +34 -0
  90. data/spec/lib/upload_spec.rb +205 -0
  91. data/spec/lib/uploader_spec.rb +30 -0
  92. data/spec/sample_files/hyde_site/_site/.vimrc +5 -0
  93. data/spec/sample_files/hyde_site/_site/css/styles.css +3 -0
  94. data/spec/sample_files/hyde_site/_site/index.html +1 -0
  95. data/spec/sample_files/hyde_site/s3_website.yml +3 -0
  96. data/spec/sample_files/tokyo_site/_site/.vimrc +5 -0
  97. data/spec/sample_files/tokyo_site/_site/css/styles.css +3 -0
  98. data/spec/sample_files/tokyo_site/_site/index.html +1 -0
  99. data/spec/sample_files/tokyo_site/s3_website.yml +4 -0
  100. data/spec/spec_helper.rb +1 -0
  101. metadata +416 -0
@@ -0,0 +1,18 @@
1
+ module S3Website
2
+ class Parallelism
3
+ def self.each_in_parallel_or_sequentially(items, &operation)
4
+ if ENV['disable_parallel_processing']
5
+ items.each do |item|
6
+ operation.call item
7
+ end
8
+ else
9
+ threads = items.map do |item|
10
+ Thread.new(item) { |item|
11
+ operation.call item
12
+ }
13
+ end
14
+ threads.each { |thread| thread.join }
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,19 @@
1
+ module S3Website
2
+ class Retry
3
+ def self.run_with_retry(sleep_milliseconds = 3.000)
4
+ attempt = 0
5
+ begin
6
+ yield
7
+ rescue Exception => e
8
+ $stderr.puts "Exception Occurred: #{e.message} (#{e.class}) Retrying in 3 seconds..."
9
+ sleep sleep_milliseconds
10
+ attempt += 1
11
+ if attempt <= 3
12
+ retry
13
+ else
14
+ raise RetryAttemptsExhaustedError
15
+ end
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,42 @@
1
+ module S3Website
2
+ class Tasks
3
+ def self.push(site_dir, in_headless_mode = false)
4
+ ConfigLoader.check_project site_dir
5
+ ConfigLoader.check_s3_configuration site_dir
6
+ config = S3Website::ConfigLoader.load_configuration site_dir
7
+ new_files_count, changed_files_count, deleted_files_count, changed_files, changed_redirects =
8
+ Uploader.run(site_dir, config, in_headless_mode)
9
+ invalidated_items_count =
10
+ invalidate_cf_dist_if_configured(config, changed_files + changed_redirects)
11
+ {
12
+ :new_files_count => new_files_count,
13
+ :changed_files_count => changed_files_count,
14
+ :deleted_files_count => deleted_files_count,
15
+ :invalidated_items_count => invalidated_items_count,
16
+ :changed_redirects_count => changed_redirects.size
17
+ }
18
+ rescue S3WebsiteError => e
19
+ puts e.message
20
+ exit 1
21
+ end
22
+
23
+ def self.config_create(site_dir)
24
+ ConfigLoader.check_s3_configuration site_dir
25
+ rescue S3WebsiteError => e
26
+ puts e.message
27
+ exit 1
28
+ end
29
+
30
+ private
31
+
32
+ def self.invalidate_cf_dist_if_configured(config, changed_files)
33
+ cloudfront_configured = config['cloudfront_distribution_id'] &&
34
+ (not config['cloudfront_distribution_id'].empty?)
35
+ invalidated_items_count = if cloudfront_configured
36
+ Cloudfront::Invalidator.invalidate(config, changed_files)
37
+ else
38
+ 0
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,103 @@
1
+ require 'tempfile'
2
+ require 'zlib'
3
+
4
+ module S3Website
5
+ class Upload
6
+ attr_reader :config, :file, :path, :full_path, :s3
7
+
8
+ def initialize(path, s3, config, site_dir)
9
+ @path = path
10
+ @full_path = "#{site_dir}/#{path}"
11
+ @file = File.open("#{site_dir}/#{path}")
12
+ @s3 = s3
13
+ @config = config
14
+ end
15
+
16
+ def perform!
17
+ success = s3.buckets[config['s3_bucket']].objects[path].write(upload_file, upload_options)
18
+ upload_file.close
19
+ success
20
+ end
21
+
22
+ def details
23
+ "#{path}#{" [gzipped]" if gzip?}#{" [max-age=#{max_age}]" if cache_control?}"
24
+ end
25
+
26
+ private
27
+
28
+ def upload_file
29
+ @upload_file ||= gzip? ? gzipped_file : file
30
+ end
31
+
32
+ def gzip?
33
+ return false unless !!config['gzip']
34
+
35
+ extensions = config['gzip'].is_a?(Array) ? config['gzip'] : S3Website::DEFAULT_GZIP_EXTENSIONS
36
+ extensions.include?(File.extname(path))
37
+ end
38
+
39
+ def gzipped_file
40
+ tempfile = Tempfile.new(File.basename(path))
41
+
42
+ gz = Zlib::GzipWriter.new(tempfile, Zlib::BEST_COMPRESSION, Zlib::DEFAULT_STRATEGY)
43
+
44
+ gz.mtime = File.mtime(full_path)
45
+ gz.orig_name = File.basename(path)
46
+ gz.write(file.read)
47
+
48
+ gz.flush
49
+ tempfile.flush
50
+
51
+ gz.close
52
+ tempfile.open
53
+
54
+ tempfile
55
+ end
56
+
57
+ def upload_options
58
+ opts = {
59
+ :content_type => mime_type,
60
+ :reduced_redundancy => config['s3_reduced_redundancy']
61
+ }
62
+
63
+ opts[:content_type] = "text/html; charset=utf-8" if mime_type == 'text/html'
64
+ opts[:content_encoding] = "gzip" if gzip?
65
+ opts[:cache_control] = "max-age=#{max_age}" if cache_control?
66
+
67
+ opts
68
+ end
69
+
70
+ def cache_control?
71
+ !!config['max_age']
72
+ end
73
+
74
+ def max_age
75
+ if config['max_age'].is_a?(Hash)
76
+ max_age_entries_most_specific_first.each do |glob_and_age|
77
+ (glob, age) = glob_and_age
78
+ return age if File.fnmatch(glob, path)
79
+ end
80
+ else
81
+ return config['max_age']
82
+ end
83
+
84
+ return 0
85
+ end
86
+
87
+ # The most specific max-age glob == the longest glob
88
+ def max_age_entries_most_specific_first
89
+ sorted_by_glob_length = config['max_age'].
90
+ each_pair.
91
+ to_a.
92
+ sort_by do |glob_and_age|
93
+ (glob, age) = glob_and_age
94
+ sort_key = glob.length
95
+ end.
96
+ reverse
97
+ end
98
+
99
+ def mime_type
100
+ MIME::Types.type_for(path).first
101
+ end
102
+ end
103
+ end
@@ -0,0 +1,160 @@
1
+ module S3Website
2
+ class Uploader
3
+ def self.run(site_dir, config, in_headless_mode = false)
4
+ puts "Deploying _site/* to #{config['s3_bucket']}"
5
+
6
+ s3_config = { :s3_endpoint => Endpoint.new(config['s3_endpoint']).hostname }
7
+ s3_id, s3_secret = config['s3_id'], config['s3_secret']
8
+ unless s3_id.nil? || s3_id == '' || s3_secret.nil? || s3_secret == ''
9
+ s3_config.merge! :access_key_id => s3_id, :secret_access_key => s3_secret
10
+ end
11
+
12
+ s3 = AWS::S3.new(s3_config)
13
+
14
+ new_files_count, changed_files_count, changed_files = upload_files(
15
+ s3, config, site_dir
16
+ )
17
+
18
+ redirects = config['redirects'] || {}
19
+ changed_redirects = setup_redirects redirects, config, s3
20
+
21
+ deleted_files_count = remove_superfluous_files(s3, { :s3_bucket => config['s3_bucket'],
22
+ :site_dir => site_dir,
23
+ :redirects => redirects,
24
+ :in_headless_mode => in_headless_mode,
25
+ :ignore_on_server => config["ignore_on_server"] })
26
+
27
+ print_done_report config
28
+
29
+ [new_files_count, changed_files_count, deleted_files_count, changed_files, changed_redirects]
30
+ end
31
+
32
+ private
33
+
34
+ def self.print_done_report(config)
35
+ bucket_name = config['s3_bucket']
36
+ website_hostname_suffix = Endpoint.new(config['s3_endpoint']).website_hostname
37
+ website_hostname_with_bucket =
38
+ "%s.%s" % [bucket_name, website_hostname_suffix]
39
+ puts "Done! Go visit: http://#{website_hostname_with_bucket}/index.html"
40
+ end
41
+
42
+ def self.upload_files(s3, config, site_dir)
43
+ changed_files, new_files = DiffHelper.resolve_files_to_upload(
44
+ s3.buckets[config['s3_bucket']], site_dir)
45
+ to_upload = changed_files + new_files
46
+ if to_upload.empty?
47
+ puts "No new or changed files to upload"
48
+ else
49
+ pre_upload_report = []
50
+ pre_upload_report << "Uploading"
51
+ pre_upload_report << "#{new_files.length} new" if new_files.length > 0
52
+ pre_upload_report << "and" if changed_files.length > 0 and new_files.length > 0
53
+ pre_upload_report << "#{changed_files.length} changed" if changed_files.length > 0
54
+ pre_upload_report << "file(s)"
55
+ puts pre_upload_report.join(' ')
56
+ upload_in_parallel_or_sequentially to_upload, s3, config, site_dir
57
+ end
58
+ [new_files.length, changed_files.length, changed_files]
59
+ end
60
+
61
+ def self.upload_in_parallel_or_sequentially(files_to_upload, s3, config, site_dir)
62
+ Parallelism.each_in_parallel_or_sequentially(files_to_upload) { |f|
63
+ upload_file(f, s3, config, site_dir)
64
+ }
65
+ end
66
+
67
+ def self.upload_file(file, s3, config, site_dir)
68
+ Retry.run_with_retry do
69
+ upload = Upload.new(file, s3, config, site_dir)
70
+
71
+ if upload.perform!
72
+ puts "Upload #{upload.details}: Success!"
73
+ else
74
+ puts "Upload #{upload.details}: FAILURE!"
75
+ end
76
+ end
77
+ end
78
+
79
+ def self.setup_redirects(redirects, config, s3)
80
+ operations = redirects.map do |path, target|
81
+ setup_redirect(path, target, s3, config)
82
+ end
83
+ performed_operations = operations.reject do |op|
84
+ op == :no_redirect_operation_performed
85
+ end
86
+ unless performed_operations.empty?
87
+ puts 'Creating new redirects ...'
88
+ end
89
+ performed_operations.each do |redirect_operation|
90
+ puts ' ' + redirect_operation[:report]
91
+ end
92
+ performed_operations.map do |redirect_operation|
93
+ redirect_operation[:path]
94
+ end
95
+ end
96
+
97
+ def self.setup_redirect(path, target, s3, config)
98
+ target = '/' + target unless target =~ %r{^(/|https?://)}
99
+ s3_object = s3.buckets[config['s3_bucket']].objects[path]
100
+
101
+ begin
102
+ current_head = s3_object.head
103
+ rescue AWS::S3::Errors::NoSuchKey
104
+ end
105
+
106
+ if current_head.nil? or current_head[:website_redirect_location] != target
107
+ s3_object.write('', :website_redirect_location => target)
108
+ {
109
+ :report => "Redirect #{path} to #{target}: Success!",
110
+ :path => path
111
+ }
112
+ else
113
+ :no_redirect_operation_performed
114
+ end
115
+ end
116
+
117
+ def self.remove_superfluous_files(s3, options)
118
+ s3_bucket_name = options.fetch(:s3_bucket)
119
+ site_dir = options.fetch(:site_dir)
120
+ in_headless_mode = options.fetch(:in_headless_mode)
121
+
122
+ remote_files = s3.buckets[s3_bucket_name].objects.map { |f| f.key }
123
+ local_files = load_all_local_files(site_dir) + options.fetch(:redirects).keys
124
+ files_to_delete = build_list_of_files_to_delete(remote_files, local_files, options[:ignore_on_server])
125
+
126
+ deleted_files_count = 0
127
+ if in_headless_mode
128
+ files_to_delete.each { |s3_object_key|
129
+ delete_s3_object s3, s3_bucket_name, s3_object_key
130
+ deleted_files_count += 1
131
+ }
132
+ else
133
+ Keyboard.if_user_confirms_delete(files_to_delete) { |s3_object_key|
134
+ delete_s3_object s3, s3_bucket_name, s3_object_key
135
+ deleted_files_count += 1
136
+ }
137
+ end
138
+ deleted_files_count
139
+ end
140
+
141
+ def self.build_list_of_files_to_delete(remote_files, local_files, ignore_on_server = nil)
142
+ ignore_on_server = Regexp.new(ignore_on_server || "a_string_that_should_never_match_ever")
143
+ files_to_delete = remote_files - local_files
144
+ files_to_delete.reject { |file| ignore_on_server.match(file) }
145
+ end
146
+
147
+ def self.delete_s3_object(s3, s3_bucket_name, s3_object_key)
148
+ Retry.run_with_retry do
149
+ s3.buckets[s3_bucket_name].objects[s3_object_key].delete
150
+ puts("Delete #{s3_object_key}: Success!")
151
+ end
152
+ end
153
+
154
+ def self.load_all_local_files(site_dir)
155
+ Dir.glob(site_dir + '/**/*', File::FNM_DOTMATCH).
156
+ delete_if { |f| File.directory?(f) }.
157
+ map { |f| f.gsub(site_dir + '/', '') }
158
+ end
159
+ end
160
+ end
@@ -0,0 +1,39 @@
1
+ # -*- encoding: utf-8 -*-
2
+ $:.push File.expand_path("../lib", __FILE__)
3
+
4
+ Gem::Specification.new do |s|
5
+ s.name = "s3_website"
6
+ s.version = "0.1.0"
7
+ s.platform = Gem::Platform::RUBY
8
+ s.authors = ["Lauri Lehmijoki"]
9
+ s.email = ["lauri.lehmijoki@iki.fi"]
10
+ s.homepage = "https://github.com/laurilehmijoki/s3_website"
11
+ s.summary = %q{Manage your S3 website}
12
+ s.description = %q{
13
+ Sync website files, set redirects, use HTTP performance optimisations, deliver via
14
+ CloudFront.
15
+ }
16
+
17
+ s.default_executable = %q{s3_website}
18
+
19
+ s.add_dependency 'aws-sdk', '~> 1.8.5'
20
+ s.add_dependency 'filey-diff', '~> 1'
21
+ s.add_dependency 'simple-cloudfront-invalidator', '~> 1'
22
+ s.add_dependency 'erubis', '~> 2.7.0'
23
+ s.add_dependency 'mime-types', '= 1.19'
24
+ s.add_dependency 'thor', '= 0.18.1'
25
+ s.add_dependency 'configure-s3-website', '~> 1'
26
+
27
+ s.add_development_dependency 'rspec'
28
+ s.add_development_dependency 'rspec-expectations'
29
+ s.add_development_dependency 'cucumber'
30
+ s.add_development_dependency 'aruba', '~> 0.5.1'
31
+ s.add_development_dependency 'rake'
32
+ s.add_development_dependency 'vcr', '~> 2.5.0'
33
+ s.add_development_dependency 'webmock', '~> 1.8.0'
34
+
35
+ s.files = `git ls-files`.split("\n")
36
+ s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
37
+ s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
38
+ s.require_paths = ["lib"]
39
+ end
@@ -0,0 +1,20 @@
1
+ require 'spec_helper'
2
+
3
+ describe S3Website::ConfigLoader do
4
+ it 'supports eRuby syntax in s3_website.yml' do
5
+ config = S3Website::ConfigLoader.load_configuration('spec/sample_files/hyde_site/_site')
6
+ config['s3_id'].should eq('hello')
7
+ config['s3_secret'].should eq('world')
8
+ config['s3_bucket'].should eq('galaxy')
9
+ end
10
+
11
+ it 'does not define default endpoint' do
12
+ config = S3Website::ConfigLoader.load_configuration('spec/sample_files/hyde_site/_site')
13
+ config['s3_endpoint'].should be_nil
14
+ end
15
+
16
+ it 'reads the S3 endpoint setting from s3_website.yml' do
17
+ config = S3Website::ConfigLoader.load_configuration('spec/sample_files/tokyo_site/_site')
18
+ config['s3_endpoint'].should eq('ap-northeast-1')
19
+ end
20
+ end
@@ -0,0 +1,27 @@
1
+ require 'spec_helper'
2
+ require 'pp'
3
+
4
+ describe S3Website::Endpoint do
5
+
6
+ it 'uses the DEFAULT_LOCATION_CONSTRAINT constant to set the default location constraint' do
7
+ endpoint = S3Website::Endpoint.new
8
+ endpoint.location_constraint.should eq(S3Website::Endpoint::DEFAULT_LOCATION_CONSTRAINT)
9
+ end
10
+
11
+ it 'uses the "us-east-1" as the default location' do
12
+ S3Website::Endpoint::DEFAULT_LOCATION_CONSTRAINT.should eq('us-east-1')
13
+ end
14
+
15
+ it 'takes a valid location constraint as a constructor parameter' do
16
+ endpoint = S3Website::Endpoint.new('EU')
17
+ endpoint.location_constraint.should eq('EU')
18
+ end
19
+
20
+ it 'fails if the location constraint is invalid' do
21
+ expect {
22
+ S3Website::Endpoint.new('andromeda')
23
+ }.to raise_error
24
+ end
25
+ end
26
+
27
+
@@ -0,0 +1,59 @@
1
+ require 'spec_helper'
2
+
3
+ describe S3Website::Keyboard do
4
+ describe '.keep_or_delete' do
5
+ let(:s3_object_keys) { ['a', 'b', 'c'] }
6
+ let(:standard_input) { stub('std_in') }
7
+
8
+ it 'can delete only the first item' do
9
+ standard_input.stub(:gets).and_return("d", "K")
10
+ deleted_keys = call_keyboard(s3_object_keys, standard_input)
11
+ deleted_keys.should eq(['a'])
12
+ end
13
+
14
+ it 'can delete only the second item' do
15
+ standard_input.stub(:gets).and_return("k", "d", "k")
16
+ deleted_keys = call_keyboard(s3_object_keys, standard_input)
17
+ deleted_keys.should eq(['b'])
18
+ end
19
+
20
+ it 'can delete all but the first item' do
21
+ standard_input.stub(:gets).and_return("k", "D")
22
+ deleted_keys = call_keyboard(s3_object_keys, standard_input)
23
+ deleted_keys.should eq(['b', 'c'])
24
+ end
25
+
26
+ it 'can delete all s3 objects' do
27
+ standard_input.stub(:gets).and_return("D")
28
+ deleted_keys = call_keyboard(s3_object_keys, standard_input)
29
+ deleted_keys.should eq(['a', 'b', 'c'])
30
+ end
31
+
32
+ it 'can keep one s3 object' do
33
+ standard_input.stub(:gets).and_return("k", "d", "d")
34
+ deleted_keys = call_keyboard(s3_object_keys, standard_input)
35
+ deleted_keys.should eq(['b', 'c'])
36
+ end
37
+
38
+ it 'can keep all s3 objects' do
39
+ standard_input.stub(:gets).and_return("k", "k", "k")
40
+ deleted_keys = call_keyboard(s3_object_keys, standard_input)
41
+ deleted_keys.should eq([])
42
+ end
43
+
44
+ it 'can keep all s3 objects' do
45
+ standard_input.stub(:gets).and_return("K")
46
+ deleted_keys = call_keyboard(s3_object_keys, standard_input)
47
+ deleted_keys.should eq([])
48
+ end
49
+
50
+ def call_keyboard(s3_object_keys, standard_input)
51
+ deleted_keys = []
52
+ S3Website::Keyboard.if_user_confirms_delete(s3_object_keys,
53
+ standard_input) { |key|
54
+ deleted_keys << key
55
+ }
56
+ deleted_keys
57
+ end
58
+ end
59
+ end