middleman-s3_sync 3.0.21 → 3.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 7e8a5cb2c096e224e6c21b405cb142a157711dac
4
- data.tar.gz: 5b906c2f9d25530a297a1a8d2da9b8e8ad147681
3
+ metadata.gz: bccd80d4f353aef53c1cead353fc6939f6b5eaa5
4
+ data.tar.gz: dc63b6bbcca6a99d0e01796681124872c37f472b
5
5
  SHA512:
6
- metadata.gz: 757e5a276590f998ef30a67367e64be4ed8544380ceab2fe431e9f4ad6212522113924c792b1cb713c41550d5979fa37a4784ad45dc4d8a368f9210eca8a8a85
7
- data.tar.gz: cc64348884fc3a0d94879f173f7ceee02282670f098a6798d6435565d2f676d0174ebaa46c5f8c4807a32ff60011dee28c02dc502d3ee4feddbbadac42aece97
6
+ metadata.gz: 39e3a98584cf44195741c2e839f278f7442ddfa015ee7a3a7c5ca3e021ca7b22cc88f2cd465647bf413535b6cb70e9653dfb4d031ed131e9703f7af4de518f20
7
+ data.tar.gz: 7a4b0c20fa87bc333bdd85856401e009ae9de9020f7be96080114ee6836e9e740082b4e299020f867eb2bf73d5714b7cb7244ac02be7f8f224f97b82ea91e284
data/Changelog.md CHANGED
@@ -2,6 +2,12 @@
2
2
 
3
3
  The gem that tries really hard not to push files to S3.
4
4
 
5
+ ## v3.0.22
6
+
7
+ * Fixes a bug where files were not closed, leading to an exhaustion of
8
+ file handles with large web sites.
9
+ * Internal fixes.
10
+
5
11
  ## v3.0.17
6
12
 
7
13
  * Limits the number of concurrent threads used while processing the
data/README.md CHANGED
@@ -100,6 +100,8 @@ A sample ```.s3_sync``` file is included at the root of this repo.
100
100
  You can also pass the credentials through environment variables. They
101
101
  map to the following values:
102
102
 
103
+ | Setting | Environment Variable |
104
+ | --------------------- | ---------------------------------- |
103
105
  | aws_access_key_id | ```ENV['AWS_ACCESS_KEY_ID``` |
104
106
  | aws_secret_access_key | ```ENV['AWS_SECRET_ACCESS_KEY']``` |
105
107
 
@@ -1,15 +1,24 @@
1
1
  module Middleman
2
2
  module S3Sync
3
3
  class Resource
4
- attr_accessor :path, :s3_resource, :content_type, :gzipped
4
+ attr_accessor :path, :partial_s3_resource, :content_type, :gzipped
5
5
 
6
6
  CONTENT_MD5_KEY = 'x-amz-meta-content-md5'
7
7
 
8
8
  include Status
9
+
10
+ def s3_resource
11
+ @full_s3_resource || @partial_s3_resource
12
+ end
13
+
14
+ # S3 resource as returned by a HEAD request
15
+ def full_s3_resource
16
+ @full_s3_resource ||= bucket.files.head(path)
17
+ end
9
18
 
10
- def initialize(path)
19
+ def initialize(path, partial_s3_resource)
11
20
  @path = path
12
- @s3_resource = bucket.files.head(path)
21
+ @partial_s3_resource = partial_s3_resource
13
22
  end
14
23
 
15
24
  def remote_path
@@ -20,10 +29,9 @@ module Middleman
20
29
  def to_h
21
30
  attributes = {
22
31
  :key => key,
23
- :body => body,
24
32
  :acl => options.acl,
25
33
  :content_type => content_type,
26
- CONTENT_MD5_KEY => content_md5
34
+ CONTENT_MD5_KEY => local_content_md5
27
35
  }
28
36
 
29
37
  if caching_policy
@@ -48,36 +56,39 @@ module Middleman
48
56
  alias :attributes :to_h
49
57
 
50
58
  def update!
51
- say_status "Updating".blue + " #{path}#{ gzipped ? ' (gzipped)'.white : ''}"
52
- if options.verbose
53
- say_status "Original: #{original_path.white}"
54
- say_status "Local Path: #{local_path.white}"
55
- say_status "remote md5: #{remote_md5.white}"
56
- say_status "content md5: #{content_md5.white}"
57
- end
58
- s3_resource.body = body
59
- s3_resource.acl = options.acl
60
- s3_resource.content_type = content_type
61
- s3_resource.metadata = { CONTENT_MD5_KEY => content_md5 }
62
-
63
- if caching_policy
64
- s3_resource.cache_control = caching_policy.cache_control
65
- s3_resource.expires = caching_policy.expires
66
- end
67
-
68
- if options.prefer_gzip && gzipped
69
- s3_resource.content_encoding = "gzip"
70
- end
71
-
72
- if options.reduced_redundancy_storage
73
- s3_resource.storage_class = 'REDUCED_REDUNDANCY'
74
- end
75
-
76
- if options.encryption
77
- s3_resource.encryption = 'AES256'
78
- end
79
-
80
- s3_resource.save
59
+ body { |body|
60
+ say_status "Updating".blue + " #{path}#{ gzipped ? ' (gzipped)'.white : ''}"
61
+ if options.verbose
62
+ say_status "Original: #{original_path.white}"
63
+ say_status "Local Path: #{local_path.white}"
64
+ say_status "remote md5: #{remote_object_md5.white} / #{remote_content_md5}"
65
+ say_status "content md5: #{local_object_md5.white} / #{local_content_md5}"
66
+ end
67
+ s3_resource.body = body
68
+
69
+ s3_resource.acl = options.acl
70
+ s3_resource.content_type = content_type
71
+ s3_resource.metadata = { CONTENT_MD5_KEY => local_content_md5 }
72
+
73
+ if caching_policy
74
+ s3_resource.cache_control = caching_policy.cache_control
75
+ s3_resource.expires = caching_policy.expires
76
+ end
77
+
78
+ if options.prefer_gzip && gzipped
79
+ s3_resource.content_encoding = "gzip"
80
+ end
81
+
82
+ if options.reduced_redundancy_storage
83
+ s3_resource.storage_class = 'REDUCED_REDUNDANCY'
84
+ end
85
+
86
+ if options.encryption
87
+ s3_resource.encryption = 'AES256'
88
+ end
89
+
90
+ s3_resource.save
91
+ }
81
92
  end
82
93
 
83
94
  def local_path
@@ -91,7 +102,7 @@ module Middleman
91
102
 
92
103
  def destroy!
93
104
  say_status "Deleting".red + " #{path}"
94
- s3_resource.destroy
105
+ bucket.files.destroy remote_path
95
106
  end
96
107
 
97
108
  def create!
@@ -99,9 +110,11 @@ module Middleman
99
110
  if options.verbose
100
111
  say_status "Original: #{original_path.white}"
101
112
  say_status "Local Path: #{local_path.white}"
102
- say_status "content md5: #{content_md5.white}"
113
+ say_status "content md5: #{local_content_md5.white}"
103
114
  end
104
- bucket.files.create(to_h)
115
+ body { |body|
116
+ bucket.files.create(to_h.merge(:body => body))
117
+ }
105
118
  end
106
119
 
107
120
  def ignore!
@@ -109,6 +122,8 @@ module Middleman
109
122
  :redirect
110
123
  elsif directory?
111
124
  :directory
125
+ elsif alternate_encoding?
126
+ 'alternate encoding'
112
127
  end
113
128
  say_status "Ignoring".yellow + " #{path} #{ reason ? "(#{reason})".white : "" }"
114
129
  end
@@ -120,6 +135,10 @@ module Middleman
120
135
  def to_create?
121
136
  status == :new
122
137
  end
138
+
139
+ def alternate_encoding?
140
+ status == :alternate_encoding
141
+ end
123
142
 
124
143
  def identical?
125
144
  status == :identical
@@ -130,21 +149,36 @@ module Middleman
130
149
  end
131
150
 
132
151
  def to_ignore?
133
- status == :ignored
152
+ status == :ignored || status == :alternate_encoding
134
153
  end
135
154
 
136
- def body
137
- @body = File.open(local_path)
155
+ def body(&block)
156
+ File.open(local_path, &block)
138
157
  end
139
158
 
140
159
  def status
141
160
  @status ||= if directory?
142
- :ignored
143
- elsif local? && remote?
144
- if content_md5 != remote_md5
145
- :updated
161
+ if remote?
162
+ :deleted
146
163
  else
164
+ :ignored
165
+ end
166
+ elsif local? && remote?
167
+ if local_object_md5 == remote_object_md5
147
168
  :identical
169
+ else
170
+ if !gzipped
171
+ # we're not gzipped, object hashes being different indicates updated content
172
+ :updated
173
+ elsif (local_content_md5 != remote_content_md5)
174
+ # we're gzipped, so we checked the content MD5, and it also changed
175
+ :updated
176
+ else
177
+ # we're gzipped, the object hashes differ, but the content hashes are equal
178
+ # this means the gzipped bits changed while the compressed bits did not
179
+ # what's more, we spent a HEAD request to find out
180
+ :alternate_encoding
181
+ end
148
182
  end
149
183
  elsif local?
150
184
  :new
@@ -154,7 +188,7 @@ module Middleman
154
188
  :deleted
155
189
  end
156
190
  end
157
-
191
+
158
192
  def local?
159
193
  File.exist?(local_path)
160
194
  end
@@ -174,13 +208,21 @@ module Middleman
174
208
  def relative_path
175
209
  @relative_path ||= local_path.gsub(/#{build_dir}/, '')
176
210
  end
177
-
178
- def remote_md5
179
- s3_resource.metadata[CONTENT_MD5_KEY] || s3_resource.etag
211
+
212
+ def remote_object_md5
213
+ s3_resource.etag
214
+ end
215
+
216
+ def remote_content_md5
217
+ full_s3_resource.metadata[CONTENT_MD5_KEY]
180
218
  end
181
219
 
182
- def content_md5
183
- @content_md5 ||= Digest::MD5.hexdigest(File.read(original_path))
220
+ def local_object_md5
221
+ @local_object_md5 ||= Digest::MD5.hexdigest(File.read(local_path))
222
+ end
223
+
224
+ def local_content_md5
225
+ @local_content_md5 ||= Digest::MD5.hexdigest(File.read(original_path))
184
226
  end
185
227
 
186
228
  def original_path
@@ -1,5 +1,5 @@
1
1
  module Middleman
2
2
  module S3Sync
3
- VERSION = "3.0.21"
3
+ VERSION = "3.0.22"
4
4
  end
5
5
  end
@@ -46,7 +46,7 @@ module Middleman
46
46
  def resources
47
47
  @resources ||= paths.pmap(32) do |p|
48
48
  progress_bar.increment
49
- S3Sync::Resource.new(p)
49
+ S3Sync::Resource.new(p, bucket_files.find { |f| f.key == p }).tap(&:status)
50
50
  end
51
51
  end
52
52
 
@@ -75,7 +75,15 @@ module Middleman
75
75
  end
76
76
 
77
77
  def remote_paths
78
- @remote_paths ||= bucket.files.map{ |f| f.key }
78
+ @remote_paths ||= bucket_files.map(&:key)
79
+ end
80
+
81
+ def bucket_files
82
+ @bucket_files ||= [].tap { |files|
83
+ bucket.files.each { |f|
84
+ files << f
85
+ }
86
+ }
79
87
  end
80
88
 
81
89
  def create_resources
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: middleman-s3_sync
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.21
4
+ version: 3.0.22
5
5
  platform: ruby
6
6
  authors:
7
7
  - Frederic Jean
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-11-11 00:00:00.000000000 Z
12
+ date: 2013-11-30 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: middleman-core
@@ -256,7 +256,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
256
256
  version: '0'
257
257
  requirements: []
258
258
  rubyforge_project:
259
- rubygems_version: 2.0.3
259
+ rubygems_version: 2.0.14
260
260
  signing_key:
261
261
  specification_version: 4
262
262
  summary: Tries really, really hard not to push files to S3.