sprockets 3.0.0 → 3.7.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/CHANGELOG.md +296 -0
- data/README.md +49 -183
- data/bin/sprockets +1 -0
- data/lib/sprockets/asset.rb +3 -2
- data/lib/sprockets/base.rb +13 -2
- data/lib/sprockets/bundle.rb +5 -1
- data/lib/sprockets/cache/file_store.rb +7 -4
- data/lib/sprockets/cache.rb +6 -4
- data/lib/sprockets/closure_compressor.rb +5 -11
- data/lib/sprockets/coffee_script_processor.rb +2 -2
- data/lib/sprockets/coffee_script_template.rb +12 -1
- data/lib/sprockets/compressing.rb +20 -0
- data/lib/sprockets/context.rb +3 -1
- data/lib/sprockets/dependencies.rb +8 -8
- data/lib/sprockets/deprecation.rb +90 -0
- data/lib/sprockets/digest_utils.rb +81 -57
- data/lib/sprockets/eco_processor.rb +2 -2
- data/lib/sprockets/eco_template.rb +12 -1
- data/lib/sprockets/ejs_processor.rb +2 -2
- data/lib/sprockets/ejs_template.rb +12 -1
- data/lib/sprockets/encoding_utils.rb +7 -4
- data/lib/sprockets/engines.rb +11 -0
- data/lib/sprockets/erb_template.rb +6 -1
- data/lib/sprockets/errors.rb +0 -1
- data/lib/sprockets/http_utils.rb +3 -1
- data/lib/sprockets/legacy.rb +16 -0
- data/lib/sprockets/legacy_proc_processor.rb +1 -1
- data/lib/sprockets/legacy_tilt_processor.rb +2 -2
- data/lib/sprockets/loader.rb +208 -59
- data/lib/sprockets/manifest.rb +57 -6
- data/lib/sprockets/path_utils.rb +20 -15
- data/lib/sprockets/processing.rb +10 -0
- data/lib/sprockets/processor_utils.rb +77 -0
- data/lib/sprockets/resolve.rb +9 -6
- data/lib/sprockets/sass_cache_store.rb +6 -1
- data/lib/sprockets/sass_compressor.rb +9 -17
- data/lib/sprockets/sass_processor.rb +16 -9
- data/lib/sprockets/sass_template.rb +14 -2
- data/lib/sprockets/server.rb +32 -12
- data/lib/sprockets/uglifier_compressor.rb +6 -13
- data/lib/sprockets/unloaded_asset.rb +137 -0
- data/lib/sprockets/uri_tar.rb +98 -0
- data/lib/sprockets/uri_utils.rb +5 -7
- data/lib/sprockets/utils/gzip.rb +67 -0
- data/lib/sprockets/utils.rb +35 -18
- data/lib/sprockets/version.rb +1 -1
- data/lib/sprockets/yui_compressor.rb +4 -14
- data/lib/sprockets.rb +21 -11
- metadata +28 -3
data/lib/sprockets/loader.rb
CHANGED
@@ -10,34 +10,54 @@ require 'sprockets/processor_utils'
|
|
10
10
|
require 'sprockets/resolve'
|
11
11
|
require 'sprockets/transformers'
|
12
12
|
require 'sprockets/uri_utils'
|
13
|
+
require 'sprockets/unloaded_asset'
|
13
14
|
|
14
15
|
module Sprockets
|
16
|
+
|
15
17
|
# The loader phase takes a asset URI location and returns a constructed Asset
|
16
18
|
# object.
|
17
19
|
module Loader
|
18
20
|
include DigestUtils, PathUtils, ProcessorUtils, URIUtils
|
19
21
|
include Engines, Mime, Processing, Resolve, Transformers
|
20
22
|
|
21
|
-
|
23
|
+
|
24
|
+
# Public: Load Asset by Asset URI.
|
25
|
+
#
|
26
|
+
# uri - A String containing complete URI to a file including schema
|
27
|
+
# and full path such as:
|
28
|
+
# "file:///Path/app/assets/js/app.js?type=application/javascript"
|
22
29
|
#
|
23
|
-
# uri - AssetURI
|
24
30
|
#
|
25
31
|
# Returns Asset.
|
26
32
|
def load(uri)
|
27
|
-
|
28
|
-
if params.key?(:id)
|
29
|
-
asset =
|
30
|
-
|
33
|
+
unloaded = UnloadedAsset.new(uri, self)
|
34
|
+
if unloaded.params.key?(:id)
|
35
|
+
unless asset = asset_from_cache(unloaded.asset_key)
|
36
|
+
id = unloaded.params.delete(:id)
|
37
|
+
uri_without_id = build_asset_uri(unloaded.filename, unloaded.params)
|
38
|
+
asset = load_from_unloaded(UnloadedAsset.new(uri_without_id, self))
|
39
|
+
if asset[:id] != id
|
40
|
+
@logger.warn "Sprockets load error: Tried to find #{uri}, but latest was id #{asset[:id]}"
|
41
|
+
end
|
31
42
|
end
|
32
43
|
else
|
33
|
-
asset = fetch_asset_from_dependency_cache(
|
44
|
+
asset = fetch_asset_from_dependency_cache(unloaded) do |paths|
|
45
|
+
# When asset is previously generated, its "dependencies" are stored in the cache.
|
46
|
+
# The presence of `paths` indicates dependencies were stored.
|
47
|
+
# We can check to see if the dependencies have not changed by "resolving" them and
|
48
|
+
# generating a digest key from the resolved entries. If this digest key has not
|
49
|
+
# changed the asset will be pulled from cache.
|
50
|
+
#
|
51
|
+
# If this `paths` is present but the cache returns nothing then `fetch_asset_from_dependency_cache`
|
52
|
+
# will confusingly be called again with `paths` set to nil where the asset will be
|
53
|
+
# loaded from disk.
|
34
54
|
if paths
|
35
|
-
digest = digest(resolve_dependencies(paths))
|
36
|
-
if
|
37
|
-
|
55
|
+
digest = DigestUtils.digest(resolve_dependencies(paths))
|
56
|
+
if uri_from_cache = cache.get(unloaded.digest_key(digest), true)
|
57
|
+
asset_from_cache(UnloadedAsset.new(uri_from_cache, self).asset_key)
|
38
58
|
end
|
39
59
|
else
|
40
|
-
|
60
|
+
load_from_unloaded(unloaded)
|
41
61
|
end
|
42
62
|
end
|
43
63
|
end
|
@@ -45,47 +65,58 @@ module Sprockets
|
|
45
65
|
end
|
46
66
|
|
47
67
|
private
|
48
|
-
def load_asset_by_id_uri(uri, filename, params)
|
49
|
-
# Internal assertion, should be routed through load_asset_by_uri
|
50
|
-
unless id = params.delete(:id)
|
51
|
-
raise ArgumentError, "expected uri to have an id: #{uri}"
|
52
|
-
end
|
53
68
|
|
54
|
-
|
55
|
-
|
69
|
+
# Internal: Load asset hash from cache
|
70
|
+
#
|
71
|
+
# key - A String containing lookup information for an asset
|
72
|
+
#
|
73
|
+
# This method converts all "compressed" paths to absolute paths.
|
74
|
+
# Returns a hash of values representing an asset
|
75
|
+
def asset_from_cache(key)
|
76
|
+
asset = cache.get(key, true)
|
77
|
+
if asset
|
78
|
+
asset[:uri] = expand_from_root(asset[:uri])
|
79
|
+
asset[:load_path] = expand_from_root(asset[:load_path])
|
80
|
+
asset[:filename] = expand_from_root(asset[:filename])
|
81
|
+
asset[:metadata][:included].map! { |uri| expand_from_root(uri) } if asset[:metadata][:included]
|
82
|
+
asset[:metadata][:links].map! { |uri| expand_from_root(uri) } if asset[:metadata][:links]
|
83
|
+
asset[:metadata][:stubbed].map! { |uri| expand_from_root(uri) } if asset[:metadata][:stubbed]
|
84
|
+
asset[:metadata][:required].map! { |uri| expand_from_root(uri) } if asset[:metadata][:required]
|
85
|
+
asset[:metadata][:dependencies].map! { |uri| uri.start_with?("file-digest://") ? expand_from_root(uri) : uri } if asset[:metadata][:dependencies]
|
56
86
|
|
57
|
-
|
58
|
-
|
87
|
+
asset[:metadata].each_key do |k|
|
88
|
+
next unless k =~ /_dependencies\z/
|
89
|
+
asset[:metadata][k].map! { |uri| expand_from_root(uri) }
|
90
|
+
end
|
59
91
|
end
|
60
|
-
|
61
92
|
asset
|
62
93
|
end
|
63
94
|
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
95
|
+
# Internal: Loads an asset and saves it to cache
|
96
|
+
#
|
97
|
+
# unloaded - An UnloadedAsset
|
98
|
+
#
|
99
|
+
# This method is only called when the given unloaded asset could not be
|
100
|
+
# successfully pulled from cache.
|
101
|
+
def load_from_unloaded(unloaded)
|
102
|
+
unless file?(unloaded.filename)
|
103
|
+
raise FileNotFound, "could not find file: #{unloaded.filename}"
|
72
104
|
end
|
73
105
|
|
74
|
-
load_path, logical_path = paths_split(config[:paths], filename)
|
106
|
+
load_path, logical_path = paths_split(config[:paths], unloaded.filename)
|
75
107
|
|
76
108
|
unless load_path
|
77
|
-
raise FileOutsidePaths, "#{filename} is no longer under a load path: #{self.paths.join(', ')}"
|
109
|
+
raise FileOutsidePaths, "#{unloaded.filename} is no longer under a load path: #{self.paths.join(', ')}"
|
78
110
|
end
|
79
111
|
|
80
112
|
logical_path, file_type, engine_extnames, _ = parse_path_extnames(logical_path)
|
81
|
-
logical_path = normalize_logical_path(logical_path)
|
82
113
|
name = logical_path
|
83
114
|
|
84
|
-
if pipeline = params[:pipeline]
|
115
|
+
if pipeline = unloaded.params[:pipeline]
|
85
116
|
logical_path += ".#{pipeline}"
|
86
117
|
end
|
87
118
|
|
88
|
-
if type = params[:type]
|
119
|
+
if type = unloaded.params[:type]
|
89
120
|
logical_path += config[:mime_types][type][:extensions].first
|
90
121
|
end
|
91
122
|
|
@@ -103,72 +134,190 @@ module Sprockets
|
|
103
134
|
result = call_processors(processors, {
|
104
135
|
environment: self,
|
105
136
|
cache: self.cache,
|
106
|
-
uri: uri,
|
107
|
-
filename: filename,
|
137
|
+
uri: unloaded.uri,
|
138
|
+
filename: unloaded.filename,
|
108
139
|
load_path: load_path,
|
109
140
|
name: name,
|
110
141
|
content_type: type,
|
111
142
|
metadata: { dependencies: dependencies }
|
112
143
|
})
|
144
|
+
validate_processor_result!(result)
|
113
145
|
source = result.delete(:data)
|
114
|
-
metadata = result
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
)
|
146
|
+
metadata = result
|
147
|
+
metadata[:charset] = source.encoding.name.downcase unless metadata.key?(:charset)
|
148
|
+
metadata[:digest] = digest(source)
|
149
|
+
metadata[:length] = source.bytesize
|
119
150
|
else
|
151
|
+
dependencies << build_file_digest_uri(unloaded.filename)
|
120
152
|
metadata = {
|
121
|
-
digest: file_digest(filename),
|
122
|
-
length: self.stat(filename).size,
|
153
|
+
digest: file_digest(unloaded.filename),
|
154
|
+
length: self.stat(unloaded.filename).size,
|
123
155
|
dependencies: dependencies
|
124
156
|
}
|
125
157
|
end
|
126
158
|
|
127
159
|
asset = {
|
128
|
-
uri: uri,
|
160
|
+
uri: unloaded.uri,
|
129
161
|
load_path: load_path,
|
130
|
-
filename: filename,
|
162
|
+
filename: unloaded.filename,
|
131
163
|
name: name,
|
132
164
|
logical_path: logical_path,
|
133
165
|
content_type: type,
|
134
166
|
source: source,
|
135
167
|
metadata: metadata,
|
136
|
-
|
137
|
-
dependencies_digest: digest(resolve_dependencies(metadata[:dependencies]))
|
168
|
+
dependencies_digest: DigestUtils.digest(resolve_dependencies(metadata[:dependencies]))
|
138
169
|
}
|
139
170
|
|
140
171
|
asset[:id] = pack_hexdigest(digest(asset))
|
141
|
-
asset[:uri] = build_asset_uri(filename, params.merge(id: asset[:id]))
|
172
|
+
asset[:uri] = build_asset_uri(unloaded.filename, unloaded.params.merge(id: asset[:id]))
|
142
173
|
|
143
174
|
# Deprecated: Avoid tracking Asset mtime
|
144
175
|
asset[:mtime] = metadata[:dependencies].map { |u|
|
145
176
|
if u.start_with?("file-digest:")
|
146
177
|
s = self.stat(parse_file_digest_uri(u))
|
147
|
-
s ? s.mtime.to_i :
|
178
|
+
s ? s.mtime.to_i : nil
|
148
179
|
else
|
149
|
-
|
180
|
+
nil
|
150
181
|
end
|
151
|
-
}.max
|
152
|
-
|
153
|
-
cache.set("asset-uri:#{VERSION}:#{asset[:uri]}", asset, true)
|
154
|
-
cache.set("asset-uri-digest:#{VERSION}:#{uri}:#{asset[:dependencies_digest]}", asset[:uri], true)
|
182
|
+
}.compact.max
|
183
|
+
asset[:mtime] ||= self.stat(unloaded.filename).mtime.to_i
|
155
184
|
|
185
|
+
store_asset(asset, unloaded)
|
156
186
|
asset
|
157
187
|
end
|
158
188
|
|
159
|
-
|
160
|
-
|
161
|
-
|
189
|
+
# Internal: Save a given asset to the cache
|
190
|
+
#
|
191
|
+
# asset - A hash containing values of loaded asset
|
192
|
+
# unloaded - The UnloadedAsset used to lookup the `asset`
|
193
|
+
#
|
194
|
+
# This method converts all absolute paths to "compressed" paths
|
195
|
+
# which are relative if they're in the root.
|
196
|
+
def store_asset(asset, unloaded)
|
197
|
+
# Save the asset in the cache under the new URI
|
198
|
+
cached_asset = asset.dup
|
199
|
+
cached_asset[:uri] = compress_from_root(asset[:uri])
|
200
|
+
cached_asset[:filename] = compress_from_root(asset[:filename])
|
201
|
+
cached_asset[:load_path] = compress_from_root(asset[:load_path])
|
162
202
|
|
203
|
+
if cached_asset[:metadata]
|
204
|
+
# Deep dup to avoid modifying `asset`
|
205
|
+
cached_asset[:metadata] = cached_asset[:metadata].dup
|
206
|
+
if cached_asset[:metadata][:included] && !cached_asset[:metadata][:included].empty?
|
207
|
+
cached_asset[:metadata][:included] = cached_asset[:metadata][:included].dup
|
208
|
+
cached_asset[:metadata][:included].map! { |uri| compress_from_root(uri) }
|
209
|
+
end
|
210
|
+
|
211
|
+
if cached_asset[:metadata][:links] && !cached_asset[:metadata][:links].empty?
|
212
|
+
cached_asset[:metadata][:links] = cached_asset[:metadata][:links].dup
|
213
|
+
cached_asset[:metadata][:links].map! { |uri| compress_from_root(uri) }
|
214
|
+
end
|
215
|
+
|
216
|
+
if cached_asset[:metadata][:stubbed] && !cached_asset[:metadata][:stubbed].empty?
|
217
|
+
cached_asset[:metadata][:stubbed] = cached_asset[:metadata][:stubbed].dup
|
218
|
+
cached_asset[:metadata][:stubbed].map! { |uri| compress_from_root(uri) }
|
219
|
+
end
|
220
|
+
|
221
|
+
if cached_asset[:metadata][:required] && !cached_asset[:metadata][:required].empty?
|
222
|
+
cached_asset[:metadata][:required] = cached_asset[:metadata][:required].dup
|
223
|
+
cached_asset[:metadata][:required].map! { |uri| compress_from_root(uri) }
|
224
|
+
end
|
225
|
+
|
226
|
+
if cached_asset[:metadata][:dependencies] && !cached_asset[:metadata][:dependencies].empty?
|
227
|
+
cached_asset[:metadata][:dependencies] = cached_asset[:metadata][:dependencies].dup
|
228
|
+
cached_asset[:metadata][:dependencies].map! do |uri|
|
229
|
+
uri.start_with?("file-digest://".freeze) ? compress_from_root(uri) : uri
|
230
|
+
end
|
231
|
+
end
|
232
|
+
|
233
|
+
# compress all _dependencies in metadata like `sass_dependencies`
|
234
|
+
cached_asset[:metadata].each do |key, value|
|
235
|
+
next unless key =~ /_dependencies\z/
|
236
|
+
cached_asset[:metadata][key] = value.dup
|
237
|
+
cached_asset[:metadata][key].map! {|uri| compress_from_root(uri) }
|
238
|
+
end
|
239
|
+
end
|
240
|
+
|
241
|
+
# Unloaded asset and stored_asset now have a different URI
|
242
|
+
stored_asset = UnloadedAsset.new(asset[:uri], self)
|
243
|
+
cache.set(stored_asset.asset_key, cached_asset, true)
|
244
|
+
|
245
|
+
# Save the new relative path for the digest key of the unloaded asset
|
246
|
+
cache.set(unloaded.digest_key(asset[:dependencies_digest]), stored_asset.compressed_path, true)
|
247
|
+
end
|
248
|
+
|
249
|
+
|
250
|
+
# Internal: Resolve set of dependency URIs.
|
251
|
+
#
|
252
|
+
# uris - An Array of "dependencies" for example:
|
253
|
+
# ["environment-version", "environment-paths", "processors:type=text/css&file_type=text/css",
|
254
|
+
# "file-digest:///Full/path/app/assets/stylesheets/application.css",
|
255
|
+
# "processors:type=text/css&file_type=text/css&pipeline=self",
|
256
|
+
# "file-digest:///Full/path/app/assets/stylesheets"]
|
257
|
+
#
|
258
|
+
# Returns back array of things that the given uri dpends on
|
259
|
+
# For example the environment version, if you're using a different version of sprockets
|
260
|
+
# then the dependencies should be different, this is used only for generating cache key
|
261
|
+
# for example the "environment-version" may be resolved to "environment-1.0-3.2.0" for
|
262
|
+
# version "3.2.0" of sprockets.
|
263
|
+
#
|
264
|
+
# Any paths that are returned are converted to relative paths
|
265
|
+
#
|
266
|
+
# Returns array of resolved dependencies
|
267
|
+
def resolve_dependencies(uris)
|
268
|
+
uris.map { |uri| resolve_dependency(uri) }
|
269
|
+
end
|
270
|
+
|
271
|
+
# Internal: Retrieves an asset based on its digest
|
272
|
+
#
|
273
|
+
# unloaded - An UnloadedAsset
|
274
|
+
# limit - A Fixnum which sets the maximum number of versions of "histories"
|
275
|
+
# stored in the cache
|
276
|
+
#
|
277
|
+
# This method attempts to retrieve the last `limit` number of histories of an asset
|
278
|
+
# from the cache a "history" which is an array of unresolved "dependencies" that the asset needs
|
279
|
+
# to compile. In this case A dependency can refer to either an asset i.e. index.js
|
280
|
+
# may rely on jquery.js (so jquery.js is a depndency), or other factors that may affect
|
281
|
+
# compilation, such as the VERSION of sprockets (i.e. the environment) and what "processors"
|
282
|
+
# are used.
|
283
|
+
#
|
284
|
+
# For example a history array may look something like this
|
285
|
+
#
|
286
|
+
# [["environment-version", "environment-paths", "processors:type=text/css&file_type=text/css",
|
287
|
+
# "file-digest:///Full/path/app/assets/stylesheets/application.css",
|
288
|
+
# "processors:type=text/css&file_digesttype=text/css&pipeline=self",
|
289
|
+
# "file-digest:///Full/path/app/assets/stylesheets"]]
|
290
|
+
#
|
291
|
+
# Where the first entry is a Set of dependencies for last generated version of that asset.
|
292
|
+
# Multiple versions are stored since sprockets keeps the last `limit` number of assets
|
293
|
+
# generated present in the system.
|
294
|
+
#
|
295
|
+
# If a "history" of dependencies is present in the cache, each version of "history" will be
|
296
|
+
# yielded to the passed block which is responsible for loading the asset. If found, the existing
|
297
|
+
# history will be saved with the dependency that found a valid asset moved to the front.
|
298
|
+
#
|
299
|
+
# If no history is present, or if none of the histories could be resolved to a valid asset then,
|
300
|
+
# the block is yielded to and expected to return a valid asset.
|
301
|
+
# When this happens the dependencies for the returned asset are added to the "history", and older
|
302
|
+
# entries are removed if the "history" is above `limit`.
|
303
|
+
def fetch_asset_from_dependency_cache(unloaded, limit = 3)
|
304
|
+
key = unloaded.dependency_history_key
|
305
|
+
|
306
|
+
history = cache.get(key) || []
|
163
307
|
history.each_with_index do |deps, index|
|
164
|
-
|
308
|
+
expanded_deps = deps.map do |path|
|
309
|
+
path.start_with?("file-digest://") ? expand_from_root(path) : path
|
310
|
+
end
|
311
|
+
if asset = yield(expanded_deps)
|
165
312
|
cache.set(key, history.rotate!(index)) if index > 0
|
166
313
|
return asset
|
167
314
|
end
|
168
315
|
end
|
169
316
|
|
170
317
|
asset = yield
|
171
|
-
deps
|
318
|
+
deps = asset[:metadata][:dependencies].dup.map! do |uri|
|
319
|
+
uri.start_with?("file-digest://") ? compress_from_root(uri) : uri
|
320
|
+
end
|
172
321
|
cache.set(key, history.unshift(deps).take(limit))
|
173
322
|
asset
|
174
323
|
end
|
data/lib/sprockets/manifest.rb
CHANGED
@@ -1,6 +1,10 @@
|
|
1
1
|
require 'json'
|
2
2
|
require 'time'
|
3
|
+
|
4
|
+
require 'concurrent'
|
5
|
+
|
3
6
|
require 'sprockets/manifest_utils'
|
7
|
+
require 'sprockets/utils/gzip'
|
4
8
|
|
5
9
|
module Sprockets
|
6
10
|
# The Manifest logs the contents of assets compiled to a single directory. It
|
@@ -145,6 +149,24 @@ module Sprockets
|
|
145
149
|
nil
|
146
150
|
end
|
147
151
|
|
152
|
+
# Public: Find the source of assets by paths.
|
153
|
+
#
|
154
|
+
# Returns Enumerator of assets file content.
|
155
|
+
def find_sources(*args)
|
156
|
+
return to_enum(__method__, *args) unless block_given?
|
157
|
+
|
158
|
+
if environment
|
159
|
+
find(*args).each do |asset|
|
160
|
+
yield asset.source
|
161
|
+
end
|
162
|
+
else
|
163
|
+
args.each do |path|
|
164
|
+
asset = assets[path]
|
165
|
+
yield File.binread(File.join(dir, asset)) if asset
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
148
170
|
# Compile and write asset to directory. The asset is written to a
|
149
171
|
# fingerprinted filename like
|
150
172
|
# `application-2e8e9a7c6b0aafa0c9bdeec90ea30213.js`. An entry is
|
@@ -157,7 +179,9 @@ module Sprockets
|
|
157
179
|
raise Error, "manifest requires environment for compilation"
|
158
180
|
end
|
159
181
|
|
160
|
-
filenames
|
182
|
+
filenames = []
|
183
|
+
concurrent_compressors = []
|
184
|
+
concurrent_writers = []
|
161
185
|
|
162
186
|
find(*args) do |asset|
|
163
187
|
files[asset.digest_path] = {
|
@@ -165,21 +189,46 @@ module Sprockets
|
|
165
189
|
'mtime' => asset.mtime.iso8601,
|
166
190
|
'size' => asset.bytesize,
|
167
191
|
'digest' => asset.hexdigest,
|
168
|
-
|
192
|
+
|
193
|
+
# Deprecated: Remove beta integrity attribute in next release.
|
194
|
+
# Callers should DigestUtils.hexdigest_integrity_uri to compute the
|
195
|
+
# digest themselves.
|
196
|
+
'integrity' => DigestUtils.hexdigest_integrity_uri(asset.hexdigest)
|
169
197
|
}
|
170
198
|
assets[asset.logical_path] = asset.digest_path
|
171
199
|
|
200
|
+
if alias_logical_path = self.class.compute_alias_logical_path(asset.logical_path)
|
201
|
+
assets[alias_logical_path] = asset.digest_path
|
202
|
+
end
|
203
|
+
|
172
204
|
target = File.join(dir, asset.digest_path)
|
173
205
|
|
174
206
|
if File.exist?(target)
|
175
207
|
logger.debug "Skipping #{target}, already exists"
|
176
208
|
else
|
177
209
|
logger.info "Writing #{target}"
|
178
|
-
asset.write_to target
|
210
|
+
write_file = Concurrent::Future.execute { asset.write_to target }
|
211
|
+
concurrent_writers << write_file
|
179
212
|
end
|
180
|
-
|
181
213
|
filenames << asset.filename
|
214
|
+
|
215
|
+
next if environment.skip_gzip?
|
216
|
+
gzip = Utils::Gzip.new(asset)
|
217
|
+
next if gzip.cannot_compress?(environment.mime_types)
|
218
|
+
|
219
|
+
if File.exist?("#{target}.gz")
|
220
|
+
logger.debug "Skipping #{target}.gz, already exists"
|
221
|
+
else
|
222
|
+
logger.info "Writing #{target}.gz"
|
223
|
+
concurrent_compressors << Concurrent::Future.execute do
|
224
|
+
write_file.wait! if write_file
|
225
|
+
gzip.compress(target)
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
182
229
|
end
|
230
|
+
concurrent_writers.each(&:wait!)
|
231
|
+
concurrent_compressors.each(&:wait!)
|
183
232
|
save
|
184
233
|
|
185
234
|
filenames
|
@@ -192,6 +241,7 @@ module Sprockets
|
|
192
241
|
#
|
193
242
|
def remove(filename)
|
194
243
|
path = File.join(dir, filename)
|
244
|
+
gzip = "#{path}.gz"
|
195
245
|
logical_path = files[filename]['logical_path']
|
196
246
|
|
197
247
|
if assets[logical_path] == filename
|
@@ -200,6 +250,7 @@ module Sprockets
|
|
200
250
|
|
201
251
|
files.delete(filename)
|
202
252
|
FileUtils.rm(path) if File.exist?(path)
|
253
|
+
FileUtils.rm(gzip) if File.exist?(gzip)
|
203
254
|
|
204
255
|
save
|
205
256
|
|
@@ -230,9 +281,9 @@ module Sprockets
|
|
230
281
|
# Sort by timestamp
|
231
282
|
Time.parse(attrs['mtime'])
|
232
283
|
}.reverse.each_with_index.drop_while { |(_, attrs), index|
|
233
|
-
|
284
|
+
_age = [0, Time.now - Time.parse(attrs['mtime'])].max
|
234
285
|
# Keep if under age or within the count limit
|
235
|
-
|
286
|
+
_age < age || index < count
|
236
287
|
}.each { |(path, _), _|
|
237
288
|
# Remove old assets
|
238
289
|
remove(path)
|
data/lib/sprockets/path_utils.rb
CHANGED
@@ -1,5 +1,3 @@
|
|
1
|
-
require 'fileutils'
|
2
|
-
|
3
1
|
module Sprockets
|
4
2
|
# Internal: File and path related utilities. Mixed into Environment.
|
5
3
|
#
|
@@ -55,9 +53,13 @@ module Sprockets
|
|
55
53
|
# Returns an empty `Array` if the directory does not exist.
|
56
54
|
def entries(path)
|
57
55
|
if File.directory?(path)
|
58
|
-
Dir.entries(path, :encoding => Encoding.default_internal)
|
59
|
-
|
60
|
-
|
56
|
+
entries = Dir.entries(path, :encoding => Encoding.default_internal)
|
57
|
+
entries.reject! { |entry|
|
58
|
+
entry.start_with?(".".freeze) ||
|
59
|
+
(entry.start_with?("#".freeze) && entry.end_with?("#".freeze)) ||
|
60
|
+
entry.end_with?("~".freeze)
|
61
|
+
}
|
62
|
+
entries.sort!
|
61
63
|
else
|
62
64
|
[]
|
63
65
|
end
|
@@ -146,16 +148,19 @@ module Sprockets
|
|
146
148
|
#
|
147
149
|
# Returns [String extname, Object value] or nil nothing matched.
|
148
150
|
def match_path_extname(path, extensions)
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
151
|
+
basename = File.basename(path)
|
152
|
+
|
153
|
+
i = basename.index('.'.freeze)
|
154
|
+
while i && i < basename.length - 1
|
155
|
+
extname = basename[i..-1]
|
156
|
+
if value = extensions[extname]
|
157
|
+
return extname, value
|
156
158
|
end
|
159
|
+
|
160
|
+
i = basename.index('.'.freeze, i+1)
|
157
161
|
end
|
158
|
-
|
162
|
+
|
163
|
+
nil
|
159
164
|
end
|
160
165
|
|
161
166
|
# Internal: Returns all parents for path
|
@@ -274,9 +279,9 @@ module Sprockets
|
|
274
279
|
yield f
|
275
280
|
end
|
276
281
|
|
277
|
-
|
282
|
+
File.rename(tmpname, filename)
|
278
283
|
ensure
|
279
|
-
|
284
|
+
File.delete(tmpname) if File.exist?(tmpname)
|
280
285
|
end
|
281
286
|
end
|
282
287
|
end
|
data/lib/sprockets/processing.rb
CHANGED
@@ -231,14 +231,24 @@ module Sprockets
|
|
231
231
|
compute_transformers!
|
232
232
|
end
|
233
233
|
|
234
|
+
def deprecate_legacy_processor_interface(interface)
|
235
|
+
msg = "You are using a deprecated processor interface #{ interface.inspect }.\n" +
|
236
|
+
"Please update your processor interface:\n" +
|
237
|
+
"https://github.com/rails/sprockets/blob/master/guides/extending_sprockets.md#supporting-all-versions-of-sprockets-in-processors\n"
|
238
|
+
|
239
|
+
Deprecation.new([caller[3]]).warn msg
|
240
|
+
end
|
241
|
+
|
234
242
|
def wrap_processor(klass, proc)
|
235
243
|
if !proc
|
236
244
|
if klass.respond_to?(:call)
|
237
245
|
klass
|
238
246
|
else
|
247
|
+
deprecate_legacy_processor_interface(klass)
|
239
248
|
LegacyTiltProcessor.new(klass)
|
240
249
|
end
|
241
250
|
elsif proc.respond_to?(:arity) && proc.arity == 2
|
251
|
+
deprecate_legacy_processor_interface(proc)
|
242
252
|
LegacyProcProcessor.new(klass.to_s, proc)
|
243
253
|
else
|
244
254
|
proc
|
@@ -1,3 +1,5 @@
|
|
1
|
+
require 'set'
|
2
|
+
|
1
3
|
module Sprockets
|
2
4
|
# Functional utilities for dealing with Processor functions.
|
3
5
|
#
|
@@ -99,5 +101,80 @@ module Sprockets
|
|
99
101
|
def processors_cache_keys(processors)
|
100
102
|
processors.map { |processor| processor_cache_key(processor) }
|
101
103
|
end
|
104
|
+
|
105
|
+
# Internal: Set of all "simple" value types allowed to be returned in
|
106
|
+
# processor metadata.
|
107
|
+
VALID_METADATA_VALUE_TYPES = Set.new([
|
108
|
+
String,
|
109
|
+
Symbol,
|
110
|
+
TrueClass,
|
111
|
+
FalseClass,
|
112
|
+
NilClass
|
113
|
+
] + (0.class == Integer ? [Integer] : [Bignum, Fixnum])).freeze
|
114
|
+
|
115
|
+
# Internal: Set of all nested compound metadata types that can nest values.
|
116
|
+
VALID_METADATA_COMPOUND_TYPES = Set.new([
|
117
|
+
Array,
|
118
|
+
Hash,
|
119
|
+
Set
|
120
|
+
]).freeze
|
121
|
+
|
122
|
+
# Internal: Hash of all "simple" value types allowed to be returned in
|
123
|
+
# processor metadata.
|
124
|
+
VALID_METADATA_VALUE_TYPES_HASH = VALID_METADATA_VALUE_TYPES.each_with_object({}) do |type, hash|
|
125
|
+
hash[type] = true
|
126
|
+
end.freeze
|
127
|
+
|
128
|
+
# Internal: Hash of all nested compound metadata types that can nest values.
|
129
|
+
VALID_METADATA_COMPOUND_TYPES_HASH = VALID_METADATA_COMPOUND_TYPES.each_with_object({}) do |type, hash|
|
130
|
+
hash[type] = true
|
131
|
+
end.freeze
|
132
|
+
|
133
|
+
# Internal: Set of all allowed metadata types.
|
134
|
+
VALID_METADATA_TYPES = (VALID_METADATA_VALUE_TYPES + VALID_METADATA_COMPOUND_TYPES).freeze
|
135
|
+
|
136
|
+
# Internal: Validate returned result of calling a processor pipeline and
|
137
|
+
# raise a friendly user error message.
|
138
|
+
#
|
139
|
+
# result - Metadata Hash returned from call_processors
|
140
|
+
#
|
141
|
+
# Returns result or raises a TypeError.
|
142
|
+
def validate_processor_result!(result)
|
143
|
+
if !result.instance_of?(Hash)
|
144
|
+
raise TypeError, "processor metadata result was expected to be a Hash, but was #{result.class}"
|
145
|
+
end
|
146
|
+
|
147
|
+
if !result[:data].instance_of?(String)
|
148
|
+
raise TypeError, "processor :data was expected to be a String, but as #{result[:data].class}"
|
149
|
+
end
|
150
|
+
|
151
|
+
result.each do |key, value|
|
152
|
+
if !key.instance_of?(Symbol)
|
153
|
+
raise TypeError, "processor metadata[#{key.inspect}] expected to be a Symbol"
|
154
|
+
end
|
155
|
+
|
156
|
+
if !valid_processor_metadata_value?(value)
|
157
|
+
raise TypeError, "processor metadata[:#{key}] returned a complex type: #{value.inspect}\n" +
|
158
|
+
"Only #{VALID_METADATA_TYPES.to_a.join(", ")} maybe used."
|
159
|
+
end
|
160
|
+
end
|
161
|
+
|
162
|
+
result
|
163
|
+
end
|
164
|
+
|
165
|
+
# Internal: Validate object is in validate metadata whitelist.
|
166
|
+
#
|
167
|
+
# value - Any Object
|
168
|
+
#
|
169
|
+
# Returns true if class is in whitelist otherwise false.
|
170
|
+
def valid_processor_metadata_value?(value)
|
171
|
+
if VALID_METADATA_VALUE_TYPES_HASH[value.class]
|
172
|
+
true
|
173
|
+
elsif VALID_METADATA_COMPOUND_TYPES_HASH[value.class]
|
174
|
+
value.all? { |v| valid_processor_metadata_value?(v) }
|
175
|
+
else
|
176
|
+
false
|
177
|
+
end
|
178
|
+
end
|
102
179
|
end
|
103
180
|
end
|