sprockets 2.12.5 → 3.0.0.beta.1
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of sprockets might be problematic. Click here for more details.
- checksums.yaml +5 -5
- data/LICENSE +2 -2
- data/README.md +61 -34
- data/lib/rake/sprocketstask.rb +5 -4
- data/lib/sprockets.rb +123 -85
- data/lib/sprockets/asset.rb +161 -200
- data/lib/sprockets/asset_uri.rb +64 -0
- data/lib/sprockets/base.rb +138 -373
- data/lib/sprockets/bower.rb +56 -0
- data/lib/sprockets/bundle.rb +32 -0
- data/lib/sprockets/cache.rb +220 -0
- data/lib/sprockets/cache/file_store.rb +145 -13
- data/lib/sprockets/cache/memory_store.rb +66 -0
- data/lib/sprockets/cache/null_store.rb +46 -0
- data/lib/sprockets/cached_environment.rb +103 -0
- data/lib/sprockets/closure_compressor.rb +30 -12
- data/lib/sprockets/coffee_script_template.rb +23 -0
- data/lib/sprockets/compressing.rb +20 -25
- data/lib/sprockets/configuration.rb +95 -0
- data/lib/sprockets/context.rb +68 -131
- data/lib/sprockets/directive_processor.rb +138 -179
- data/lib/sprockets/eco_template.rb +10 -19
- data/lib/sprockets/ejs_template.rb +10 -19
- data/lib/sprockets/encoding_utils.rb +246 -0
- data/lib/sprockets/engines.rb +40 -29
- data/lib/sprockets/environment.rb +10 -66
- data/lib/sprockets/erb_template.rb +23 -0
- data/lib/sprockets/errors.rb +5 -13
- data/lib/sprockets/http_utils.rb +97 -0
- data/lib/sprockets/jst_processor.rb +28 -15
- data/lib/sprockets/lazy_processor.rb +15 -0
- data/lib/sprockets/legacy.rb +23 -0
- data/lib/sprockets/legacy_proc_processor.rb +35 -0
- data/lib/sprockets/legacy_tilt_processor.rb +29 -0
- data/lib/sprockets/manifest.rb +128 -99
- data/lib/sprockets/mime.rb +114 -33
- data/lib/sprockets/path_utils.rb +179 -0
- data/lib/sprockets/paths.rb +13 -26
- data/lib/sprockets/processing.rb +198 -107
- data/lib/sprockets/resolve.rb +289 -0
- data/lib/sprockets/sass_compressor.rb +36 -17
- data/lib/sprockets/sass_template.rb +269 -46
- data/lib/sprockets/server.rb +113 -83
- data/lib/sprockets/transformers.rb +69 -0
- data/lib/sprockets/uglifier_compressor.rb +36 -15
- data/lib/sprockets/utils.rb +161 -44
- data/lib/sprockets/version.rb +1 -1
- data/lib/sprockets/yui_compressor.rb +37 -12
- metadata +64 -106
- data/lib/sprockets/asset_attributes.rb +0 -137
- data/lib/sprockets/bundled_asset.rb +0 -78
- data/lib/sprockets/caching.rb +0 -96
- data/lib/sprockets/charset_normalizer.rb +0 -41
- data/lib/sprockets/index.rb +0 -100
- data/lib/sprockets/processed_asset.rb +0 -152
- data/lib/sprockets/processor.rb +0 -32
- data/lib/sprockets/safety_colons.rb +0 -28
- data/lib/sprockets/sass_cache_store.rb +0 -29
- data/lib/sprockets/sass_functions.rb +0 -70
- data/lib/sprockets/sass_importer.rb +0 -30
- data/lib/sprockets/scss_template.rb +0 -13
- data/lib/sprockets/static_asset.rb +0 -60
@@ -0,0 +1,56 @@
|
|
1
|
+
require 'json'
|
2
|
+
|
3
|
+
module Sprockets
|
4
|
+
module Bower
|
5
|
+
# Internal: All supported bower.json files.
|
6
|
+
#
|
7
|
+
# https://github.com/bower/json/blob/0.4.0/lib/json.js#L7
|
8
|
+
POSSIBLE_BOWER_JSONS = ['bower.json', 'component.json', '.bower.json']
|
9
|
+
|
10
|
+
# Internal: Override resolve_alternates to install bower.json behavior.
|
11
|
+
#
|
12
|
+
# load_path - String environment path
|
13
|
+
# logical_path - String path relative to base
|
14
|
+
#
|
15
|
+
# Returns nothing.
|
16
|
+
def resolve_alternates(load_path, logical_path, &block)
|
17
|
+
super
|
18
|
+
|
19
|
+
# bower.json can only be nested one level deep
|
20
|
+
if !logical_path.index('/')
|
21
|
+
dirname = File.join(load_path, logical_path)
|
22
|
+
stat = self.stat(dirname)
|
23
|
+
|
24
|
+
if stat && stat.directory?
|
25
|
+
filenames = POSSIBLE_BOWER_JSONS.map { |basename| File.join(dirname, basename) }
|
26
|
+
filename = filenames.detect { |fn| self.file?(fn) }
|
27
|
+
|
28
|
+
if filename
|
29
|
+
read_bower_main(dirname, filename, &block)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
nil
|
35
|
+
end
|
36
|
+
|
37
|
+
# Internal: Read bower.json's main directive.
|
38
|
+
#
|
39
|
+
# dirname - String path to component directory.
|
40
|
+
# filename - String path to bower.json.
|
41
|
+
#
|
42
|
+
# Returns nothing.
|
43
|
+
def read_bower_main(dirname, filename)
|
44
|
+
bower = JSON.parse(File.read(filename), create_additions: false)
|
45
|
+
|
46
|
+
case bower['main']
|
47
|
+
when String
|
48
|
+
yield File.expand_path(bower['main'], dirname)
|
49
|
+
when Array
|
50
|
+
bower['main'].each do |name|
|
51
|
+
yield File.expand_path(name, dirname)
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
module Sprockets
|
2
|
+
# Internal: Bundle processor takes a single file asset and prepends all the
|
3
|
+
# `:required` URIs to the contents.
|
4
|
+
#
|
5
|
+
# Uses pipeline metadata:
|
6
|
+
#
|
7
|
+
# :required - Ordered Set of asset URIs to prepend
|
8
|
+
# :stubbed - Set of asset URIs to substract from the required set.
|
9
|
+
#
|
10
|
+
# Also see DirectiveProcessor.
|
11
|
+
class Bundle
|
12
|
+
def self.call(input)
|
13
|
+
env = input[:environment]
|
14
|
+
type = input[:content_type]
|
15
|
+
|
16
|
+
# TODO: Rebuilding this URI is a bit of a smell
|
17
|
+
processed_uri = AssetURI.build(input[:filename], type: type, skip_bundle: true)
|
18
|
+
|
19
|
+
cache = Hash.new do |h, uri|
|
20
|
+
h[uri] = env.find_asset_by_uri(uri)
|
21
|
+
end
|
22
|
+
|
23
|
+
find_required = proc { |uri| cache[uri].metadata[:required] }
|
24
|
+
required = Utils.dfs(processed_uri, &find_required)
|
25
|
+
stubbed = Utils.dfs(cache[processed_uri].metadata[:stubbed], &find_required)
|
26
|
+
required.subtract(stubbed)
|
27
|
+
assets = required.map { |uri| cache[uri] }
|
28
|
+
|
29
|
+
env.process_bundle_reducers(assets, env.unwrap_bundle_reducers(type)).merge(included: assets.map(&:uri))
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,220 @@
|
|
1
|
+
require 'sprockets/utils'
|
2
|
+
require 'logger'
|
3
|
+
|
4
|
+
module Sprockets
|
5
|
+
# Public: Wrapper interface to backend cache stores. Ensures a consistent API
|
6
|
+
# even when the backend uses get/set or read/write.
|
7
|
+
#
|
8
|
+
# Public cache interface
|
9
|
+
#
|
10
|
+
# Always assign the backend store instance to Environment#cache=.
|
11
|
+
#
|
12
|
+
# environment.cache = Sprockets::Cache::MemoryStore.new(1000)
|
13
|
+
#
|
14
|
+
# Environment#cache will always return a wrapped Cache interface. See the
|
15
|
+
# methods marked public on this class.
|
16
|
+
#
|
17
|
+
#
|
18
|
+
# Backend cache interface
|
19
|
+
#
|
20
|
+
# The Backend cache store must implement two methods.
|
21
|
+
#
|
22
|
+
# get(key)
|
23
|
+
#
|
24
|
+
# key - An opaque String with a length less than 250 characters.
|
25
|
+
#
|
26
|
+
# Returns an JSON serializable object.
|
27
|
+
#
|
28
|
+
# set(key, value)
|
29
|
+
#
|
30
|
+
# Will only be called once per key. Setting a key "foo" with value "bar",
|
31
|
+
# then later key "foo" with value "baz" is an undefined behavior.
|
32
|
+
#
|
33
|
+
# key - An opaque String with a length less than 250 characters.
|
34
|
+
# value - A JSON serializable object.
|
35
|
+
#
|
36
|
+
# Returns argument value.
|
37
|
+
#
|
38
|
+
class Cache
|
39
|
+
# Builtin cache stores.
|
40
|
+
autoload :FileStore, 'sprockets/cache/file_store'
|
41
|
+
autoload :MemoryStore, 'sprockets/cache/memory_store'
|
42
|
+
autoload :NullStore, 'sprockets/cache/null_store'
|
43
|
+
|
44
|
+
# Internal: Cache key version for this class. Rarely should have to change
|
45
|
+
# unless the cache format radically changes. Will be bump on major version
|
46
|
+
# releases though.
|
47
|
+
VERSION = '3.0'
|
48
|
+
|
49
|
+
def self.default_logger
|
50
|
+
logger = Logger.new($stderr)
|
51
|
+
logger.level = Logger::FATAL
|
52
|
+
logger
|
53
|
+
end
|
54
|
+
|
55
|
+
# Internal: Wrap a backend cache store.
|
56
|
+
#
|
57
|
+
# Always assign a backend cache store instance to Environment#cache= and
|
58
|
+
# use Environment#cache to retreive a wrapped interface.
|
59
|
+
#
|
60
|
+
# cache - A compatible backend cache store instance.
|
61
|
+
def initialize(cache = nil, logger = self.class.default_logger)
|
62
|
+
@cache_wrapper = get_cache_wrapper(cache)
|
63
|
+
@fetch_cache = Cache::MemoryStore.new(1024)
|
64
|
+
@logger = logger
|
65
|
+
end
|
66
|
+
|
67
|
+
# Public: Prefer API to retrieve and set values in the cache store.
|
68
|
+
#
|
69
|
+
# key - JSON serializable key
|
70
|
+
# block -
|
71
|
+
# Must return a consistent JSON serializable object for the given key.
|
72
|
+
#
|
73
|
+
# Examples
|
74
|
+
#
|
75
|
+
# cache.fetch("foo") { "bar" }
|
76
|
+
#
|
77
|
+
# Returns a JSON serializable object.
|
78
|
+
def fetch(key)
|
79
|
+
start = Utils.benchmark_start
|
80
|
+
expanded_key = expand_key(key)
|
81
|
+
value = @fetch_cache.get(expanded_key)
|
82
|
+
if value.nil?
|
83
|
+
value = @cache_wrapper.get(expanded_key)
|
84
|
+
if value.nil?
|
85
|
+
value = yield
|
86
|
+
@cache_wrapper.set(expanded_key, value)
|
87
|
+
@logger.debug do
|
88
|
+
ms = "(#{Utils.benchmark_end(start)}ms)"
|
89
|
+
"Sprockets Cache miss #{peek_key(key)} #{ms}"
|
90
|
+
end
|
91
|
+
end
|
92
|
+
@fetch_cache.set(expanded_key, value)
|
93
|
+
end
|
94
|
+
value
|
95
|
+
end
|
96
|
+
|
97
|
+
# Public: Low level API to retrieve item directly from the backend cache
|
98
|
+
# store.
|
99
|
+
#
|
100
|
+
# This API may be used publicaly, but may have undefined behavior
|
101
|
+
# depending on the backend store being used. Therefore it must be used
|
102
|
+
# with caution, which is why its prefixed with an underscore. Prefer the
|
103
|
+
# Cache#fetch API over using this.
|
104
|
+
#
|
105
|
+
# key - JSON serializable key
|
106
|
+
# value - A consistent JSON serializable object for the given key. Setting
|
107
|
+
# a different value for the given key has undefined behavior.
|
108
|
+
#
|
109
|
+
# Returns a JSON serializable object or nil if there was a cache miss.
|
110
|
+
def _get(key)
|
111
|
+
@cache_wrapper.get(expand_key(key))
|
112
|
+
end
|
113
|
+
|
114
|
+
# Public: Low level API to set item directly to the backend cache store.
|
115
|
+
#
|
116
|
+
# This API may be used publicaly, but may have undefined behavior
|
117
|
+
# depending on the backend store being used. Therefore it must be used
|
118
|
+
# with caution, which is why its prefixed with an underscore. Prefer the
|
119
|
+
# Cache#fetch API over using this.
|
120
|
+
#
|
121
|
+
# key - JSON serializable key
|
122
|
+
#
|
123
|
+
# Returns the value argument.
|
124
|
+
def _set(key, value)
|
125
|
+
@cache_wrapper.set(expand_key(key), value)
|
126
|
+
end
|
127
|
+
|
128
|
+
# Public: Pretty inspect
|
129
|
+
#
|
130
|
+
# Returns String.
|
131
|
+
def inspect
|
132
|
+
"#<#{self.class} local=#{@fetch_cache.inspect} store=#{@cache_wrapper.cache.inspect}>"
|
133
|
+
end
|
134
|
+
|
135
|
+
private
|
136
|
+
# Internal: Expand object cache key into a short String key.
|
137
|
+
#
|
138
|
+
# The String should be under 250 characters so its compatible with
|
139
|
+
# Memcache.
|
140
|
+
#
|
141
|
+
# key - JSON serializable key
|
142
|
+
#
|
143
|
+
# Returns a String with a length less than 250 characters.
|
144
|
+
def expand_key(key)
|
145
|
+
"sprockets/v#{VERSION}/#{Utils.hexdigest(key)}"
|
146
|
+
end
|
147
|
+
|
148
|
+
PEEK_SIZE = 100
|
149
|
+
|
150
|
+
# Internal: Show first 100 characters of cache key for logging purposes.
|
151
|
+
#
|
152
|
+
# Returns a String with a length less than 100 characters.
|
153
|
+
def peek_key(key)
|
154
|
+
if key.is_a?(String)
|
155
|
+
key[0, PEEK_SIZE].inspect
|
156
|
+
elsif key.is_a?(Array)
|
157
|
+
str = []
|
158
|
+
key.each { |k| str << peek_key(k) }
|
159
|
+
str.join(':')[0, PEEK_SIZE]
|
160
|
+
else
|
161
|
+
peek_key(Utils.hexdigest(key))
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
def get_cache_wrapper(cache)
|
166
|
+
if cache.is_a?(Cache)
|
167
|
+
cache
|
168
|
+
|
169
|
+
# `Cache#get(key)` for Memcache
|
170
|
+
elsif cache.respond_to?(:get)
|
171
|
+
GetWrapper.new(cache)
|
172
|
+
|
173
|
+
# `Cache#[key]` so `Hash` can be used
|
174
|
+
elsif cache.respond_to?(:[])
|
175
|
+
HashWrapper.new(cache)
|
176
|
+
|
177
|
+
# `Cache#read(key)` for `ActiveSupport::Cache` support
|
178
|
+
elsif cache.respond_to?(:read)
|
179
|
+
ReadWriteWrapper.new(cache)
|
180
|
+
|
181
|
+
else
|
182
|
+
cache = Sprockets::Cache::NullStore.new
|
183
|
+
GetWrapper.new(cache)
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
class Wrapper < Struct.new(:cache)
|
188
|
+
end
|
189
|
+
|
190
|
+
class GetWrapper < Wrapper
|
191
|
+
def get(key)
|
192
|
+
cache.get(key)
|
193
|
+
end
|
194
|
+
|
195
|
+
def set(key, value)
|
196
|
+
cache.set(key, value)
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
class HashWrapper < Wrapper
|
201
|
+
def get(key)
|
202
|
+
cache[key]
|
203
|
+
end
|
204
|
+
|
205
|
+
def set(key, value)
|
206
|
+
cache[key] = value
|
207
|
+
end
|
208
|
+
end
|
209
|
+
|
210
|
+
class ReadWriteWrapper < Wrapper
|
211
|
+
def get(key)
|
212
|
+
cache.read(key)
|
213
|
+
end
|
214
|
+
|
215
|
+
def set(key, value)
|
216
|
+
cache.write(key, value)
|
217
|
+
end
|
218
|
+
end
|
219
|
+
end
|
220
|
+
end
|
@@ -1,32 +1,164 @@
|
|
1
1
|
require 'digest/md5'
|
2
2
|
require 'fileutils'
|
3
|
-
require '
|
3
|
+
require 'logger'
|
4
4
|
|
5
5
|
module Sprockets
|
6
|
-
|
7
|
-
# A
|
6
|
+
class Cache
|
7
|
+
# Public: A file system cache store that automatically cleans up old keys.
|
8
|
+
#
|
9
|
+
# Assign the instance to the Environment#cache.
|
8
10
|
#
|
9
11
|
# environment.cache = Sprockets::Cache::FileStore.new("/tmp")
|
10
12
|
#
|
13
|
+
# See Also
|
14
|
+
#
|
15
|
+
# ActiveSupport::Cache::FileStore
|
16
|
+
#
|
11
17
|
class FileStore
|
12
|
-
|
13
|
-
|
18
|
+
# Internal: Default key limit for store.
|
19
|
+
DEFAULT_MAX_SIZE = 25 * 1024 * 1024
|
20
|
+
|
21
|
+
# Internal: Default standard error fatal logger.
|
22
|
+
#
|
23
|
+
# Returns a Logger.
|
24
|
+
def self.default_logger
|
25
|
+
logger = Logger.new($stderr)
|
26
|
+
logger.level = Logger::FATAL
|
27
|
+
logger
|
28
|
+
end
|
29
|
+
|
30
|
+
# Public: Initialize the cache store.
|
31
|
+
#
|
32
|
+
# root - A String path to a directory to persist cached values to.
|
33
|
+
# max_size - A Integer of the maximum number of keys the store will hold.
|
34
|
+
# (default: 1000).
|
35
|
+
def initialize(root, max_size = DEFAULT_MAX_SIZE, logger = self.class.default_logger)
|
36
|
+
@root = root
|
37
|
+
@size = find_caches.inject(0) { |n, (_, stat)| n + stat.size }
|
38
|
+
@max_size = max_size
|
39
|
+
@gc_size = max_size * 0.75
|
40
|
+
@logger = logger
|
14
41
|
end
|
15
42
|
|
16
|
-
#
|
17
|
-
|
18
|
-
|
19
|
-
|
43
|
+
# Public: Retrieve value from cache.
|
44
|
+
#
|
45
|
+
# This API should not be used directly, but via the Cache wrapper API.
|
46
|
+
#
|
47
|
+
# key - String cache key.
|
48
|
+
#
|
49
|
+
# Returns Object or nil or the value is not set.
|
50
|
+
def get(key)
|
51
|
+
path = File.join(@root, "#{key}.cache")
|
52
|
+
|
53
|
+
value = safe_open(path) do |f|
|
54
|
+
begin
|
55
|
+
Marshal.load(f)
|
56
|
+
rescue Exception => e
|
57
|
+
@logger.error do
|
58
|
+
"#{self.class}[#{path}] could not be unmarshaled: " +
|
59
|
+
"#{e.class}: #{e.message}"
|
60
|
+
end
|
61
|
+
nil
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
if value
|
66
|
+
FileUtils.touch(path)
|
67
|
+
value
|
68
|
+
end
|
20
69
|
end
|
21
70
|
|
22
|
-
#
|
23
|
-
|
71
|
+
# Public: Set a key and value in the cache.
|
72
|
+
#
|
73
|
+
# This API should not be used directly, but via the Cache wrapper API.
|
74
|
+
#
|
75
|
+
# key - String cache key.
|
76
|
+
# value - Object value.
|
77
|
+
#
|
78
|
+
# Returns Object value.
|
79
|
+
def set(key, value)
|
80
|
+
path = File.join(@root, "#{key}.cache")
|
81
|
+
|
24
82
|
# Ensure directory exists
|
25
|
-
FileUtils.mkdir_p
|
83
|
+
FileUtils.mkdir_p File.dirname(path)
|
84
|
+
|
85
|
+
# Check if cache exists before writing
|
86
|
+
exists = File.exist?(path)
|
87
|
+
|
88
|
+
# Write data
|
89
|
+
PathUtils.atomic_write(path) do |f|
|
90
|
+
Marshal.dump(value, f)
|
91
|
+
@size += f.size unless exists
|
92
|
+
end
|
93
|
+
|
94
|
+
# GC if necessary
|
95
|
+
gc! if @size > @max_size
|
26
96
|
|
27
|
-
@root.join(key).open('w') { |f| Marshal.dump(value, f)}
|
28
97
|
value
|
29
98
|
end
|
99
|
+
|
100
|
+
# Public: Pretty inspect
|
101
|
+
#
|
102
|
+
# Returns String.
|
103
|
+
def inspect
|
104
|
+
"#<#{self.class} size=#{@size}/#{@max_size}>"
|
105
|
+
end
|
106
|
+
|
107
|
+
private
|
108
|
+
# Internal: Get all cache files along with stats.
|
109
|
+
#
|
110
|
+
# Returns an Array of [String filename, File::Stat] pairs sorted by
|
111
|
+
# mtime.
|
112
|
+
def find_caches
|
113
|
+
Dir.glob(File.join(@root, '**/*.cache')).reduce([]) { |stats, filename|
|
114
|
+
stat = safe_stat(filename)
|
115
|
+
# stat maybe nil if file was removed between the time we called
|
116
|
+
# dir.glob and the next stat
|
117
|
+
stats << [filename, stat] if stat
|
118
|
+
stats
|
119
|
+
}.sort_by { |_, stat| stat.mtime.to_i }
|
120
|
+
end
|
121
|
+
|
122
|
+
def compute_size(caches)
|
123
|
+
caches.inject(0) { |sum, (_, stat)| sum + stat.size }
|
124
|
+
end
|
125
|
+
|
126
|
+
def safe_stat(fn)
|
127
|
+
File.stat(fn)
|
128
|
+
rescue Errno::ENOENT
|
129
|
+
nil
|
130
|
+
end
|
131
|
+
|
132
|
+
def safe_open(path, &block)
|
133
|
+
if File.exist?(path)
|
134
|
+
File.open(path, 'rb', &block)
|
135
|
+
end
|
136
|
+
rescue Errno::ENOENT
|
137
|
+
end
|
138
|
+
|
139
|
+
def gc!
|
140
|
+
start_time = Time.now
|
141
|
+
|
142
|
+
caches = find_caches
|
143
|
+
size = compute_size(caches)
|
144
|
+
|
145
|
+
delete_caches, keep_caches = caches.partition { |filename, stat|
|
146
|
+
deleted = size > @gc_size
|
147
|
+
size -= stat.size
|
148
|
+
deleted
|
149
|
+
}
|
150
|
+
|
151
|
+
return if delete_caches.empty?
|
152
|
+
|
153
|
+
FileUtils.remove(delete_caches.map(&:first), force: true)
|
154
|
+
@size = compute_size(keep_caches)
|
155
|
+
|
156
|
+
@logger.warn do
|
157
|
+
secs = Time.now.to_f - start_time.to_f
|
158
|
+
"#{self.class}[#{@root}] garbage collected " +
|
159
|
+
"#{delete_caches.size} files (#{(secs * 1000).to_i}ms)"
|
160
|
+
end
|
161
|
+
end
|
30
162
|
end
|
31
163
|
end
|
32
164
|
end
|