sprockets 2.0.5 → 2.1.0.beta

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sprockets might be problematic. Click here for more details.

@@ -8,251 +8,72 @@ module Sprockets
8
8
  # `BundledAsset`s are used for files that need to be processed and
9
9
  # concatenated with other assets. Use for `.js` and `.css` files.
10
10
  class BundledAsset < Asset
11
- # Define extra attributes to be serialized.
12
- def self.serialized_attributes
13
- super + %w( content_type mtime )
14
- end
11
+ attr_reader :source
15
12
 
16
- def initialize(environment, logical_path, pathname, options)
13
+ def initialize(environment, logical_path, pathname)
17
14
  super(environment, logical_path, pathname)
18
- @options = options || {}
15
+
16
+ @processed_asset = environment.find_asset(pathname, :bundle => false)
17
+ @required_assets = @processed_asset.required_assets
18
+
19
+ @source = ""
20
+
21
+ # Explode Asset into parts and gather the dependency bodies
22
+ to_a.each { |dependency| @source << dependency.to_s }
23
+
24
+ # Run bundle processors on concatenated source
25
+ context = environment.context_class.new(environment, logical_path, pathname)
26
+ @source = context.evaluate(pathname, :data => @source,
27
+ :processors => environment.bundle_processors(content_type))
28
+
29
+ @mtime = to_a.map(&:mtime).max
30
+ @length = Rack::Utils.bytesize(source)
31
+ @digest = environment.digest.update(source).hexdigest
19
32
  end
20
33
 
21
34
  # Initialize `BundledAsset` from serialized `Hash`.
22
35
  def init_with(environment, coder)
23
- @options = {}
24
-
25
36
  super
26
37
 
27
- @body = coder['body']
28
- @assets = coder['asset_paths'].map { |p|
29
- p = expand_root_path(p)
30
- p == pathname.to_s ? self : environment[p, @options]
31
- }
38
+ @processed_asset = environment.find_asset(pathname, :bundle => false)
39
+ @required_assets = @processed_asset.required_assets
32
40
 
33
- @dependency_paths = coder['dependency_paths'].map { |h|
34
- h.merge('path' => expand_root_path(h['path']))
35
- }
36
- @dependency_paths.each do |dep|
37
- dep['mtime'] = Time.parse(dep['mtime']) if dep['mtime'].is_a?(String)
41
+ if @processed_asset.dependency_digest != coder['required_assets_digest']
42
+ raise UnserializeError, "processed asset belongs to a stale environment"
38
43
  end
44
+
45
+ @source = coder['source']
39
46
  end
40
47
 
41
48
  # Serialize custom attributes in `BundledAsset`.
42
49
  def encode_with(coder)
43
50
  super
44
51
 
45
- coder['body'] = body
46
- coder['asset_paths'] = to_a.map { |a| relativize_root_path(a.pathname) }
47
- coder['dependency_paths'] = dependency_paths.map { |h|
48
- h.merge('path' => relativize_root_path(h['path']))
49
- }
52
+ coder['source'] = source
53
+ coder['required_assets_digest'] = @processed_asset.dependency_digest
50
54
  end
51
55
 
52
56
  # Get asset's own processed contents. Excludes any of its required
53
57
  # dependencies but does run any processors or engines on the
54
58
  # original file.
55
59
  def body
56
- @body ||= build_dependency_context_and_body[1]
57
- end
58
-
59
- # Get latest mtime of all its dependencies.
60
- def mtime
61
- @mtime ||= dependency_paths.map { |h| h['mtime'] }.max
62
- end
63
-
64
- # Get size of concatenated source.
65
- def length
66
- @length ||= build_source['length']
67
- end
68
-
69
- # Compute digest of concatenated source.
70
- def digest
71
- @digest ||= build_source['digest']
60
+ @processed_asset.source
72
61
  end
73
62
 
74
63
  # Return an `Array` of `Asset` files that are declared dependencies.
75
64
  def dependencies
76
- to_a - [self]
65
+ to_a.reject { |a| a.eql?(@processed_asset) }
77
66
  end
78
67
 
79
68
  # Expand asset into an `Array` of parts.
80
69
  def to_a
81
- @assets ||= build_dependencies_paths_and_assets[1]
70
+ required_assets
82
71
  end
83
72
 
84
73
  # Checks if Asset is stale by comparing the actual mtime and
85
74
  # digest to the inmemory model.
86
- def fresh?
87
- # Check freshness of all declared dependencies
88
- dependency_paths.all? { |h| dependency_fresh?(h) }
89
- end
90
-
91
- # Return `String` of concatenated source.
92
- def to_s
93
- @source ||= build_source['source']
94
- end
95
-
96
- # Save asset to disk.
97
- def write_to(filename, options = {})
98
- # Gzip contents if filename has '.gz'
99
- options[:compress] ||= File.extname(filename) == '.gz'
100
-
101
- File.open("#{filename}+", 'wb') do |f|
102
- if options[:compress]
103
- # Run contents through `Zlib`
104
- gz = Zlib::GzipWriter.new(f, Zlib::BEST_COMPRESSION)
105
- gz.write to_s
106
- gz.close
107
- else
108
- # Write out as is
109
- f.write to_s
110
- f.close
111
- end
112
- end
113
-
114
- # Atomic write
115
- FileUtils.mv("#{filename}+", filename)
116
-
117
- # Set mtime correctly
118
- File.utime(mtime, mtime, filename)
119
-
120
- nil
121
- ensure
122
- # Ensure tmp file gets cleaned up
123
- FileUtils.rm("#{filename}+") if File.exist?("#{filename}+")
75
+ def fresh?(environment)
76
+ @processed_asset.fresh?(environment)
124
77
  end
125
-
126
- protected
127
- # Return new blank `Context` to evaluate processors in.
128
- def blank_context
129
- environment.context_class.new(environment, logical_path.to_s, pathname)
130
- end
131
-
132
- # Get `Context` after processors have been ran on it. This
133
- # trackes any dependencies that processors have added to it.
134
- def dependency_context
135
- @dependency_context ||= build_dependency_context_and_body[0]
136
- end
137
-
138
- # All paths that this asset depends on. This list may include
139
- # non-assets like directories.
140
- def dependency_paths
141
- @dependency_paths ||= build_dependencies_paths_and_assets[0]
142
- end
143
-
144
- private
145
- def logger
146
- environment.logger
147
- end
148
-
149
- # Check if self has already been required and raise a fast
150
- # error. Otherwise you end up with a StackOverflow error.
151
- def check_circular_dependency!
152
- requires = @options[:_requires] ||= []
153
- if requires.include?(pathname.to_s)
154
- raise CircularDependencyError, "#{pathname} has already been required"
155
- end
156
- requires << pathname.to_s
157
- end
158
-
159
- def build_dependency_context_and_body
160
- start_time = Time.now.to_f
161
-
162
- context = blank_context
163
-
164
- # Read original data once and pass it along to `Context`
165
- data = Sprockets::Utils.read_unicode(pathname)
166
-
167
- # Prime digest cache with data, since we happen to have it
168
- environment.file_digest(pathname, data)
169
-
170
- # Runs all processors on `Context`
171
- body = context.evaluate(pathname, :data => data)
172
-
173
- @dependency_context, @body = context, body
174
-
175
- elapsed_time = ((Time.now.to_f - start_time) * 1000).to_i
176
- logger.info "Compiled #{logical_path} (#{elapsed_time}ms) (pid #{Process.pid})"
177
-
178
- return context, body
179
- end
180
-
181
- def build_dependencies_paths_and_assets
182
- check_circular_dependency!
183
-
184
- paths, assets = {}, []
185
-
186
- # Define an `add_dependency` helper
187
- add_dependency = lambda do |asset|
188
- unless assets.any? { |a| a.pathname == asset.pathname }
189
- assets << asset
190
- end
191
- end
192
-
193
- # Iterate over all the declared require paths from the `Context`
194
- dependency_context._required_paths.each do |required_path|
195
- # Catch `require_self`
196
- if required_path == pathname.to_s
197
- add_dependency.call(self)
198
- else
199
- # Recursively lookup required asset
200
- environment[required_path, @options].to_a.each do |asset|
201
- add_dependency.call(asset)
202
- end
203
- end
204
- end
205
-
206
- # Ensure self is added to the dependency list
207
- add_dependency.call(self)
208
-
209
- dependency_context._dependency_paths.each do |path|
210
- paths[path] ||= {
211
- 'path' => path,
212
- 'mtime' => environment.stat(path).mtime,
213
- 'hexdigest' => environment.file_digest(path).hexdigest
214
- }
215
- end
216
-
217
- dependency_context._dependency_assets.each do |path|
218
- # Skip if depending on self
219
- next if path == pathname.to_s
220
-
221
- # Recursively lookup required asset
222
- environment[path, @options].to_a.each do |asset|
223
- asset.dependency_paths.each do |dep|
224
- paths[dep['path']] ||= dep
225
- end
226
- end
227
- end
228
-
229
- @dependency_paths, @assets = paths.values, assets
230
-
231
- return @dependency_paths, @assets
232
- end
233
-
234
- def build_source
235
- hash = environment.cache_hash("#{pathname}:source", id) do
236
- data = ""
237
-
238
- # Explode Asset into parts and gather the dependency bodies
239
- to_a.each { |dependency| data << dependency.body }
240
-
241
- # Run bundle processors on concatenated source
242
- data = blank_context.evaluate(pathname, :data => data,
243
- :processors => environment.bundle_processors(content_type))
244
-
245
- { 'length' => Rack::Utils.bytesize(data),
246
- 'digest' => environment.digest.update(data).hexdigest,
247
- 'source' => data }
248
- end
249
- hash['length'] = Integer(hash['length']) if hash['length'].is_a?(String)
250
-
251
- @length = hash['length']
252
- @digest = hash['digest']
253
- @source = hash['source']
254
-
255
- hash
256
- end
257
78
  end
258
79
  end
@@ -18,24 +18,15 @@ module Sprockets
18
18
 
19
19
  # Lookup value in cache
20
20
  def [](key)
21
- pathname = path_for(key)
21
+ pathname = @root.join(key)
22
22
  pathname.exist? ? pathname.open('rb') { |f| Marshal.load(f) } : nil
23
23
  end
24
24
 
25
25
  # Save value to cache
26
26
  def []=(key, value)
27
- path_for(key).open('w') { |f| Marshal.dump(value, f)}
27
+ @root.join(key).open('w') { |f| Marshal.dump(value, f)}
28
28
  value
29
29
  end
30
-
31
- private
32
- # Returns path for cache key.
33
- #
34
- # The key may include some funky characters so hash it into
35
- # safe hex.
36
- def path_for(key)
37
- @root.join(::Digest::MD5.hexdigest(key))
38
- end
39
30
  end
40
31
  end
41
32
  end
@@ -1,38 +1,7 @@
1
- require 'sprockets/bundled_asset'
2
- require 'sprockets/static_asset'
3
-
4
1
  module Sprockets
5
2
  # `Caching` is an internal mixin whose public methods are exposed on
6
3
  # the `Environment` and `Index` classes.
7
4
  module Caching
8
- # Return `Asset` instance for serialized `Hash`.
9
- def asset_from_hash(hash)
10
- return unless hash.is_a?(Hash)
11
- case hash['class']
12
- when 'BundledAsset'
13
- BundledAsset.from_hash(self, hash)
14
- when 'StaticAsset'
15
- StaticAsset.from_hash(self, hash)
16
- else
17
- nil
18
- end
19
- rescue Exception => e
20
- logger.debug "Cache for Asset (#{hash['logical_path']}) is stale"
21
- logger.debug e
22
- nil
23
- end
24
-
25
- def cache_hash(key, version)
26
- if cache.nil?
27
- yield
28
- elsif hash = cache_get_hash(key, version)
29
- hash
30
- elsif hash = yield
31
- cache_set_hash(key, version, hash)
32
- hash
33
- end
34
- end
35
-
36
5
  protected
37
6
  # Cache helper method. Takes a `path` argument which maybe a
38
7
  # logical path or fully expanded path. The `&block` is passed
@@ -43,7 +12,7 @@ module Sprockets
43
12
  yield
44
13
 
45
14
  # Check cache for `path`
46
- elsif (asset = asset_from_hash(cache_get_hash(path.to_s, digest.hexdigest))) && asset.fresh?
15
+ elsif (asset = Asset.from_hash(self, cache_get_hash(path.to_s))) && asset.fresh?(self)
47
16
  asset
48
17
 
49
18
  # Otherwise yield block that slowly finds and builds the asset
@@ -52,12 +21,12 @@ module Sprockets
52
21
  asset.encode_with(hash)
53
22
 
54
23
  # Save the asset to its path
55
- cache_set_hash(path.to_s, digest.hexdigest, hash)
24
+ cache_set_hash(path.to_s, hash)
56
25
 
57
26
  # Since path maybe a logical or full pathname, save the
58
27
  # asset its its full path too
59
28
  if path.to_s != asset.pathname.to_s
60
- cache_set_hash(asset.pathname.to_s, digest.hexdigest, hash)
29
+ cache_set_hash(asset.pathname.to_s, hash)
61
30
  end
62
31
 
63
32
  asset
@@ -68,20 +37,20 @@ module Sprockets
68
37
  # Strips `Environment#root` from key to make the key work
69
38
  # consisently across different servers. The key is also hashed
70
39
  # so it does not exceed 250 characters.
71
- def cache_key_for(key)
72
- File.join('sprockets', digest.hexdigest(key.sub(root, '')))
40
+ def expand_cache_key(key)
41
+ File.join('sprockets', digest_class.hexdigest(key.sub(root, '')))
73
42
  end
74
43
 
75
- def cache_get_hash(key, version)
76
- hash = cache_get(cache_key_for(key))
77
- if hash.is_a?(Hash) && version == hash['_version']
44
+ def cache_get_hash(key)
45
+ hash = cache_get(expand_cache_key(key))
46
+ if hash.is_a?(Hash) && digest.hexdigest == hash['_version']
78
47
  hash
79
48
  end
80
49
  end
81
50
 
82
- def cache_set_hash(key, version, hash)
83
- hash['_version'] = version
84
- cache_set(cache_key_for(key), hash)
51
+ def cache_set_hash(key, hash)
52
+ hash['_version'] = digest.hexdigest
53
+ cache_set(expand_cache_key(key), hash)
85
54
  hash
86
55
  end
87
56
 
@@ -32,8 +32,8 @@ module Sprockets
32
32
  @__LINE__ = nil
33
33
 
34
34
  @_required_paths = []
35
- @_dependency_paths = Set.new([pathname.to_s])
36
- @_dependency_assets = Set.new
35
+ @_dependency_paths = Set.new
36
+ @_dependency_assets = Set.new([pathname.to_s])
37
37
  end
38
38
 
39
39
  # Returns the environment path that contains the file.
@@ -78,7 +78,7 @@ module Sprockets
78
78
  pathname = Pathname.new(path)
79
79
  attributes = environment.attributes_for(pathname)
80
80
 
81
- if pathname.absolute?
81
+ if pathname.to_s =~ /^\//
82
82
  pathname
83
83
 
84
84
  elsif content_type = options[:content_type]
@@ -148,7 +148,9 @@ module Sprockets
148
148
  def asset_requirable?(path)
149
149
  pathname = resolve(path)
150
150
  content_type = environment.content_type_of(pathname)
151
- pathname.file? && (self.content_type.nil? || self.content_type == content_type)
151
+ stat = environment.stat(path)
152
+ return false unless stat && stat.file?
153
+ self.content_type.nil? || self.content_type == content_type
152
154
  end
153
155
 
154
156
  # Reads `path` and runs processors on the file.
@@ -193,7 +195,7 @@ module Sprockets
193
195
  # $('<img>').attr('src', '<%= asset_data_uri 'avatar.jpg' %>')
194
196
  #
195
197
  def asset_data_uri(path)
196
- depend_on(path)
198
+ depend_on_asset(path)
197
199
  asset = environment.find_asset(path)
198
200
  base64 = Base64.encode64(asset.to_s).gsub(/\s+/, "")
199
201
  "data:#{asset.content_type};base64,#{Rack::Utils.escape(base64)}"
@@ -71,28 +71,4 @@ module Sprockets
71
71
  hash.inject(initial) { |h, (k, a)| h[k] = a.dup; h }
72
72
  end
73
73
  end
74
-
75
- # Extend Sprockets module to provide global registry
76
- extend Engines
77
- @engines = {}
78
-
79
- # Cherry pick the default Tilt engines that make sense for
80
- # Sprockets. We don't need ones that only generate html like HAML.
81
-
82
- # Mmm, CoffeeScript
83
- register_engine '.coffee', Tilt::CoffeeScriptTemplate
84
-
85
- # JST engines
86
- register_engine '.jst', JstProcessor
87
- register_engine '.eco', EcoTemplate
88
- register_engine '.ejs', EjsTemplate
89
-
90
- # CSS engines
91
- register_engine '.less', Tilt::LessTemplate
92
- register_engine '.sass', Tilt::SassTemplate
93
- register_engine '.scss', Tilt::ScssTemplate
94
-
95
- # Other
96
- register_engine '.erb', Tilt::ERBTemplate
97
- register_engine '.str', Tilt::StringTemplate
98
74
  end