sprockets 4.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +72 -0
- data/README.md +665 -0
- data/bin/sprockets +93 -0
- data/lib/rake/sprocketstask.rb +153 -0
- data/lib/sprockets.rb +229 -0
- data/lib/sprockets/add_source_map_comment_to_asset_processor.rb +60 -0
- data/lib/sprockets/asset.rb +202 -0
- data/lib/sprockets/autoload.rb +16 -0
- data/lib/sprockets/autoload/babel.rb +8 -0
- data/lib/sprockets/autoload/closure.rb +8 -0
- data/lib/sprockets/autoload/coffee_script.rb +8 -0
- data/lib/sprockets/autoload/eco.rb +8 -0
- data/lib/sprockets/autoload/ejs.rb +8 -0
- data/lib/sprockets/autoload/jsminc.rb +8 -0
- data/lib/sprockets/autoload/sass.rb +8 -0
- data/lib/sprockets/autoload/sassc.rb +8 -0
- data/lib/sprockets/autoload/uglifier.rb +8 -0
- data/lib/sprockets/autoload/yui.rb +8 -0
- data/lib/sprockets/autoload/zopfli.rb +7 -0
- data/lib/sprockets/babel_processor.rb +66 -0
- data/lib/sprockets/base.rb +147 -0
- data/lib/sprockets/bower.rb +61 -0
- data/lib/sprockets/bundle.rb +105 -0
- data/lib/sprockets/cache.rb +271 -0
- data/lib/sprockets/cache/file_store.rb +208 -0
- data/lib/sprockets/cache/memory_store.rb +75 -0
- data/lib/sprockets/cache/null_store.rb +54 -0
- data/lib/sprockets/cached_environment.rb +64 -0
- data/lib/sprockets/closure_compressor.rb +48 -0
- data/lib/sprockets/coffee_script_processor.rb +39 -0
- data/lib/sprockets/compressing.rb +134 -0
- data/lib/sprockets/configuration.rb +79 -0
- data/lib/sprockets/context.rb +304 -0
- data/lib/sprockets/dependencies.rb +74 -0
- data/lib/sprockets/digest_utils.rb +200 -0
- data/lib/sprockets/directive_processor.rb +414 -0
- data/lib/sprockets/eco_processor.rb +33 -0
- data/lib/sprockets/ejs_processor.rb +32 -0
- data/lib/sprockets/encoding_utils.rb +262 -0
- data/lib/sprockets/environment.rb +46 -0
- data/lib/sprockets/erb_processor.rb +37 -0
- data/lib/sprockets/errors.rb +12 -0
- data/lib/sprockets/exporters/base.rb +71 -0
- data/lib/sprockets/exporters/file_exporter.rb +24 -0
- data/lib/sprockets/exporters/zlib_exporter.rb +33 -0
- data/lib/sprockets/exporters/zopfli_exporter.rb +14 -0
- data/lib/sprockets/exporting.rb +73 -0
- data/lib/sprockets/file_reader.rb +16 -0
- data/lib/sprockets/http_utils.rb +135 -0
- data/lib/sprockets/jsminc_compressor.rb +32 -0
- data/lib/sprockets/jst_processor.rb +50 -0
- data/lib/sprockets/loader.rb +345 -0
- data/lib/sprockets/manifest.rb +338 -0
- data/lib/sprockets/manifest_utils.rb +48 -0
- data/lib/sprockets/mime.rb +96 -0
- data/lib/sprockets/npm.rb +52 -0
- data/lib/sprockets/path_dependency_utils.rb +77 -0
- data/lib/sprockets/path_digest_utils.rb +48 -0
- data/lib/sprockets/path_utils.rb +367 -0
- data/lib/sprockets/paths.rb +82 -0
- data/lib/sprockets/preprocessors/default_source_map.rb +49 -0
- data/lib/sprockets/processing.rb +228 -0
- data/lib/sprockets/processor_utils.rb +169 -0
- data/lib/sprockets/resolve.rb +295 -0
- data/lib/sprockets/sass_cache_store.rb +30 -0
- data/lib/sprockets/sass_compressor.rb +63 -0
- data/lib/sprockets/sass_functions.rb +3 -0
- data/lib/sprockets/sass_importer.rb +3 -0
- data/lib/sprockets/sass_processor.rb +313 -0
- data/lib/sprockets/sassc_compressor.rb +56 -0
- data/lib/sprockets/sassc_processor.rb +297 -0
- data/lib/sprockets/server.rb +295 -0
- data/lib/sprockets/source_map_processor.rb +66 -0
- data/lib/sprockets/source_map_utils.rb +483 -0
- data/lib/sprockets/transformers.rb +173 -0
- data/lib/sprockets/uglifier_compressor.rb +66 -0
- data/lib/sprockets/unloaded_asset.rb +139 -0
- data/lib/sprockets/uri_tar.rb +99 -0
- data/lib/sprockets/uri_utils.rb +191 -0
- data/lib/sprockets/utils.rb +202 -0
- data/lib/sprockets/utils/gzip.rb +99 -0
- data/lib/sprockets/version.rb +4 -0
- data/lib/sprockets/yui_compressor.rb +56 -0
- metadata +444 -0
@@ -0,0 +1,24 @@
|
|
1
|
+
require 'sprockets/exporters/base'
|
2
|
+
|
3
|
+
module Sprockets
|
4
|
+
module Exporters
|
5
|
+
# Writes a an asset file to disk
|
6
|
+
class FileExporter < Exporters::Base
|
7
|
+
def skip?(logger)
|
8
|
+
if ::File.exist?(target)
|
9
|
+
logger.debug "Skipping #{ target }, already exists"
|
10
|
+
true
|
11
|
+
else
|
12
|
+
logger.info "Writing #{ target }"
|
13
|
+
false
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
def call
|
18
|
+
write(target) do |file|
|
19
|
+
file.write(asset.source)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
require 'sprockets/exporters/base'
|
2
|
+
require 'sprockets/utils/gzip'
|
3
|
+
|
4
|
+
module Sprockets
|
5
|
+
module Exporters
|
6
|
+
# Generates a `.gz` file using the zlib algorithm built into
|
7
|
+
# Ruby's standard library.
|
8
|
+
class ZlibExporter < Exporters::Base
|
9
|
+
def setup
|
10
|
+
@gzip_target = "#{ target }.gz"
|
11
|
+
@gzip = Sprockets::Utils::Gzip.new(asset, archiver: Utils::Gzip::ZlibArchiver)
|
12
|
+
end
|
13
|
+
|
14
|
+
def skip?(logger)
|
15
|
+
return true if environment.skip_gzip?
|
16
|
+
return true if @gzip.cannot_compress?
|
17
|
+
if ::File.exist?(@gzip_target)
|
18
|
+
logger.debug "Skipping #{ @gzip_target }, already exists"
|
19
|
+
true
|
20
|
+
else
|
21
|
+
logger.info "Writing #{ @gzip_target }"
|
22
|
+
false
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def call
|
27
|
+
write(@gzip_target) do |file|
|
28
|
+
@gzip.compress(file, target)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
require 'sprockets/exporters/zlib_exporter'
|
2
|
+
|
3
|
+
module Sprockets
|
4
|
+
module Exporters
|
5
|
+
# Generates a `.gz` file using the zopfli algorithm from the
|
6
|
+
# Zopfli gem.
|
7
|
+
class ZopfliExporter < ZlibExporter
|
8
|
+
def setup
|
9
|
+
@gzip_target = "#{ target }.gz"
|
10
|
+
@gzip = Sprockets::Utils::Gzip.new(asset, archiver: Utils::Gzip::ZopfliArchiver)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,73 @@
|
|
1
|
+
module Sprockets
|
2
|
+
# `Exporting` is an internal mixin whose public methods are exposed on
|
3
|
+
# the `Environment` and `CachedEnvironment` classes.
|
4
|
+
module Exporting
|
5
|
+
# Exporters are ran on the assets:precompile task
|
6
|
+
def exporters
|
7
|
+
config[:exporters]
|
8
|
+
end
|
9
|
+
|
10
|
+
# Public: Registers a new Exporter `klass` for `mime_type`.
|
11
|
+
#
|
12
|
+
# If your exporter depends on one or more other exporters you can
|
13
|
+
# specify this via the `depend_on` keyword.
|
14
|
+
#
|
15
|
+
# register_exporter '*/*', Sprockets::Exporters::ZlibExporter
|
16
|
+
#
|
17
|
+
# This ensures that `Sprockets::Exporters::File` will always execute before
|
18
|
+
# `Sprockets::Exporters::Zlib`
|
19
|
+
def register_exporter(mime_types, klass = nil)
|
20
|
+
mime_types = Array(mime_types)
|
21
|
+
|
22
|
+
mime_types.each do |mime_type|
|
23
|
+
self.config = hash_reassoc(config, :exporters, mime_type) do |_exporters|
|
24
|
+
_exporters << klass
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
# Public: Remove Exporting processor `klass` for `mime_type`.
|
30
|
+
#
|
31
|
+
# environment.unregister_exporter '*/*', Sprockets::Exporters::Zlib
|
32
|
+
#
|
33
|
+
# Can be called without a mime type
|
34
|
+
#
|
35
|
+
# environment.unregister_exporter Sprockets::Exporters::Zlib
|
36
|
+
#
|
37
|
+
# Does not remove any exporters that depend on `klass`.
|
38
|
+
def unregister_exporter(mime_types, exporter = nil)
|
39
|
+
unless mime_types.is_a? Array
|
40
|
+
if mime_types.is_a? String
|
41
|
+
mime_types = [mime_types]
|
42
|
+
else # called with no mime type
|
43
|
+
exporter = mime_types
|
44
|
+
mime_types = nil
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
self.config = hash_reassoc(config, :exporters) do |_exporters|
|
49
|
+
_exporters.each do |mime_type, exporters_array|
|
50
|
+
next if mime_types && !mime_types.include?(mime_type)
|
51
|
+
if exporters_array.include? exporter
|
52
|
+
_exporters[mime_type] = exporters_array.dup.delete exporter
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
# Public: Checks if concurrent exporting is allowed
|
59
|
+
def export_concurrent
|
60
|
+
config[:export_concurrent]
|
61
|
+
end
|
62
|
+
|
63
|
+
# Public: Enable or disable the concurrently exporting files
|
64
|
+
#
|
65
|
+
# Defaults to true.
|
66
|
+
#
|
67
|
+
# environment.export_concurrent = false
|
68
|
+
#
|
69
|
+
def export_concurrent=(export_concurrent)
|
70
|
+
self.config = config.merge(export_concurrent: export_concurrent).freeze
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
@@ -0,0 +1,16 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'set'
|
3
|
+
|
4
|
+
module Sprockets
|
5
|
+
# Internal: The first processor in the pipeline that reads the file into
|
6
|
+
# memory and passes it along as `input[:data]`.
|
7
|
+
class FileReader
|
8
|
+
def self.call(input)
|
9
|
+
env = input[:environment]
|
10
|
+
data = env.read_file(input[:filename], input[:content_type])
|
11
|
+
dependencies = Set.new(input[:metadata][:dependencies])
|
12
|
+
dependencies += [env.build_file_digest_uri(input[:filename])]
|
13
|
+
{ data: data, dependencies: dependencies }
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
@@ -0,0 +1,135 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Sprockets
|
3
|
+
# Internal: HTTP URI utilities. Many adapted from Rack::Utils. Mixed into
|
4
|
+
# Environment.
|
5
|
+
module HTTPUtils
|
6
|
+
extend self
|
7
|
+
|
8
|
+
# Public: Test mime type against mime range.
|
9
|
+
#
|
10
|
+
# match_mime_type?('text/html', 'text/*') => true
|
11
|
+
# match_mime_type?('text/plain', '*') => true
|
12
|
+
# match_mime_type?('text/html', 'application/json') => false
|
13
|
+
#
|
14
|
+
# Returns true if the given value is a mime match for the given mime match
|
15
|
+
# specification, false otherwise.
|
16
|
+
def match_mime_type?(value, matcher)
|
17
|
+
v1, v2 = value.split('/'.freeze, 2)
|
18
|
+
m1, m2 = matcher.split('/'.freeze, 2)
|
19
|
+
(m1 == '*'.freeze || v1 == m1) && (m2.nil? || m2 == '*'.freeze || m2 == v2)
|
20
|
+
end
|
21
|
+
|
22
|
+
# Public: Return values from Hash where the key matches the mime type.
|
23
|
+
#
|
24
|
+
# hash - Hash of String matcher keys to Object values
|
25
|
+
# mime_type - String mime type
|
26
|
+
#
|
27
|
+
# Returns Array of Object values.
|
28
|
+
def match_mime_type_keys(hash, mime_type)
|
29
|
+
type, subtype = mime_type.split('/', 2)
|
30
|
+
[
|
31
|
+
hash["*"],
|
32
|
+
hash["*/*"],
|
33
|
+
hash["#{type}/*"],
|
34
|
+
hash["#{type}/#{subtype}"]
|
35
|
+
].compact
|
36
|
+
end
|
37
|
+
|
38
|
+
# Internal: Parse Accept header quality values.
|
39
|
+
#
|
40
|
+
# values - String e.g. "application/javascript"
|
41
|
+
#
|
42
|
+
# Adapted from Rack::Utils#q_values. Quality values are
|
43
|
+
# described in http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
|
44
|
+
#
|
45
|
+
# parse_q_values("application/javascript")
|
46
|
+
# # => [["application/javascript", 1.0]]
|
47
|
+
#
|
48
|
+
# parse_q_values("*/*")
|
49
|
+
# # => [["*/*", 1.0]]
|
50
|
+
#
|
51
|
+
# parse_q_values("text/plain; q=0.5, image/*")
|
52
|
+
# # => [["text/plain", 0.5], ["image/*", 1.0]]
|
53
|
+
#
|
54
|
+
# parse_q_values("application/javascript, text/css")
|
55
|
+
# # => [["application/javascript", 1.0], ["text/css", 1.0]]
|
56
|
+
#
|
57
|
+
# Returns an Array of [String, Float].
|
58
|
+
def parse_q_values(values)
|
59
|
+
values.to_s.split(/\s*,\s*/).map do |part|
|
60
|
+
value, parameters = part.split(/\s*;\s*/, 2)
|
61
|
+
quality = 1.0
|
62
|
+
if md = /\Aq=([\d.]+)/.match(parameters)
|
63
|
+
quality = md[1].to_f
|
64
|
+
end
|
65
|
+
[value, quality]
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
# Internal: Find all qvalue matches from an Array of available options.
|
70
|
+
#
|
71
|
+
# Adapted from Rack::Utils#q_values.
|
72
|
+
#
|
73
|
+
# Returns Array of matched Strings from available Array or [].
|
74
|
+
def find_q_matches(q_values, available, &matcher)
|
75
|
+
matcher ||= lambda { |a, b| a == b }
|
76
|
+
|
77
|
+
matches = []
|
78
|
+
|
79
|
+
case q_values
|
80
|
+
when Array
|
81
|
+
when String
|
82
|
+
q_values = parse_q_values(q_values)
|
83
|
+
when NilClass
|
84
|
+
q_values = []
|
85
|
+
else
|
86
|
+
raise TypeError, "unknown q_values type: #{q_values.class}"
|
87
|
+
end
|
88
|
+
|
89
|
+
i = 0
|
90
|
+
q_values.each do |accepted, quality|
|
91
|
+
if match = available.find { |option| matcher.call(option, accepted) }
|
92
|
+
i += 1
|
93
|
+
matches << [-quality, i, match]
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
matches.sort!
|
98
|
+
matches.map! { |_, _, match| match }
|
99
|
+
matches
|
100
|
+
end
|
101
|
+
|
102
|
+
# Internal: Find the best qvalue match from an Array of available options.
|
103
|
+
#
|
104
|
+
# Adapted from Rack::Utils#q_values.
|
105
|
+
#
|
106
|
+
# Returns the matched String from available Array or nil.
|
107
|
+
def find_best_q_match(q_values, available, &matcher)
|
108
|
+
find_q_matches(q_values, available, &matcher).first
|
109
|
+
end
|
110
|
+
|
111
|
+
# Internal: Find the all qvalue match from an Array of available mime type
|
112
|
+
# options.
|
113
|
+
#
|
114
|
+
# Adapted from Rack::Utils#q_values.
|
115
|
+
#
|
116
|
+
# Returns Array of matched mime type Strings from available Array or [].
|
117
|
+
def find_mime_type_matches(q_value_header, available)
|
118
|
+
find_q_matches(q_value_header, available) do |a, b|
|
119
|
+
match_mime_type?(a, b)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
# Internal: Find the best qvalue match from an Array of available mime type
|
124
|
+
# options.
|
125
|
+
#
|
126
|
+
# Adapted from Rack::Utils#q_values.
|
127
|
+
#
|
128
|
+
# Returns the matched mime type String from available Array or nil.
|
129
|
+
def find_best_mime_type_match(q_value_header, available)
|
130
|
+
find_best_q_match(q_value_header, available) do |a, b|
|
131
|
+
match_mime_type?(a, b)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
135
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'sprockets/autoload'
|
3
|
+
require 'sprockets/digest_utils'
|
4
|
+
|
5
|
+
module Sprockets
|
6
|
+
class JSMincCompressor
|
7
|
+
VERSION = '1'
|
8
|
+
|
9
|
+
def self.instance
|
10
|
+
@instance ||= new
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.call(input)
|
14
|
+
instance.call(input)
|
15
|
+
end
|
16
|
+
|
17
|
+
def self.cache_key
|
18
|
+
instance.cache_key
|
19
|
+
end
|
20
|
+
|
21
|
+
attr_reader :cache_key
|
22
|
+
|
23
|
+
def initialize(options = {})
|
24
|
+
@compressor_class = Autoload::JSMinC
|
25
|
+
@cache_key = "#{self.class.name}:#{Autoload::JSMinC::VERSION}:#{VERSION}:#{DigestUtils.digest(options)}".freeze
|
26
|
+
end
|
27
|
+
|
28
|
+
def call(input)
|
29
|
+
@compressor_class.minify(input[:data])
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,50 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Sprockets
|
3
|
+
# Public: JST transformer.
|
4
|
+
#
|
5
|
+
# Exports server side compiled templates to an object.
|
6
|
+
#
|
7
|
+
# Name your template "users/show.ejs", "users/new.eco", etc.
|
8
|
+
#
|
9
|
+
# To accept the default options
|
10
|
+
#
|
11
|
+
# environment.register_transformer
|
12
|
+
# 'application/javascript+function',
|
13
|
+
# 'application/javascript', JstProcessor
|
14
|
+
#
|
15
|
+
# Change the default namespace.
|
16
|
+
#
|
17
|
+
# environment.register_transformer
|
18
|
+
# 'application/javascript+function',
|
19
|
+
# 'application/javascript', JstProcessor.new(namespace: 'App.templates')
|
20
|
+
#
|
21
|
+
class JstProcessor
|
22
|
+
def self.default_namespace
|
23
|
+
'this.JST'
|
24
|
+
end
|
25
|
+
|
26
|
+
# Public: Return singleton instance with default options.
|
27
|
+
#
|
28
|
+
# Returns JstProcessor object.
|
29
|
+
def self.instance
|
30
|
+
@instance ||= new
|
31
|
+
end
|
32
|
+
|
33
|
+
def self.call(input)
|
34
|
+
instance.call(input)
|
35
|
+
end
|
36
|
+
|
37
|
+
def initialize(namespace: self.class.default_namespace)
|
38
|
+
@namespace = namespace
|
39
|
+
end
|
40
|
+
|
41
|
+
def call(input)
|
42
|
+
data = input[:data].gsub(/$(.)/m, "\\1 ").strip
|
43
|
+
key = input[:name]
|
44
|
+
<<-JST
|
45
|
+
(function() { #{@namespace} || (#{@namespace} = {}); #{@namespace}[#{key.inspect}] = #{data};
|
46
|
+
}).call(this);
|
47
|
+
JST
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
@@ -0,0 +1,345 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'sprockets/asset'
|
3
|
+
require 'sprockets/digest_utils'
|
4
|
+
require 'sprockets/errors'
|
5
|
+
require 'sprockets/file_reader'
|
6
|
+
require 'sprockets/mime'
|
7
|
+
require 'sprockets/path_utils'
|
8
|
+
require 'sprockets/processing'
|
9
|
+
require 'sprockets/processor_utils'
|
10
|
+
require 'sprockets/resolve'
|
11
|
+
require 'sprockets/transformers'
|
12
|
+
require 'sprockets/uri_utils'
|
13
|
+
require 'sprockets/unloaded_asset'
|
14
|
+
|
15
|
+
module Sprockets
|
16
|
+
|
17
|
+
# The loader phase takes a asset URI location and returns a constructed Asset
|
18
|
+
# object.
|
19
|
+
module Loader
|
20
|
+
include DigestUtils, PathUtils, ProcessorUtils, URIUtils
|
21
|
+
include Mime, Processing, Resolve, Transformers
|
22
|
+
|
23
|
+
|
24
|
+
# Public: Load Asset by Asset URI.
|
25
|
+
#
|
26
|
+
# uri - A String containing complete URI to a file including schema
|
27
|
+
# and full path such as:
|
28
|
+
# "file:///Path/app/assets/js/app.js?type=application/javascript"
|
29
|
+
#
|
30
|
+
# Returns Asset.
|
31
|
+
def load(uri)
|
32
|
+
unloaded = UnloadedAsset.new(uri, self)
|
33
|
+
if unloaded.params.key?(:id)
|
34
|
+
unless asset = asset_from_cache(unloaded.asset_key)
|
35
|
+
id = unloaded.params.delete(:id)
|
36
|
+
uri_without_id = build_asset_uri(unloaded.filename, unloaded.params)
|
37
|
+
asset = load_from_unloaded(UnloadedAsset.new(uri_without_id, self))
|
38
|
+
if asset[:id] != id
|
39
|
+
@logger.warn "Sprockets load error: Tried to find #{uri}, but latest was id #{asset[:id]}"
|
40
|
+
end
|
41
|
+
end
|
42
|
+
else
|
43
|
+
asset = fetch_asset_from_dependency_cache(unloaded) do |paths|
|
44
|
+
# When asset is previously generated, its "dependencies" are stored in the cache.
|
45
|
+
# The presence of `paths` indicates dependencies were stored.
|
46
|
+
# We can check to see if the dependencies have not changed by "resolving" them and
|
47
|
+
# generating a digest key from the resolved entries. If this digest key has not
|
48
|
+
# changed, the asset will be pulled from cache.
|
49
|
+
#
|
50
|
+
# If this `paths` is present but the cache returns nothing then `fetch_asset_from_dependency_cache`
|
51
|
+
# will confusingly be called again with `paths` set to nil where the asset will be
|
52
|
+
# loaded from disk.
|
53
|
+
if paths
|
54
|
+
digest = DigestUtils.digest(resolve_dependencies(paths))
|
55
|
+
if uri_from_cache = cache.get(unloaded.digest_key(digest), true)
|
56
|
+
asset_from_cache(UnloadedAsset.new(uri_from_cache, self).asset_key)
|
57
|
+
end
|
58
|
+
else
|
59
|
+
load_from_unloaded(unloaded)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
Asset.new(asset)
|
64
|
+
end
|
65
|
+
|
66
|
+
private
|
67
|
+
def compress_key_from_hash(hash, key)
|
68
|
+
return unless hash.key?(key)
|
69
|
+
value = hash[key].dup
|
70
|
+
return if !value
|
71
|
+
|
72
|
+
if block_given?
|
73
|
+
value.map! do |x|
|
74
|
+
if yield x
|
75
|
+
compress_from_root(x)
|
76
|
+
else
|
77
|
+
x
|
78
|
+
end
|
79
|
+
end
|
80
|
+
else
|
81
|
+
value.map! { |x| compress_from_root(x) }
|
82
|
+
end
|
83
|
+
hash[key] = value
|
84
|
+
end
|
85
|
+
|
86
|
+
|
87
|
+
def expand_key_from_hash(hash, key)
|
88
|
+
return unless hash.key?(key)
|
89
|
+
value = hash[key].dup
|
90
|
+
return if !value
|
91
|
+
if block_given?
|
92
|
+
value.map! do |x|
|
93
|
+
if yield x
|
94
|
+
expand_from_root(x)
|
95
|
+
else
|
96
|
+
x
|
97
|
+
end
|
98
|
+
end
|
99
|
+
else
|
100
|
+
value.map! { |x| expand_from_root(x) }
|
101
|
+
end
|
102
|
+
hash[key] = value
|
103
|
+
end
|
104
|
+
|
105
|
+
# Internal: Load asset hash from cache
|
106
|
+
#
|
107
|
+
# key - A String containing lookup information for an asset
|
108
|
+
#
|
109
|
+
# This method converts all "compressed" paths to absolute paths.
|
110
|
+
# Returns a hash of values representing an asset
|
111
|
+
def asset_from_cache(key)
|
112
|
+
asset = cache.get(key, true)
|
113
|
+
if asset
|
114
|
+
asset[:uri] = expand_from_root(asset[:uri])
|
115
|
+
asset[:load_path] = expand_from_root(asset[:load_path])
|
116
|
+
asset[:filename] = expand_from_root(asset[:filename])
|
117
|
+
expand_key_from_hash(asset[:metadata], :included)
|
118
|
+
expand_key_from_hash(asset[:metadata], :links)
|
119
|
+
expand_key_from_hash(asset[:metadata], :stubbed)
|
120
|
+
expand_key_from_hash(asset[:metadata], :required)
|
121
|
+
expand_key_from_hash(asset[:metadata], :to_load)
|
122
|
+
expand_key_from_hash(asset[:metadata], :to_link)
|
123
|
+
expand_key_from_hash(asset[:metadata], :dependencies) { |uri| uri.start_with?("file-digest://") }
|
124
|
+
|
125
|
+
asset[:metadata].each_key do |k|
|
126
|
+
next unless k.match?(/_dependencies\z/) # rubocop:disable Performance/EndWith
|
127
|
+
expand_key_from_hash(asset[:metadata], k)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
asset
|
131
|
+
end
|
132
|
+
|
133
|
+
# Internal: Loads an asset and saves it to cache
|
134
|
+
#
|
135
|
+
# unloaded - An UnloadedAsset
|
136
|
+
#
|
137
|
+
# This method is only called when the given unloaded asset could not be
|
138
|
+
# successfully pulled from cache.
|
139
|
+
def load_from_unloaded(unloaded)
|
140
|
+
unless file?(unloaded.filename)
|
141
|
+
raise FileNotFound, "could not find file: #{unloaded.filename}"
|
142
|
+
end
|
143
|
+
|
144
|
+
path_to_split =
|
145
|
+
if index_alias = unloaded.params[:index_alias]
|
146
|
+
expand_from_root index_alias
|
147
|
+
else
|
148
|
+
unloaded.filename
|
149
|
+
end
|
150
|
+
|
151
|
+
load_path, logical_path = paths_split(config[:paths], path_to_split)
|
152
|
+
|
153
|
+
unless load_path
|
154
|
+
target = path_to_split
|
155
|
+
target += " (index alias of #{unloaded.filename})" if unloaded.params[:index_alias]
|
156
|
+
raise FileOutsidePaths, "#{target} is no longer under a load path: #{self.paths.join(', ')}"
|
157
|
+
end
|
158
|
+
|
159
|
+
extname, file_type = match_path_extname(logical_path, mime_exts)
|
160
|
+
logical_path = logical_path.chomp(extname)
|
161
|
+
name = logical_path
|
162
|
+
|
163
|
+
if pipeline = unloaded.params[:pipeline]
|
164
|
+
logical_path += ".#{pipeline}"
|
165
|
+
end
|
166
|
+
|
167
|
+
if type = unloaded.params[:type]
|
168
|
+
logical_path += config[:mime_types][type][:extensions].first
|
169
|
+
end
|
170
|
+
|
171
|
+
if type != file_type && !config[:transformers][file_type][type]
|
172
|
+
raise ConversionError, "could not convert #{file_type.inspect} to #{type.inspect}"
|
173
|
+
end
|
174
|
+
|
175
|
+
processors = processors_for(type, file_type, pipeline)
|
176
|
+
|
177
|
+
processors_dep_uri = build_processors_uri(type, file_type, pipeline)
|
178
|
+
dependencies = config[:dependencies] + [processors_dep_uri]
|
179
|
+
|
180
|
+
# Read into memory and process if theres a processor pipeline
|
181
|
+
if processors.any?
|
182
|
+
result = call_processors(processors, {
|
183
|
+
environment: self,
|
184
|
+
cache: self.cache,
|
185
|
+
uri: unloaded.uri,
|
186
|
+
filename: unloaded.filename,
|
187
|
+
load_path: load_path,
|
188
|
+
name: name,
|
189
|
+
content_type: type,
|
190
|
+
metadata: {
|
191
|
+
dependencies: dependencies
|
192
|
+
}
|
193
|
+
})
|
194
|
+
validate_processor_result!(result)
|
195
|
+
source = result.delete(:data)
|
196
|
+
metadata = result
|
197
|
+
metadata[:charset] = source.encoding.name.downcase unless metadata.key?(:charset)
|
198
|
+
metadata[:digest] = digest(source)
|
199
|
+
metadata[:length] = source.bytesize
|
200
|
+
metadata[:environment_version] = version
|
201
|
+
else
|
202
|
+
dependencies << build_file_digest_uri(unloaded.filename)
|
203
|
+
metadata = {
|
204
|
+
digest: file_digest(unloaded.filename),
|
205
|
+
length: self.stat(unloaded.filename).size,
|
206
|
+
dependencies: dependencies,
|
207
|
+
environment_version: version,
|
208
|
+
}
|
209
|
+
end
|
210
|
+
|
211
|
+
asset = {
|
212
|
+
uri: unloaded.uri,
|
213
|
+
load_path: load_path,
|
214
|
+
filename: unloaded.filename,
|
215
|
+
name: name,
|
216
|
+
logical_path: logical_path,
|
217
|
+
content_type: type,
|
218
|
+
source: source,
|
219
|
+
metadata: metadata,
|
220
|
+
dependencies_digest: DigestUtils.digest(resolve_dependencies(metadata[:dependencies]))
|
221
|
+
}
|
222
|
+
|
223
|
+
asset[:id] = hexdigest(asset)
|
224
|
+
asset[:uri] = build_asset_uri(unloaded.filename, unloaded.params.merge(id: asset[:id]))
|
225
|
+
|
226
|
+
store_asset(asset, unloaded)
|
227
|
+
asset
|
228
|
+
end
|
229
|
+
|
230
|
+
# Internal: Save a given asset to the cache
|
231
|
+
#
|
232
|
+
# asset - A hash containing values of loaded asset
|
233
|
+
# unloaded - The UnloadedAsset used to lookup the `asset`
|
234
|
+
#
|
235
|
+
# This method converts all absolute paths to "compressed" paths
|
236
|
+
# which are relative if they're in the root.
|
237
|
+
def store_asset(asset, unloaded)
|
238
|
+
# Save the asset in the cache under the new URI
|
239
|
+
cached_asset = asset.dup
|
240
|
+
cached_asset[:uri] = compress_from_root(asset[:uri])
|
241
|
+
cached_asset[:filename] = compress_from_root(asset[:filename])
|
242
|
+
cached_asset[:load_path] = compress_from_root(asset[:load_path])
|
243
|
+
|
244
|
+
if cached_asset[:metadata]
|
245
|
+
# Deep dup to avoid modifying `asset`
|
246
|
+
cached_asset[:metadata] = cached_asset[:metadata].dup
|
247
|
+
compress_key_from_hash(cached_asset[:metadata], :included)
|
248
|
+
compress_key_from_hash(cached_asset[:metadata], :links)
|
249
|
+
compress_key_from_hash(cached_asset[:metadata], :stubbed)
|
250
|
+
compress_key_from_hash(cached_asset[:metadata], :required)
|
251
|
+
compress_key_from_hash(cached_asset[:metadata], :to_load)
|
252
|
+
compress_key_from_hash(cached_asset[:metadata], :to_link)
|
253
|
+
compress_key_from_hash(cached_asset[:metadata], :dependencies) { |uri| uri.start_with?("file-digest://") }
|
254
|
+
|
255
|
+
cached_asset[:metadata].each do |key, value|
|
256
|
+
next unless key.match?(/_dependencies\z/) # rubocop:disable Performance/EndWith
|
257
|
+
compress_key_from_hash(cached_asset[:metadata], key)
|
258
|
+
end
|
259
|
+
end
|
260
|
+
|
261
|
+
# Unloaded asset and stored_asset now have a different URI
|
262
|
+
stored_asset = UnloadedAsset.new(asset[:uri], self)
|
263
|
+
cache.set(stored_asset.asset_key, cached_asset, true)
|
264
|
+
|
265
|
+
# Save the new relative path for the digest key of the unloaded asset
|
266
|
+
cache.set(unloaded.digest_key(asset[:dependencies_digest]), stored_asset.compressed_path, true)
|
267
|
+
end
|
268
|
+
|
269
|
+
|
270
|
+
# Internal: Resolve set of dependency URIs.
|
271
|
+
#
|
272
|
+
# uris - An Array of "dependencies" for example:
|
273
|
+
# ["environment-version", "environment-paths", "processors:type=text/css&file_type=text/css",
|
274
|
+
# "file-digest:///Full/path/app/assets/stylesheets/application.css",
|
275
|
+
# "processors:type=text/css&file_type=text/css&pipeline=self",
|
276
|
+
# "file-digest:///Full/path/app/assets/stylesheets"]
|
277
|
+
#
|
278
|
+
# Returns back array of things that the given uri depends on
|
279
|
+
# For example the environment version, if you're using a different version of sprockets
|
280
|
+
# then the dependencies should be different, this is used only for generating cache key
|
281
|
+
# for example the "environment-version" may be resolved to "environment-1.0-3.2.0" for
|
282
|
+
# version "3.2.0" of sprockets.
|
283
|
+
#
|
284
|
+
# Any paths that are returned are converted to relative paths
|
285
|
+
#
|
286
|
+
# Returns array of resolved dependencies
|
287
|
+
def resolve_dependencies(uris)
|
288
|
+
uris.map { |uri| resolve_dependency(uri) }
|
289
|
+
end
|
290
|
+
|
291
|
+
# Internal: Retrieves an asset based on its digest
|
292
|
+
#
|
293
|
+
# unloaded - An UnloadedAsset
|
294
|
+
# limit - A Fixnum which sets the maximum number of versions of "histories"
|
295
|
+
# stored in the cache
|
296
|
+
#
|
297
|
+
# This method attempts to retrieve the last `limit` number of histories of an asset
|
298
|
+
# from the cache a "history" which is an array of unresolved "dependencies" that the asset needs
|
299
|
+
# to compile. In this case a dependency can refer to either an asset e.g. index.js
|
300
|
+
# may rely on jquery.js (so jquery.js is a dependency), or other factors that may affect
|
301
|
+
# compilation, such as the VERSION of Sprockets (i.e. the environment) and what "processors"
|
302
|
+
# are used.
|
303
|
+
#
|
304
|
+
# For example a history array may look something like this
|
305
|
+
#
|
306
|
+
# [["environment-version", "environment-paths", "processors:type=text/css&file_type=text/css",
|
307
|
+
# "file-digest:///Full/path/app/assets/stylesheets/application.css",
|
308
|
+
# "processors:type=text/css&file_digesttype=text/css&pipeline=self",
|
309
|
+
# "file-digest:///Full/path/app/assets/stylesheets"]]
|
310
|
+
#
|
311
|
+
# Where the first entry is a Set of dependencies for last generated version of that asset.
|
312
|
+
# Multiple versions are stored since Sprockets keeps the last `limit` number of assets
|
313
|
+
# generated present in the system.
|
314
|
+
#
|
315
|
+
# If a "history" of dependencies is present in the cache, each version of "history" will be
|
316
|
+
# yielded to the passed block which is responsible for loading the asset. If found, the existing
|
317
|
+
# history will be saved with the dependency that found a valid asset moved to the front.
|
318
|
+
#
|
319
|
+
# If no history is present, or if none of the histories could be resolved to a valid asset then,
|
320
|
+
# the block is yielded to and expected to return a valid asset.
|
321
|
+
# When this happens the dependencies for the returned asset are added to the "history", and older
|
322
|
+
# entries are removed if the "history" is above `limit`.
|
323
|
+
def fetch_asset_from_dependency_cache(unloaded, limit = 3)
|
324
|
+
key = unloaded.dependency_history_key
|
325
|
+
|
326
|
+
history = cache.get(key) || []
|
327
|
+
history.each_with_index do |deps, index|
|
328
|
+
expanded_deps = deps.map do |path|
|
329
|
+
path.start_with?("file-digest://") ? expand_from_root(path) : path
|
330
|
+
end
|
331
|
+
if asset = yield(expanded_deps)
|
332
|
+
cache.set(key, history.rotate!(index)) if index > 0
|
333
|
+
return asset
|
334
|
+
end
|
335
|
+
end
|
336
|
+
|
337
|
+
asset = yield
|
338
|
+
deps = asset[:metadata][:dependencies].dup.map! do |uri|
|
339
|
+
uri.start_with?("file-digest://") ? compress_from_root(uri) : uri
|
340
|
+
end
|
341
|
+
cache.set(key, history.unshift(deps).take(limit))
|
342
|
+
asset
|
343
|
+
end
|
344
|
+
end
|
345
|
+
end
|