sprockets 4.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +72 -0
- data/README.md +665 -0
- data/bin/sprockets +93 -0
- data/lib/rake/sprocketstask.rb +153 -0
- data/lib/sprockets.rb +229 -0
- data/lib/sprockets/add_source_map_comment_to_asset_processor.rb +60 -0
- data/lib/sprockets/asset.rb +202 -0
- data/lib/sprockets/autoload.rb +16 -0
- data/lib/sprockets/autoload/babel.rb +8 -0
- data/lib/sprockets/autoload/closure.rb +8 -0
- data/lib/sprockets/autoload/coffee_script.rb +8 -0
- data/lib/sprockets/autoload/eco.rb +8 -0
- data/lib/sprockets/autoload/ejs.rb +8 -0
- data/lib/sprockets/autoload/jsminc.rb +8 -0
- data/lib/sprockets/autoload/sass.rb +8 -0
- data/lib/sprockets/autoload/sassc.rb +8 -0
- data/lib/sprockets/autoload/uglifier.rb +8 -0
- data/lib/sprockets/autoload/yui.rb +8 -0
- data/lib/sprockets/autoload/zopfli.rb +7 -0
- data/lib/sprockets/babel_processor.rb +66 -0
- data/lib/sprockets/base.rb +147 -0
- data/lib/sprockets/bower.rb +61 -0
- data/lib/sprockets/bundle.rb +105 -0
- data/lib/sprockets/cache.rb +271 -0
- data/lib/sprockets/cache/file_store.rb +208 -0
- data/lib/sprockets/cache/memory_store.rb +75 -0
- data/lib/sprockets/cache/null_store.rb +54 -0
- data/lib/sprockets/cached_environment.rb +64 -0
- data/lib/sprockets/closure_compressor.rb +48 -0
- data/lib/sprockets/coffee_script_processor.rb +39 -0
- data/lib/sprockets/compressing.rb +134 -0
- data/lib/sprockets/configuration.rb +79 -0
- data/lib/sprockets/context.rb +304 -0
- data/lib/sprockets/dependencies.rb +74 -0
- data/lib/sprockets/digest_utils.rb +200 -0
- data/lib/sprockets/directive_processor.rb +414 -0
- data/lib/sprockets/eco_processor.rb +33 -0
- data/lib/sprockets/ejs_processor.rb +32 -0
- data/lib/sprockets/encoding_utils.rb +262 -0
- data/lib/sprockets/environment.rb +46 -0
- data/lib/sprockets/erb_processor.rb +37 -0
- data/lib/sprockets/errors.rb +12 -0
- data/lib/sprockets/exporters/base.rb +71 -0
- data/lib/sprockets/exporters/file_exporter.rb +24 -0
- data/lib/sprockets/exporters/zlib_exporter.rb +33 -0
- data/lib/sprockets/exporters/zopfli_exporter.rb +14 -0
- data/lib/sprockets/exporting.rb +73 -0
- data/lib/sprockets/file_reader.rb +16 -0
- data/lib/sprockets/http_utils.rb +135 -0
- data/lib/sprockets/jsminc_compressor.rb +32 -0
- data/lib/sprockets/jst_processor.rb +50 -0
- data/lib/sprockets/loader.rb +345 -0
- data/lib/sprockets/manifest.rb +338 -0
- data/lib/sprockets/manifest_utils.rb +48 -0
- data/lib/sprockets/mime.rb +96 -0
- data/lib/sprockets/npm.rb +52 -0
- data/lib/sprockets/path_dependency_utils.rb +77 -0
- data/lib/sprockets/path_digest_utils.rb +48 -0
- data/lib/sprockets/path_utils.rb +367 -0
- data/lib/sprockets/paths.rb +82 -0
- data/lib/sprockets/preprocessors/default_source_map.rb +49 -0
- data/lib/sprockets/processing.rb +228 -0
- data/lib/sprockets/processor_utils.rb +169 -0
- data/lib/sprockets/resolve.rb +295 -0
- data/lib/sprockets/sass_cache_store.rb +30 -0
- data/lib/sprockets/sass_compressor.rb +63 -0
- data/lib/sprockets/sass_functions.rb +3 -0
- data/lib/sprockets/sass_importer.rb +3 -0
- data/lib/sprockets/sass_processor.rb +313 -0
- data/lib/sprockets/sassc_compressor.rb +56 -0
- data/lib/sprockets/sassc_processor.rb +297 -0
- data/lib/sprockets/server.rb +295 -0
- data/lib/sprockets/source_map_processor.rb +66 -0
- data/lib/sprockets/source_map_utils.rb +483 -0
- data/lib/sprockets/transformers.rb +173 -0
- data/lib/sprockets/uglifier_compressor.rb +66 -0
- data/lib/sprockets/unloaded_asset.rb +139 -0
- data/lib/sprockets/uri_tar.rb +99 -0
- data/lib/sprockets/uri_utils.rb +191 -0
- data/lib/sprockets/utils.rb +202 -0
- data/lib/sprockets/utils/gzip.rb +99 -0
- data/lib/sprockets/version.rb +4 -0
- data/lib/sprockets/yui_compressor.rb +56 -0
- metadata +444 -0
@@ -0,0 +1,147 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'sprockets/asset'
|
3
|
+
require 'sprockets/bower'
|
4
|
+
require 'sprockets/cache'
|
5
|
+
require 'sprockets/configuration'
|
6
|
+
require 'sprockets/digest_utils'
|
7
|
+
require 'sprockets/errors'
|
8
|
+
require 'sprockets/loader'
|
9
|
+
require 'sprockets/npm'
|
10
|
+
require 'sprockets/path_dependency_utils'
|
11
|
+
require 'sprockets/path_digest_utils'
|
12
|
+
require 'sprockets/path_utils'
|
13
|
+
require 'sprockets/resolve'
|
14
|
+
require 'sprockets/server'
|
15
|
+
require 'sprockets/source_map_utils'
|
16
|
+
require 'sprockets/uri_tar'
|
17
|
+
|
18
|
+
module Sprockets
|
19
|
+
|
20
|
+
class DoubleLinkError < Sprockets::Error
|
21
|
+
def initialize(parent_filename:, logical_path:, last_filename:, filename:)
|
22
|
+
super <<~MSG
|
23
|
+
Multiple files with the same output path cannot be linked (#{logical_path.inspect})
|
24
|
+
In #{parent_filename.inspect} these files were linked:
|
25
|
+
- #{last_filename}
|
26
|
+
- #{filename}
|
27
|
+
MSG
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
# `Base` class for `Environment` and `CachedEnvironment`.
|
32
|
+
class Base
|
33
|
+
include PathUtils, PathDependencyUtils, PathDigestUtils, DigestUtils, SourceMapUtils
|
34
|
+
include Configuration
|
35
|
+
include Server
|
36
|
+
include Resolve, Loader
|
37
|
+
include Bower
|
38
|
+
include Npm
|
39
|
+
|
40
|
+
# Get persistent cache store
|
41
|
+
attr_reader :cache
|
42
|
+
|
43
|
+
# Set persistent cache store
|
44
|
+
#
|
45
|
+
# The cache store must implement a pair of getters and
|
46
|
+
# setters. Either `get(key)`/`set(key, value)`,
|
47
|
+
# `[key]`/`[key]=value`, `read(key)`/`write(key, value)`.
|
48
|
+
def cache=(cache)
|
49
|
+
@cache = Cache.new(cache, logger)
|
50
|
+
end
|
51
|
+
|
52
|
+
# Return an `CachedEnvironment`. Must be implemented by the subclass.
|
53
|
+
def cached
|
54
|
+
raise NotImplementedError
|
55
|
+
end
|
56
|
+
alias_method :index, :cached
|
57
|
+
|
58
|
+
# Internal: Compute digest for path.
|
59
|
+
#
|
60
|
+
# path - String filename or directory path.
|
61
|
+
#
|
62
|
+
# Returns a String digest or nil.
|
63
|
+
def file_digest(path)
|
64
|
+
if stat = self.stat(path)
|
65
|
+
# Caveat: Digests are cached by the path's current mtime. Its possible
|
66
|
+
# for a files contents to have changed and its mtime to have been
|
67
|
+
# negligently reset thus appearing as if the file hasn't changed on
|
68
|
+
# disk. Also, the mtime is only read to the nearest second. It's
|
69
|
+
# also possible the file was updated more than once in a given second.
|
70
|
+
key = UnloadedAsset.new(path, self).file_digest_key(stat.mtime.to_i)
|
71
|
+
cache.fetch(key) do
|
72
|
+
self.stat_digest(path, stat)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
# Find asset by logical path or expanded path.
|
78
|
+
def find_asset(*args, **options)
|
79
|
+
uri, _ = resolve(*args, **options)
|
80
|
+
if uri
|
81
|
+
load(uri)
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
def find_all_linked_assets(*args)
|
86
|
+
return to_enum(__method__, *args) unless block_given?
|
87
|
+
|
88
|
+
parent_asset = asset = find_asset(*args)
|
89
|
+
return unless asset
|
90
|
+
|
91
|
+
yield asset
|
92
|
+
stack = asset.links.to_a
|
93
|
+
linked_paths = {}
|
94
|
+
|
95
|
+
while uri = stack.shift
|
96
|
+
yield asset = load(uri)
|
97
|
+
|
98
|
+
last_filename = linked_paths[asset.logical_path]
|
99
|
+
if last_filename && last_filename != asset.filename
|
100
|
+
raise DoubleLinkError.new(
|
101
|
+
parent_filename: parent_asset.filename,
|
102
|
+
last_filename: last_filename,
|
103
|
+
logical_path: asset.logical_path,
|
104
|
+
filename: asset.filename
|
105
|
+
)
|
106
|
+
end
|
107
|
+
linked_paths[asset.logical_path] = asset.filename
|
108
|
+
stack = asset.links.to_a + stack
|
109
|
+
end
|
110
|
+
|
111
|
+
nil
|
112
|
+
end
|
113
|
+
|
114
|
+
# Preferred `find_asset` shorthand.
|
115
|
+
#
|
116
|
+
# environment['application.js']
|
117
|
+
#
|
118
|
+
def [](*args, **options)
|
119
|
+
find_asset(*args, **options)
|
120
|
+
end
|
121
|
+
|
122
|
+
# Find asset by logical path or expanded path.
|
123
|
+
#
|
124
|
+
# If the asset is not found an error will be raised.
|
125
|
+
def find_asset!(*args)
|
126
|
+
uri, _ = resolve!(*args)
|
127
|
+
if uri
|
128
|
+
load(uri)
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
# Pretty inspect
|
133
|
+
def inspect
|
134
|
+
"#<#{self.class}:0x#{object_id.to_s(16)} " +
|
135
|
+
"root=#{root.to_s.inspect}, " +
|
136
|
+
"paths=#{paths.inspect}>"
|
137
|
+
end
|
138
|
+
|
139
|
+
def compress_from_root(uri)
|
140
|
+
URITar.new(uri, self).compress
|
141
|
+
end
|
142
|
+
|
143
|
+
def expand_from_root(uri)
|
144
|
+
URITar.new(uri, self).expand
|
145
|
+
end
|
146
|
+
end
|
147
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'json'
|
3
|
+
|
4
|
+
module Sprockets
|
5
|
+
module Bower
|
6
|
+
# Internal: All supported bower.json files.
|
7
|
+
#
|
8
|
+
# https://github.com/bower/json/blob/0.4.0/lib/json.js#L7
|
9
|
+
POSSIBLE_BOWER_JSONS = ['bower.json', 'component.json', '.bower.json']
|
10
|
+
|
11
|
+
# Internal: Override resolve_alternates to install bower.json behavior.
|
12
|
+
#
|
13
|
+
# load_path - String environment path
|
14
|
+
# logical_path - String path relative to base
|
15
|
+
#
|
16
|
+
# Returns candiate filenames.
|
17
|
+
def resolve_alternates(load_path, logical_path)
|
18
|
+
candidates, deps = super
|
19
|
+
|
20
|
+
# bower.json can only be nested one level deep
|
21
|
+
if !logical_path.index('/'.freeze)
|
22
|
+
dirname = File.join(load_path, logical_path)
|
23
|
+
|
24
|
+
if directory?(dirname)
|
25
|
+
filenames = POSSIBLE_BOWER_JSONS.map { |basename| File.join(dirname, basename) }
|
26
|
+
filename = filenames.detect { |fn| self.file?(fn) }
|
27
|
+
|
28
|
+
if filename
|
29
|
+
deps << build_file_digest_uri(filename)
|
30
|
+
read_bower_main(dirname, filename) do |path|
|
31
|
+
if file?(path)
|
32
|
+
candidates << path
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
return candidates, deps
|
40
|
+
end
|
41
|
+
|
42
|
+
# Internal: Read bower.json's main directive.
|
43
|
+
#
|
44
|
+
# dirname - String path to component directory.
|
45
|
+
# filename - String path to bower.json.
|
46
|
+
#
|
47
|
+
# Returns nothing.
|
48
|
+
def read_bower_main(dirname, filename)
|
49
|
+
bower = JSON.parse(File.read(filename), create_additions: false)
|
50
|
+
|
51
|
+
case bower['main']
|
52
|
+
when String
|
53
|
+
yield File.expand_path(bower['main'], dirname)
|
54
|
+
when Array
|
55
|
+
bower['main'].each do |name|
|
56
|
+
yield File.expand_path(name, dirname)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,105 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'set'
|
3
|
+
require 'sprockets/utils'
|
4
|
+
require 'sprockets/uri_utils'
|
5
|
+
|
6
|
+
module Sprockets
|
7
|
+
# Internal: Bundle processor takes a single file asset and prepends all the
|
8
|
+
# `:required` URIs to the contents.
|
9
|
+
#
|
10
|
+
# Uses pipeline metadata:
|
11
|
+
#
|
12
|
+
# :required - Ordered Set of asset URIs to prepend
|
13
|
+
# :stubbed - Set of asset URIs to substract from the required set.
|
14
|
+
#
|
15
|
+
# Also see DirectiveProcessor.
|
16
|
+
class Bundle
|
17
|
+
def self.call(input)
|
18
|
+
env = input[:environment]
|
19
|
+
type = input[:content_type]
|
20
|
+
input[:links] ||= Set.new
|
21
|
+
dependencies = Set.new(input[:metadata][:dependencies])
|
22
|
+
|
23
|
+
processed_uri, deps = env.resolve(input[:filename], accept: type, pipeline: :self)
|
24
|
+
dependencies.merge(deps)
|
25
|
+
|
26
|
+
# DirectiveProcessor (and any other transformers called here with pipeline=self)
|
27
|
+
primary_asset = env.load(processed_uri)
|
28
|
+
to_load = primary_asset.metadata.delete(:to_load) || Set.new
|
29
|
+
to_link = primary_asset.metadata.delete(:to_link) || Set.new
|
30
|
+
|
31
|
+
to_load.each do |uri|
|
32
|
+
loaded_asset = env.load(uri)
|
33
|
+
dependencies.merge(loaded_asset.metadata[:dependencies])
|
34
|
+
if to_link.include?(uri)
|
35
|
+
primary_metadata = primary_asset.metadata
|
36
|
+
input[:links] << loaded_asset.uri
|
37
|
+
primary_metadata[:links] << loaded_asset.uri
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
find_required = proc { |uri| env.load(uri).metadata[:required] }
|
42
|
+
required = Utils.dfs(processed_uri, &find_required)
|
43
|
+
stubbed = Utils.dfs(env.load(processed_uri).metadata[:stubbed], &find_required)
|
44
|
+
required.subtract(stubbed)
|
45
|
+
dedup(required)
|
46
|
+
assets = required.map { |uri| env.load(uri) }
|
47
|
+
|
48
|
+
(required + stubbed).each do |uri|
|
49
|
+
dependencies.merge(env.load(uri).metadata[:dependencies])
|
50
|
+
end
|
51
|
+
|
52
|
+
reducers = Hash[env.match_mime_type_keys(env.config[:bundle_reducers], type).flat_map(&:to_a)]
|
53
|
+
process_bundle_reducers(input, assets, reducers).merge(dependencies: dependencies, included: assets.map(&:uri))
|
54
|
+
end
|
55
|
+
|
56
|
+
# Internal: Removes uri from required if it's already included as an alias.
|
57
|
+
#
|
58
|
+
# required - Set of required uris
|
59
|
+
#
|
60
|
+
# Returns deduped set of uris
|
61
|
+
def self.dedup(required)
|
62
|
+
dupes = required.reduce([]) do |r, uri|
|
63
|
+
path, params = URIUtils.parse_asset_uri(uri)
|
64
|
+
if (params.delete(:index_alias))
|
65
|
+
r << URIUtils.build_asset_uri(path, params)
|
66
|
+
end
|
67
|
+
r
|
68
|
+
end
|
69
|
+
required.subtract(dupes)
|
70
|
+
end
|
71
|
+
|
72
|
+
# Internal: Run bundle reducers on set of Assets producing a reduced
|
73
|
+
# metadata Hash.
|
74
|
+
#
|
75
|
+
# filename - String bundle filename
|
76
|
+
# assets - Array of Assets
|
77
|
+
# reducers - Array of [initial, reducer_proc] pairs
|
78
|
+
#
|
79
|
+
# Returns reduced asset metadata Hash.
|
80
|
+
def self.process_bundle_reducers(input, assets, reducers)
|
81
|
+
initial = {}
|
82
|
+
reducers.each do |k, (v, _)|
|
83
|
+
if v.respond_to?(:call)
|
84
|
+
initial[k] = v.call(input)
|
85
|
+
elsif !v.nil?
|
86
|
+
initial[k] = v
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
assets.reduce(initial) do |h, asset|
|
91
|
+
reducers.each do |k, (_, block)|
|
92
|
+
value = k == :data ? asset.source : asset.metadata[k]
|
93
|
+
if h.key?(k)
|
94
|
+
if !value.nil?
|
95
|
+
h[k] = block.call(h[k], value)
|
96
|
+
end
|
97
|
+
else
|
98
|
+
h[k] = value
|
99
|
+
end
|
100
|
+
end
|
101
|
+
h
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
@@ -0,0 +1,271 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'logger'
|
3
|
+
require 'sprockets/digest_utils'
|
4
|
+
|
5
|
+
module Sprockets
|
6
|
+
# Public: Wrapper interface to backend cache stores. Ensures a consistent API
|
7
|
+
# even when the backend uses get/set or read/write.
|
8
|
+
#
|
9
|
+
# Public cache interface
|
10
|
+
#
|
11
|
+
# Always assign the backend store instance to Environment#cache=.
|
12
|
+
#
|
13
|
+
# environment.cache = Sprockets::Cache::MemoryStore.new(1000)
|
14
|
+
#
|
15
|
+
# Environment#cache will always return a wrapped Cache interface. See the
|
16
|
+
# methods marked public on this class.
|
17
|
+
#
|
18
|
+
#
|
19
|
+
# Backend cache interface
|
20
|
+
#
|
21
|
+
# The Backend cache store must implement two methods.
|
22
|
+
#
|
23
|
+
# get(key)
|
24
|
+
#
|
25
|
+
# key - An opaque String with a length less than 250 characters.
|
26
|
+
#
|
27
|
+
# Returns an JSON serializable object.
|
28
|
+
#
|
29
|
+
# set(key, value)
|
30
|
+
#
|
31
|
+
# Will only be called once per key. Setting a key "foo" with value "bar",
|
32
|
+
# then later key "foo" with value "baz" is an undefined behavior.
|
33
|
+
#
|
34
|
+
# key - An opaque String with a length less than 250 characters.
|
35
|
+
# value - A JSON serializable object.
|
36
|
+
#
|
37
|
+
# Returns argument value.
|
38
|
+
#
|
39
|
+
# clear(options)
|
40
|
+
#
|
41
|
+
# Clear the entire cache. Be careful with this method since it could
|
42
|
+
# affect other processes if shared cache is being used.
|
43
|
+
#
|
44
|
+
# The options hash is passed to the underlying cache implementation.
|
45
|
+
class Cache
|
46
|
+
# Builtin cache stores.
|
47
|
+
autoload :FileStore, 'sprockets/cache/file_store'
|
48
|
+
autoload :MemoryStore, 'sprockets/cache/memory_store'
|
49
|
+
autoload :NullStore, 'sprockets/cache/null_store'
|
50
|
+
|
51
|
+
# Internal: Cache key version for this class. Rarely should have to change
|
52
|
+
# unless the cache format radically changes. Will be bump on major version
|
53
|
+
# releases though.
|
54
|
+
VERSION = '4.0.0'
|
55
|
+
|
56
|
+
def self.default_logger
|
57
|
+
logger = Logger.new($stderr)
|
58
|
+
logger.level = Logger::FATAL
|
59
|
+
logger
|
60
|
+
end
|
61
|
+
|
62
|
+
# Internal: Wrap a backend cache store.
|
63
|
+
#
|
64
|
+
# Always assign a backend cache store instance to Environment#cache= and
|
65
|
+
# use Environment#cache to retreive a wrapped interface.
|
66
|
+
#
|
67
|
+
# cache - A compatible backend cache store instance.
|
68
|
+
def initialize(cache = nil, logger = self.class.default_logger)
|
69
|
+
@cache_wrapper = get_cache_wrapper(cache)
|
70
|
+
@fetch_cache = Cache::MemoryStore.new(1024)
|
71
|
+
@logger = logger
|
72
|
+
end
|
73
|
+
|
74
|
+
# Public: Prefer API to retrieve and set values in the cache store.
|
75
|
+
#
|
76
|
+
# key - JSON serializable key
|
77
|
+
# block -
|
78
|
+
# Must return a consistent JSON serializable object for the given key.
|
79
|
+
#
|
80
|
+
# Examples
|
81
|
+
#
|
82
|
+
# cache.fetch("foo") { "bar" }
|
83
|
+
#
|
84
|
+
# Returns a JSON serializable object.
|
85
|
+
def fetch(key)
|
86
|
+
start = Time.now.to_f
|
87
|
+
expanded_key = expand_key(key)
|
88
|
+
value = @fetch_cache.get(expanded_key)
|
89
|
+
if value.nil?
|
90
|
+
value = @cache_wrapper.get(expanded_key)
|
91
|
+
if value.nil?
|
92
|
+
value = yield
|
93
|
+
@cache_wrapper.set(expanded_key, value)
|
94
|
+
@logger.debug do
|
95
|
+
ms = "(#{((Time.now.to_f - start) * 1000).to_i}ms)"
|
96
|
+
"Sprockets Cache miss #{peek_key(key)} #{ms}"
|
97
|
+
end
|
98
|
+
end
|
99
|
+
@fetch_cache.set(expanded_key, value)
|
100
|
+
end
|
101
|
+
value
|
102
|
+
end
|
103
|
+
|
104
|
+
# Public: Low level API to retrieve item directly from the backend cache
|
105
|
+
# store.
|
106
|
+
#
|
107
|
+
# This API may be used publicly, but may have undefined behavior
|
108
|
+
# depending on the backend store being used. Prefer the
|
109
|
+
# Cache#fetch API over using this.
|
110
|
+
#
|
111
|
+
# key - JSON serializable key
|
112
|
+
# local - Check local cache first (default: false)
|
113
|
+
#
|
114
|
+
# Returns a JSON serializable object or nil if there was a cache miss.
|
115
|
+
def get(key, local = false)
|
116
|
+
expanded_key = expand_key(key)
|
117
|
+
|
118
|
+
if local && value = @fetch_cache.get(expanded_key)
|
119
|
+
return value
|
120
|
+
end
|
121
|
+
|
122
|
+
value = @cache_wrapper.get(expanded_key)
|
123
|
+
@fetch_cache.set(expanded_key, value) if local
|
124
|
+
|
125
|
+
value
|
126
|
+
end
|
127
|
+
|
128
|
+
# Public: Low level API to set item directly to the backend cache store.
|
129
|
+
#
|
130
|
+
# This API may be used publicly, but may have undefined behavior
|
131
|
+
# depending on the backend store being used. Prefer the
|
132
|
+
# Cache#fetch API over using this.
|
133
|
+
#
|
134
|
+
# key - JSON serializable key
|
135
|
+
# value - A consistent JSON serializable object for the given key. Setting
|
136
|
+
# a different value for the given key has undefined behavior.
|
137
|
+
# local - Set on local cache (default: false)
|
138
|
+
#
|
139
|
+
# Returns the value argument.
|
140
|
+
def set(key, value, local = false)
|
141
|
+
expanded_key = expand_key(key)
|
142
|
+
@fetch_cache.set(expanded_key, value) if local
|
143
|
+
@cache_wrapper.set(expanded_key, value)
|
144
|
+
end
|
145
|
+
|
146
|
+
# Public: Pretty inspect
|
147
|
+
#
|
148
|
+
# Returns String.
|
149
|
+
def inspect
|
150
|
+
"#<#{self.class} local=#{@fetch_cache.inspect} store=#{@cache_wrapper.cache.inspect}>"
|
151
|
+
end
|
152
|
+
|
153
|
+
# Public: Clear cache
|
154
|
+
#
|
155
|
+
# Returns truthy on success, potentially raises exception on failure
|
156
|
+
def clear(options=nil)
|
157
|
+
@cache_wrapper.clear
|
158
|
+
@fetch_cache.clear
|
159
|
+
end
|
160
|
+
|
161
|
+
private
|
162
|
+
# Internal: Expand object cache key into a short String key.
|
163
|
+
#
|
164
|
+
# The String should be under 250 characters so its compatible with
|
165
|
+
# Memcache.
|
166
|
+
#
|
167
|
+
# key - JSON serializable key
|
168
|
+
#
|
169
|
+
# Returns a String with a length less than 250 characters.
|
170
|
+
def expand_key(key)
|
171
|
+
digest_key = DigestUtils.pack_urlsafe_base64digest(DigestUtils.digest(key))
|
172
|
+
namespace = digest_key[0, 2]
|
173
|
+
"sprockets/v#{VERSION}/#{namespace}/#{digest_key}"
|
174
|
+
end
|
175
|
+
|
176
|
+
PEEK_SIZE = 100
|
177
|
+
|
178
|
+
# Internal: Show first 100 characters of cache key for logging purposes.
|
179
|
+
#
|
180
|
+
# Returns a String with a length less than 100 characters.
|
181
|
+
def peek_key(key)
|
182
|
+
case key
|
183
|
+
when Integer
|
184
|
+
key.to_s
|
185
|
+
when String
|
186
|
+
key[0, PEEK_SIZE].inspect
|
187
|
+
when Array
|
188
|
+
str = []
|
189
|
+
key.each { |k| str << peek_key(k) }
|
190
|
+
str.join(':')[0, PEEK_SIZE]
|
191
|
+
else
|
192
|
+
peek_key(DigestUtils.pack_urlsafe_base64digest(DigestUtils.digest(key)))
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
def get_cache_wrapper(cache)
|
197
|
+
if cache.is_a?(Cache)
|
198
|
+
cache
|
199
|
+
|
200
|
+
# `Cache#get(key)` for Memcache
|
201
|
+
elsif cache.respond_to?(:get)
|
202
|
+
GetWrapper.new(cache)
|
203
|
+
|
204
|
+
# `Cache#[key]` so `Hash` can be used
|
205
|
+
elsif cache.respond_to?(:[])
|
206
|
+
HashWrapper.new(cache)
|
207
|
+
|
208
|
+
# `Cache#read(key)` for `ActiveSupport::Cache` support
|
209
|
+
elsif cache.respond_to?(:read)
|
210
|
+
ReadWriteWrapper.new(cache)
|
211
|
+
|
212
|
+
else
|
213
|
+
cache = Sprockets::Cache::NullStore.new
|
214
|
+
GetWrapper.new(cache)
|
215
|
+
end
|
216
|
+
end
|
217
|
+
|
218
|
+
class Wrapper < Struct.new(:cache)
|
219
|
+
end
|
220
|
+
|
221
|
+
class GetWrapper < Wrapper
|
222
|
+
def get(key)
|
223
|
+
cache.get(key)
|
224
|
+
end
|
225
|
+
|
226
|
+
def set(key, value)
|
227
|
+
cache.set(key, value)
|
228
|
+
end
|
229
|
+
|
230
|
+
def clear(options=nil)
|
231
|
+
# dalli has a #flush method so try it
|
232
|
+
if cache.respond_to?(:flush)
|
233
|
+
cache.flush(options)
|
234
|
+
else
|
235
|
+
cache.clear(options)
|
236
|
+
end
|
237
|
+
true
|
238
|
+
end
|
239
|
+
end
|
240
|
+
|
241
|
+
class HashWrapper < Wrapper
|
242
|
+
def get(key)
|
243
|
+
cache[key]
|
244
|
+
end
|
245
|
+
|
246
|
+
def set(key, value)
|
247
|
+
cache[key] = value
|
248
|
+
end
|
249
|
+
|
250
|
+
def clear(options=nil)
|
251
|
+
cache.clear
|
252
|
+
true
|
253
|
+
end
|
254
|
+
end
|
255
|
+
|
256
|
+
class ReadWriteWrapper < Wrapper
|
257
|
+
def get(key)
|
258
|
+
cache.read(key)
|
259
|
+
end
|
260
|
+
|
261
|
+
def set(key, value)
|
262
|
+
cache.write(key, value)
|
263
|
+
end
|
264
|
+
|
265
|
+
def clear(options=nil)
|
266
|
+
cache.clear(options)
|
267
|
+
true
|
268
|
+
end
|
269
|
+
end
|
270
|
+
end
|
271
|
+
end
|