dalli 2.7.8 → 3.2.2
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of dalli might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/Gemfile +5 -1
- data/History.md +159 -0
- data/README.md +27 -223
- data/lib/dalli/cas/client.rb +1 -57
- data/lib/dalli/client.rb +227 -254
- data/lib/dalli/compressor.rb +12 -2
- data/lib/dalli/key_manager.rb +113 -0
- data/lib/dalli/options.rb +6 -7
- data/lib/dalli/pipelined_getter.rb +177 -0
- data/lib/dalli/protocol/base.rb +241 -0
- data/lib/dalli/protocol/binary/request_formatter.rb +117 -0
- data/lib/dalli/protocol/binary/response_header.rb +36 -0
- data/lib/dalli/protocol/binary/response_processor.rb +239 -0
- data/lib/dalli/protocol/binary/sasl_authentication.rb +60 -0
- data/lib/dalli/protocol/binary.rb +173 -0
- data/lib/dalli/protocol/connection_manager.rb +252 -0
- data/lib/dalli/protocol/meta/key_regularizer.rb +31 -0
- data/lib/dalli/protocol/meta/request_formatter.rb +108 -0
- data/lib/dalli/protocol/meta/response_processor.rb +211 -0
- data/lib/dalli/protocol/meta.rb +177 -0
- data/lib/dalli/protocol/response_buffer.rb +54 -0
- data/lib/dalli/protocol/server_config_parser.rb +84 -0
- data/lib/dalli/protocol/ttl_sanitizer.rb +45 -0
- data/lib/dalli/protocol/value_compressor.rb +85 -0
- data/lib/dalli/protocol/value_marshaller.rb +59 -0
- data/lib/dalli/protocol/value_serializer.rb +91 -0
- data/lib/dalli/protocol.rb +8 -0
- data/lib/dalli/ring.rb +94 -83
- data/lib/dalli/server.rb +3 -746
- data/lib/dalli/servers_arg_normalizer.rb +54 -0
- data/lib/dalli/socket.rb +117 -137
- data/lib/dalli/version.rb +4 -1
- data/lib/dalli.rb +43 -15
- data/lib/rack/session/dalli.rb +103 -94
- metadata +64 -27
- data/lib/action_dispatch/middleware/session/dalli_store.rb +0 -82
- data/lib/active_support/cache/dalli_store.rb +0 -429
- data/lib/dalli/railtie.rb +0 -8
@@ -0,0 +1,177 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'forwardable'
|
4
|
+
require 'socket'
|
5
|
+
require 'timeout'
|
6
|
+
|
7
|
+
module Dalli
|
8
|
+
module Protocol
|
9
|
+
##
|
10
|
+
# Access point for a single Memcached server, accessed via Memcached's meta
|
11
|
+
# protocol. Contains logic for managing connection state to the server (retries, etc),
|
12
|
+
# formatting requests to the server, and unpacking responses.
|
13
|
+
##
|
14
|
+
class Meta < Base
|
15
|
+
TERMINATOR = "\r\n"
|
16
|
+
|
17
|
+
def response_processor
|
18
|
+
@response_processor ||= ResponseProcessor.new(@connection_manager, @value_marshaller)
|
19
|
+
end
|
20
|
+
|
21
|
+
# NOTE: Additional public methods should be overridden in Dalli::Threadsafe
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
# Retrieval Commands
|
26
|
+
def get(key, options = nil)
|
27
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
28
|
+
req = RequestFormatter.meta_get(key: encoded_key, base64: base64)
|
29
|
+
write(req)
|
30
|
+
response_processor.meta_get_with_value(cache_nils: cache_nils?(options))
|
31
|
+
end
|
32
|
+
|
33
|
+
def quiet_get_request(key)
|
34
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
35
|
+
RequestFormatter.meta_get(key: encoded_key, return_cas: true, base64: base64, quiet: true)
|
36
|
+
end
|
37
|
+
|
38
|
+
def gat(key, ttl, options = nil)
|
39
|
+
ttl = TtlSanitizer.sanitize(ttl)
|
40
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
41
|
+
req = RequestFormatter.meta_get(key: encoded_key, ttl: ttl, base64: base64)
|
42
|
+
write(req)
|
43
|
+
response_processor.meta_get_with_value(cache_nils: cache_nils?(options))
|
44
|
+
end
|
45
|
+
|
46
|
+
def touch(key, ttl)
|
47
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
48
|
+
req = RequestFormatter.meta_get(key: encoded_key, ttl: ttl, value: false, base64: base64)
|
49
|
+
write(req)
|
50
|
+
response_processor.meta_get_without_value
|
51
|
+
end
|
52
|
+
|
53
|
+
# TODO: This is confusing, as there's a cas command in memcached
|
54
|
+
# and this isn't it. Maybe rename? Maybe eliminate?
|
55
|
+
def cas(key)
|
56
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
57
|
+
req = RequestFormatter.meta_get(key: encoded_key, value: true, return_cas: true, base64: base64)
|
58
|
+
write(req)
|
59
|
+
response_processor.meta_get_with_value_and_cas
|
60
|
+
end
|
61
|
+
|
62
|
+
# Storage Commands
|
63
|
+
def set(key, value, ttl, cas, options)
|
64
|
+
write_storage_req(:set, key, value, ttl, cas, options)
|
65
|
+
response_processor.meta_set_with_cas unless quiet?
|
66
|
+
end
|
67
|
+
|
68
|
+
def add(key, value, ttl, options)
|
69
|
+
write_storage_req(:add, key, value, ttl, nil, options)
|
70
|
+
response_processor.meta_set_with_cas unless quiet?
|
71
|
+
end
|
72
|
+
|
73
|
+
def replace(key, value, ttl, cas, options)
|
74
|
+
write_storage_req(:replace, key, value, ttl, cas, options)
|
75
|
+
response_processor.meta_set_with_cas unless quiet?
|
76
|
+
end
|
77
|
+
|
78
|
+
# rubocop:disable Metrics/ParameterLists
|
79
|
+
def write_storage_req(mode, key, raw_value, ttl = nil, cas = nil, options = {})
|
80
|
+
(value, bitflags) = @value_marshaller.store(key, raw_value, options)
|
81
|
+
ttl = TtlSanitizer.sanitize(ttl) if ttl
|
82
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
83
|
+
req = RequestFormatter.meta_set(key: encoded_key, value: value,
|
84
|
+
bitflags: bitflags, cas: cas,
|
85
|
+
ttl: ttl, mode: mode, quiet: quiet?, base64: base64)
|
86
|
+
write(req)
|
87
|
+
end
|
88
|
+
# rubocop:enable Metrics/ParameterLists
|
89
|
+
|
90
|
+
def append(key, value)
|
91
|
+
write_append_prepend_req(:append, key, value)
|
92
|
+
response_processor.meta_set_append_prepend unless quiet?
|
93
|
+
end
|
94
|
+
|
95
|
+
def prepend(key, value)
|
96
|
+
write_append_prepend_req(:prepend, key, value)
|
97
|
+
response_processor.meta_set_append_prepend unless quiet?
|
98
|
+
end
|
99
|
+
|
100
|
+
# rubocop:disable Metrics/ParameterLists
|
101
|
+
def write_append_prepend_req(mode, key, value, ttl = nil, cas = nil, _options = {})
|
102
|
+
ttl = TtlSanitizer.sanitize(ttl) if ttl
|
103
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
104
|
+
req = RequestFormatter.meta_set(key: encoded_key, value: value, base64: base64,
|
105
|
+
cas: cas, ttl: ttl, mode: mode, quiet: quiet?)
|
106
|
+
write(req)
|
107
|
+
end
|
108
|
+
# rubocop:enable Metrics/ParameterLists
|
109
|
+
|
110
|
+
# Delete Commands
|
111
|
+
def delete(key, cas)
|
112
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
113
|
+
req = RequestFormatter.meta_delete(key: encoded_key, cas: cas,
|
114
|
+
base64: base64, quiet: quiet?)
|
115
|
+
write(req)
|
116
|
+
response_processor.meta_delete unless quiet?
|
117
|
+
end
|
118
|
+
|
119
|
+
# Arithmetic Commands
|
120
|
+
def decr(key, count, ttl, initial)
|
121
|
+
decr_incr false, key, count, ttl, initial
|
122
|
+
end
|
123
|
+
|
124
|
+
def incr(key, count, ttl, initial)
|
125
|
+
decr_incr true, key, count, ttl, initial
|
126
|
+
end
|
127
|
+
|
128
|
+
def decr_incr(incr, key, delta, ttl, initial)
|
129
|
+
ttl = initial ? TtlSanitizer.sanitize(ttl) : nil # Only set a TTL if we want to set a value on miss
|
130
|
+
encoded_key, base64 = KeyRegularizer.encode(key)
|
131
|
+
write(RequestFormatter.meta_arithmetic(key: encoded_key, delta: delta, initial: initial, incr: incr, ttl: ttl,
|
132
|
+
quiet: quiet?, base64: base64))
|
133
|
+
response_processor.decr_incr unless quiet?
|
134
|
+
end
|
135
|
+
|
136
|
+
# Other Commands
|
137
|
+
def flush(delay = 0)
|
138
|
+
write(RequestFormatter.flush(delay: delay))
|
139
|
+
response_processor.flush unless quiet?
|
140
|
+
end
|
141
|
+
|
142
|
+
# Noop is a keepalive operation but also used to demarcate the end of a set of pipelined commands.
|
143
|
+
# We need to read all the responses at once.
|
144
|
+
def noop
|
145
|
+
write_noop
|
146
|
+
response_processor.consume_all_responses_until_mn
|
147
|
+
end
|
148
|
+
|
149
|
+
def stats(info = nil)
|
150
|
+
write(RequestFormatter.stats(info))
|
151
|
+
response_processor.stats
|
152
|
+
end
|
153
|
+
|
154
|
+
def reset_stats
|
155
|
+
write(RequestFormatter.stats('reset'))
|
156
|
+
response_processor.reset
|
157
|
+
end
|
158
|
+
|
159
|
+
def version
|
160
|
+
write(RequestFormatter.version)
|
161
|
+
response_processor.version
|
162
|
+
end
|
163
|
+
|
164
|
+
def write_noop
|
165
|
+
write(RequestFormatter.meta_noop)
|
166
|
+
end
|
167
|
+
|
168
|
+
def authenticate_connection
|
169
|
+
raise Dalli::DalliError, 'Authentication not supported for the meta protocol.'
|
170
|
+
end
|
171
|
+
|
172
|
+
require_relative 'meta/key_regularizer'
|
173
|
+
require_relative 'meta/request_formatter'
|
174
|
+
require_relative 'meta/response_processor'
|
175
|
+
end
|
176
|
+
end
|
177
|
+
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'socket'
|
4
|
+
require 'timeout'
|
5
|
+
|
6
|
+
module Dalli
|
7
|
+
module Protocol
|
8
|
+
##
|
9
|
+
# Manages the buffer for responses from memcached.
|
10
|
+
##
|
11
|
+
class ResponseBuffer
|
12
|
+
def initialize(io_source, response_processor)
|
13
|
+
@io_source = io_source
|
14
|
+
@response_processor = response_processor
|
15
|
+
@buffer = nil
|
16
|
+
end
|
17
|
+
|
18
|
+
def read
|
19
|
+
@buffer << @io_source.read_nonblock
|
20
|
+
end
|
21
|
+
|
22
|
+
# Attempts to process a single response from the buffer. Starts
|
23
|
+
# by advancing the buffer to the specified start position
|
24
|
+
def process_single_getk_response
|
25
|
+
bytes, status, cas, key, value = @response_processor.getk_response_from_buffer(@buffer)
|
26
|
+
advance(bytes)
|
27
|
+
[status, cas, key, value]
|
28
|
+
end
|
29
|
+
|
30
|
+
# Advances the internal response buffer by bytes_to_advance
|
31
|
+
# bytes. The
|
32
|
+
def advance(bytes_to_advance)
|
33
|
+
return unless bytes_to_advance.positive?
|
34
|
+
|
35
|
+
@buffer = @buffer.byteslice(bytes_to_advance..-1)
|
36
|
+
end
|
37
|
+
|
38
|
+
# Resets the internal buffer to an empty state,
|
39
|
+
# so that we're ready to read pipelined responses
|
40
|
+
def reset
|
41
|
+
@buffer = ''.b
|
42
|
+
end
|
43
|
+
|
44
|
+
# Clear the internal response buffer
|
45
|
+
def clear
|
46
|
+
@buffer = nil
|
47
|
+
end
|
48
|
+
|
49
|
+
def in_progress?
|
50
|
+
!@buffer.nil?
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dalli
|
4
|
+
module Protocol
|
5
|
+
##
|
6
|
+
# Dalli::Protocol::ServerConfigParser parses a server string passed to
|
7
|
+
# a Dalli::Protocol::Binary instance into the hostname, port, weight, and
|
8
|
+
# socket_type.
|
9
|
+
##
|
10
|
+
class ServerConfigParser
|
11
|
+
MEMCACHED_URI_PROTOCOL = 'memcached://'
|
12
|
+
|
13
|
+
# TODO: Revisit this, especially the IP/domain part. Likely
|
14
|
+
# can limit character set to LDH + '.'. Hex digit section
|
15
|
+
# is there to support IPv6 addresses, which need to be specified with
|
16
|
+
# a bounding []
|
17
|
+
SERVER_CONFIG_REGEXP = /\A(\[([\h:]+)\]|[^:]+)(?::(\d+))?(?::(\d+))?\z/.freeze
|
18
|
+
|
19
|
+
DEFAULT_PORT = 11_211
|
20
|
+
DEFAULT_WEIGHT = 1
|
21
|
+
|
22
|
+
def self.parse(str)
|
23
|
+
return parse_non_uri(str) unless str.start_with?(MEMCACHED_URI_PROTOCOL)
|
24
|
+
|
25
|
+
parse_uri(str)
|
26
|
+
end
|
27
|
+
|
28
|
+
def self.parse_uri(str)
|
29
|
+
uri = URI.parse(str)
|
30
|
+
auth_details = {
|
31
|
+
username: uri.user,
|
32
|
+
password: uri.password
|
33
|
+
}
|
34
|
+
[uri.host, normalize_port(uri.port), :tcp, DEFAULT_WEIGHT, auth_details]
|
35
|
+
end
|
36
|
+
|
37
|
+
def self.parse_non_uri(str)
|
38
|
+
res = deconstruct_string(str)
|
39
|
+
|
40
|
+
hostname = normalize_host_from_match(str, res)
|
41
|
+
if hostname.start_with?('/')
|
42
|
+
socket_type = :unix
|
43
|
+
port, weight = attributes_for_unix_socket(res)
|
44
|
+
else
|
45
|
+
socket_type = :tcp
|
46
|
+
port, weight = attributes_for_tcp_socket(res)
|
47
|
+
end
|
48
|
+
[hostname, port, socket_type, weight, {}]
|
49
|
+
end
|
50
|
+
|
51
|
+
def self.deconstruct_string(str)
|
52
|
+
mtch = str.match(SERVER_CONFIG_REGEXP)
|
53
|
+
raise Dalli::DalliError, "Could not parse hostname #{str}" if mtch.nil? || mtch[1] == '[]'
|
54
|
+
|
55
|
+
mtch
|
56
|
+
end
|
57
|
+
|
58
|
+
def self.attributes_for_unix_socket(res)
|
59
|
+
# in case of unix socket, allow only setting of weight, not port
|
60
|
+
raise Dalli::DalliError, "Could not parse hostname #{res[0]}" if res[4]
|
61
|
+
|
62
|
+
[nil, normalize_weight(res[3])]
|
63
|
+
end
|
64
|
+
|
65
|
+
def self.attributes_for_tcp_socket(res)
|
66
|
+
[normalize_port(res[3]), normalize_weight(res[4])]
|
67
|
+
end
|
68
|
+
|
69
|
+
def self.normalize_host_from_match(str, res)
|
70
|
+
raise Dalli::DalliError, "Could not parse hostname #{str}" if res.nil? || res[1] == '[]'
|
71
|
+
|
72
|
+
res[2] || res[1]
|
73
|
+
end
|
74
|
+
|
75
|
+
def self.normalize_port(port)
|
76
|
+
Integer(port || DEFAULT_PORT)
|
77
|
+
end
|
78
|
+
|
79
|
+
def self.normalize_weight(weight)
|
80
|
+
Integer(weight || DEFAULT_WEIGHT)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dalli
|
4
|
+
module Protocol
|
5
|
+
##
|
6
|
+
# Utility class for sanitizing TTL arguments based on Memcached rules.
|
7
|
+
# TTLs are either expirations times in seconds (with a maximum value of
|
8
|
+
# 30 days) or expiration timestamps. This class sanitizes TTLs to ensure
|
9
|
+
# they meet those restrictions.
|
10
|
+
##
|
11
|
+
class TtlSanitizer
|
12
|
+
# https://github.com/memcached/memcached/blob/master/doc/protocol.txt#L79
|
13
|
+
# > An expiration time, in seconds. Can be up to 30 days. After 30 days, is
|
14
|
+
# treated as a unix timestamp of an exact date.
|
15
|
+
MAX_ACCEPTABLE_EXPIRATION_INTERVAL = 30 * 24 * 60 * 60 # 30 days
|
16
|
+
|
17
|
+
# Ensures the TTL passed to Memcached is a valid TTL in the expected format.
|
18
|
+
def self.sanitize(ttl)
|
19
|
+
ttl_as_i = ttl.to_i
|
20
|
+
return ttl_as_i if less_than_max_expiration_interval?(ttl_as_i)
|
21
|
+
|
22
|
+
as_timestamp(ttl_as_i)
|
23
|
+
end
|
24
|
+
|
25
|
+
def self.less_than_max_expiration_interval?(ttl_as_i)
|
26
|
+
ttl_as_i <= MAX_ACCEPTABLE_EXPIRATION_INTERVAL
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.as_timestamp(ttl_as_i)
|
30
|
+
now = current_timestamp
|
31
|
+
return ttl_as_i if ttl_as_i > now # Already a timestamp
|
32
|
+
|
33
|
+
Dalli.logger.debug "Expiration interval (#{ttl_as_i}) too long for Memcached " \
|
34
|
+
'and too short to be a future timestamp,' \
|
35
|
+
'converting to an expiration timestamp'
|
36
|
+
now + ttl_as_i
|
37
|
+
end
|
38
|
+
|
39
|
+
# Pulled out into a method so it's easy to stub time
|
40
|
+
def self.current_timestamp
|
41
|
+
Time.now.to_i
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,85 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'English'
|
4
|
+
|
5
|
+
module Dalli
|
6
|
+
module Protocol
|
7
|
+
##
|
8
|
+
# Dalli::Protocol::ValueCompressor compartmentalizes the logic for managing
|
9
|
+
# compression and decompression of stored values. It manages interpreting
|
10
|
+
# relevant options from both client and request, determining whether to
|
11
|
+
# compress/decompress on store/retrieve, and processes bitflags as necessary.
|
12
|
+
##
|
13
|
+
class ValueCompressor
|
14
|
+
DEFAULTS = {
|
15
|
+
compress: true,
|
16
|
+
compressor: ::Dalli::Compressor,
|
17
|
+
# min byte size to attempt compression
|
18
|
+
compression_min_size: 4 * 1024 # 4K
|
19
|
+
}.freeze
|
20
|
+
|
21
|
+
OPTIONS = DEFAULTS.keys.freeze
|
22
|
+
|
23
|
+
# https://www.hjp.at/zettel/m/memcached_flags.rxml
|
24
|
+
# Looks like most clients use bit 1 to indicate gzip compression.
|
25
|
+
FLAG_COMPRESSED = 0x2
|
26
|
+
|
27
|
+
def initialize(client_options)
|
28
|
+
# Support the deprecated compression option, but don't allow it to override
|
29
|
+
# an explicit compress
|
30
|
+
# Remove this with 4.0
|
31
|
+
if client_options.key?(:compression) && !client_options.key?(:compress)
|
32
|
+
Dalli.logger.warn "DEPRECATED: Dalli's :compression option is now just 'compress: true'. " \
|
33
|
+
'Please update your configuration.'
|
34
|
+
client_options[:compress] = client_options.delete(:compression)
|
35
|
+
end
|
36
|
+
|
37
|
+
@compression_options =
|
38
|
+
DEFAULTS.merge(client_options.select { |k, _| OPTIONS.include?(k) })
|
39
|
+
end
|
40
|
+
|
41
|
+
def store(value, req_options, bitflags)
|
42
|
+
do_compress = compress_value?(value, req_options)
|
43
|
+
store_value = do_compress ? compressor.compress(value) : value
|
44
|
+
bitflags |= FLAG_COMPRESSED if do_compress
|
45
|
+
|
46
|
+
[store_value, bitflags]
|
47
|
+
end
|
48
|
+
|
49
|
+
def retrieve(value, bitflags)
|
50
|
+
compressed = (bitflags & FLAG_COMPRESSED) != 0
|
51
|
+
compressed ? compressor.decompress(value) : value
|
52
|
+
|
53
|
+
# TODO: We likely want to move this rescue into the Dalli::Compressor / Dalli::GzipCompressor
|
54
|
+
# itself, since not all compressors necessarily use Zlib. For now keep it here, so the behavior
|
55
|
+
# of custom compressors doesn't change.
|
56
|
+
rescue Zlib::Error
|
57
|
+
raise UnmarshalError, "Unable to uncompress value: #{$ERROR_INFO.message}"
|
58
|
+
end
|
59
|
+
|
60
|
+
def compress_by_default?
|
61
|
+
@compression_options[:compress]
|
62
|
+
end
|
63
|
+
|
64
|
+
def compressor
|
65
|
+
@compression_options[:compressor]
|
66
|
+
end
|
67
|
+
|
68
|
+
def compression_min_size
|
69
|
+
@compression_options[:compression_min_size]
|
70
|
+
end
|
71
|
+
|
72
|
+
# Checks whether we should apply compression when serializing a value
|
73
|
+
# based on the specified options. Returns false unless the value
|
74
|
+
# is greater than the minimum compression size. Otherwise returns
|
75
|
+
# based on a method-level option if specified, falling back to the
|
76
|
+
# server default.
|
77
|
+
def compress_value?(value, req_options)
|
78
|
+
return false unless value.bytesize >= compression_min_size
|
79
|
+
return compress_by_default? unless req_options && !req_options[:compress].nil?
|
80
|
+
|
81
|
+
req_options[:compress]
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'forwardable'
|
4
|
+
|
5
|
+
module Dalli
|
6
|
+
module Protocol
|
7
|
+
##
|
8
|
+
# Dalli::Protocol::ValueMarshaller compartmentalizes the logic for marshalling
|
9
|
+
# and unmarshalling unstructured data (values) to Memcached. It also enforces
|
10
|
+
# limits on the maximum size of marshalled data.
|
11
|
+
##
|
12
|
+
class ValueMarshaller
|
13
|
+
extend Forwardable
|
14
|
+
|
15
|
+
DEFAULTS = {
|
16
|
+
# max size of value in bytes (default is 1 MB, can be overriden with "memcached -I <size>")
|
17
|
+
value_max_bytes: 1024 * 1024
|
18
|
+
}.freeze
|
19
|
+
|
20
|
+
OPTIONS = DEFAULTS.keys.freeze
|
21
|
+
|
22
|
+
def_delegators :@value_serializer, :serializer
|
23
|
+
def_delegators :@value_compressor, :compressor, :compression_min_size, :compress_by_default?
|
24
|
+
|
25
|
+
def initialize(client_options)
|
26
|
+
@value_serializer = ValueSerializer.new(client_options)
|
27
|
+
@value_compressor = ValueCompressor.new(client_options)
|
28
|
+
|
29
|
+
@marshal_options =
|
30
|
+
DEFAULTS.merge(client_options.select { |k, _| OPTIONS.include?(k) })
|
31
|
+
end
|
32
|
+
|
33
|
+
def store(key, value, options = nil)
|
34
|
+
bitflags = 0
|
35
|
+
value, bitflags = @value_serializer.store(value, options, bitflags)
|
36
|
+
value, bitflags = @value_compressor.store(value, options, bitflags)
|
37
|
+
|
38
|
+
error_if_over_max_value_bytes(key, value)
|
39
|
+
[value, bitflags]
|
40
|
+
end
|
41
|
+
|
42
|
+
def retrieve(value, flags)
|
43
|
+
value = @value_compressor.retrieve(value, flags)
|
44
|
+
@value_serializer.retrieve(value, flags)
|
45
|
+
end
|
46
|
+
|
47
|
+
def value_max_bytes
|
48
|
+
@marshal_options[:value_max_bytes]
|
49
|
+
end
|
50
|
+
|
51
|
+
def error_if_over_max_value_bytes(key, value)
|
52
|
+
return if value.bytesize <= value_max_bytes
|
53
|
+
|
54
|
+
message = "Value for #{key} over max size: #{value_max_bytes} <= #{value.bytesize}"
|
55
|
+
raise Dalli::ValueOverMaxSize, message
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,91 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dalli
|
4
|
+
module Protocol
|
5
|
+
##
|
6
|
+
# Dalli::Protocol::ValueSerializer compartmentalizes the logic for managing
|
7
|
+
# serialization and deserialization of stored values. It manages interpreting
|
8
|
+
# relevant options from both client and request, determining whether to
|
9
|
+
# serialize/deserialize on store/retrieve, and processes bitflags as necessary.
|
10
|
+
##
|
11
|
+
class ValueSerializer
|
12
|
+
DEFAULTS = {
|
13
|
+
serializer: Marshal
|
14
|
+
}.freeze
|
15
|
+
|
16
|
+
OPTIONS = DEFAULTS.keys.freeze
|
17
|
+
|
18
|
+
# https://www.hjp.at/zettel/m/memcached_flags.rxml
|
19
|
+
# Looks like most clients use bit 0 to indicate native language serialization
|
20
|
+
FLAG_SERIALIZED = 0x1
|
21
|
+
|
22
|
+
attr_accessor :serialization_options
|
23
|
+
|
24
|
+
def initialize(protocol_options)
|
25
|
+
@serialization_options =
|
26
|
+
DEFAULTS.merge(protocol_options.select { |k, _| OPTIONS.include?(k) })
|
27
|
+
end
|
28
|
+
|
29
|
+
def store(value, req_options, bitflags)
|
30
|
+
do_serialize = !(req_options && req_options[:raw])
|
31
|
+
store_value = do_serialize ? serialize_value(value) : value.to_s
|
32
|
+
bitflags |= FLAG_SERIALIZED if do_serialize
|
33
|
+
[store_value, bitflags]
|
34
|
+
end
|
35
|
+
|
36
|
+
# TODO: Some of these error messages need to be validated. It's not obvious
|
37
|
+
# that all of them are actually generated by the invoked code
|
38
|
+
# in current systems
|
39
|
+
# rubocop:disable Layout/LineLength
|
40
|
+
TYPE_ERR_REGEXP = %r{needs to have method `_load'|exception class/object expected|instance of IO needed|incompatible marshal file format}.freeze
|
41
|
+
ARGUMENT_ERR_REGEXP = /undefined class|marshal data too short/.freeze
|
42
|
+
NAME_ERR_STR = 'uninitialized constant'
|
43
|
+
# rubocop:enable Layout/LineLength
|
44
|
+
|
45
|
+
def retrieve(value, bitflags)
|
46
|
+
serialized = (bitflags & FLAG_SERIALIZED) != 0
|
47
|
+
serialized ? serializer.load(value) : value
|
48
|
+
rescue TypeError => e
|
49
|
+
filter_type_error(e)
|
50
|
+
rescue ArgumentError => e
|
51
|
+
filter_argument_error(e)
|
52
|
+
rescue NameError => e
|
53
|
+
filter_name_error(e)
|
54
|
+
end
|
55
|
+
|
56
|
+
def filter_type_error(err)
|
57
|
+
raise err unless TYPE_ERR_REGEXP.match?(err.message)
|
58
|
+
|
59
|
+
raise UnmarshalError, "Unable to unmarshal value: #{err.message}"
|
60
|
+
end
|
61
|
+
|
62
|
+
def filter_argument_error(err)
|
63
|
+
raise err unless ARGUMENT_ERR_REGEXP.match?(err.message)
|
64
|
+
|
65
|
+
raise UnmarshalError, "Unable to unmarshal value: #{err.message}"
|
66
|
+
end
|
67
|
+
|
68
|
+
def filter_name_error(err)
|
69
|
+
raise err unless err.message.include?(NAME_ERR_STR)
|
70
|
+
|
71
|
+
raise UnmarshalError, "Unable to unmarshal value: #{err.message}"
|
72
|
+
end
|
73
|
+
|
74
|
+
def serializer
|
75
|
+
@serialization_options[:serializer]
|
76
|
+
end
|
77
|
+
|
78
|
+
def serialize_value(value)
|
79
|
+
serializer.dump(value)
|
80
|
+
rescue Timeout::Error => e
|
81
|
+
raise e
|
82
|
+
rescue StandardError => e
|
83
|
+
# Serializing can throw several different types of generic Ruby exceptions.
|
84
|
+
# Convert to a specific exception so we can special case it higher up the stack.
|
85
|
+
exc = Dalli::MarshalError.new(e.message)
|
86
|
+
exc.set_backtrace e.backtrace
|
87
|
+
raise exc
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|