moped 0.0.0.beta → 1.0.0.alpha
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of moped might be problematic. Click here for more details.
- data/MIT_LICENSE +19 -0
- data/README.md +323 -0
- data/lib/moped.rb +19 -0
- data/lib/moped/bson.rb +25 -0
- data/lib/moped/bson/binary.rb +68 -0
- data/lib/moped/bson/code.rb +61 -0
- data/lib/moped/bson/document.rb +16 -0
- data/lib/moped/bson/extensions.rb +81 -0
- data/lib/moped/bson/extensions/array.rb +44 -0
- data/lib/moped/bson/extensions/boolean.rb +14 -0
- data/lib/moped/bson/extensions/false_class.rb +15 -0
- data/lib/moped/bson/extensions/float.rb +23 -0
- data/lib/moped/bson/extensions/hash.rb +49 -0
- data/lib/moped/bson/extensions/integer.rb +37 -0
- data/lib/moped/bson/extensions/nil_class.rb +20 -0
- data/lib/moped/bson/extensions/regexp.rb +40 -0
- data/lib/moped/bson/extensions/string.rb +35 -0
- data/lib/moped/bson/extensions/symbol.rb +25 -0
- data/lib/moped/bson/extensions/time.rb +21 -0
- data/lib/moped/bson/extensions/true_class.rb +15 -0
- data/lib/moped/bson/max_key.rb +21 -0
- data/lib/moped/bson/min_key.rb +21 -0
- data/lib/moped/bson/object_id.rb +123 -0
- data/lib/moped/bson/timestamp.rb +15 -0
- data/lib/moped/bson/types.rb +67 -0
- data/lib/moped/cluster.rb +193 -0
- data/lib/moped/collection.rb +67 -0
- data/lib/moped/cursor.rb +60 -0
- data/lib/moped/database.rb +76 -0
- data/lib/moped/errors.rb +61 -0
- data/lib/moped/indexes.rb +93 -0
- data/lib/moped/logging.rb +25 -0
- data/lib/moped/protocol.rb +20 -0
- data/lib/moped/protocol/command.rb +27 -0
- data/lib/moped/protocol/commands.rb +11 -0
- data/lib/moped/protocol/commands/authenticate.rb +54 -0
- data/lib/moped/protocol/delete.rb +92 -0
- data/lib/moped/protocol/get_more.rb +79 -0
- data/lib/moped/protocol/insert.rb +92 -0
- data/lib/moped/protocol/kill_cursors.rb +61 -0
- data/lib/moped/protocol/message.rb +320 -0
- data/lib/moped/protocol/query.rb +131 -0
- data/lib/moped/protocol/reply.rb +90 -0
- data/lib/moped/protocol/update.rb +107 -0
- data/lib/moped/query.rb +230 -0
- data/lib/moped/server.rb +73 -0
- data/lib/moped/session.rb +253 -0
- data/lib/moped/socket.rb +201 -0
- data/lib/moped/version.rb +4 -0
- metadata +108 -46
@@ -0,0 +1,25 @@
|
|
1
|
+
module Moped
|
2
|
+
module BSON
|
3
|
+
# @private
|
4
|
+
module Extensions
|
5
|
+
module Symbol
|
6
|
+
module ClassMethods
|
7
|
+
def __bson_load__(io)
|
8
|
+
io.read(*io.read(4).unpack(INT32_PACK)).chop!.force_encoding('utf-8').intern
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
def __bson_dump__(io, key)
|
13
|
+
io << Types::SYMBOL
|
14
|
+
io << key
|
15
|
+
io << NULL_BYTE
|
16
|
+
|
17
|
+
str = to_s.force_encoding('utf-8').force_encoding('binary')
|
18
|
+
io << [str.bytesize+1].pack(INT32_PACK)
|
19
|
+
io << str
|
20
|
+
io << NULL_BYTE
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module Moped
|
2
|
+
module BSON
|
3
|
+
# @private
|
4
|
+
module Extensions
|
5
|
+
module Time
|
6
|
+
module ClassMethods
|
7
|
+
def __bson_load__(io)
|
8
|
+
at(io.read(8).unpack(INT64_PACK)[0]/1000.0)
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
def __bson_dump__(io, key)
|
13
|
+
io << Types::TIME
|
14
|
+
io << key
|
15
|
+
io << NULL_BYTE
|
16
|
+
io << [to_f * 1000].pack(INT64_PACK)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module Moped
|
2
|
+
module BSON
|
3
|
+
class MaxKey
|
4
|
+
class << self
|
5
|
+
def ===(other)
|
6
|
+
other == self
|
7
|
+
end
|
8
|
+
|
9
|
+
def __bson_load__(io)
|
10
|
+
self
|
11
|
+
end
|
12
|
+
|
13
|
+
def __bson_dump__(io, key)
|
14
|
+
io << Types::MAX_KEY
|
15
|
+
io << key
|
16
|
+
io << NULL_BYTE
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module Moped
|
2
|
+
module BSON
|
3
|
+
class MinKey
|
4
|
+
class << self
|
5
|
+
def ===(other)
|
6
|
+
other == self
|
7
|
+
end
|
8
|
+
|
9
|
+
def __bson_load__(io)
|
10
|
+
self
|
11
|
+
end
|
12
|
+
|
13
|
+
def __bson_dump__(io, key)
|
14
|
+
io << Types::MIN_KEY
|
15
|
+
io << key
|
16
|
+
io << NULL_BYTE
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,123 @@
|
|
1
|
+
require "digest/md5"
|
2
|
+
require "socket"
|
3
|
+
|
4
|
+
module Moped
|
5
|
+
module BSON
|
6
|
+
class ObjectId
|
7
|
+
|
8
|
+
# Formatting string for outputting an ObjectId.
|
9
|
+
@@string_format = ("%02x" * 12).freeze
|
10
|
+
|
11
|
+
attr_reader :data
|
12
|
+
|
13
|
+
class << self
|
14
|
+
def from_string(string)
|
15
|
+
data = []
|
16
|
+
12.times { |i| data << string[i*2, 2].to_i(16) }
|
17
|
+
new data
|
18
|
+
end
|
19
|
+
|
20
|
+
def legal?(str)
|
21
|
+
!!str.match(/^[0-9a-f]{24}$/i)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
def initialize(data = nil, time = nil)
|
26
|
+
if data
|
27
|
+
@data = data
|
28
|
+
elsif time
|
29
|
+
@data = @@generator.generate(time.to_i)
|
30
|
+
else
|
31
|
+
@data = @@generator.next
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def ==(other)
|
36
|
+
BSON::ObjectId === other && data == other.data
|
37
|
+
end
|
38
|
+
alias eql? ==
|
39
|
+
|
40
|
+
def hash
|
41
|
+
data.hash
|
42
|
+
end
|
43
|
+
|
44
|
+
def to_s
|
45
|
+
@@string_format % data
|
46
|
+
end
|
47
|
+
|
48
|
+
# Return the UTC time at which this ObjectId was generated. This may
|
49
|
+
# be used instread of a created_at timestamp since this information
|
50
|
+
# is always encoded in the object id.
|
51
|
+
def generation_time
|
52
|
+
Time.at(@data.pack("C4").unpack("N")[0]).utc
|
53
|
+
end
|
54
|
+
|
55
|
+
class << self
|
56
|
+
def __bson_load__(io)
|
57
|
+
new io.read(12).unpack('C*')
|
58
|
+
end
|
59
|
+
|
60
|
+
end
|
61
|
+
|
62
|
+
def __bson_dump__(io, key)
|
63
|
+
io << Types::OBJECT_ID
|
64
|
+
io << key
|
65
|
+
io << NULL_BYTE
|
66
|
+
io << data.pack('C12')
|
67
|
+
end
|
68
|
+
|
69
|
+
# @api private
|
70
|
+
class Generator
|
71
|
+
def initialize
|
72
|
+
# Generate and cache 3 bytes of identifying information from the current
|
73
|
+
# machine.
|
74
|
+
@machine_id = Digest::MD5.digest(Socket.gethostname).unpack("C3")
|
75
|
+
|
76
|
+
@mutex = Mutex.new
|
77
|
+
@last_timestamp = nil
|
78
|
+
@counter = 0
|
79
|
+
end
|
80
|
+
|
81
|
+
# Return object id data based on the current time, incrementing a
|
82
|
+
# counter for object ids generated in the same second.
|
83
|
+
def next
|
84
|
+
now = Time.new.to_i
|
85
|
+
|
86
|
+
counter = @mutex.synchronize do
|
87
|
+
last_timestamp, @last_timestamp = @last_timestamp, now
|
88
|
+
|
89
|
+
if last_timestamp == now
|
90
|
+
@counter += 1
|
91
|
+
else
|
92
|
+
@counter = 0
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
generate(now, counter)
|
97
|
+
end
|
98
|
+
|
99
|
+
# Generate object id data for a given time using the provided +inc+.
|
100
|
+
def generate(time, inc = 0)
|
101
|
+
pid = Process.pid % 0xFFFF
|
102
|
+
|
103
|
+
[
|
104
|
+
time >> 24 & 0xFF, # 4 bytes time (network order)
|
105
|
+
time >> 16 & 0xFF,
|
106
|
+
time >> 8 & 0xFF,
|
107
|
+
time & 0xFF,
|
108
|
+
@machine_id[0], # 3 bytes machine
|
109
|
+
@machine_id[1],
|
110
|
+
@machine_id[2],
|
111
|
+
pid >> 8 & 0xFF, # 2 bytes process id
|
112
|
+
pid & 0xFF,
|
113
|
+
inc >> 16 & 0xFF, # 3 bytes increment
|
114
|
+
inc >> 8 & 0xFF,
|
115
|
+
inc & 0xFF,
|
116
|
+
]
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
@@generator = Generator.new
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module Moped
|
2
|
+
module BSON
|
3
|
+
class Timestamp < Struct.new(:seconds, :increment)
|
4
|
+
class << self
|
5
|
+
def __bson_load__(io)
|
6
|
+
new(*io.read(8).unpack('l2'))
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
def __bson_dump__(io, key)
|
11
|
+
io << [17, key, increment, seconds].pack('cZ*l2')
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
module Moped
|
2
|
+
module BSON
|
3
|
+
|
4
|
+
# @private
|
5
|
+
module Types
|
6
|
+
class CodeWithScope
|
7
|
+
def self.__bson_load__(io)
|
8
|
+
io.read 4 # swallow the length
|
9
|
+
|
10
|
+
code = io.read(*io.read(4).unpack(INT32_PACK)).chop!.force_encoding('utf-8')
|
11
|
+
scope = BSON::Document.deserialize(io)
|
12
|
+
|
13
|
+
Code.new code, scope
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
class Integer64
|
18
|
+
def self.__bson_load__(io)
|
19
|
+
io.read(8).unpack(INT64_PACK)[0]
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
MAP = {}
|
24
|
+
MAP[1] = Float
|
25
|
+
MAP[2] = String
|
26
|
+
MAP[3] = Hash
|
27
|
+
MAP[4] = Array
|
28
|
+
MAP[5] = Binary
|
29
|
+
# MAP[6] = undefined - deprecated
|
30
|
+
MAP[7] = ObjectId
|
31
|
+
MAP[8] = TrueClass
|
32
|
+
MAP[9] = Time
|
33
|
+
MAP[10] = NilClass
|
34
|
+
MAP[11] = Regexp
|
35
|
+
# MAP[12] = db pointer - deprecated
|
36
|
+
MAP[13] = Code
|
37
|
+
MAP[14] = Symbol
|
38
|
+
MAP[15] = CodeWithScope
|
39
|
+
MAP[16] = Integer
|
40
|
+
MAP[17] = Timestamp
|
41
|
+
MAP[18] = Integer64
|
42
|
+
MAP[255] = MinKey
|
43
|
+
MAP[127] = MaxKey
|
44
|
+
|
45
|
+
FLOAT = 1.chr.freeze
|
46
|
+
STRING = 2.chr.freeze
|
47
|
+
HASH = 3.chr.freeze
|
48
|
+
ARRAY = 4.chr.freeze
|
49
|
+
BINARY = 5.chr.freeze
|
50
|
+
OBJECT_ID = 7.chr.freeze
|
51
|
+
BOOLEAN = 8.chr.freeze
|
52
|
+
TIME = 9.chr.freeze
|
53
|
+
NULL = 10.chr.freeze
|
54
|
+
REGEX = 11.chr.freeze
|
55
|
+
CODE = 13.chr.freeze
|
56
|
+
SYMBOL = 14.chr.freeze
|
57
|
+
CODE_WITH_SCOPE = 15.chr.freeze
|
58
|
+
INT32 = 16.chr.freeze
|
59
|
+
INT64 = 18.chr.freeze
|
60
|
+
MAX_KEY = 127.chr.freeze
|
61
|
+
MIN_KEY = 255.chr.freeze
|
62
|
+
|
63
|
+
TRUE = 1.chr.freeze
|
64
|
+
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
@@ -0,0 +1,193 @@
|
|
1
|
+
module Moped
|
2
|
+
|
3
|
+
# @api private
|
4
|
+
#
|
5
|
+
# The internal class managing connections to both a single node and replica
|
6
|
+
# sets.
|
7
|
+
#
|
8
|
+
# @note Though the socket class itself *is* threadsafe, the cluster presently
|
9
|
+
# is not. This means that in the course of normal operations sessions can be
|
10
|
+
# shared across threads, but in failure modes (when a resync is required),
|
11
|
+
# things can possibly go wrong.
|
12
|
+
class Cluster
|
13
|
+
|
14
|
+
# @return [Array] the user supplied seeds
|
15
|
+
attr_reader :seeds
|
16
|
+
|
17
|
+
# @return [Boolean] whether this is a direct connection
|
18
|
+
attr_reader :direct
|
19
|
+
|
20
|
+
# @return [Array] all available nodes
|
21
|
+
attr_reader :servers
|
22
|
+
|
23
|
+
# @return [Array] seeds gathered from cluster discovery
|
24
|
+
attr_reader :dynamic_seeds
|
25
|
+
|
26
|
+
# @param [Array] seeds an array of host:port pairs
|
27
|
+
# @param [Boolean] direct (false) whether to connect directly to the hosts
|
28
|
+
# provided or to find additional available nodes.
|
29
|
+
def initialize(seeds, direct = false)
|
30
|
+
@seeds = seeds
|
31
|
+
@direct = direct
|
32
|
+
|
33
|
+
@servers = []
|
34
|
+
@dynamic_seeds = []
|
35
|
+
end
|
36
|
+
|
37
|
+
# @return [Array] available secondary nodes
|
38
|
+
def secondaries
|
39
|
+
servers.select(&:secondary?)
|
40
|
+
end
|
41
|
+
|
42
|
+
# @return [Array] available primary nodes
|
43
|
+
def primaries
|
44
|
+
servers.select(&:primary?)
|
45
|
+
end
|
46
|
+
|
47
|
+
# @return [Array] all known addresses from user supplied seeds, dynamically
|
48
|
+
# discovered seeds, and active servers.
|
49
|
+
def known_addresses
|
50
|
+
[].tap do |addresses|
|
51
|
+
addresses.concat seeds
|
52
|
+
addresses.concat dynamic_seeds
|
53
|
+
addresses.concat servers.map { |server| server.address }
|
54
|
+
end.uniq
|
55
|
+
end
|
56
|
+
|
57
|
+
def remove(server)
|
58
|
+
servers.delete(server)
|
59
|
+
end
|
60
|
+
|
61
|
+
def reconnect
|
62
|
+
@servers = servers.map { |server| Server.new(server.address) }
|
63
|
+
end
|
64
|
+
|
65
|
+
def sync
|
66
|
+
known = known_addresses.shuffle
|
67
|
+
seen = {}
|
68
|
+
|
69
|
+
sync_seed = ->(seed) do
|
70
|
+
server = Server.new seed
|
71
|
+
|
72
|
+
unless seen[server.resolved_address]
|
73
|
+
seen[server.resolved_address] = true
|
74
|
+
|
75
|
+
hosts = sync_server(server)
|
76
|
+
|
77
|
+
hosts.each do |host|
|
78
|
+
sync_seed[host]
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
known.each do |seed|
|
84
|
+
sync_seed[seed]
|
85
|
+
end
|
86
|
+
|
87
|
+
unless servers.empty?
|
88
|
+
@dynamic_seeds = servers.map(&:address)
|
89
|
+
end
|
90
|
+
|
91
|
+
true
|
92
|
+
end
|
93
|
+
|
94
|
+
def sync_server(server)
|
95
|
+
[].tap do |hosts|
|
96
|
+
socket = server.socket
|
97
|
+
|
98
|
+
if socket.connect
|
99
|
+
info = socket.simple_query Protocol::Command.new(:admin, ismaster: 1)
|
100
|
+
|
101
|
+
if info["ismaster"]
|
102
|
+
server.primary = true
|
103
|
+
end
|
104
|
+
|
105
|
+
if info["secondary"]
|
106
|
+
server.secondary = true
|
107
|
+
end
|
108
|
+
|
109
|
+
if info["primary"]
|
110
|
+
hosts.push info["primary"]
|
111
|
+
end
|
112
|
+
|
113
|
+
if info["hosts"]
|
114
|
+
hosts.concat info["hosts"]
|
115
|
+
end
|
116
|
+
|
117
|
+
if info["passives"]
|
118
|
+
hosts.concat info["passives"]
|
119
|
+
end
|
120
|
+
|
121
|
+
merge(server)
|
122
|
+
|
123
|
+
end
|
124
|
+
end.uniq
|
125
|
+
end
|
126
|
+
|
127
|
+
def merge(server)
|
128
|
+
previous = servers.find { |other| other == server }
|
129
|
+
primary = server.primary?
|
130
|
+
secondary = server.secondary?
|
131
|
+
|
132
|
+
if previous
|
133
|
+
previous.merge(server)
|
134
|
+
else
|
135
|
+
servers << server
|
136
|
+
end
|
137
|
+
end
|
138
|
+
|
139
|
+
# @param [:read, :write] mode the type of socket to return
|
140
|
+
# @return [Socket] a socket valid for +mode+ operations
|
141
|
+
def socket_for(mode)
|
142
|
+
sync unless primaries.any? || (secondaries.any? && mode == :read)
|
143
|
+
|
144
|
+
server = nil
|
145
|
+
while primaries.any? || (secondaries.any? && mode == :read)
|
146
|
+
if mode == :write || secondaries.empty?
|
147
|
+
server = primaries.sample
|
148
|
+
else
|
149
|
+
server = secondaries.sample
|
150
|
+
end
|
151
|
+
|
152
|
+
if server
|
153
|
+
socket = server.socket
|
154
|
+
socket.connect unless socket.connection
|
155
|
+
|
156
|
+
if socket.alive?
|
157
|
+
break server
|
158
|
+
else
|
159
|
+
remove server
|
160
|
+
end
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
unless server
|
165
|
+
raise Errors::ConnectionFailure.new("Could not connect to any primary or secondary servers")
|
166
|
+
end
|
167
|
+
|
168
|
+
socket = server.socket
|
169
|
+
socket.apply_auth auth
|
170
|
+
socket
|
171
|
+
end
|
172
|
+
|
173
|
+
# @return [Hash] the cached authentication credentials for this cluster.
|
174
|
+
def auth
|
175
|
+
@auth ||= {}
|
176
|
+
end
|
177
|
+
|
178
|
+
# Log in to +database+ with +username+ and +password+. Does not perform the
|
179
|
+
# actual log in, but saves the credentials for later authentication on a
|
180
|
+
# socket.
|
181
|
+
def login(database, username, password)
|
182
|
+
auth[database.to_s] = [username, password]
|
183
|
+
end
|
184
|
+
|
185
|
+
# Log out of +database+. Does not perform the actual log out, but will log
|
186
|
+
# out when the socket is used next.
|
187
|
+
def logout(database)
|
188
|
+
auth.delete(database.to_s)
|
189
|
+
end
|
190
|
+
|
191
|
+
end
|
192
|
+
|
193
|
+
end
|