mongo 1.0.7 → 1.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/HISTORY +6 -0
- data/lib/mongo.rb +1 -1
- data/lib/mongo/collection.rb +13 -9
- data/lib/mongo/connection.rb +35 -4
- data/lib/mongo/cursor.rb +21 -13
- data/lib/mongo/db.rb +1 -1
- data/lib/mongo/gridfs/grid.rb +2 -2
- data/lib/mongo/gridfs/grid_ext.rb +1 -1
- data/lib/mongo/gridfs/grid_file_system.rb +1 -1
- data/lib/mongo/gridfs/grid_io.rb +4 -4
- data/mongo.gemspec +1 -1
- data/test/collection_test.rb +3 -3
- data/test/cursor_test.rb +32 -17
- data/test/db_api_test.rb +7 -26
- data/test/db_test.rb +2 -2
- data/test/replica_sets/node_type_test.rb +42 -0
- data/test/unit/connection_test.rb +3 -3
- metadata +48 -6
data/HISTORY
CHANGED
data/lib/mongo.rb
CHANGED
data/lib/mongo/collection.rb
CHANGED
@@ -58,7 +58,7 @@ module Mongo
|
|
58
58
|
|
59
59
|
@db, @name = db, name
|
60
60
|
@connection = @db.connection
|
61
|
-
@pk_factory = pk_factory || BSON::
|
61
|
+
@pk_factory = pk_factory || BSON::ObjectId
|
62
62
|
@hint = nil
|
63
63
|
end
|
64
64
|
|
@@ -145,15 +145,19 @@ module Mongo
|
|
145
145
|
hint = opts.delete(:hint)
|
146
146
|
snapshot = opts.delete(:snapshot)
|
147
147
|
batch_size = opts.delete(:batch_size)
|
148
|
+
|
148
149
|
if opts[:timeout] == false && !block_given?
|
149
150
|
raise ArgumentError, "Timeout can be set to false only when #find is invoked with a block."
|
151
|
+
else
|
152
|
+
timeout = opts.delete(:timeout) || false
|
150
153
|
end
|
151
|
-
|
154
|
+
|
152
155
|
if hint
|
153
156
|
hint = normalize_hint_fields(hint)
|
154
157
|
else
|
155
158
|
hint = @hint # assumed to be normalized already
|
156
159
|
end
|
160
|
+
|
157
161
|
raise RuntimeError, "Unknown options [#{opts.inspect}]" unless opts.empty?
|
158
162
|
|
159
163
|
cursor = Cursor.new(self, :selector => selector, :fields => fields, :skip => skip, :limit => limit,
|
@@ -173,9 +177,9 @@ module Mongo
|
|
173
177
|
# @return [OrderedHash, Nil]
|
174
178
|
# a single document or nil if no result is found.
|
175
179
|
#
|
176
|
-
# @param [Hash,
|
180
|
+
# @param [Hash, ObjectId, Nil] spec_or_object_id a hash specifying elements
|
177
181
|
# which must be present for a document to be included in the result set or an
|
178
|
-
# instance of
|
182
|
+
# instance of ObjectId to be used as the value for an _id query.
|
179
183
|
# If nil, an empty selector, {}, will be used.
|
180
184
|
#
|
181
185
|
# @option opts [Hash]
|
@@ -187,12 +191,12 @@ module Mongo
|
|
187
191
|
spec = case spec_or_object_id
|
188
192
|
when nil
|
189
193
|
{}
|
190
|
-
when BSON::
|
194
|
+
when BSON::ObjectId
|
191
195
|
{:_id => spec_or_object_id}
|
192
196
|
when Hash
|
193
197
|
spec_or_object_id
|
194
198
|
else
|
195
|
-
raise TypeError, "spec_or_object_id must be an instance of
|
199
|
+
raise TypeError, "spec_or_object_id must be an instance of ObjectId or Hash, or nil"
|
196
200
|
end
|
197
201
|
find(spec, opts.merge(:limit => -1)).next_document
|
198
202
|
end
|
@@ -204,7 +208,7 @@ module Mongo
|
|
204
208
|
# then an update (upsert) operation will be performed, and any existing
|
205
209
|
# document with that _id is overwritten. Otherwise an insert operation is performed.
|
206
210
|
#
|
207
|
-
# @return [
|
211
|
+
# @return [ObjectId] the _id of the saved document.
|
208
212
|
#
|
209
213
|
# @option opts [Boolean, Hash] :safe (+false+)
|
210
214
|
# run the operation in safe mode, which run a getlasterror command on the
|
@@ -230,7 +234,7 @@ module Mongo
|
|
230
234
|
# @param [Hash, Array] doc_or_docs
|
231
235
|
# a document (as a hash) or array of documents to be inserted.
|
232
236
|
#
|
233
|
-
# @return [
|
237
|
+
# @return [ObjectId, Array]
|
234
238
|
# the _id of the inserted document or a list of _ids of all inserted documents.
|
235
239
|
# Note: the object may have been modified by the database's PK factory, if it has one.
|
236
240
|
#
|
@@ -471,7 +475,7 @@ module Mongo
|
|
471
475
|
# @option opts [String] :out (nil) the name of the output collection. If specified, the collection will not be treated as temporary.
|
472
476
|
# @option opts [Boolean] :keeptemp (false) if true, the generated collection will be persisted. default is false.
|
473
477
|
# @option opts [Boolean ] :verbose (false) if true, provides statistics on job execution time.
|
474
|
-
# @
|
478
|
+
# @option opts [Boolean] :raw (false) if true, return the raw result object from the map_reduce command, and not
|
475
479
|
# the instantiated collection that's returned by default.
|
476
480
|
#
|
477
481
|
# @return [Collection] a collection containing the results of the operation.
|
data/lib/mongo/connection.rb
CHANGED
@@ -24,6 +24,9 @@ module Mongo
|
|
24
24
|
|
25
25
|
# Instantiates and manages connections to MongoDB.
|
26
26
|
class Connection
|
27
|
+
TCPSocket = ::TCPSocket
|
28
|
+
Mutex = ::Mutex
|
29
|
+
ConditionVariable = ::ConditionVariable
|
27
30
|
|
28
31
|
# Abort connections if a ConnectionError is raised.
|
29
32
|
Thread.abort_on_exception = true
|
@@ -35,7 +38,7 @@ module Mongo
|
|
35
38
|
MONGODB_URI_MATCHER = /(([-_.\w\d]+):([-_\w\d]+)@)?([-.\w\d]+)(:([\w\d]+))?(\/([-\d\w]+))?/
|
36
39
|
MONGODB_URI_SPEC = "mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/database]"
|
37
40
|
|
38
|
-
attr_reader :logger, :size, :host, :port, :nodes, :auths, :sockets, :checked_out
|
41
|
+
attr_reader :logger, :size, :host, :port, :nodes, :auths, :sockets, :checked_out, :primary, :secondaries, :arbiters
|
39
42
|
|
40
43
|
# Counter for generating unique request ids.
|
41
44
|
@@current_request_id = 0
|
@@ -50,6 +53,11 @@ module Mongo
|
|
50
53
|
# replica set, you can use Connection.new, as any other host known to the set will be
|
51
54
|
# cached.
|
52
55
|
#
|
56
|
+
# Once connected to a replica set, you can find out which nodes are primary, secondary, and
|
57
|
+
# arbiters with the corresponding accessors: Connection#primary, Connection#secondaries, and
|
58
|
+
# Connection#arbiters. This is useful if your application needs to connect manually to nodes other
|
59
|
+
# than the primary.
|
60
|
+
#
|
53
61
|
# @param [String, Hash] host.
|
54
62
|
# @param [Integer] port specify a port number here if only one host is being specified.
|
55
63
|
#
|
@@ -116,6 +124,12 @@ module Mongo
|
|
116
124
|
@slave_ok = options[:slave_ok]
|
117
125
|
end
|
118
126
|
|
127
|
+
# Cache the various node types
|
128
|
+
# when connecting to a replica set.
|
129
|
+
@primary = nil
|
130
|
+
@secondaries = []
|
131
|
+
@arbiters = []
|
132
|
+
|
119
133
|
@logger = options[:logger] || nil
|
120
134
|
@options = options
|
121
135
|
|
@@ -463,9 +477,11 @@ module Mongo
|
|
463
477
|
|
464
478
|
while !connected? && !(nodes_to_try = @nodes - @nodes_tried).empty?
|
465
479
|
nodes_to_try.each do |node|
|
466
|
-
|
480
|
+
config = check_is_master(node)
|
481
|
+
if is_primary?(config)
|
467
482
|
set_primary(node)
|
468
|
-
|
483
|
+
else
|
484
|
+
set_auxillary(node, config)
|
469
485
|
end
|
470
486
|
end
|
471
487
|
end
|
@@ -578,6 +594,9 @@ module Mongo
|
|
578
594
|
close
|
579
595
|
@host = nil
|
580
596
|
@port = nil
|
597
|
+
@primary = nil
|
598
|
+
@secondaries = []
|
599
|
+
@arbiters = []
|
581
600
|
@nodes_tried = []
|
582
601
|
end
|
583
602
|
|
@@ -592,7 +611,6 @@ module Mongo
|
|
592
611
|
config && (config['ismaster'] == 1 || config['ismaster'] == true) || @slave_ok
|
593
612
|
end
|
594
613
|
|
595
|
-
# @return
|
596
614
|
def check_is_master(node)
|
597
615
|
begin
|
598
616
|
host, port = *node
|
@@ -622,9 +640,22 @@ module Mongo
|
|
622
640
|
# apply any saved authentication credentials.
|
623
641
|
def set_primary(node)
|
624
642
|
@host, @port = *node
|
643
|
+
@primary = [@host, @port]
|
625
644
|
apply_saved_authentication
|
626
645
|
end
|
627
646
|
|
647
|
+
# Determines what kind of node we have and caches its host
|
648
|
+
# and port so that users can easily connect manually.
|
649
|
+
def set_auxillary(node, config)
|
650
|
+
if config
|
651
|
+
if config['secondary']
|
652
|
+
@secondaries << node unless @secondaries.include?(node)
|
653
|
+
elsif config['arbiterOnly']
|
654
|
+
@arbiters << node unless @arbiters.include?(node)
|
655
|
+
end
|
656
|
+
end
|
657
|
+
end
|
658
|
+
|
628
659
|
# Update the list of known nodes. Only applies to replica sets,
|
629
660
|
# where the response to the ismaster command will return a list
|
630
661
|
# of known hosts.
|
data/lib/mongo/cursor.rb
CHANGED
@@ -81,6 +81,18 @@ module Mongo
|
|
81
81
|
doc
|
82
82
|
end
|
83
83
|
|
84
|
+
# Reset this cursor on the server. Cursor options, such as the
|
85
|
+
# query string and the values for skip and limit, are preserved.
|
86
|
+
def rewind!
|
87
|
+
close
|
88
|
+
@cache.clear
|
89
|
+
@cursor_id = nil
|
90
|
+
@closed = false
|
91
|
+
@query_run = false
|
92
|
+
@n_received = nil
|
93
|
+
true
|
94
|
+
end
|
95
|
+
|
84
96
|
# Determine whether this cursor has any remaining results.
|
85
97
|
#
|
86
98
|
# @return [Boolean]
|
@@ -187,22 +199,18 @@ module Mongo
|
|
187
199
|
|
188
200
|
# Receive all the documents from this cursor as an array of hashes.
|
189
201
|
#
|
190
|
-
#
|
202
|
+
# Notes:
|
203
|
+
#
|
204
|
+
# If you've already started iterating over the cursor, the array returned
|
205
|
+
# by this method contains only the remaining documents. See Cursor#rewind! if you
|
206
|
+
# need to reset the cursor.
|
207
|
+
#
|
208
|
+
# Use of this method is discouraged - in most cases, it's much more
|
191
209
|
# efficient to retrieve documents as you need them by iterating over the cursor.
|
192
210
|
#
|
193
211
|
# @return [Array] an array of documents.
|
194
|
-
#
|
195
|
-
# @raise [InvalidOperation] if this cursor has already been used or if
|
196
|
-
# this method has already been called on the cursor.
|
197
212
|
def to_a
|
198
|
-
|
199
|
-
rows = []
|
200
|
-
num_returned = 0
|
201
|
-
while has_next? && (@limit <= 0 || num_returned < @limit)
|
202
|
-
rows << next_document
|
203
|
-
num_returned += 1
|
204
|
-
end
|
205
|
-
rows
|
213
|
+
super
|
206
214
|
end
|
207
215
|
|
208
216
|
# Get the explain plan for this cursor.
|
@@ -229,7 +237,7 @@ module Mongo
|
|
229
237
|
#
|
230
238
|
# @return [True]
|
231
239
|
def close
|
232
|
-
if @cursor_id
|
240
|
+
if @cursor_id && @cursor_id != 0
|
233
241
|
message = BSON::ByteBuffer.new([0, 0, 0, 0])
|
234
242
|
message.put_int(1)
|
235
243
|
message.put_long(@cursor_id)
|
data/lib/mongo/db.rb
CHANGED
@@ -60,7 +60,7 @@ module Mongo
|
|
60
60
|
# @option options [Boolean] :strict (False) If true, collections must exist to be accessed and must
|
61
61
|
# not exist to be created. See DB#collection and DB#create_collection.
|
62
62
|
#
|
63
|
-
# @option options [Object, #create_pk(doc)] :pk (Mongo::
|
63
|
+
# @option options [Object, #create_pk(doc)] :pk (Mongo::ObjectId) A primary key factory object,
|
64
64
|
# which should take a hash and return a hash which merges the original hash with any primary key
|
65
65
|
# fields the factory wishes to inject. (NOTE: if the object already has a primary key,
|
66
66
|
# the factory should not inject a new key).
|
data/lib/mongo/gridfs/grid.rb
CHANGED
@@ -51,7 +51,7 @@ module Mongo
|
|
51
51
|
#
|
52
52
|
# @option opts [String] :filename (nil) a name for the file.
|
53
53
|
# @option opts [Hash] :metadata ({}) any additional data to store with the file.
|
54
|
-
# @option opts [
|
54
|
+
# @option opts [ObjectId] :_id (ObjectId) a unique id for
|
55
55
|
# the file to be use in lieu of an automatically generated one.
|
56
56
|
# @option opts [String] :content_type ('binary/octet-stream') If no content type is specified,
|
57
57
|
# the content type will may be inferred from the filename extension if the mime-types gem can be
|
@@ -60,7 +60,7 @@ module Mongo
|
|
60
60
|
# @option opts [Boolean] :safe (false) When safe mode is enabled, the chunks sent to the server
|
61
61
|
# will be validated using an md5 hash. If validation fails, an exception will be raised.
|
62
62
|
#
|
63
|
-
# @return [Mongo::
|
63
|
+
# @return [Mongo::ObjectId] the file's id.
|
64
64
|
def put(data, opts={})
|
65
65
|
filename = opts[:filename]
|
66
66
|
opts.merge!(default_grid_io_opts)
|
@@ -42,7 +42,7 @@ module Mongo
|
|
42
42
|
#
|
43
43
|
# # Check for existence by _id
|
44
44
|
# @grid = Grid.new(@db)
|
45
|
-
# @grid.exist?(:_id => BSON::
|
45
|
+
# @grid.exist?(:_id => BSON::ObjectId.from_string('4bddcd24beffd95a7db9b8c8'))
|
46
46
|
#
|
47
47
|
# # Check for existence by an arbitrary attribute.
|
48
48
|
# @grid = Grid.new(@db)
|
@@ -55,7 +55,7 @@ module Mongo
|
|
55
55
|
# @param [Hash] opts see GridIO#new
|
56
56
|
#
|
57
57
|
# @option opts [Hash] :metadata ({}) any additional data to store with the file.
|
58
|
-
# @option opts [
|
58
|
+
# @option opts [ObjectId] :_id (ObjectId) a unique id for
|
59
59
|
# the file to be use in lieu of an automatically generated one.
|
60
60
|
# @option opts [String] :content_type ('binary/octet-stream') If no content type is specified,
|
61
61
|
# the content type will may be inferred from the filename extension if the mime-types gem can be
|
data/lib/mongo/gridfs/grid_io.rb
CHANGED
@@ -47,7 +47,7 @@ module Mongo
|
|
47
47
|
# @option opts [String] :fs_name the file system prefix.
|
48
48
|
# @option opts [Integer] (262144) :chunk_size size of file chunks in bytes.
|
49
49
|
# @option opts [Hash] :metadata ({}) any additional data to store with the file.
|
50
|
-
# @option opts [
|
50
|
+
# @option opts [ObjectId] :_id (ObjectId) a unique id for
|
51
51
|
# the file to be use in lieu of an automatically generated one.
|
52
52
|
# @option opts [String] :content_type ('binary/octet-stream') If no content type is specified,
|
53
53
|
# the content type will may be inferred from the filename extension if the mime-types gem can be
|
@@ -180,7 +180,7 @@ module Mongo
|
|
180
180
|
# This method will be invoked automatically when
|
181
181
|
# on GridIO#open is passed a block. Otherwise, it must be called manually.
|
182
182
|
#
|
183
|
-
# @return [BSON::
|
183
|
+
# @return [BSON::ObjectId]
|
184
184
|
def close
|
185
185
|
if @mode[0] == ?w
|
186
186
|
if @current_chunk['n'].zero? && @chunk_position.zero?
|
@@ -200,7 +200,7 @@ module Mongo
|
|
200
200
|
|
201
201
|
def create_chunk(n)
|
202
202
|
chunk = BSON::OrderedHash.new
|
203
|
-
chunk['_id'] = BSON::
|
203
|
+
chunk['_id'] = BSON::ObjectId.new
|
204
204
|
chunk['n'] = n
|
205
205
|
chunk['files_id'] = @files_id
|
206
206
|
chunk['data'] = ''
|
@@ -308,7 +308,7 @@ module Mongo
|
|
308
308
|
|
309
309
|
# Initialize the class for writing a file.
|
310
310
|
def init_write(opts)
|
311
|
-
@files_id = opts.delete(:_id) || BSON::
|
311
|
+
@files_id = opts.delete(:_id) || BSON::ObjectId.new
|
312
312
|
@content_type = opts.delete(:content_type) || (defined? MIME) && get_content_type || DEFAULT_CONTENT_TYPE
|
313
313
|
@chunk_size = opts.delete(:chunk_size) || DEFAULT_CHUNK_SIZE
|
314
314
|
@metadata = opts.delete(:metadata) if opts[:metadata]
|
data/mongo.gemspec
CHANGED
data/test/collection_test.rb
CHANGED
@@ -12,9 +12,9 @@ class TestCollection < Test::Unit::TestCase
|
|
12
12
|
|
13
13
|
def test_optional_pk_factory
|
14
14
|
@coll_default_pk = @@db.collection('stuff')
|
15
|
-
assert_equal BSON::
|
15
|
+
assert_equal BSON::ObjectId, @coll_default_pk.pk_factory
|
16
16
|
@coll_default_pk = @@db.create_collection('more-stuff')
|
17
|
-
assert_equal BSON::
|
17
|
+
assert_equal BSON::ObjectId, @coll_default_pk.pk_factory
|
18
18
|
|
19
19
|
# Create a db with a pk_factory.
|
20
20
|
@db = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
|
@@ -334,7 +334,7 @@ class TestCollection < Test::Unit::TestCase
|
|
334
334
|
|
335
335
|
assert_equal nil, @@test.find_one("hello" => "foo")
|
336
336
|
assert_equal nil, @@test.find_one(BSON::OrderedHash["hello", "foo"])
|
337
|
-
assert_equal nil, @@test.find_one(
|
337
|
+
assert_equal nil, @@test.find_one(ObjectId.new)
|
338
338
|
|
339
339
|
assert_raise TypeError do
|
340
340
|
@@test.find_one(6)
|
data/test/cursor_test.rb
CHANGED
@@ -300,7 +300,6 @@ class CursorTest < Test::Unit::TestCase
|
|
300
300
|
@@coll.drop
|
301
301
|
|
302
302
|
client_cursors = @@db.command("cursorInfo" => 1)["clientCursors_size"]
|
303
|
-
by_location = @@db.command("cursorInfo" => 1)["byLocation_size"]
|
304
303
|
|
305
304
|
10000.times do |i|
|
306
305
|
@@coll.insert("i" => i)
|
@@ -308,8 +307,6 @@ class CursorTest < Test::Unit::TestCase
|
|
308
307
|
|
309
308
|
assert_equal(client_cursors,
|
310
309
|
@@db.command("cursorInfo" => 1)["clientCursors_size"])
|
311
|
-
assert_equal(by_location,
|
312
|
-
@@db.command("cursorInfo" => 1)["byLocation_size"])
|
313
310
|
|
314
311
|
10.times do |i|
|
315
312
|
@@coll.find_one()
|
@@ -317,8 +314,6 @@ class CursorTest < Test::Unit::TestCase
|
|
317
314
|
|
318
315
|
assert_equal(client_cursors,
|
319
316
|
@@db.command("cursorInfo" => 1)["clientCursors_size"])
|
320
|
-
assert_equal(by_location,
|
321
|
-
@@db.command("cursorInfo" => 1)["byLocation_size"])
|
322
317
|
|
323
318
|
10.times do |i|
|
324
319
|
a = @@coll.find()
|
@@ -328,30 +323,22 @@ class CursorTest < Test::Unit::TestCase
|
|
328
323
|
|
329
324
|
assert_equal(client_cursors,
|
330
325
|
@@db.command("cursorInfo" => 1)["clientCursors_size"])
|
331
|
-
assert_equal(by_location,
|
332
|
-
@@db.command("cursorInfo" => 1)["byLocation_size"])
|
333
326
|
|
334
327
|
a = @@coll.find()
|
335
328
|
a.next_document
|
336
329
|
|
337
330
|
assert_not_equal(client_cursors,
|
338
331
|
@@db.command("cursorInfo" => 1)["clientCursors_size"])
|
339
|
-
assert_not_equal(by_location,
|
340
|
-
@@db.command("cursorInfo" => 1)["byLocation_size"])
|
341
332
|
|
342
333
|
a.close()
|
343
334
|
|
344
335
|
assert_equal(client_cursors,
|
345
336
|
@@db.command("cursorInfo" => 1)["clientCursors_size"])
|
346
|
-
assert_equal(by_location,
|
347
|
-
@@db.command("cursorInfo" => 1)["byLocation_size"])
|
348
337
|
|
349
338
|
a = @@coll.find({}, :limit => 10).next_document
|
350
339
|
|
351
340
|
assert_equal(client_cursors,
|
352
341
|
@@db.command("cursorInfo" => 1)["clientCursors_size"])
|
353
|
-
assert_equal(by_location,
|
354
|
-
@@db.command("cursorInfo" => 1)["byLocation_size"])
|
355
342
|
|
356
343
|
@@coll.find() do |cursor|
|
357
344
|
cursor.next_document
|
@@ -359,8 +346,6 @@ class CursorTest < Test::Unit::TestCase
|
|
359
346
|
|
360
347
|
assert_equal(client_cursors,
|
361
348
|
@@db.command("cursorInfo" => 1)["clientCursors_size"])
|
362
|
-
assert_equal(by_location,
|
363
|
-
@@db.command("cursorInfo" => 1)["byLocation_size"])
|
364
349
|
|
365
350
|
@@coll.find() { |cursor|
|
366
351
|
cursor.next_document
|
@@ -368,8 +353,6 @@ class CursorTest < Test::Unit::TestCase
|
|
368
353
|
|
369
354
|
assert_equal(client_cursors,
|
370
355
|
@@db.command("cursorInfo" => 1)["clientCursors_size"])
|
371
|
-
assert_equal(by_location,
|
372
|
-
@@db.command("cursorInfo" => 1)["byLocation_size"])
|
373
356
|
end
|
374
357
|
|
375
358
|
def test_count_with_fields
|
@@ -413,4 +396,36 @@ class CursorTest < Test::Unit::TestCase
|
|
413
396
|
end
|
414
397
|
end
|
415
398
|
|
399
|
+
def test_enumberables
|
400
|
+
@@coll.remove
|
401
|
+
100.times do |n|
|
402
|
+
@@coll.insert({:a => n})
|
403
|
+
end
|
404
|
+
|
405
|
+
assert_equal 100, @@coll.find.to_a.length
|
406
|
+
assert_equal 100, @@coll.find.to_set.length
|
407
|
+
|
408
|
+
cursor = @@coll.find
|
409
|
+
50.times { |n| cursor.next_document }
|
410
|
+
assert_equal 50, cursor.to_a.length
|
411
|
+
end
|
412
|
+
|
413
|
+
def test_rewind
|
414
|
+
@@coll.remove
|
415
|
+
100.times do |n|
|
416
|
+
@@coll.insert({:a => n})
|
417
|
+
end
|
418
|
+
|
419
|
+
cursor = @@coll.find
|
420
|
+
cursor.to_a
|
421
|
+
assert_equal [], cursor.map {|doc| doc }
|
422
|
+
|
423
|
+
cursor.rewind!
|
424
|
+
assert_equal 100, cursor.map {|doc| doc }.length
|
425
|
+
|
426
|
+
cursor.rewind!
|
427
|
+
5.times { cursor.next_document }
|
428
|
+
cursor.rewind!
|
429
|
+
assert_equal 100, cursor.map {|doc| doc }.length
|
430
|
+
end
|
416
431
|
end
|
data/test/db_api_test.rb
CHANGED
@@ -29,8 +29,8 @@ class DBAPITest < Test::Unit::TestCase
|
|
29
29
|
end
|
30
30
|
|
31
31
|
def test_insert
|
32
|
-
assert_kind_of BSON::
|
33
|
-
assert_kind_of BSON::
|
32
|
+
assert_kind_of BSON::ObjectId, @@coll.insert('a' => 2)
|
33
|
+
assert_kind_of BSON::ObjectId, @@coll.insert('b' => 3)
|
34
34
|
|
35
35
|
assert_equal 3, @@coll.count
|
36
36
|
docs = @@coll.find().to_a
|
@@ -62,7 +62,7 @@ class DBAPITest < Test::Unit::TestCase
|
|
62
62
|
ids = @@coll.insert([{'a' => 2}, {'b' => 3}])
|
63
63
|
|
64
64
|
ids.each do |i|
|
65
|
-
assert_kind_of BSON::
|
65
|
+
assert_kind_of BSON::ObjectId, i
|
66
66
|
end
|
67
67
|
|
68
68
|
assert_equal 3, @@coll.count
|
@@ -445,25 +445,6 @@ class DBAPITest < Test::Unit::TestCase
|
|
445
445
|
@@db.drop_collection('foobar')
|
446
446
|
end
|
447
447
|
|
448
|
-
def test_to_a
|
449
|
-
cursor = @@coll.find()
|
450
|
-
rows = cursor.to_a
|
451
|
-
|
452
|
-
assert_raise InvalidOperation do
|
453
|
-
cursor.to_a
|
454
|
-
end
|
455
|
-
|
456
|
-
cursor.each { |doc| fail "should be no docs in each now" }
|
457
|
-
end
|
458
|
-
|
459
|
-
def test_to_a_after_each
|
460
|
-
cursor = @@coll.find
|
461
|
-
cursor.each { |row| row }
|
462
|
-
assert_raise InvalidOperation do
|
463
|
-
cursor.to_a
|
464
|
-
end
|
465
|
-
end
|
466
|
-
|
467
448
|
def test_where
|
468
449
|
@@coll.insert('a' => 2)
|
469
450
|
@@coll.insert('a' => 3)
|
@@ -575,7 +556,7 @@ class DBAPITest < Test::Unit::TestCase
|
|
575
556
|
def test_deref
|
576
557
|
@@coll.remove
|
577
558
|
|
578
|
-
assert_equal nil, @@db.dereference(DBRef.new("test",
|
559
|
+
assert_equal nil, @@db.dereference(DBRef.new("test", ObjectId.new))
|
579
560
|
@@coll.insert({"x" => "hello"})
|
580
561
|
key = @@coll.find_one()["_id"]
|
581
562
|
assert_equal "hello", @@db.dereference(DBRef.new("test", key))["x"]
|
@@ -596,7 +577,7 @@ class DBAPITest < Test::Unit::TestCase
|
|
596
577
|
a = {"hello" => "world"}
|
597
578
|
|
598
579
|
id = @@coll.save(a)
|
599
|
-
assert_kind_of
|
580
|
+
assert_kind_of ObjectId, id
|
600
581
|
assert_equal 1, @@coll.count
|
601
582
|
|
602
583
|
assert_equal id, @@coll.save(a)
|
@@ -625,14 +606,14 @@ class DBAPITest < Test::Unit::TestCase
|
|
625
606
|
|
626
607
|
@@coll.save("hello" => "mike")
|
627
608
|
id = @@coll.save("hello" => "world")
|
628
|
-
assert_kind_of
|
609
|
+
assert_kind_of ObjectId, id
|
629
610
|
|
630
611
|
assert_equal "world", @@coll.find_one(:_id => id)["hello"]
|
631
612
|
@@coll.find(:_id => id).to_a.each do |doc|
|
632
613
|
assert_equal "world", doc["hello"]
|
633
614
|
end
|
634
615
|
|
635
|
-
id =
|
616
|
+
id = ObjectId.from_string(id.to_s)
|
636
617
|
assert_equal "world", @@coll.find_one(:_id => id)["hello"]
|
637
618
|
end
|
638
619
|
|
data/test/db_test.rb
CHANGED
@@ -6,7 +6,7 @@ require 'logger'
|
|
6
6
|
|
7
7
|
class TestPKFactory
|
8
8
|
def create_pk(row)
|
9
|
-
row['_id'] ||= BSON::
|
9
|
+
row['_id'] ||= BSON::ObjectId.new
|
10
10
|
row
|
11
11
|
end
|
12
12
|
end
|
@@ -102,7 +102,7 @@ class DBTest < Test::Unit::TestCase
|
|
102
102
|
assert_not_nil oid
|
103
103
|
assert_equal insert_id, oid
|
104
104
|
|
105
|
-
oid = BSON::
|
105
|
+
oid = BSON::ObjectId.new
|
106
106
|
data = {'_id' => oid, 'name' => 'Barney', 'age' => 41}
|
107
107
|
coll.insert(data)
|
108
108
|
row = coll.find_one({'name' => data['name']})
|
@@ -0,0 +1,42 @@
|
|
1
|
+
$:.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
2
|
+
require 'mongo'
|
3
|
+
require 'test/unit'
|
4
|
+
require 'test/test_helper'
|
5
|
+
|
6
|
+
# NOTE: This test expects a replica set of three nodes, one of which is an arbiter, to be running
|
7
|
+
# on the local host.
|
8
|
+
class ReplicaSetNodeTypeTest < Test::Unit::TestCase
|
9
|
+
include Mongo
|
10
|
+
|
11
|
+
def setup
|
12
|
+
@conn = Mongo::Connection.multi([['localhost', 27017], ['localhost', 27018], ['localhost', 27019]])
|
13
|
+
@db = @conn.db(MONGO_TEST_DB)
|
14
|
+
@db.drop_collection("test-sets")
|
15
|
+
@coll = @db.collection("test-sets")
|
16
|
+
end
|
17
|
+
|
18
|
+
def test_correct_node_types
|
19
|
+
p @conn.primary
|
20
|
+
p @conn.secondaries
|
21
|
+
p @conn.arbiters
|
22
|
+
assert_equal 1, @conn.secondaries.length
|
23
|
+
assert_equal 1, @conn.arbiters.length
|
24
|
+
|
25
|
+
old_secondary = @conn.secondaries.first
|
26
|
+
old_primary = @conn.primary
|
27
|
+
|
28
|
+
puts "Please disconnect the current primary and reconnect so that it becomes secondary."
|
29
|
+
gets
|
30
|
+
|
31
|
+
# Insert something to rescue the connection failure.
|
32
|
+
rescue_connection_failure do
|
33
|
+
@coll.insert({:a => 30}, :safe => true)
|
34
|
+
end
|
35
|
+
|
36
|
+
assert_equal 1, @conn.secondaries.length
|
37
|
+
assert_equal 1, @conn.arbiters.length
|
38
|
+
assert_equal old_primary, @conn.secondaries.first
|
39
|
+
assert_equal old_secondary, @conn.primary
|
40
|
+
end
|
41
|
+
|
42
|
+
end
|
@@ -7,7 +7,7 @@ class ConnectionTest < Test::Unit::TestCase
|
|
7
7
|
def new_mock_socket
|
8
8
|
socket = Object.new
|
9
9
|
socket.stubs(:setsockopt).with(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1)
|
10
|
-
socket.
|
10
|
+
socket.stubs(:close)
|
11
11
|
socket
|
12
12
|
end
|
13
13
|
|
@@ -69,8 +69,8 @@ class ConnectionTest < Test::Unit::TestCase
|
|
69
69
|
|
70
70
|
admin_db = new_mock_db
|
71
71
|
@hosts = ['localhost:27017', 'localhost:27018', 'localhost:27019']
|
72
|
-
admin_db.
|
73
|
-
@conn.
|
72
|
+
admin_db.stubs(:command).returns({'ok' => 1, 'ismaster' => 1, 'hosts' => @hosts})
|
73
|
+
@conn.stubs(:[]).with('admin').returns(admin_db)
|
74
74
|
@conn.connect
|
75
75
|
end
|
76
76
|
|
metadata
CHANGED
@@ -1,12 +1,13 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: mongo
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
+
hash: 7
|
4
5
|
prerelease: false
|
5
6
|
segments:
|
6
7
|
- 1
|
7
8
|
- 0
|
8
|
-
-
|
9
|
-
version: 1.0.
|
9
|
+
- 8
|
10
|
+
version: 1.0.8
|
10
11
|
platform: ruby
|
11
12
|
authors:
|
12
13
|
- Jim Menard
|
@@ -16,21 +17,23 @@ autorequire:
|
|
16
17
|
bindir: bin
|
17
18
|
cert_chain: []
|
18
19
|
|
19
|
-
date: 2010-08-
|
20
|
+
date: 2010-08-27 00:00:00 -04:00
|
20
21
|
default_executable:
|
21
22
|
dependencies:
|
22
23
|
- !ruby/object:Gem::Dependency
|
23
24
|
name: bson
|
24
25
|
prerelease: false
|
25
26
|
requirement: &id001 !ruby/object:Gem::Requirement
|
27
|
+
none: false
|
26
28
|
requirements:
|
27
29
|
- - ">="
|
28
30
|
- !ruby/object:Gem::Version
|
31
|
+
hash: 29
|
29
32
|
segments:
|
30
33
|
- 1
|
31
34
|
- 0
|
32
|
-
-
|
33
|
-
version: 1.0.
|
35
|
+
- 5
|
36
|
+
version: 1.0.5
|
34
37
|
type: :runtime
|
35
38
|
version_requirements: *id001
|
36
39
|
description: A Ruby driver for MongoDB. For more information about Mongo, see http://www.mongodb.org.
|
@@ -73,6 +76,40 @@ files:
|
|
73
76
|
- examples/types.rb
|
74
77
|
- bin/bson_benchmark.rb
|
75
78
|
- bin/fail_if_no_c.rb
|
79
|
+
- test/auxillary/1.4_features.rb
|
80
|
+
- test/auxillary/authentication_test.rb
|
81
|
+
- test/auxillary/autoreconnect_test.rb
|
82
|
+
- test/auxillary/slave_connection_test.rb
|
83
|
+
- test/collection_test.rb
|
84
|
+
- test/connection_test.rb
|
85
|
+
- test/conversions_test.rb
|
86
|
+
- test/cursor_fail_test.rb
|
87
|
+
- test/cursor_message_test.rb
|
88
|
+
- test/cursor_test.rb
|
89
|
+
- test/db_api_test.rb
|
90
|
+
- test/db_connection_test.rb
|
91
|
+
- test/db_test.rb
|
92
|
+
- test/grid_file_system_test.rb
|
93
|
+
- test/grid_io_test.rb
|
94
|
+
- test/grid_test.rb
|
95
|
+
- test/replica_pairs/count_test.rb
|
96
|
+
- test/replica_pairs/insert_test.rb
|
97
|
+
- test/replica_pairs/pooled_insert_test.rb
|
98
|
+
- test/replica_pairs/query_test.rb
|
99
|
+
- test/replica_sets/count_test.rb
|
100
|
+
- test/replica_sets/insert_test.rb
|
101
|
+
- test/replica_sets/node_type_test.rb
|
102
|
+
- test/replica_sets/pooled_insert_test.rb
|
103
|
+
- test/replica_sets/query_test.rb
|
104
|
+
- test/replica_sets/replication_ack_test.rb
|
105
|
+
- test/support_test.rb
|
106
|
+
- test/test_helper.rb
|
107
|
+
- test/threading/test_threading_large_pool.rb
|
108
|
+
- test/threading_test.rb
|
109
|
+
- test/unit/collection_test.rb
|
110
|
+
- test/unit/connection_test.rb
|
111
|
+
- test/unit/cursor_test.rb
|
112
|
+
- test/unit/db_test.rb
|
76
113
|
has_rdoc: true
|
77
114
|
homepage: http://www.mongodb.org
|
78
115
|
licenses: []
|
@@ -85,23 +122,27 @@ rdoc_options:
|
|
85
122
|
require_paths:
|
86
123
|
- lib
|
87
124
|
required_ruby_version: !ruby/object:Gem::Requirement
|
125
|
+
none: false
|
88
126
|
requirements:
|
89
127
|
- - ">="
|
90
128
|
- !ruby/object:Gem::Version
|
129
|
+
hash: 3
|
91
130
|
segments:
|
92
131
|
- 0
|
93
132
|
version: "0"
|
94
133
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
134
|
+
none: false
|
95
135
|
requirements:
|
96
136
|
- - ">="
|
97
137
|
- !ruby/object:Gem::Version
|
138
|
+
hash: 3
|
98
139
|
segments:
|
99
140
|
- 0
|
100
141
|
version: "0"
|
101
142
|
requirements: []
|
102
143
|
|
103
144
|
rubyforge_project:
|
104
|
-
rubygems_version: 1.3.
|
145
|
+
rubygems_version: 1.3.7
|
105
146
|
signing_key:
|
106
147
|
specification_version: 3
|
107
148
|
summary: Ruby driver for the MongoDB
|
@@ -128,6 +169,7 @@ test_files:
|
|
128
169
|
- test/replica_pairs/query_test.rb
|
129
170
|
- test/replica_sets/count_test.rb
|
130
171
|
- test/replica_sets/insert_test.rb
|
172
|
+
- test/replica_sets/node_type_test.rb
|
131
173
|
- test/replica_sets/pooled_insert_test.rb
|
132
174
|
- test/replica_sets/query_test.rb
|
133
175
|
- test/replica_sets/replication_ack_test.rb
|