mongo 0.16 → 0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README.rdoc +3 -0
- data/lib/mongo.rb +8 -1
- data/lib/mongo/collection.rb +58 -26
- data/lib/mongo/connection.rb +12 -1
- data/lib/mongo/cursor.rb +34 -95
- data/lib/mongo/db.rb +172 -39
- data/lib/mongo/errors.rb +8 -2
- data/lib/mongo/types/objectid.rb +22 -12
- data/lib/mongo/util/bson.rb +12 -0
- data/lib/mongo/util/byte_buffer.rb +8 -4
- data/test/test_collection.rb +29 -0
- data/test/test_connection.rb +10 -3
- data/test/test_db.rb +7 -0
- data/test/test_objectid.rb +12 -1
- data/test/test_threading.rb +61 -11
- data/test/unit/collection_test.rb +54 -0
- data/test/unit/db_test.rb +82 -0
- metadata +4 -2
data/README.rdoc
CHANGED
data/lib/mongo.rb
CHANGED
@@ -18,9 +18,16 @@ require 'mongo/cursor'
|
|
18
18
|
require 'mongo/collection'
|
19
19
|
require 'mongo/admin'
|
20
20
|
|
21
|
+
begin
|
22
|
+
require 'mongo_ext/cbson'
|
23
|
+
BSON_SERIALIZER = CBson
|
24
|
+
rescue LoadError
|
25
|
+
BSON_SERIALIZER = BSON
|
26
|
+
end
|
27
|
+
|
21
28
|
module Mongo
|
22
29
|
ASCENDING = 1
|
23
30
|
DESCENDING = -1
|
24
31
|
|
25
|
-
VERSION = "0.
|
32
|
+
VERSION = "0.17"
|
26
33
|
end
|
data/lib/mongo/collection.rb
CHANGED
@@ -202,13 +202,7 @@ module Mongo
|
|
202
202
|
def insert(doc_or_docs, options={})
|
203
203
|
doc_or_docs = [doc_or_docs] unless doc_or_docs.is_a?(Array)
|
204
204
|
doc_or_docs.collect! { |doc| @pk_factory.create_pk(doc) }
|
205
|
-
result = insert_documents(doc_or_docs)
|
206
|
-
if options.delete(:safe)
|
207
|
-
error = @db.error
|
208
|
-
if error
|
209
|
-
raise OperationFailure, error
|
210
|
-
end
|
211
|
-
end
|
205
|
+
result = insert_documents(doc_or_docs, @name, true, options[:safe])
|
212
206
|
result.size > 1 ? result : result.first
|
213
207
|
end
|
214
208
|
alias_method :<<, :insert
|
@@ -227,8 +221,9 @@ module Mongo
|
|
227
221
|
message.put_int(0)
|
228
222
|
BSON.serialize_cstr(message, "#{@db.name}.#{@name}")
|
229
223
|
message.put_int(0)
|
230
|
-
message.put_array(
|
231
|
-
@db.send_message_with_operation(Mongo::Constants::OP_DELETE, message
|
224
|
+
message.put_array(BSON_SERIALIZER.serialize(selector, false).unpack("C*"))
|
225
|
+
@db.send_message_with_operation(Mongo::Constants::OP_DELETE, message,
|
226
|
+
"db.#{@db.name}.remove(#{selector.inspect})")
|
232
227
|
end
|
233
228
|
|
234
229
|
# Remove all records.
|
@@ -240,28 +235,37 @@ module Mongo
|
|
240
235
|
|
241
236
|
# Update a single document in this collection.
|
242
237
|
#
|
243
|
-
# :
|
244
|
-
#
|
238
|
+
# :selector :: a hash specifying elements which must be present for a document to be updated. Note:
|
239
|
+
# the update command currently updates only the first document matching the
|
240
|
+
# given selector. If you want all matching documents to be updated, be sure
|
241
|
+
# to specify :multi => true.
|
245
242
|
# :document :: a hash specifying the fields to be changed in the
|
246
|
-
#
|
247
|
-
#
|
243
|
+
# selected document, or (in the case of an upsert) the document to
|
244
|
+
# be inserted
|
248
245
|
#
|
249
246
|
# Options:
|
250
247
|
# :upsert :: if true, perform an upsert operation
|
248
|
+
# :multi :: update all documents matching the selector, as opposed to
|
249
|
+
# just the first matching document. Note: only works in 1.1.3 or later.
|
251
250
|
# :safe :: if true, check that the update succeeded. OperationFailure
|
252
|
-
#
|
253
|
-
#
|
254
|
-
def update(
|
251
|
+
# will be raised on an error. Checking for safety requires an extra
|
252
|
+
# round-trip to the database
|
253
|
+
def update(selector, document, options={})
|
255
254
|
message = ByteBuffer.new
|
256
255
|
message.put_int(0)
|
257
256
|
BSON.serialize_cstr(message, "#{@db.name}.#{@name}")
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
257
|
+
update_options = 0
|
258
|
+
update_options += 1 if options[:upsert]
|
259
|
+
update_options += 2 if options[:multi]
|
260
|
+
message.put_int(update_options)
|
261
|
+
message.put_array(BSON_SERIALIZER.serialize(selector, false).unpack("C*"))
|
262
|
+
message.put_array(BSON_SERIALIZER.serialize(document, false).unpack("C*"))
|
263
|
+
if options[:safe]
|
264
|
+
@db.send_message_with_safe_check(Mongo::Constants::OP_UPDATE, message,
|
265
|
+
"db.#{@name}.update(#{selector.inspect}, #{document.inspect})")
|
266
|
+
else
|
267
|
+
@db.send_message_with_operation(Mongo::Constants::OP_UPDATE, message,
|
268
|
+
"db.#{@name}.update(#{selector.inspect}, #{document.inspect})")
|
265
269
|
end
|
266
270
|
end
|
267
271
|
|
@@ -383,6 +387,28 @@ EOS
|
|
383
387
|
return @db.eval(Code.new(group_function, scope))["result"]
|
384
388
|
end
|
385
389
|
|
390
|
+
# Returns a list of distinct values for +key+ across all
|
391
|
+
# documents in the collection. The key may use dot notation
|
392
|
+
# to reach into an embedded object.
|
393
|
+
# @collection.save({:zip => 10010, :name => {:age => 27}})
|
394
|
+
# @collection.save({:zip => 94108, :name => {:age => 24}})
|
395
|
+
# @collection.save({:zip => 10010, :name => {:age => 27}})
|
396
|
+
# @collection.save({:zip => 99701, :name => {:age => 24}})
|
397
|
+
# @collection.save({:zip => 94108, :name => {:age => 27}})
|
398
|
+
#
|
399
|
+
# @collection.distinct(:zip)
|
400
|
+
# [10010, 94108, 99701]
|
401
|
+
# @collection.distinct("name.age")
|
402
|
+
# [27, 24]
|
403
|
+
def distinct(key)
|
404
|
+
raise MongoArgumentError unless [String, Symbol].include?(key.class)
|
405
|
+
command = OrderedHash.new
|
406
|
+
command[:distinct] = @name
|
407
|
+
command[:key] = key.to_s
|
408
|
+
|
409
|
+
@db.db_command(command)["values"]
|
410
|
+
end
|
411
|
+
|
386
412
|
# Rename this collection.
|
387
413
|
#
|
388
414
|
# If operating in auth mode, client must be authorized as an admin to
|
@@ -456,12 +482,18 @@ EOS
|
|
456
482
|
# Sends an Mongo::Constants::OP_INSERT message to the database.
|
457
483
|
# Takes an array of +documents+, an optional +collection_name+, and a
|
458
484
|
# +check_keys+ setting.
|
459
|
-
def insert_documents(documents, collection_name=@name, check_keys=true)
|
485
|
+
def insert_documents(documents, collection_name=@name, check_keys=true, safe=false)
|
460
486
|
message = ByteBuffer.new
|
461
487
|
message.put_int(0)
|
462
488
|
BSON.serialize_cstr(message, "#{@db.name}.#{collection_name}")
|
463
|
-
documents.each { |doc| message.put_array(
|
464
|
-
|
489
|
+
documents.each { |doc| message.put_array(BSON_SERIALIZER.serialize(doc, check_keys).unpack("C*")) }
|
490
|
+
if safe
|
491
|
+
@db.send_message_with_safe_check(Mongo::Constants::OP_INSERT, message,
|
492
|
+
"db.#{collection_name}.insert(#{documents.inspect})")
|
493
|
+
else
|
494
|
+
@db.send_message_with_operation(Mongo::Constants::OP_INSERT, message,
|
495
|
+
"db.#{collection_name}.insert(#{documents.inspect})")
|
496
|
+
end
|
465
497
|
documents.collect { |o| o[:_id] || o['_id'] }
|
466
498
|
end
|
467
499
|
|
data/lib/mongo/connection.rb
CHANGED
@@ -120,9 +120,20 @@ module Mongo
|
|
120
120
|
single_db_command(name, :dropDatabase => 1)
|
121
121
|
end
|
122
122
|
|
123
|
+
# Copies the database +from+ on the local server to +to+ on the specified +host+.
|
124
|
+
# +host+ defaults to 'localhost' if no value is provided.
|
125
|
+
def copy_database(from, to, host="localhost")
|
126
|
+
oh = OrderedHash.new
|
127
|
+
oh[:copydb] = 1
|
128
|
+
oh[:fromhost] = host
|
129
|
+
oh[:fromdb] = from
|
130
|
+
oh[:todb] = to
|
131
|
+
single_db_command('admin', oh)
|
132
|
+
end
|
133
|
+
|
123
134
|
# Return the build information for the current connection.
|
124
135
|
def server_info
|
125
|
-
db("admin").
|
136
|
+
db("admin").command({:buildinfo => 1}, {:admin => true, :check_response => true})
|
126
137
|
end
|
127
138
|
|
128
139
|
# Returns the build version of the current server, using
|
data/lib/mongo/cursor.rb
CHANGED
@@ -20,11 +20,8 @@ module Mongo
|
|
20
20
|
# A cursor over query results. Returned objects are hashes.
|
21
21
|
class Cursor
|
22
22
|
include Mongo::Conversions
|
23
|
-
|
24
23
|
include Enumerable
|
25
24
|
|
26
|
-
RESPONSE_HEADER_SIZE = 20
|
27
|
-
|
28
25
|
attr_reader :collection, :selector, :admin, :fields,
|
29
26
|
:order, :hint, :snapshot, :timeout,
|
30
27
|
:full_collection_name
|
@@ -199,7 +196,7 @@ module Mongo
|
|
199
196
|
message.put_int(0)
|
200
197
|
message.put_int(1)
|
201
198
|
message.put_long(@cursor_id)
|
202
|
-
@db.send_message_with_operation(Mongo::Constants::OP_KILL_CURSORS, message)
|
199
|
+
@db.send_message_with_operation(Mongo::Constants::OP_KILL_CURSORS, message, "cursor.close()")
|
203
200
|
end
|
204
201
|
@cursor_id = 0
|
205
202
|
@closed = true
|
@@ -250,14 +247,14 @@ module Mongo
|
|
250
247
|
# the selector will be used in a $where clause.
|
251
248
|
# See http://www.mongodb.org/display/DOCS/Server-side+Code+Execution
|
252
249
|
def convert_selector_for_query(selector)
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
250
|
+
case selector
|
251
|
+
when Hash
|
252
|
+
selector
|
253
|
+
when nil
|
254
|
+
{}
|
255
|
+
when String
|
259
256
|
{"$where" => Code.new(selector)}
|
260
|
-
|
257
|
+
when Code
|
261
258
|
{"$where" => selector}
|
262
259
|
end
|
263
260
|
end
|
@@ -267,47 +264,6 @@ module Mongo
|
|
267
264
|
@order || @explain || @hint || @snapshot
|
268
265
|
end
|
269
266
|
|
270
|
-
def read_all
|
271
|
-
read_message_header
|
272
|
-
read_response_header
|
273
|
-
read_objects_off_wire
|
274
|
-
end
|
275
|
-
|
276
|
-
def read_objects_off_wire
|
277
|
-
while doc = next_object_on_wire
|
278
|
-
@cache << doc
|
279
|
-
end
|
280
|
-
end
|
281
|
-
|
282
|
-
def read_message_header
|
283
|
-
message = ByteBuffer.new
|
284
|
-
message.put_array(@db.receive_full(16).unpack("C*"))
|
285
|
-
unless message.size == 16 #HEADER_SIZE
|
286
|
-
raise "Short read for DB response header: expected #{16} bytes, saw #{message.size}"
|
287
|
-
end
|
288
|
-
message.rewind
|
289
|
-
size = message.get_int
|
290
|
-
request_id = message.get_int
|
291
|
-
response_to = message.get_int
|
292
|
-
op = message.get_int
|
293
|
-
end
|
294
|
-
|
295
|
-
def read_response_header
|
296
|
-
header_buf = ByteBuffer.new
|
297
|
-
header_buf.put_array(@db.receive_full(RESPONSE_HEADER_SIZE).unpack("C*"))
|
298
|
-
raise "Short read for DB response header; expected #{RESPONSE_HEADER_SIZE} bytes, saw #{header_buf.length}" unless header_buf.length == RESPONSE_HEADER_SIZE
|
299
|
-
header_buf.rewind
|
300
|
-
@result_flags = header_buf.get_int
|
301
|
-
@cursor_id = header_buf.get_long
|
302
|
-
@starting_from = header_buf.get_int
|
303
|
-
@n_remaining = header_buf.get_int
|
304
|
-
if @n_received
|
305
|
-
@n_received += @n_remaining
|
306
|
-
else
|
307
|
-
@n_received = @n_remaining
|
308
|
-
end
|
309
|
-
end
|
310
|
-
|
311
267
|
def num_remaining
|
312
268
|
refill_via_get_more if @cache.length == 0
|
313
269
|
@cache.length
|
@@ -320,64 +276,42 @@ module Mongo
|
|
320
276
|
num_remaining > 0
|
321
277
|
end
|
322
278
|
|
323
|
-
def next_object_on_wire
|
324
|
-
# if @n_remaining is 0 but we have a non-zero cursor, there are more
|
325
|
-
# to fetch, so do a GetMore operation, but don't do it here - do it
|
326
|
-
# when someone pulls an object out of the cache and it's empty
|
327
|
-
return nil if @n_remaining == 0
|
328
|
-
object_from_stream
|
329
|
-
end
|
330
|
-
|
331
279
|
def refill_via_get_more
|
332
280
|
return if send_query_if_needed || @cursor_id.zero?
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
message.put_int(0)
|
281
|
+
message = ByteBuffer.new
|
282
|
+
# Reserved.
|
283
|
+
message.put_int(0)
|
337
284
|
|
338
|
-
|
339
|
-
|
340
|
-
|
285
|
+
# DB name.
|
286
|
+
db_name = @admin ? 'admin' : @db.name
|
287
|
+
BSON.serialize_cstr(message, "#{db_name}.#{@collection.name}")
|
341
288
|
|
342
|
-
|
343
|
-
|
289
|
+
# Number of results to return; db decides for now.
|
290
|
+
message.put_int(0)
|
344
291
|
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
}
|
292
|
+
# Cursor id.
|
293
|
+
message.put_long(@cursor_id)
|
294
|
+
results, @n_received, @cursor_id = @db.receive_message_with_operation(Mongo::Constants::OP_GET_MORE, message, "cursor.get_more()")
|
295
|
+
@cache += results
|
350
296
|
close_cursor_if_query_complete
|
351
297
|
end
|
352
298
|
|
353
|
-
|
354
|
-
buf = ByteBuffer.new
|
355
|
-
buf.put_array(@db.receive_full(4).unpack("C*"))
|
356
|
-
buf.rewind
|
357
|
-
size = buf.get_int
|
358
|
-
buf.put_array(@db.receive_full(size - 4).unpack("C*"), 4)
|
359
|
-
@n_remaining -= 1
|
360
|
-
buf.rewind
|
361
|
-
BSON.new.deserialize(buf)
|
362
|
-
end
|
363
|
-
|
299
|
+
# Run query first time we request an object from the wire
|
364
300
|
def send_query_if_needed
|
365
|
-
# Run query first time we request an object from the wire
|
366
301
|
if @query_run
|
367
302
|
false
|
368
303
|
else
|
369
|
-
message = construct_query_message
|
370
|
-
@db.
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
}
|
304
|
+
message = construct_query_message
|
305
|
+
results, @n_received, @cursor_id = @db.receive_message_with_operation(Mongo::Constants::OP_QUERY, message,
|
306
|
+
(query_log_message if @db.logger))
|
307
|
+
@cache += results
|
308
|
+
@query_run = true
|
375
309
|
close_cursor_if_query_complete
|
376
310
|
true
|
377
311
|
end
|
378
312
|
end
|
379
313
|
|
380
|
-
def construct_query_message
|
314
|
+
def construct_query_message
|
381
315
|
message = ByteBuffer.new
|
382
316
|
message.put_int(query_opts)
|
383
317
|
db_name = @admin ? 'admin' : @db.name
|
@@ -388,11 +322,16 @@ module Mongo
|
|
388
322
|
if query_contains_special_fields?
|
389
323
|
selector = selector_with_special_query_fields
|
390
324
|
end
|
391
|
-
message.put_array(
|
392
|
-
message.put_array(
|
325
|
+
message.put_array(BSON_SERIALIZER.serialize(selector, false).unpack("C*"))
|
326
|
+
message.put_array(BSON_SERIALIZER.serialize(@fields, false).unpack("C*")) if @fields
|
393
327
|
message
|
394
328
|
end
|
395
329
|
|
330
|
+
def query_log_message
|
331
|
+
"#{@admin ? 'admin' : @db.name}.#{@collection.name}.find(#{@selector.inspect}, #{@fields ? @fields.inspect : '{}'})" +
|
332
|
+
"#{@skip != 0 ? ('.skip(' + @skip.to_s + ')') : ''}#{@limit != 0 ? ('.limit(' + @limit.to_s + ')') : ''}"
|
333
|
+
end
|
334
|
+
|
396
335
|
def selector_with_special_query_fields
|
397
336
|
sel = OrderedHash.new
|
398
337
|
sel['query'] = @selector
|
data/lib/mongo/db.rb
CHANGED
@@ -26,6 +26,8 @@ module Mongo
|
|
26
26
|
# A Mongo database.
|
27
27
|
class DB
|
28
28
|
|
29
|
+
STANDARD_HEADER_SIZE = 16
|
30
|
+
RESPONSE_HEADER_SIZE = 20
|
29
31
|
SYSTEM_NAMESPACE_COLLECTION = "system.namespaces"
|
30
32
|
SYSTEM_INDEX_COLLECTION = "system.indexes"
|
31
33
|
SYSTEM_PROFILE_COLLECTION = "system.profile"
|
@@ -49,6 +51,8 @@ module Mongo
|
|
49
51
|
# The name of the database.
|
50
52
|
attr_reader :name
|
51
53
|
|
54
|
+
attr_reader :connection
|
55
|
+
|
52
56
|
# Host to which we are currently connected.
|
53
57
|
attr_reader :host
|
54
58
|
# Port to which we are currently connected.
|
@@ -133,6 +137,8 @@ module Mongo
|
|
133
137
|
raise InvalidName, "database name cannot be the empty string"
|
134
138
|
end
|
135
139
|
|
140
|
+
@connection = options[:connection]
|
141
|
+
|
136
142
|
@name, @nodes = db_name, nodes
|
137
143
|
@strict = options[:strict]
|
138
144
|
@pk_factory = options[:pk]
|
@@ -431,54 +437,49 @@ module Mongo
|
|
431
437
|
self.collection(collection_name).create_index(field_or_spec, unique)
|
432
438
|
end
|
433
439
|
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
440
|
+
# Sends a message to MongoDB.
|
441
|
+
#
|
442
|
+
# Takes a MongoDB opcode, +operation+, a message of class ByteBuffer,
|
443
|
+
# +message+, and an optional formatted +log_message+.
|
444
|
+
# Sends the message to the databse, adding the necessary headers.
|
445
|
+
def send_message_with_operation(operation, message, log_message=nil)
|
446
|
+
message_with_headers = add_message_headers(operation, message).to_s
|
447
|
+
@logger.debug(" MONGODB #{log_message || message}") if @logger
|
448
|
+
@semaphore.synchronize do
|
449
|
+
send_message_on_socket(message_with_headers)
|
443
450
|
end
|
444
451
|
end
|
445
452
|
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
# +message+, and sends the message to the databse, adding the necessary headers.
|
450
|
-
def send_message_with_operation(operation, message)
|
453
|
+
def send_message_with_operation_raw(operation, message, log_message=nil)
|
454
|
+
message_with_headers = add_message_headers_raw(operation, message)
|
455
|
+
@logger.debug(" MONGODB #{log_message || message}") if @logger
|
451
456
|
@semaphore.synchronize do
|
452
|
-
|
453
|
-
begin
|
454
|
-
message_with_headers = add_message_headers(operation, message)
|
455
|
-
@logger.debug(" MONGODB #{message}") if @logger
|
456
|
-
@socket.print(message_with_headers.to_s)
|
457
|
-
@socket.flush
|
458
|
-
rescue => ex
|
459
|
-
close
|
460
|
-
raise ex
|
461
|
-
end
|
457
|
+
send_message_on_socket(message_with_headers)
|
462
458
|
end
|
463
459
|
end
|
464
460
|
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
|
473
|
-
|
474
|
-
|
461
|
+
# Sends a message to the database, waits for a response, and raises
|
462
|
+
# and exception if the operation has failed.
|
463
|
+
def send_message_with_safe_check(operation, message, log_message=nil)
|
464
|
+
message_with_headers = add_message_headers(operation, message)
|
465
|
+
message_with_check = last_error_message
|
466
|
+
@logger.debug(" MONGODB #{log_message || message}") if @logger
|
467
|
+
@semaphore.synchronize do
|
468
|
+
send_message_on_socket(message_with_headers.append!(message_with_check).to_s)
|
469
|
+
docs, num_received, cursor_id = receive
|
470
|
+
if num_received == 1 && error = docs[0]['err']
|
471
|
+
raise Mongo::OperationFailure, error
|
472
|
+
end
|
475
473
|
end
|
476
474
|
end
|
477
475
|
|
478
|
-
|
476
|
+
# Send a message to the database and waits for the response.
|
477
|
+
def receive_message_with_operation(operation, message, log_message=nil)
|
478
|
+
message_with_headers = add_message_headers(operation, message).to_s
|
479
|
+
@logger.debug(" MONGODB #{log_message || message}") if @logger
|
479
480
|
@semaphore.synchronize do
|
480
|
-
|
481
|
-
|
481
|
+
send_message_on_socket(message_with_headers)
|
482
|
+
receive
|
482
483
|
end
|
483
484
|
end
|
484
485
|
|
@@ -501,9 +502,34 @@ module Mongo
|
|
501
502
|
cursor = Cursor.new(Collection.new(self, SYSTEM_COMMAND_COLLECTION), :admin => use_admin_db, :limit => -1, :selector => selector)
|
502
503
|
cursor.next_object
|
503
504
|
end
|
505
|
+
|
506
|
+
# Sends a command to the database.
|
507
|
+
#
|
508
|
+
# :selector (required) :: An OrderedHash, or a standard Hash with just one
|
509
|
+
# key, specifying the command to be performed.
|
510
|
+
#
|
511
|
+
# :admin (optional) :: If true, the command will be executed on the admin
|
512
|
+
# collection.
|
513
|
+
#
|
514
|
+
# :check_response (optional) :: If true, will raise an exception if the
|
515
|
+
# command fails.
|
516
|
+
#
|
517
|
+
# Note: DB commands must start with the "command" key. For this reason,
|
518
|
+
# any selector containing more than one key must be an OrderedHash.
|
519
|
+
def command(selector, admin=false, check_response=false)
|
520
|
+
raise MongoArgumentError, "command must be given a selector" unless selector.is_a?(Hash) && !selector.empty?
|
521
|
+
if selector.class.eql?(Hash) && selector.keys.length > 1
|
522
|
+
raise MongoArgumentError, "DB#command requires an OrderedHash when hash contains multiple keys"
|
523
|
+
end
|
504
524
|
|
505
|
-
|
506
|
-
|
525
|
+
result = Cursor.new(system_command_collection, :admin => admin,
|
526
|
+
:limit => -1, :selector => selector).next_object
|
527
|
+
|
528
|
+
if check_response && !ok?(result)
|
529
|
+
raise OperationFailure, "Database command '#{selector.keys.first}' failed."
|
530
|
+
else
|
531
|
+
result
|
532
|
+
end
|
507
533
|
end
|
508
534
|
|
509
535
|
def full_collection_name(collection_name)
|
@@ -512,6 +538,80 @@ module Mongo
|
|
512
538
|
|
513
539
|
private
|
514
540
|
|
541
|
+
def receive
|
542
|
+
receive_header
|
543
|
+
number_received, cursor_id = receive_response_header
|
544
|
+
read_documents(number_received, cursor_id)
|
545
|
+
end
|
546
|
+
|
547
|
+
def receive_header
|
548
|
+
header = ByteBuffer.new
|
549
|
+
header.put_array(receive_data_on_socket(16).unpack("C*"))
|
550
|
+
unless header.size == STANDARD_HEADER_SIZE
|
551
|
+
raise "Short read for DB response header: " +
|
552
|
+
"expected #{STANDARD_HEADER_SIZE} bytes, saw #{header.size}"
|
553
|
+
end
|
554
|
+
header.rewind
|
555
|
+
size = header.get_int
|
556
|
+
request_id = header.get_int
|
557
|
+
response_to = header.get_int
|
558
|
+
op = header.get_int
|
559
|
+
end
|
560
|
+
|
561
|
+
def receive_response_header
|
562
|
+
header_buf = ByteBuffer.new
|
563
|
+
header_buf.put_array(receive_data_on_socket(RESPONSE_HEADER_SIZE).unpack("C*"))
|
564
|
+
if header_buf.length != RESPONSE_HEADER_SIZE
|
565
|
+
raise "Short read for DB response header; " +
|
566
|
+
"expected #{RESPONSE_HEADER_SIZE} bytes, saw #{header_buf.length}"
|
567
|
+
end
|
568
|
+
header_buf.rewind
|
569
|
+
result_flags = header_buf.get_int
|
570
|
+
cursor_id = header_buf.get_long
|
571
|
+
starting_from = header_buf.get_int
|
572
|
+
number_remaining = header_buf.get_int
|
573
|
+
[number_remaining, cursor_id]
|
574
|
+
end
|
575
|
+
|
576
|
+
def read_documents(number_received, cursor_id)
|
577
|
+
docs = []
|
578
|
+
number_remaining = number_received
|
579
|
+
while number_remaining > 0 do
|
580
|
+
buf = ByteBuffer.new
|
581
|
+
buf.put_array(receive_data_on_socket(4).unpack("C*"))
|
582
|
+
buf.rewind
|
583
|
+
size = buf.get_int
|
584
|
+
buf.put_array(receive_data_on_socket(size - 4).unpack("C*"), 4)
|
585
|
+
number_remaining -= 1
|
586
|
+
buf.rewind
|
587
|
+
docs << BSON.new.deserialize(buf)
|
588
|
+
end
|
589
|
+
[docs, number_received, cursor_id]
|
590
|
+
end
|
591
|
+
|
592
|
+
# Sending a message on socket.
|
593
|
+
def send_message_on_socket(packed_message)
|
594
|
+
connect_to_master if !connected? && @auto_reconnect
|
595
|
+
begin
|
596
|
+
@socket.print(packed_message)
|
597
|
+
@socket.flush
|
598
|
+
rescue => ex
|
599
|
+
close
|
600
|
+
raise ex
|
601
|
+
end
|
602
|
+
end
|
603
|
+
|
604
|
+
# Receive data of specified length on socket.
|
605
|
+
def receive_data_on_socket(length)
|
606
|
+
message = ""
|
607
|
+
while message.length < length do
|
608
|
+
chunk = @socket.recv(length - message.length)
|
609
|
+
raise "connection closed" unless chunk.length > 0
|
610
|
+
message += chunk
|
611
|
+
end
|
612
|
+
message
|
613
|
+
end
|
614
|
+
|
515
615
|
# Prepares a message for transmission to MongoDB by
|
516
616
|
# constructing a valid message header.
|
517
617
|
def add_message_headers(operation, message)
|
@@ -538,8 +638,41 @@ module Mongo
|
|
538
638
|
@@current_request_id
|
539
639
|
end
|
540
640
|
|
641
|
+
# Creates a getlasterror message.
|
642
|
+
def last_error_message
|
643
|
+
generate_last_error_message
|
644
|
+
end
|
645
|
+
|
646
|
+
def generate_last_error_message
|
647
|
+
message = ByteBuffer.new
|
648
|
+
message.put_int(0)
|
649
|
+
BSON.serialize_cstr(message, "#{@name}.$cmd")
|
650
|
+
message.put_int(0)
|
651
|
+
message.put_int(-1)
|
652
|
+
message.put_array(BSON_SERIALIZER.serialize({:getlasterror => 1}, false).unpack("C*"))
|
653
|
+
add_message_headers(Mongo::Constants::OP_QUERY, message)
|
654
|
+
end
|
655
|
+
|
656
|
+
def reset_error_message
|
657
|
+
@@reset_error_message ||= generate_reset_error_message
|
658
|
+
end
|
659
|
+
|
660
|
+
def generate_reset_error_message
|
661
|
+
message = ByteBuffer.new
|
662
|
+
message.put_int(0)
|
663
|
+
BSON.serialize_cstr(message, "#{@name}.$cmd")
|
664
|
+
message.put_int(0)
|
665
|
+
message.put_int(-1)
|
666
|
+
message.put_array(BSON_SERIALIZER.serialize({:reseterror => 1}, false).unpack("C*"))
|
667
|
+
add_message_headers(Mongo::Constants::OP_QUERY, message)
|
668
|
+
end
|
669
|
+
|
541
670
|
def hash_password(username, plaintext)
|
542
671
|
Digest::MD5.hexdigest("#{username}:mongo:#{plaintext}")
|
543
672
|
end
|
673
|
+
|
674
|
+
def system_command_collection
|
675
|
+
Collection.new(self, SYSTEM_COMMAND_COLLECTION)
|
676
|
+
end
|
544
677
|
end
|
545
678
|
end
|
data/lib/mongo/errors.rb
CHANGED
@@ -18,14 +18,20 @@ module Mongo
|
|
18
18
|
# Generic Mongo Ruby Driver exception class.
|
19
19
|
class MongoRubyError < StandardError; end
|
20
20
|
|
21
|
+
# Raised when MongoDB itself has returned an error.
|
22
|
+
class MongoDBError < RuntimeError; end
|
23
|
+
|
21
24
|
# Raised when configuration options cause connections, queries, etc., to fail.
|
22
25
|
class ConfigurationError < MongoRubyError; end
|
23
26
|
|
27
|
+
# Raised when invalid arguments are sent to Mongo Ruby methods.
|
28
|
+
class MongoArgumentError < MongoRubyError; end
|
29
|
+
|
24
30
|
# Raised when a database operation fails.
|
25
|
-
class OperationFailure <
|
31
|
+
class OperationFailure < MongoDBError; end
|
26
32
|
|
27
33
|
# Raised when a client attempts to perform an invalid operation.
|
28
|
-
class InvalidOperation <
|
34
|
+
class InvalidOperation < MongoDBError; end
|
29
35
|
|
30
36
|
# Raised when an invalid name is used.
|
31
37
|
class InvalidName < RuntimeError; end
|
data/lib/mongo/types/objectid.rb
CHANGED
@@ -57,6 +57,12 @@ module Mongo
|
|
57
57
|
end
|
58
58
|
alias_method :==, :eql?
|
59
59
|
|
60
|
+
# Returns a unique hashcode for the object.
|
61
|
+
# This is required since we've defined an #eql? method.
|
62
|
+
def hash
|
63
|
+
@data.hash
|
64
|
+
end
|
65
|
+
|
60
66
|
def to_a
|
61
67
|
@data.dup
|
62
68
|
end
|
@@ -120,23 +126,27 @@ module Mongo
|
|
120
126
|
|
121
127
|
private
|
122
128
|
|
123
|
-
|
124
|
-
|
129
|
+
begin
|
130
|
+
require 'mongo_ext/cbson'
|
131
|
+
rescue LoadError
|
132
|
+
def generate
|
133
|
+
oid = ''
|
125
134
|
|
126
|
-
|
127
|
-
|
128
|
-
|
135
|
+
# 4 bytes current time
|
136
|
+
time = Time.new.to_i
|
137
|
+
oid += [time].pack("N")
|
129
138
|
|
130
|
-
|
131
|
-
|
139
|
+
# 3 bytes machine
|
140
|
+
oid += Digest::MD5.digest(Socket.gethostname)[0, 3]
|
132
141
|
|
133
|
-
|
134
|
-
|
142
|
+
# 2 bytes pid
|
143
|
+
oid += [Process.pid % 0xFFFF].pack("n")
|
135
144
|
|
136
|
-
|
137
|
-
|
145
|
+
# 3 bytes inc
|
146
|
+
oid += [get_inc].pack("N")[1, 3]
|
138
147
|
|
139
|
-
|
148
|
+
oid.unpack("C12")
|
149
|
+
end
|
140
150
|
end
|
141
151
|
|
142
152
|
def get_inc
|
data/lib/mongo/util/bson.rb
CHANGED
@@ -71,6 +71,12 @@ class BSON
|
|
71
71
|
@buf.to_a
|
72
72
|
end
|
73
73
|
|
74
|
+
# Serializes an object.
|
75
|
+
# Implemented to ensure an API compatible with BSON extension.
|
76
|
+
def self.serialize(obj, check_keys)
|
77
|
+
new.serialize(obj, check_keys)
|
78
|
+
end
|
79
|
+
|
74
80
|
begin
|
75
81
|
require 'mongo_ext/cbson'
|
76
82
|
def serialize(obj, check_keys=false)
|
@@ -99,6 +105,12 @@ class BSON
|
|
99
105
|
end
|
100
106
|
end
|
101
107
|
|
108
|
+
# Returns the array stored in the buffer.
|
109
|
+
# Implemented to ensure an API compatible with BSON extension.
|
110
|
+
def unpack(arg)
|
111
|
+
@buf.to_a
|
112
|
+
end
|
113
|
+
|
102
114
|
def serialize_key_value(k, v, check_keys)
|
103
115
|
k = k.to_s
|
104
116
|
if check_keys
|
@@ -20,9 +20,11 @@ class ByteBuffer
|
|
20
20
|
attr_reader :order
|
21
21
|
|
22
22
|
def initialize(initial_data=[])
|
23
|
-
@buf
|
24
|
-
@cursor =
|
25
|
-
|
23
|
+
@buf = initial_data
|
24
|
+
@cursor = @buf.length
|
25
|
+
@order = :little_endian
|
26
|
+
@int_pack_order = 'V'
|
27
|
+
@double_pack_order = 'E'
|
26
28
|
end
|
27
29
|
|
28
30
|
# +endianness+ should be :little_endian or :big_endian. Default is :little_endian
|
@@ -159,7 +161,9 @@ class ByteBuffer
|
|
159
161
|
end
|
160
162
|
|
161
163
|
def to_s
|
162
|
-
if @buf.respond_to?
|
164
|
+
if @buf.respond_to? :fast_pack
|
165
|
+
@buf.fast_pack
|
166
|
+
elsif @buf.respond_to? "pack"
|
163
167
|
@buf.pack("C*")
|
164
168
|
else
|
165
169
|
@buf
|
data/test/test_collection.rb
CHANGED
@@ -59,6 +59,21 @@ class TestCollection < Test::Unit::TestCase
|
|
59
59
|
assert_equal 5, @@db.collection("test.foo").find_one()["x"]
|
60
60
|
end
|
61
61
|
|
62
|
+
if @@version > "1.1"
|
63
|
+
def test_distinct
|
64
|
+
@@test.remove
|
65
|
+
@@test.insert([{:a => 0, :b => {:c => "a"}},
|
66
|
+
{:a => 1, :b => {:c => "b"}},
|
67
|
+
{:a => 1, :b => {:c => "c"}},
|
68
|
+
{:a => 2, :b => {:c => "a"}},
|
69
|
+
{:a => 3},
|
70
|
+
{:a => 3}])
|
71
|
+
|
72
|
+
assert_equal [0, 1, 2, 3], @@test.distinct(:a).sort
|
73
|
+
assert_equal ["a", "b", "c"], @@test.distinct("b.c").sort
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
62
77
|
def test_safe_insert
|
63
78
|
a = {"hello" => "world"}
|
64
79
|
@@test.insert(a)
|
@@ -82,6 +97,20 @@ class TestCollection < Test::Unit::TestCase
|
|
82
97
|
assert_equal 1, @@test.find_one(:_id => id2)["x"]
|
83
98
|
end
|
84
99
|
|
100
|
+
if @@version >= "1.1.3"
|
101
|
+
def test_multi_update
|
102
|
+
@@test.save("num" => 10)
|
103
|
+
@@test.save("num" => 10)
|
104
|
+
@@test.save("num" => 10)
|
105
|
+
assert_equal 3, @@test.count
|
106
|
+
|
107
|
+
@@test.update({"num" => 10}, {"$set" => {"num" => 100}}, :multi => true)
|
108
|
+
@@test.find.each do |doc|
|
109
|
+
assert_equal 100, doc["num"]
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
85
114
|
def test_upsert
|
86
115
|
@@test.update({"page" => "/"}, {"$inc" => {"count" => 1}}, :upsert => true)
|
87
116
|
@@test.update({"page" => "/"}, {"$inc" => {"count" => 1}}, :upsert => true)
|
data/test/test_connection.rb
CHANGED
@@ -21,8 +21,7 @@ class TestConnection < Test::Unit::TestCase
|
|
21
21
|
|
22
22
|
def test_server_info
|
23
23
|
server_info = @mongo.server_info
|
24
|
-
assert server_info.keys.include?
|
25
|
-
assert server_info.keys.include? "bits"
|
24
|
+
assert server_info.keys.include?("version")
|
26
25
|
assert_equal 1.0, server_info["ok"]
|
27
26
|
end
|
28
27
|
|
@@ -54,6 +53,14 @@ class TestConnection < Test::Unit::TestCase
|
|
54
53
|
@mongo.drop_database('ruby-mongo-info-test')
|
55
54
|
end
|
56
55
|
|
56
|
+
def test_copy_database
|
57
|
+
@mongo.db('old').collection('copy-test').insert('a' => 1)
|
58
|
+
@mongo.copy_database('old', 'new')
|
59
|
+
old_object = @mongo.db('old').collection('copy-test').find.next_object
|
60
|
+
new_object = @mongo.db('new').collection('copy-test').find.next_object
|
61
|
+
assert_equal old_object, new_object
|
62
|
+
end
|
63
|
+
|
57
64
|
def test_database_names
|
58
65
|
@mongo.drop_database('ruby-mongo-info-test')
|
59
66
|
@mongo.db('ruby-mongo-info-test').collection('info-test').insert('a' => 1)
|
@@ -73,7 +80,7 @@ class TestConnection < Test::Unit::TestCase
|
|
73
80
|
logger.level = Logger::DEBUG
|
74
81
|
db = Connection.new(@host, @port, :logger => logger).db('ruby-mongo-test')
|
75
82
|
|
76
|
-
assert output.string.include?("
|
83
|
+
assert output.string.include?("$cmd.find")
|
77
84
|
end
|
78
85
|
|
79
86
|
def test_connection_logger
|
data/test/test_db.rb
CHANGED
@@ -189,6 +189,13 @@ class DBTest < Test::Unit::TestCase
|
|
189
189
|
assert_nil @@db.previous_error
|
190
190
|
end
|
191
191
|
|
192
|
+
def test_check_command_response
|
193
|
+
command = {:forceerror => 1}
|
194
|
+
assert_raise OperationFailure do
|
195
|
+
@@db.command(command, false, true)
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
192
199
|
def test_last_status
|
193
200
|
@@db['test'].remove
|
194
201
|
@@db['test'].save("i" => 1)
|
data/test/test_objectid.rb
CHANGED
@@ -7,7 +7,18 @@ class ObjectIDTest < Test::Unit::TestCase
|
|
7
7
|
include Mongo
|
8
8
|
|
9
9
|
def setup
|
10
|
-
@o = ObjectID.new
|
10
|
+
@o = ObjectID.new
|
11
|
+
end
|
12
|
+
|
13
|
+
def test_hashcode
|
14
|
+
assert_equal @o.instance_variable_get(:@data).hash, @o.hash
|
15
|
+
end
|
16
|
+
|
17
|
+
def test_array_uniq_for_equilavent_ids
|
18
|
+
a = ObjectID.new('123')
|
19
|
+
b = ObjectID.new('123')
|
20
|
+
assert_equal a, b
|
21
|
+
assert_equal 1, [a, b].uniq.size
|
11
22
|
end
|
12
23
|
|
13
24
|
def test_create_pk_method
|
data/test/test_threading.rb
CHANGED
@@ -1,18 +1,68 @@
|
|
1
|
-
|
2
|
-
require 'mongo'
|
3
|
-
require 'test/unit'
|
1
|
+
require 'test/test_helper'
|
4
2
|
|
5
3
|
class TestThreading < Test::Unit::TestCase
|
6
4
|
|
7
5
|
include Mongo
|
8
6
|
|
9
|
-
@@
|
10
|
-
@@port = ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT
|
11
|
-
@@db = Connection.new(@@host, @@port).db('ruby-mongo-test')
|
7
|
+
@@db = Connection.new.db('ruby-mongo-test')
|
12
8
|
@@coll = @@db.collection('thread-test-collection')
|
13
9
|
|
10
|
+
def set_up_safe_data
|
11
|
+
@@db.drop_collection('duplicate')
|
12
|
+
@@db.drop_collection('unique')
|
13
|
+
@duplicate = @@db.collection('duplicate')
|
14
|
+
@unique = @@db.collection('unique')
|
15
|
+
|
16
|
+
@duplicate.insert("test" => "insert")
|
17
|
+
@duplicate.insert("test" => "update")
|
18
|
+
@unique.insert("test" => "insert")
|
19
|
+
@unique.insert("test" => "update")
|
20
|
+
@unique.create_index("test", true)
|
21
|
+
end
|
22
|
+
|
23
|
+
def test_safe_update
|
24
|
+
set_up_safe_data
|
25
|
+
threads = []
|
26
|
+
100.times do |i|
|
27
|
+
threads[i] = Thread.new do
|
28
|
+
if i % 2 == 0
|
29
|
+
assert_raise Mongo::OperationFailure do
|
30
|
+
@unique.update({"test" => "insert"}, {"$set" => {"test" => "update"}}, :safe => true)
|
31
|
+
end
|
32
|
+
else
|
33
|
+
@duplicate.update({"test" => "insert"}, {"$set" => {"test" => "update"}}, :safe => true)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
100.times do |i|
|
39
|
+
threads[i].join
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
def test_safe_insert
|
44
|
+
set_up_safe_data
|
45
|
+
threads = []
|
46
|
+
100.times do |i|
|
47
|
+
threads[i] = Thread.new do
|
48
|
+
if i % 2 == 0
|
49
|
+
assert_raise Mongo::OperationFailure do
|
50
|
+
@unique.insert({"test" => "insert"}, :safe => true)
|
51
|
+
end
|
52
|
+
else
|
53
|
+
@duplicate.insert({"test" => "insert"}, :safe => true)
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
100.times do |i|
|
59
|
+
threads[i].join
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
14
63
|
def test_threading
|
15
|
-
@@coll.
|
64
|
+
@@coll.drop
|
65
|
+
@@coll = @@db.collection('thread-test-collection')
|
16
66
|
|
17
67
|
1000.times do |i|
|
18
68
|
@@coll.insert("x" => i)
|
@@ -21,13 +71,13 @@ class TestThreading < Test::Unit::TestCase
|
|
21
71
|
threads = []
|
22
72
|
|
23
73
|
10.times do |i|
|
24
|
-
threads[i] = Thread.new
|
74
|
+
threads[i] = Thread.new do
|
25
75
|
sum = 0
|
26
|
-
@@coll.find().each
|
76
|
+
@@coll.find().each do |document|
|
27
77
|
sum += document["x"]
|
28
|
-
|
78
|
+
end
|
29
79
|
assert_equal 499500, sum
|
30
|
-
|
80
|
+
end
|
31
81
|
end
|
32
82
|
|
33
83
|
10.times do |i|
|
@@ -0,0 +1,54 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
|
3
|
+
class CollectionTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
class MockDB < DB
|
6
|
+
def connect_to_master
|
7
|
+
true
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
context "Basic operations: " do
|
12
|
+
setup do
|
13
|
+
@logger = mock()
|
14
|
+
end
|
15
|
+
|
16
|
+
should "send update message" do
|
17
|
+
@db = MockDB.new("testing", ['localhost', 27017], :logger => @logger)
|
18
|
+
@coll = @db.collection('books')
|
19
|
+
@db.expects(:send_message_with_operation).with do |op, msg, log|
|
20
|
+
op == 2001 && log.include?("db.books.update")
|
21
|
+
end
|
22
|
+
@coll.update({}, {:title => 'Moby Dick'})
|
23
|
+
end
|
24
|
+
|
25
|
+
should "send insert message" do
|
26
|
+
@db = MockDB.new("testing", ['localhost', 27017], :logger => @logger)
|
27
|
+
@coll = @db.collection('books')
|
28
|
+
@db.expects(:send_message_with_operation).with do |op, msg, log|
|
29
|
+
op == 2002 && log.include?("db.books.insert")
|
30
|
+
end
|
31
|
+
@coll.insert({:title => 'Moby Dick'})
|
32
|
+
end
|
33
|
+
|
34
|
+
should "send safe update message" do
|
35
|
+
@db = MockDB.new("testing", ['localhost', 27017], :logger => @logger)
|
36
|
+
@coll = @db.collection('books')
|
37
|
+
@db.expects(:send_message_with_safe_check).with do |op, msg, log|
|
38
|
+
op == 2001 && log.include?("db.books.update")
|
39
|
+
end
|
40
|
+
@coll.update({}, {:title => 'Moby Dick'}, :safe => true)
|
41
|
+
end
|
42
|
+
|
43
|
+
should "send safe insert message" do
|
44
|
+
@db = MockDB.new("testing", ['localhost', 27017], :logger => @logger)
|
45
|
+
@coll = @db.collection('books')
|
46
|
+
@db.expects(:send_message_with_safe_check).with do |op, msg, log|
|
47
|
+
op == 2001 && log.include?("db.books.update")
|
48
|
+
end
|
49
|
+
@coll.update({}, {:title => 'Moby Dick'}, :safe => true)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
|
@@ -0,0 +1,82 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
|
3
|
+
class DBTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
class MockDB < DB
|
6
|
+
attr_accessor :socket
|
7
|
+
|
8
|
+
def connect_to_master
|
9
|
+
true
|
10
|
+
end
|
11
|
+
|
12
|
+
public :add_message_headers
|
13
|
+
end
|
14
|
+
|
15
|
+
def insert_message(db, documents)
|
16
|
+
documents = [documents] unless documents.is_a?(Array)
|
17
|
+
message = ByteBuffer.new
|
18
|
+
message.put_int(0)
|
19
|
+
BSON.serialize_cstr(message, "#{db.name}.test")
|
20
|
+
documents.each { |doc| message.put_array(BSON.new.serialize(doc, true).to_a) }
|
21
|
+
message = db.add_message_headers(Mongo::Constants::OP_INSERT, message)
|
22
|
+
end
|
23
|
+
|
24
|
+
context "DB commands" do
|
25
|
+
setup do
|
26
|
+
@db = MockDB.new("testing", ['localhost', 27017])
|
27
|
+
@collection = mock()
|
28
|
+
@db.stubs(:system_command_collection).returns(@collection)
|
29
|
+
end
|
30
|
+
|
31
|
+
should "raise an error if given a hash with more than one key" do
|
32
|
+
assert_raise MongoArgumentError do
|
33
|
+
@db.command(:buildinfo => 1, :somekey => 1)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
should "raise an error if the selector is omitted" do
|
38
|
+
assert_raise MongoArgumentError do
|
39
|
+
@db.command({}, true)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
should "create the proper cursor" do
|
44
|
+
@cursor = mock(:next_object => {"ok" => 1})
|
45
|
+
Cursor.expects(:new).with(@collection, :admin => true,
|
46
|
+
:limit => -1, :selector => {:buildinfo => 1}).returns(@cursor)
|
47
|
+
command = {:buildinfo => 1}
|
48
|
+
@db.command(command, true)
|
49
|
+
end
|
50
|
+
|
51
|
+
should "raise an error when the command fails" do
|
52
|
+
@cursor = mock(:next_object => {"ok" => 0})
|
53
|
+
Cursor.expects(:new).with(@collection, :admin => true,
|
54
|
+
:limit => -1, :selector => {:buildinfo => 1}).returns(@cursor)
|
55
|
+
assert_raise OperationFailure do
|
56
|
+
command = {:buildinfo => 1}
|
57
|
+
@db.command(command, true, true)
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
context "safe messages" do
|
63
|
+
setup do
|
64
|
+
@db = MockDB.new("testing", ['localhost', 27017])
|
65
|
+
@collection = mock()
|
66
|
+
@db.stubs(:system_command_collection).returns(@collection)
|
67
|
+
end
|
68
|
+
|
69
|
+
should "receive getlasterror message" do
|
70
|
+
@socket = mock()
|
71
|
+
@socket.stubs(:close)
|
72
|
+
@socket.expects(:flush)
|
73
|
+
@socket.expects(:print).with { |message| message.include?('getlasterror') }
|
74
|
+
@db.socket = @socket
|
75
|
+
@db.stubs(:receive)
|
76
|
+
message = insert_message(@db, {:a => 1})
|
77
|
+
@db.send_message_with_safe_check(Mongo::Constants::OP_QUERY, message)
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: mongo
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: "0.
|
4
|
+
version: "0.17"
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Jim Menard
|
@@ -10,7 +10,7 @@ autorequire:
|
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
12
|
|
13
|
-
date: 2009-
|
13
|
+
date: 2009-11-16 00:00:00 -05:00
|
14
14
|
default_executable:
|
15
15
|
dependencies: []
|
16
16
|
|
@@ -115,4 +115,6 @@ test_files:
|
|
115
115
|
- test/test_round_trip.rb
|
116
116
|
- test/test_slave_connection.rb
|
117
117
|
- test/test_threading.rb
|
118
|
+
- test/unit/collection_test.rb
|
118
119
|
- test/unit/cursor_test.rb
|
120
|
+
- test/unit/db_test.rb
|