mongo 0.19.3 → 0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README.rdoc +7 -3
- data/Rakefile +16 -6
- data/bin/bson_benchmark.rb +2 -2
- data/examples/gridfs.rb +3 -2
- data/lib/mongo.rb +3 -26
- data/lib/mongo/collection.rb +69 -50
- data/lib/mongo/connection.rb +16 -41
- data/lib/mongo/cursor.rb +22 -32
- data/lib/mongo/db.rb +13 -5
- data/lib/mongo/exceptions.rb +11 -15
- data/lib/mongo/gridfs/grid.rb +14 -3
- data/lib/mongo/gridfs/grid_file_system.rb +28 -5
- data/lib/mongo/gridfs/grid_io.rb +42 -24
- data/lib/mongo/util/support.rb +13 -2
- data/mongo-ruby-driver.gemspec +3 -1
- data/test/collection_test.rb +62 -9
- data/test/connection_test.rb +21 -32
- data/test/conversions_test.rb +1 -1
- data/test/cursor_test.rb +2 -2
- data/test/db_api_test.rb +28 -27
- data/test/db_connection_test.rb +1 -1
- data/test/db_test.rb +23 -13
- data/test/grid_file_system_test.rb +30 -4
- data/test/grid_io_test.rb +14 -1
- data/test/grid_test.rb +59 -3
- data/test/test_helper.rb +4 -1
- data/test/threading/test_threading_large_pool.rb +1 -1
- data/test/threading_test.rb +1 -1
- data/test/unit/collection_test.rb +2 -2
- data/test/unit/cursor_test.rb +7 -0
- data/test/unit/db_test.rb +8 -8
- metadata +6 -46
- data/bin/gr.rb +0 -14
- data/lib/bson.rb +0 -46
- data/lib/bson/bson_c.rb +0 -20
- data/lib/bson/bson_ruby.rb +0 -601
- data/lib/bson/byte_buffer.rb +0 -224
- data/lib/bson/exceptions.rb +0 -39
- data/lib/bson/ordered_hash.rb +0 -140
- data/lib/bson/types/binary.rb +0 -54
- data/lib/bson/types/code.rb +0 -36
- data/lib/bson/types/dbref.rb +0 -40
- data/lib/bson/types/min_max_keys.rb +0 -58
- data/lib/bson/types/objectid.rb +0 -180
- data/lib/bson/types/regexp_of_holding.rb +0 -45
- data/lib/mongo/gridfs.rb +0 -29
- data/lib/mongo/gridfs/chunk.rb +0 -91
- data/lib/mongo/gridfs/grid_store.rb +0 -580
- data/lib/mongo/types/binary.rb +0 -52
- data/lib/mongo/types/code.rb +0 -36
- data/lib/mongo/types/dbref.rb +0 -40
- data/lib/mongo/types/min_max_keys.rb +0 -58
- data/lib/mongo/types/objectid.rb +0 -180
- data/lib/mongo/types/regexp_of_holding.rb +0 -45
- data/lib/mongo/util/bson_c.rb +0 -18
- data/lib/mongo/util/bson_ruby.rb +0 -606
- data/lib/mongo/util/byte_buffer.rb +0 -222
- data/lib/mongo/util/ordered_hash.rb +0 -140
- data/test/binary_test.rb +0 -15
- data/test/bson_test.rb +0 -459
- data/test/byte_buffer_test.rb +0 -81
- data/test/chunk_test.rb +0 -82
- data/test/grid_store_test.rb +0 -337
- data/test/objectid_test.rb +0 -125
- data/test/ordered_hash_test.rb +0 -172
@@ -1,222 +0,0 @@
|
|
1
|
-
# --
|
2
|
-
# Copyright (C) 2008-2010 10gen Inc.
|
3
|
-
#
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
-
# you may not use this file except in compliance with the License.
|
6
|
-
# You may obtain a copy of the License at
|
7
|
-
#
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
-
#
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
-
# See the License for the specific language governing permissions and
|
14
|
-
# limitations under the License.
|
15
|
-
# ++
|
16
|
-
|
17
|
-
# A byte buffer.
|
18
|
-
class ByteBuffer
|
19
|
-
|
20
|
-
# Commonly-used integers.
|
21
|
-
INT_LOOKUP = {
|
22
|
-
0 => [0, 0, 0, 0],
|
23
|
-
1 => [1, 0, 0, 0],
|
24
|
-
2 => [2, 0, 0, 0],
|
25
|
-
3 => [3, 0, 0, 0],
|
26
|
-
4 => [4, 0, 0, 0],
|
27
|
-
2001 => [209, 7, 0, 0],
|
28
|
-
2002 => [210, 7, 0, 0],
|
29
|
-
2004 => [212, 7, 0, 0],
|
30
|
-
2005 => [213, 7, 0, 0],
|
31
|
-
2006 => [214, 7, 0, 0]
|
32
|
-
}
|
33
|
-
|
34
|
-
attr_reader :order
|
35
|
-
|
36
|
-
def initialize(initial_data=[])
|
37
|
-
@buf = initial_data
|
38
|
-
@cursor = @buf.length
|
39
|
-
@order = :little_endian
|
40
|
-
@int_pack_order = 'V'
|
41
|
-
@double_pack_order = 'E'
|
42
|
-
end
|
43
|
-
|
44
|
-
if RUBY_VERSION >= '1.9'
|
45
|
-
def self.to_utf8(str)
|
46
|
-
str.encode("utf-8")
|
47
|
-
end
|
48
|
-
else
|
49
|
-
def self.to_utf8(str)
|
50
|
-
begin
|
51
|
-
str.unpack("U*")
|
52
|
-
rescue => ex
|
53
|
-
raise InvalidStringEncoding, "String not valid utf-8: #{str}"
|
54
|
-
end
|
55
|
-
str
|
56
|
-
end
|
57
|
-
end
|
58
|
-
|
59
|
-
def self.serialize_cstr(buf, val)
|
60
|
-
buf.put_array(to_utf8(val.to_s).unpack("C*") + [0])
|
61
|
-
end
|
62
|
-
|
63
|
-
# +endianness+ should be :little_endian or :big_endian. Default is :little_endian
|
64
|
-
def order=(endianness)
|
65
|
-
@order = endianness
|
66
|
-
@int_pack_order = endianness == :little_endian ? 'V' : 'N'
|
67
|
-
@double_pack_order = endianness == :little_endian ? 'E' : 'G'
|
68
|
-
end
|
69
|
-
|
70
|
-
def rewind
|
71
|
-
@cursor = 0
|
72
|
-
end
|
73
|
-
|
74
|
-
def position
|
75
|
-
@cursor
|
76
|
-
end
|
77
|
-
|
78
|
-
def position=(val)
|
79
|
-
@cursor = val
|
80
|
-
end
|
81
|
-
|
82
|
-
def clear
|
83
|
-
@buf = []
|
84
|
-
rewind
|
85
|
-
end
|
86
|
-
|
87
|
-
def size
|
88
|
-
@buf.size
|
89
|
-
end
|
90
|
-
alias_method :length, :size
|
91
|
-
|
92
|
-
# Appends a second ByteBuffer object, +buffer+, to the current buffer.
|
93
|
-
def append!(buffer)
|
94
|
-
@buf = @buf + buffer.to_a
|
95
|
-
self
|
96
|
-
end
|
97
|
-
|
98
|
-
# Prepends a second ByteBuffer object, +buffer+, to the current buffer.
|
99
|
-
def prepend!(buffer)
|
100
|
-
@buf = buffer.to_a + @buf
|
101
|
-
self
|
102
|
-
end
|
103
|
-
|
104
|
-
def put(byte, offset=nil)
|
105
|
-
@cursor = offset if offset
|
106
|
-
@buf[@cursor] = byte
|
107
|
-
@cursor += 1
|
108
|
-
end
|
109
|
-
|
110
|
-
def put_array(array, offset=nil)
|
111
|
-
@cursor = offset if offset
|
112
|
-
@buf[@cursor, array.length] = array
|
113
|
-
@cursor += array.length
|
114
|
-
end
|
115
|
-
|
116
|
-
def put_int(i, offset=nil)
|
117
|
-
unless a = INT_LOOKUP[i]
|
118
|
-
a = []
|
119
|
-
[i].pack(@int_pack_order).each_byte { |b| a << b }
|
120
|
-
end
|
121
|
-
put_array(a, offset)
|
122
|
-
end
|
123
|
-
|
124
|
-
def put_long(i, offset=nil)
|
125
|
-
offset = @cursor unless offset
|
126
|
-
if @int_pack_order == 'N'
|
127
|
-
put_int(i >> 32, offset)
|
128
|
-
put_int(i & 0xffffffff, offset + 4)
|
129
|
-
else
|
130
|
-
put_int(i & 0xffffffff, offset)
|
131
|
-
put_int(i >> 32, offset + 4)
|
132
|
-
end
|
133
|
-
end
|
134
|
-
|
135
|
-
def put_double(d, offset=nil)
|
136
|
-
a = []
|
137
|
-
[d].pack(@double_pack_order).each_byte { |b| a << b }
|
138
|
-
put_array(a, offset)
|
139
|
-
end
|
140
|
-
|
141
|
-
# If +size+ == nil, returns one byte. Else returns array of bytes of length
|
142
|
-
# # +size+.
|
143
|
-
def get(len=nil)
|
144
|
-
one_byte = len.nil?
|
145
|
-
len ||= 1
|
146
|
-
check_read_length(len)
|
147
|
-
start = @cursor
|
148
|
-
@cursor += len
|
149
|
-
if one_byte
|
150
|
-
@buf[start]
|
151
|
-
else
|
152
|
-
if @buf.respond_to? "unpack"
|
153
|
-
@buf[start, len].unpack("C*")
|
154
|
-
else
|
155
|
-
@buf[start, len]
|
156
|
-
end
|
157
|
-
end
|
158
|
-
end
|
159
|
-
|
160
|
-
def get_int
|
161
|
-
check_read_length(4)
|
162
|
-
vals = ""
|
163
|
-
(@cursor..@cursor+3).each { |i| vals << @buf[i].chr }
|
164
|
-
@cursor += 4
|
165
|
-
vals.unpack(@int_pack_order)[0]
|
166
|
-
end
|
167
|
-
|
168
|
-
def get_long
|
169
|
-
i1 = get_int
|
170
|
-
i2 = get_int
|
171
|
-
if @int_pack_order == 'N'
|
172
|
-
(i1 << 32) + i2
|
173
|
-
else
|
174
|
-
(i2 << 32) + i1
|
175
|
-
end
|
176
|
-
end
|
177
|
-
|
178
|
-
def get_double
|
179
|
-
check_read_length(8)
|
180
|
-
vals = ""
|
181
|
-
(@cursor..@cursor+7).each { |i| vals << @buf[i].chr }
|
182
|
-
@cursor += 8
|
183
|
-
vals.unpack(@double_pack_order)[0]
|
184
|
-
end
|
185
|
-
|
186
|
-
def more?
|
187
|
-
@cursor < @buf.size
|
188
|
-
end
|
189
|
-
|
190
|
-
def to_a
|
191
|
-
if @buf.respond_to? "unpack"
|
192
|
-
@buf.unpack("C*")
|
193
|
-
else
|
194
|
-
@buf
|
195
|
-
end
|
196
|
-
end
|
197
|
-
|
198
|
-
def unpack(args)
|
199
|
-
to_a
|
200
|
-
end
|
201
|
-
|
202
|
-
def to_s
|
203
|
-
if @buf.respond_to? :fast_pack
|
204
|
-
@buf.fast_pack
|
205
|
-
elsif @buf.respond_to? "pack"
|
206
|
-
@buf.pack("C*")
|
207
|
-
else
|
208
|
-
@buf
|
209
|
-
end
|
210
|
-
end
|
211
|
-
|
212
|
-
def dump
|
213
|
-
@buf.each_with_index { |c, i| $stderr.puts "#{'%04d' % i}: #{'%02x' % c} #{'%03o' % c} #{'%s' % c.chr} #{'%3d' % c}" }
|
214
|
-
end
|
215
|
-
|
216
|
-
private
|
217
|
-
|
218
|
-
def check_read_length(len)
|
219
|
-
raise "attempt to read past end of buffer" if @cursor + len > @buf.length
|
220
|
-
end
|
221
|
-
|
222
|
-
end
|
@@ -1,140 +0,0 @@
|
|
1
|
-
# --
|
2
|
-
# Copyright (C) 2008-2010 10gen Inc.
|
3
|
-
#
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
-
# you may not use this file except in compliance with the License.
|
6
|
-
# You may obtain a copy of the License at
|
7
|
-
#
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
-
#
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
-
# See the License for the specific language governing permissions and
|
14
|
-
# limitations under the License.
|
15
|
-
# ++
|
16
|
-
|
17
|
-
# A hash in which the order of keys are preserved.
|
18
|
-
#
|
19
|
-
# Under Ruby 1.9 and greater, this class has no added methods because Ruby's
|
20
|
-
# Hash already keeps its keys ordered by order of insertion.
|
21
|
-
class OrderedHash < Hash
|
22
|
-
|
23
|
-
def ==(other)
|
24
|
-
begin
|
25
|
-
!other.nil? &&
|
26
|
-
keys == other.keys &&
|
27
|
-
values == other.values
|
28
|
-
rescue
|
29
|
-
false
|
30
|
-
end
|
31
|
-
end
|
32
|
-
|
33
|
-
# We only need the body of this class if the RUBY_VERSION is before 1.9
|
34
|
-
if RUBY_VERSION < '1.9'
|
35
|
-
attr_accessor :ordered_keys
|
36
|
-
|
37
|
-
def self.[] *args
|
38
|
-
oh = OrderedHash.new
|
39
|
-
if Hash === args[0]
|
40
|
-
oh.merge! args[0]
|
41
|
-
elsif (args.size % 2) != 0
|
42
|
-
raise ArgumentError, "odd number of elements for Hash"
|
43
|
-
else
|
44
|
-
0.step(args.size - 1, 2) do |key|
|
45
|
-
value = key + 1
|
46
|
-
oh[args[key]] = args[value]
|
47
|
-
end
|
48
|
-
end
|
49
|
-
oh
|
50
|
-
end
|
51
|
-
|
52
|
-
def initialize(*a, &b)
|
53
|
-
super
|
54
|
-
@ordered_keys = []
|
55
|
-
end
|
56
|
-
|
57
|
-
def keys
|
58
|
-
@ordered_keys || []
|
59
|
-
end
|
60
|
-
|
61
|
-
def []=(key, value)
|
62
|
-
@ordered_keys ||= []
|
63
|
-
@ordered_keys << key unless @ordered_keys.include?(key)
|
64
|
-
super(key, value)
|
65
|
-
end
|
66
|
-
|
67
|
-
def each
|
68
|
-
@ordered_keys ||= []
|
69
|
-
@ordered_keys.each { |k| yield k, self[k] }
|
70
|
-
self
|
71
|
-
end
|
72
|
-
alias :each_pair :each
|
73
|
-
|
74
|
-
def to_a
|
75
|
-
@ordered_keys ||= []
|
76
|
-
@ordered_keys.map { |k| [k, self[k]] }
|
77
|
-
end
|
78
|
-
|
79
|
-
def values
|
80
|
-
collect { |k, v| v }
|
81
|
-
end
|
82
|
-
|
83
|
-
def merge(other)
|
84
|
-
oh = self.dup
|
85
|
-
oh.merge!(other)
|
86
|
-
oh
|
87
|
-
end
|
88
|
-
|
89
|
-
def merge!(other)
|
90
|
-
@ordered_keys ||= []
|
91
|
-
@ordered_keys += other.keys # unordered if not an OrderedHash
|
92
|
-
@ordered_keys.uniq!
|
93
|
-
super(other)
|
94
|
-
end
|
95
|
-
|
96
|
-
alias :update :merge!
|
97
|
-
|
98
|
-
def inspect
|
99
|
-
str = '{'
|
100
|
-
str << (@ordered_keys || []).collect { |k| "\"#{k}\"=>#{self.[](k).inspect}" }.join(", ")
|
101
|
-
str << '}'
|
102
|
-
end
|
103
|
-
|
104
|
-
def delete(key, &block)
|
105
|
-
@ordered_keys.delete(key) if @ordered_keys
|
106
|
-
super
|
107
|
-
end
|
108
|
-
|
109
|
-
def delete_if(&block)
|
110
|
-
self.each { |k,v|
|
111
|
-
if yield k, v
|
112
|
-
delete(k)
|
113
|
-
end
|
114
|
-
}
|
115
|
-
end
|
116
|
-
|
117
|
-
def clear
|
118
|
-
super
|
119
|
-
@ordered_keys = []
|
120
|
-
end
|
121
|
-
|
122
|
-
def hash
|
123
|
-
code = 17
|
124
|
-
each_pair do |key, value|
|
125
|
-
code = 37 * code + key.hash
|
126
|
-
code = 37 * code + value.hash
|
127
|
-
end
|
128
|
-
code & 0x7fffffff
|
129
|
-
end
|
130
|
-
|
131
|
-
def eql?(o)
|
132
|
-
if o.instance_of? OrderedHash
|
133
|
-
self.hash == o.hash
|
134
|
-
else
|
135
|
-
false
|
136
|
-
end
|
137
|
-
end
|
138
|
-
|
139
|
-
end
|
140
|
-
end
|
data/test/binary_test.rb
DELETED
@@ -1,15 +0,0 @@
|
|
1
|
-
# encoding:utf-8
|
2
|
-
require 'test/test_helper'
|
3
|
-
|
4
|
-
class BinaryTest < Test::Unit::TestCase
|
5
|
-
context "Inspecting" do
|
6
|
-
setup do
|
7
|
-
@data = ("THIS IS BINARY " * 50).unpack("c*")
|
8
|
-
end
|
9
|
-
|
10
|
-
should "not display actual data" do
|
11
|
-
binary = Mongo::Binary.new(@data)
|
12
|
-
assert_equal "<Mongo::Binary:#{binary.object_id}>", binary.inspect
|
13
|
-
end
|
14
|
-
end
|
15
|
-
end
|
data/test/bson_test.rb
DELETED
@@ -1,459 +0,0 @@
|
|
1
|
-
# encoding:utf-8
|
2
|
-
require 'test/test_helper'
|
3
|
-
require 'complex'
|
4
|
-
require 'bigdecimal'
|
5
|
-
require 'rational'
|
6
|
-
|
7
|
-
begin
|
8
|
-
require 'active_support/core_ext'
|
9
|
-
require 'active_support/hash_with_indifferent_access'
|
10
|
-
Time.zone = "Pacific Time (US & Canada)"
|
11
|
-
Zone = Time.zone.now
|
12
|
-
rescue LoadError
|
13
|
-
warn 'Could not test BSON with HashWithIndifferentAccess.'
|
14
|
-
module ActiveSupport
|
15
|
-
class TimeWithZone
|
16
|
-
end
|
17
|
-
end
|
18
|
-
Zone = ActiveSupport::TimeWithZone.new
|
19
|
-
end
|
20
|
-
|
21
|
-
class BSONTest < Test::Unit::TestCase
|
22
|
-
|
23
|
-
include Mongo
|
24
|
-
|
25
|
-
def test_string
|
26
|
-
doc = {'doc' => 'hello, world'}
|
27
|
-
bson = bson = BSON.serialize(doc)
|
28
|
-
assert_equal doc, BSON.deserialize(bson)
|
29
|
-
end
|
30
|
-
|
31
|
-
def test_valid_utf8_string
|
32
|
-
doc = {'doc' => 'aé'}
|
33
|
-
bson = bson = BSON.serialize(doc)
|
34
|
-
assert_equal doc, BSON.deserialize(bson)
|
35
|
-
end
|
36
|
-
|
37
|
-
def test_valid_utf8_key
|
38
|
-
doc = {'aé' => 'hello'}
|
39
|
-
bson = bson = BSON.serialize(doc)
|
40
|
-
assert_equal doc, BSON.deserialize(bson)
|
41
|
-
end
|
42
|
-
|
43
|
-
def test_document_length
|
44
|
-
doc = {'name' => 'a' * 5 * 1024 * 1024}
|
45
|
-
assert_raise InvalidDocument do
|
46
|
-
assert BSON.serialize(doc)
|
47
|
-
end
|
48
|
-
end
|
49
|
-
|
50
|
-
# In 1.8 we test that other string encodings raise an exception.
|
51
|
-
# In 1.9 we test that they get auto-converted.
|
52
|
-
if RUBY_VERSION < '1.9'
|
53
|
-
require 'iconv'
|
54
|
-
def test_invalid_string
|
55
|
-
string = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
56
|
-
doc = {'doc' => string}
|
57
|
-
assert_raise InvalidStringEncoding do
|
58
|
-
BSON.serialize(doc)
|
59
|
-
end
|
60
|
-
end
|
61
|
-
|
62
|
-
def test_invalid_key
|
63
|
-
key = Iconv.conv('iso-8859-1', 'utf-8', 'aé')
|
64
|
-
doc = {key => 'hello'}
|
65
|
-
assert_raise InvalidStringEncoding do
|
66
|
-
BSON.serialize(doc)
|
67
|
-
end
|
68
|
-
end
|
69
|
-
else
|
70
|
-
def test_non_utf8_string
|
71
|
-
bson = BSON.serialize({'str' => 'aé'.encode('iso-8859-1')})
|
72
|
-
result = BSON.deserialize(bson)['str']
|
73
|
-
assert_equal 'aé', result
|
74
|
-
assert_equal 'UTF-8', result.encoding.name
|
75
|
-
end
|
76
|
-
|
77
|
-
def test_non_utf8_key
|
78
|
-
bson = BSON.serialize({'aé'.encode('iso-8859-1') => 'hello'})
|
79
|
-
assert_equal 'hello', BSON.deserialize(bson)['aé']
|
80
|
-
end
|
81
|
-
end
|
82
|
-
|
83
|
-
def test_code
|
84
|
-
doc = {'$where' => Code.new('this.a.b < this.b')}
|
85
|
-
bson = BSON.serialize(doc)
|
86
|
-
assert_equal doc, BSON.deserialize(bson)
|
87
|
-
end
|
88
|
-
|
89
|
-
def test_number
|
90
|
-
doc = {'doc' => 41.99}
|
91
|
-
bson = BSON.serialize(doc)
|
92
|
-
assert_equal doc, BSON.deserialize(bson)
|
93
|
-
end
|
94
|
-
|
95
|
-
def test_int
|
96
|
-
doc = {'doc' => 42}
|
97
|
-
bson = BSON.serialize(doc)
|
98
|
-
assert_equal doc, BSON.deserialize(bson)
|
99
|
-
|
100
|
-
doc = {"doc" => -5600}
|
101
|
-
bson = BSON.serialize(doc)
|
102
|
-
assert_equal doc, BSON.deserialize(bson)
|
103
|
-
|
104
|
-
doc = {"doc" => 2147483647}
|
105
|
-
bson = BSON.serialize(doc)
|
106
|
-
assert_equal doc, BSON.deserialize(bson)
|
107
|
-
|
108
|
-
doc = {"doc" => -2147483648}
|
109
|
-
bson = BSON.serialize(doc)
|
110
|
-
assert_equal doc, BSON.deserialize(bson)
|
111
|
-
end
|
112
|
-
|
113
|
-
def test_ordered_hash
|
114
|
-
doc = OrderedHash.new
|
115
|
-
doc["b"] = 1
|
116
|
-
doc["a"] = 2
|
117
|
-
doc["c"] = 3
|
118
|
-
doc["d"] = 4
|
119
|
-
bson = BSON.serialize(doc)
|
120
|
-
assert_equal doc, BSON.deserialize(bson)
|
121
|
-
end
|
122
|
-
|
123
|
-
def test_object
|
124
|
-
doc = {'doc' => {'age' => 42, 'name' => 'Spongebob', 'shoe_size' => 9.5}}
|
125
|
-
bson = BSON.serialize(doc)
|
126
|
-
assert_equal doc, BSON.deserialize(bson)
|
127
|
-
end
|
128
|
-
|
129
|
-
def test_oid
|
130
|
-
doc = {'doc' => ObjectID.new}
|
131
|
-
bson = BSON.serialize(doc)
|
132
|
-
assert_equal doc, BSON.deserialize(bson)
|
133
|
-
end
|
134
|
-
|
135
|
-
def test_array
|
136
|
-
doc = {'doc' => [1, 2, 'a', 'b']}
|
137
|
-
bson = BSON.serialize(doc)
|
138
|
-
assert_equal doc, BSON.deserialize(bson)
|
139
|
-
end
|
140
|
-
|
141
|
-
def test_regex
|
142
|
-
doc = {'doc' => /foobar/i}
|
143
|
-
bson = BSON.serialize(doc)
|
144
|
-
doc2 = BSON.deserialize(bson)
|
145
|
-
assert_equal doc, doc2
|
146
|
-
|
147
|
-
r = doc2['doc']
|
148
|
-
assert_kind_of Regexp, r
|
149
|
-
|
150
|
-
r = RegexpOfHolding.new('st', 0, 'zywcab')
|
151
|
-
assert_equal 'zywcab', r.extra_options_str
|
152
|
-
|
153
|
-
doc = {'doc' => r}
|
154
|
-
bson_doc = BSON.serialize(doc)
|
155
|
-
doc2 = nil
|
156
|
-
doc2 = BSON.deserialize(bson_doc)
|
157
|
-
assert_equal doc, doc2
|
158
|
-
|
159
|
-
r = doc2['doc']
|
160
|
-
assert_kind_of RegexpOfHolding, r
|
161
|
-
assert_equal 'abcwyz', r.extra_options_str # must be sorted
|
162
|
-
end
|
163
|
-
|
164
|
-
def test_boolean
|
165
|
-
doc = {'doc' => true}
|
166
|
-
bson = BSON.serialize(doc)
|
167
|
-
assert_equal doc, BSON.deserialize(bson)
|
168
|
-
end
|
169
|
-
|
170
|
-
def test_date
|
171
|
-
doc = {'date' => Time.now}
|
172
|
-
bson = BSON.serialize(doc)
|
173
|
-
doc2 = BSON.deserialize(bson)
|
174
|
-
# Mongo only stores up to the millisecond
|
175
|
-
assert_in_delta doc['date'], doc2['date'], 0.001
|
176
|
-
end
|
177
|
-
|
178
|
-
def test_date_returns_as_utc
|
179
|
-
doc = {'date' => Time.now}
|
180
|
-
bson = BSON.serialize(doc)
|
181
|
-
doc2 = BSON.deserialize(bson)
|
182
|
-
assert doc2['date'].utc?
|
183
|
-
end
|
184
|
-
|
185
|
-
def test_date_before_epoch
|
186
|
-
begin
|
187
|
-
doc = {'date' => Time.utc(1600)}
|
188
|
-
bson = BSON.serialize(doc)
|
189
|
-
doc2 = BSON.deserialize(bson)
|
190
|
-
# Mongo only stores up to the millisecond
|
191
|
-
assert_in_delta doc['date'], doc2['date'], 0.001
|
192
|
-
rescue ArgumentError
|
193
|
-
# some versions of Ruby won't let you create pre-epoch Time instances
|
194
|
-
#
|
195
|
-
# TODO figure out how that will work if somebady has saved data
|
196
|
-
# w/ early dates already and is just querying for it.
|
197
|
-
end
|
198
|
-
end
|
199
|
-
|
200
|
-
def test_exeption_on_using_unsupported_date_class
|
201
|
-
[DateTime.now, Date.today, Zone].each do |invalid_date|
|
202
|
-
doc = {:date => invalid_date}
|
203
|
-
begin
|
204
|
-
bson = BSON.serialize(doc)
|
205
|
-
rescue => e
|
206
|
-
ensure
|
207
|
-
assert_equal InvalidDocument, e.class
|
208
|
-
assert_match /UTC Time/, e.message
|
209
|
-
end
|
210
|
-
end
|
211
|
-
end
|
212
|
-
|
213
|
-
def test_dbref
|
214
|
-
oid = ObjectID.new
|
215
|
-
doc = {}
|
216
|
-
doc['dbref'] = DBRef.new('namespace', oid)
|
217
|
-
bson = BSON.serialize(doc)
|
218
|
-
doc2 = BSON.deserialize(bson)
|
219
|
-
assert_equal 'namespace', doc2['dbref'].namespace
|
220
|
-
assert_equal oid, doc2['dbref'].object_id
|
221
|
-
end
|
222
|
-
|
223
|
-
def test_symbol
|
224
|
-
doc = {'sym' => :foo}
|
225
|
-
bson = BSON.serialize(doc)
|
226
|
-
doc2 = BSON.deserialize(bson)
|
227
|
-
assert_equal :foo, doc2['sym']
|
228
|
-
end
|
229
|
-
|
230
|
-
def test_binary
|
231
|
-
bin = Binary.new
|
232
|
-
'binstring'.each_byte { |b| bin.put(b) }
|
233
|
-
|
234
|
-
doc = {'bin' => bin}
|
235
|
-
bson = BSON.serialize(doc)
|
236
|
-
doc2 = BSON.deserialize(bson)
|
237
|
-
bin2 = doc2['bin']
|
238
|
-
assert_kind_of Binary, bin2
|
239
|
-
assert_equal 'binstring', bin2.to_s
|
240
|
-
assert_equal Binary::SUBTYPE_BYTES, bin2.subtype
|
241
|
-
end
|
242
|
-
|
243
|
-
def test_binary_type
|
244
|
-
bin = Binary.new([1, 2, 3, 4, 5], Binary::SUBTYPE_USER_DEFINED)
|
245
|
-
|
246
|
-
doc = {'bin' => bin}
|
247
|
-
bson = BSON.serialize(doc)
|
248
|
-
doc2 = BSON.deserialize(bson)
|
249
|
-
bin2 = doc2['bin']
|
250
|
-
assert_kind_of Binary, bin2
|
251
|
-
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
252
|
-
assert_equal Binary::SUBTYPE_USER_DEFINED, bin2.subtype
|
253
|
-
end
|
254
|
-
|
255
|
-
def test_binary_byte_buffer
|
256
|
-
bb = ByteBuffer.new
|
257
|
-
5.times { |i| bb.put(i + 1) }
|
258
|
-
|
259
|
-
doc = {'bin' => bb}
|
260
|
-
bson = BSON.serialize(doc)
|
261
|
-
doc2 = BSON.deserialize(bson)
|
262
|
-
bin2 = doc2['bin']
|
263
|
-
assert_kind_of Binary, bin2
|
264
|
-
assert_equal [1, 2, 3, 4, 5], bin2.to_a
|
265
|
-
assert_equal Binary::SUBTYPE_BYTES, bin2.subtype
|
266
|
-
end
|
267
|
-
|
268
|
-
def test_put_id_first
|
269
|
-
val = OrderedHash.new
|
270
|
-
val['not_id'] = 1
|
271
|
-
val['_id'] = 2
|
272
|
-
roundtrip = BSON.deserialize(BSON.serialize(val, false, true).to_a)
|
273
|
-
assert_kind_of OrderedHash, roundtrip
|
274
|
-
assert_equal '_id', roundtrip.keys.first
|
275
|
-
|
276
|
-
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
277
|
-
roundtrip = BSON.deserialize(BSON.serialize(val, false, true).to_a)
|
278
|
-
assert_kind_of OrderedHash, roundtrip
|
279
|
-
assert_equal '_id', roundtrip.keys.first
|
280
|
-
end
|
281
|
-
|
282
|
-
def test_nil_id
|
283
|
-
doc = {"_id" => nil}
|
284
|
-
assert_equal doc, BSON.deserialize(bson = BSON.serialize(doc, false, true).to_a)
|
285
|
-
end
|
286
|
-
|
287
|
-
def test_timestamp
|
288
|
-
val = {"test" => [4, 20]}
|
289
|
-
assert_equal val, BSON.deserialize([0x13, 0x00, 0x00, 0x00,
|
290
|
-
0x11, 0x74, 0x65, 0x73,
|
291
|
-
0x74, 0x00, 0x04, 0x00,
|
292
|
-
0x00, 0x00, 0x14, 0x00,
|
293
|
-
0x00, 0x00, 0x00])
|
294
|
-
end
|
295
|
-
|
296
|
-
def test_overflow
|
297
|
-
doc = {"x" => 2**75}
|
298
|
-
assert_raise RangeError do
|
299
|
-
bson = BSON.serialize(doc)
|
300
|
-
end
|
301
|
-
|
302
|
-
doc = {"x" => 9223372036854775}
|
303
|
-
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
304
|
-
|
305
|
-
doc = {"x" => 9223372036854775807}
|
306
|
-
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
307
|
-
|
308
|
-
doc["x"] = doc["x"] + 1
|
309
|
-
assert_raise RangeError do
|
310
|
-
bson = BSON.serialize(doc)
|
311
|
-
end
|
312
|
-
|
313
|
-
doc = {"x" => -9223372036854775}
|
314
|
-
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
315
|
-
|
316
|
-
doc = {"x" => -9223372036854775808}
|
317
|
-
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
318
|
-
|
319
|
-
doc["x"] = doc["x"] - 1
|
320
|
-
assert_raise RangeError do
|
321
|
-
bson = BSON.serialize(doc)
|
322
|
-
end
|
323
|
-
end
|
324
|
-
|
325
|
-
def test_invalid_numeric_types
|
326
|
-
[BigDecimal.new("1.0"), Complex(0, 1), Rational(2, 3)].each do |type|
|
327
|
-
doc = {"x" => type}
|
328
|
-
begin
|
329
|
-
BSON.serialize(doc)
|
330
|
-
rescue => e
|
331
|
-
ensure
|
332
|
-
assert_equal InvalidDocument, e.class
|
333
|
-
assert_match /Cannot serialize/, e.message
|
334
|
-
end
|
335
|
-
end
|
336
|
-
end
|
337
|
-
|
338
|
-
def test_do_not_change_original_object
|
339
|
-
val = OrderedHash.new
|
340
|
-
val['not_id'] = 1
|
341
|
-
val['_id'] = 2
|
342
|
-
assert val.keys.include?('_id')
|
343
|
-
BSON.serialize(val)
|
344
|
-
assert val.keys.include?('_id')
|
345
|
-
|
346
|
-
val = {'a' => 'foo', 'b' => 'bar', :_id => 42, 'z' => 'hello'}
|
347
|
-
assert val.keys.include?(:_id)
|
348
|
-
BSON.serialize(val)
|
349
|
-
assert val.keys.include?(:_id)
|
350
|
-
end
|
351
|
-
|
352
|
-
# note we only test for _id here because in the general case we will
|
353
|
-
# write duplicates for :key and "key". _id is a special case because
|
354
|
-
# we call has_key? to check for it's existance rather than just iterating
|
355
|
-
# over it like we do for the rest of the keys. thus, things like
|
356
|
-
# HashWithIndifferentAccess can cause problems for _id but not for other
|
357
|
-
# keys. rather than require rails to test with HWIA directly, we do this
|
358
|
-
# somewhat hacky test.
|
359
|
-
def test_no_duplicate_id
|
360
|
-
dup = {"_id" => "foo", :_id => "foo"}
|
361
|
-
one = {"_id" => "foo"}
|
362
|
-
|
363
|
-
assert_equal BSON.serialize(one).to_a, BSON.serialize(dup).to_a
|
364
|
-
end
|
365
|
-
|
366
|
-
def test_no_duplicate_id_when_moving_id
|
367
|
-
dup = {"_id" => "foo", :_id => "foo"}
|
368
|
-
one = {:_id => "foo"}
|
369
|
-
|
370
|
-
assert_equal BSON.serialize(one, false, true).to_s, BSON.serialize(dup, false, true).to_s
|
371
|
-
end
|
372
|
-
|
373
|
-
def test_null_character
|
374
|
-
doc = {"a" => "\x00"}
|
375
|
-
|
376
|
-
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
377
|
-
|
378
|
-
assert_raise InvalidDocument do
|
379
|
-
BSON.serialize({"\x00" => "a"})
|
380
|
-
end
|
381
|
-
|
382
|
-
assert_raise InvalidDocument do
|
383
|
-
BSON.serialize({"a" => (Regexp.compile "ab\x00c")})
|
384
|
-
end
|
385
|
-
end
|
386
|
-
|
387
|
-
def test_max_key
|
388
|
-
doc = {"a" => MaxKey.new}
|
389
|
-
|
390
|
-
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
391
|
-
end
|
392
|
-
|
393
|
-
def test_min_key
|
394
|
-
doc = {"a" => MinKey.new}
|
395
|
-
|
396
|
-
assert_equal doc, BSON.deserialize(BSON.serialize(doc).to_a)
|
397
|
-
end
|
398
|
-
|
399
|
-
def test_invalid_object
|
400
|
-
o = Object.new
|
401
|
-
assert_raise InvalidDocument do
|
402
|
-
BSON.serialize({:foo => o})
|
403
|
-
end
|
404
|
-
|
405
|
-
assert_raise InvalidDocument do
|
406
|
-
BSON.serialize({:foo => Date.today})
|
407
|
-
end
|
408
|
-
end
|
409
|
-
|
410
|
-
def test_move_id
|
411
|
-
a = OrderedHash.new
|
412
|
-
a['text'] = 'abc'
|
413
|
-
a['key'] = 'abc'
|
414
|
-
a['_id'] = 1
|
415
|
-
|
416
|
-
|
417
|
-
assert_equal ")\000\000\000\020_id\000\001\000\000\000\002text" +
|
418
|
-
"\000\004\000\000\000abc\000\002key\000\004\000\000\000abc\000\000",
|
419
|
-
BSON.serialize(a, false, true).to_s
|
420
|
-
assert_equal ")\000\000\000\002text\000\004\000\000\000abc\000\002key" +
|
421
|
-
"\000\004\000\000\000abc\000\020_id\000\001\000\000\000\000",
|
422
|
-
BSON.serialize(a, false, false).to_s
|
423
|
-
end
|
424
|
-
|
425
|
-
def test_move_id_with_nested_doc
|
426
|
-
b = OrderedHash.new
|
427
|
-
b['text'] = 'abc'
|
428
|
-
b['_id'] = 2
|
429
|
-
c = OrderedHash.new
|
430
|
-
c['text'] = 'abc'
|
431
|
-
c['hash'] = b
|
432
|
-
c['_id'] = 3
|
433
|
-
assert_equal ">\000\000\000\020_id\000\003\000\000\000\002text" +
|
434
|
-
"\000\004\000\000\000abc\000\003hash\000\034\000\000" +
|
435
|
-
"\000\002text\000\004\000\000\000abc\000\020_id\000\002\000\000\000\000\000",
|
436
|
-
BSON.serialize(c, false, true).to_s
|
437
|
-
assert_equal ">\000\000\000\002text\000\004\000\000\000abc\000\003hash" +
|
438
|
-
"\000\034\000\000\000\002text\000\004\000\000\000abc\000\020_id" +
|
439
|
-
"\000\002\000\000\000\000\020_id\000\003\000\000\000\000",
|
440
|
-
BSON.serialize(c, false, false).to_s
|
441
|
-
end
|
442
|
-
|
443
|
-
if defined?(HashWithIndifferentAccess)
|
444
|
-
def test_keep_id_with_hash_with_indifferent_access
|
445
|
-
doc = HashWithIndifferentAccess.new
|
446
|
-
embedded = HashWithIndifferentAccess.new
|
447
|
-
embedded['_id'] = ObjectID.new
|
448
|
-
doc['_id'] = ObjectID.new
|
449
|
-
doc['embedded'] = [embedded]
|
450
|
-
BSON.serialize(doc, false, true).to_a
|
451
|
-
assert doc.has_key?("_id")
|
452
|
-
assert doc['embedded'][0].has_key?("_id")
|
453
|
-
|
454
|
-
doc['_id'] = ObjectID.new
|
455
|
-
BSON.serialize(doc, false, true).to_a
|
456
|
-
assert doc.has_key?("_id")
|
457
|
-
end
|
458
|
-
end
|
459
|
-
end
|