kbaum-mongo 0.18.3p
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/LICENSE.txt +202 -0
- data/README.rdoc +339 -0
- data/Rakefile +138 -0
- data/bin/bson_benchmark.rb +59 -0
- data/bin/fail_if_no_c.rb +11 -0
- data/examples/admin.rb +42 -0
- data/examples/capped.rb +22 -0
- data/examples/cursor.rb +48 -0
- data/examples/gridfs.rb +88 -0
- data/examples/index_test.rb +126 -0
- data/examples/info.rb +31 -0
- data/examples/queries.rb +70 -0
- data/examples/simple.rb +24 -0
- data/examples/strict.rb +35 -0
- data/examples/types.rb +36 -0
- data/lib/mongo/collection.rb +609 -0
- data/lib/mongo/connection.rb +672 -0
- data/lib/mongo/cursor.rb +403 -0
- data/lib/mongo/db.rb +555 -0
- data/lib/mongo/exceptions.rb +66 -0
- data/lib/mongo/gridfs/chunk.rb +91 -0
- data/lib/mongo/gridfs/grid.rb +79 -0
- data/lib/mongo/gridfs/grid_file_system.rb +101 -0
- data/lib/mongo/gridfs/grid_io.rb +338 -0
- data/lib/mongo/gridfs/grid_store.rb +580 -0
- data/lib/mongo/gridfs.rb +25 -0
- data/lib/mongo/types/binary.rb +52 -0
- data/lib/mongo/types/code.rb +36 -0
- data/lib/mongo/types/dbref.rb +40 -0
- data/lib/mongo/types/min_max_keys.rb +58 -0
- data/lib/mongo/types/objectid.rb +180 -0
- data/lib/mongo/types/regexp_of_holding.rb +45 -0
- data/lib/mongo/util/bson_c.rb +18 -0
- data/lib/mongo/util/bson_ruby.rb +606 -0
- data/lib/mongo/util/byte_buffer.rb +222 -0
- data/lib/mongo/util/conversions.rb +87 -0
- data/lib/mongo/util/ordered_hash.rb +140 -0
- data/lib/mongo/util/server_version.rb +69 -0
- data/lib/mongo/util/support.rb +26 -0
- data/lib/mongo.rb +63 -0
- data/mongo-ruby-driver.gemspec +28 -0
- data/test/auxillary/autoreconnect_test.rb +42 -0
- data/test/binary_test.rb +15 -0
- data/test/bson_test.rb +427 -0
- data/test/byte_buffer_test.rb +81 -0
- data/test/chunk_test.rb +82 -0
- data/test/collection_test.rb +515 -0
- data/test/connection_test.rb +160 -0
- data/test/conversions_test.rb +120 -0
- data/test/cursor_test.rb +379 -0
- data/test/db_api_test.rb +780 -0
- data/test/db_connection_test.rb +16 -0
- data/test/db_test.rb +272 -0
- data/test/grid_file_system_test.rb +210 -0
- data/test/grid_io_test.rb +78 -0
- data/test/grid_store_test.rb +334 -0
- data/test/grid_test.rb +87 -0
- data/test/objectid_test.rb +125 -0
- data/test/ordered_hash_test.rb +172 -0
- data/test/replica/count_test.rb +34 -0
- data/test/replica/insert_test.rb +50 -0
- data/test/replica/pooled_insert_test.rb +54 -0
- data/test/replica/query_test.rb +39 -0
- data/test/slave_connection_test.rb +36 -0
- data/test/test_helper.rb +42 -0
- data/test/threading/test_threading_large_pool.rb +90 -0
- data/test/threading_test.rb +87 -0
- data/test/unit/collection_test.rb +61 -0
- data/test/unit/connection_test.rb +117 -0
- data/test/unit/cursor_test.rb +93 -0
- data/test/unit/db_test.rb +98 -0
- metadata +127 -0
@@ -0,0 +1,334 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
require 'mongo/gridfs'
|
3
|
+
|
4
|
+
class GridStoreTest < Test::Unit::TestCase
|
5
|
+
|
6
|
+
include Mongo
|
7
|
+
include GridFS
|
8
|
+
|
9
|
+
@@db = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
|
10
|
+
ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test')
|
11
|
+
@@files = @@db.collection('fs.files')
|
12
|
+
@@chunks = @@db.collection('fs.chunks')
|
13
|
+
|
14
|
+
def setup
|
15
|
+
@@chunks.remove
|
16
|
+
@@files.remove
|
17
|
+
GridStore.open(@@db, 'foobar', 'w') { |f| f.write("hello, world!") }
|
18
|
+
end
|
19
|
+
|
20
|
+
def teardown
|
21
|
+
@@chunks.remove
|
22
|
+
@@files.remove
|
23
|
+
@@db.error
|
24
|
+
end
|
25
|
+
|
26
|
+
def test_exist
|
27
|
+
assert GridStore.exist?(@@db, 'foobar')
|
28
|
+
assert !GridStore.exist?(@@db, 'does_not_exist')
|
29
|
+
assert !GridStore.exist?(@@db, 'foobar', 'another_root')
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_list
|
33
|
+
assert_equal ['foobar'], GridStore.list(@@db)
|
34
|
+
assert_equal ['foobar'], GridStore.list(@@db, 'fs')
|
35
|
+
assert_equal [], GridStore.list(@@db, 'my_fs')
|
36
|
+
|
37
|
+
GridStore.open(@@db, 'test', 'w') { |f| f.write("my file") }
|
38
|
+
|
39
|
+
assert_equal ['foobar', 'test'], GridStore.list(@@db)
|
40
|
+
end
|
41
|
+
|
42
|
+
def test_small_write
|
43
|
+
rows = @@files.find({'filename' => 'foobar'}).to_a
|
44
|
+
assert_not_nil rows
|
45
|
+
assert_equal 1, rows.length
|
46
|
+
row = rows[0]
|
47
|
+
assert_not_nil row
|
48
|
+
|
49
|
+
file_id = row['_id']
|
50
|
+
assert_kind_of ObjectID, file_id
|
51
|
+
rows = @@chunks.find({'files_id' => file_id}).to_a
|
52
|
+
assert_not_nil rows
|
53
|
+
assert_equal 1, rows.length
|
54
|
+
end
|
55
|
+
|
56
|
+
def test_small_file
|
57
|
+
rows = @@files.find({'filename' => 'foobar'}).to_a
|
58
|
+
assert_not_nil rows
|
59
|
+
assert_equal 1, rows.length
|
60
|
+
row = rows[0]
|
61
|
+
assert_not_nil row
|
62
|
+
assert_equal "hello, world!", GridStore.read(@@db, 'foobar')
|
63
|
+
end
|
64
|
+
|
65
|
+
def test_overwrite
|
66
|
+
GridStore.open(@@db, 'foobar', 'w') { |f| f.write("overwrite") }
|
67
|
+
assert_equal "overwrite", GridStore.read(@@db, 'foobar')
|
68
|
+
end
|
69
|
+
|
70
|
+
def test_read_length
|
71
|
+
assert_equal "hello", GridStore.read(@@db, 'foobar', 5)
|
72
|
+
end
|
73
|
+
|
74
|
+
def test_read_with_and_without_length
|
75
|
+
GridStore.open(@@db, 'read-types', 'w') do |f|
|
76
|
+
f.write('hello, there')
|
77
|
+
end
|
78
|
+
|
79
|
+
GridStore.open(@@db, 'read-types', 'r') do |f|
|
80
|
+
assert_equal 'hello, ', f.read(7)
|
81
|
+
assert_equal 'there', f.read
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
def test_access_length
|
86
|
+
assert_equal 13, GridStore.new(@@db, 'foobar').length
|
87
|
+
end
|
88
|
+
|
89
|
+
# Also tests seek
|
90
|
+
def test_read_with_offset
|
91
|
+
assert_equal "world!", GridStore.read(@@db, 'foobar', nil, 7)
|
92
|
+
end
|
93
|
+
|
94
|
+
def test_seek
|
95
|
+
GridStore.open(@@db, 'foobar', 'r') { |f|
|
96
|
+
f.seek(0)
|
97
|
+
assert_equal 'h', f.getc.chr
|
98
|
+
f.seek(7)
|
99
|
+
assert_equal 'w', f.getc.chr
|
100
|
+
f.seek(4)
|
101
|
+
assert_equal 'o', f.getc.chr
|
102
|
+
|
103
|
+
f.seek(-1, IO::SEEK_END)
|
104
|
+
assert_equal '!', f.getc.chr
|
105
|
+
f.seek(-6, IO::SEEK_END)
|
106
|
+
assert_equal 'w', f.getc.chr
|
107
|
+
|
108
|
+
f.seek(0)
|
109
|
+
f.seek(7, IO::SEEK_CUR)
|
110
|
+
assert_equal 'w', f.getc.chr
|
111
|
+
f.seek(-1, IO::SEEK_CUR)
|
112
|
+
assert_equal 'w', f.getc.chr
|
113
|
+
f.seek(-4, IO::SEEK_CUR)
|
114
|
+
assert_equal 'o', f.getc.chr
|
115
|
+
f.seek(3, IO::SEEK_CUR)
|
116
|
+
assert_equal 'o', f.getc.chr
|
117
|
+
}
|
118
|
+
end
|
119
|
+
|
120
|
+
def test_multi_chunk
|
121
|
+
@@chunks.remove
|
122
|
+
@@files.remove
|
123
|
+
|
124
|
+
size = 512
|
125
|
+
GridStore.open(@@db, 'biggie', 'w') { |f|
|
126
|
+
f.chunk_size = size
|
127
|
+
f.write('x' * size)
|
128
|
+
f.write('y' * size)
|
129
|
+
f.write('z' * size)
|
130
|
+
}
|
131
|
+
|
132
|
+
assert_equal 3, @@chunks.count
|
133
|
+
#assert_equal ('x' * size) + ('y' * size) + ('z' * size), GridStore.read(@@db, 'biggie')
|
134
|
+
end
|
135
|
+
|
136
|
+
def test_binary
|
137
|
+
file = File.open(File.join(File.dirname(__FILE__), 'data', 'data.tar.gz'), 'r')
|
138
|
+
GridStore.open(@@db, 'zip', 'w') do |f|
|
139
|
+
f.write(file.read)
|
140
|
+
end
|
141
|
+
|
142
|
+
file.rewind
|
143
|
+
GridStore.open(@@db, 'zip', 'r') do |f|
|
144
|
+
assert_equal file.read.length, f.read.length
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
def test_puts_and_readlines
|
149
|
+
GridStore.open(@@db, 'multiline', 'w') { |f|
|
150
|
+
f.puts "line one"
|
151
|
+
f.puts "line two\n"
|
152
|
+
f.puts "line three"
|
153
|
+
}
|
154
|
+
|
155
|
+
lines = GridStore.readlines(@@db, 'multiline')
|
156
|
+
assert_equal ["line one\n", "line two\n", "line three\n"], lines
|
157
|
+
end
|
158
|
+
|
159
|
+
def test_unlink
|
160
|
+
assert_equal 1, @@files.count
|
161
|
+
assert_equal 1, @@chunks.count
|
162
|
+
GridStore.unlink(@@db, 'foobar')
|
163
|
+
assert_equal 0, @@files.count
|
164
|
+
assert_equal 0, @@chunks.count
|
165
|
+
end
|
166
|
+
|
167
|
+
def test_unlink_alternate_root_collection
|
168
|
+
GridStore.default_root_collection = 'gridfs'
|
169
|
+
GridStore.open(@@db, 'foobar', 'w') do |f|
|
170
|
+
f.puts "Hello"
|
171
|
+
end
|
172
|
+
assert GridStore.exist?(@@db, 'foobar')
|
173
|
+
|
174
|
+
GridStore.default_root_collection = 'fs'
|
175
|
+
GridStore.unlink(@@db, 'foobar')
|
176
|
+
assert !GridStore.exist?(@@db, 'foobar')
|
177
|
+
|
178
|
+
GridStore.default_root_collection = 'gridfs'
|
179
|
+
GridStore.unlink(@@db, 'foobar')
|
180
|
+
assert !GridStore.exist?(@@db, 'foobar')
|
181
|
+
end
|
182
|
+
|
183
|
+
def test_mv
|
184
|
+
assert_equal 1, @@files.count
|
185
|
+
assert_equal 1, @@chunks.count
|
186
|
+
GridStore.mv(@@db, 'foobar', 'bazqux')
|
187
|
+
assert_equal 1, @@files.count
|
188
|
+
assert_equal 1, @@chunks.count
|
189
|
+
assert !GridStore.exist?(@@db, 'foobar')
|
190
|
+
assert GridStore.exist?(@@db, 'bazqux')
|
191
|
+
end
|
192
|
+
|
193
|
+
def test_append
|
194
|
+
GridStore.open(@@db, 'foobar', 'w+') { |f| f.write(" how are you?") }
|
195
|
+
assert_equal 1, @@chunks.count
|
196
|
+
assert_equal "hello, world! how are you?", GridStore.read(@@db, 'foobar')
|
197
|
+
end
|
198
|
+
|
199
|
+
def test_rewind_and_truncate_on_write
|
200
|
+
GridStore.open(@@db, 'foobar', 'w') { |f|
|
201
|
+
f.write("some text is inserted here")
|
202
|
+
f.rewind
|
203
|
+
f.write("abc")
|
204
|
+
}
|
205
|
+
assert_equal "abc", GridStore.read(@@db, 'foobar')
|
206
|
+
end
|
207
|
+
|
208
|
+
def test_tell
|
209
|
+
GridStore.open(@@db, 'foobar', 'r') { |f|
|
210
|
+
f.read(5)
|
211
|
+
assert_equal 5, f.tell
|
212
|
+
}
|
213
|
+
end
|
214
|
+
|
215
|
+
def test_empty_block_ok
|
216
|
+
GridStore.open(@@db, 'empty', 'w')
|
217
|
+
end
|
218
|
+
|
219
|
+
def test_save_empty_file
|
220
|
+
@@chunks.remove
|
221
|
+
@@files.remove
|
222
|
+
GridStore.open(@@db, 'empty', 'w') {} # re-write with zero bytes
|
223
|
+
assert_equal 1, @@files.count
|
224
|
+
assert_equal 0, @@chunks.count
|
225
|
+
end
|
226
|
+
|
227
|
+
def test_empty_file_eof
|
228
|
+
GridStore.open(@@db, 'empty', 'w')
|
229
|
+
GridStore.open(@@db, 'empty', 'r') { |f|
|
230
|
+
assert f.eof?
|
231
|
+
}
|
232
|
+
end
|
233
|
+
|
234
|
+
def test_cannot_change_chunk_size_on_read
|
235
|
+
begin
|
236
|
+
GridStore.open(@@db, 'foobar', 'r') { |f| f.chunk_size = 42 }
|
237
|
+
fail "should have seen error"
|
238
|
+
rescue => ex
|
239
|
+
assert_match /error: can only change chunk size/, ex.to_s
|
240
|
+
end
|
241
|
+
end
|
242
|
+
|
243
|
+
def test_cannot_change_chunk_size_after_data_written
|
244
|
+
begin
|
245
|
+
GridStore.open(@@db, 'foobar', 'w') { |f|
|
246
|
+
f.write("some text")
|
247
|
+
f.chunk_size = 42
|
248
|
+
}
|
249
|
+
fail "should have seen error"
|
250
|
+
rescue => ex
|
251
|
+
assert_match /error: can only change chunk size/, ex.to_s
|
252
|
+
end
|
253
|
+
end
|
254
|
+
|
255
|
+
def test_change_chunk_size
|
256
|
+
GridStore.open(@@db, 'new-file', 'w') { |f|
|
257
|
+
f.chunk_size = 42
|
258
|
+
f.write("foo")
|
259
|
+
}
|
260
|
+
GridStore.open(@@db, 'new-file', 'r') { |f|
|
261
|
+
assert f.chunk_size == 42
|
262
|
+
}
|
263
|
+
end
|
264
|
+
|
265
|
+
def test_chunk_size_in_option
|
266
|
+
GridStore.open(@@db, 'new-file', 'w', :chunk_size => 42) { |f| f.write("foo") }
|
267
|
+
GridStore.open(@@db, 'new-file', 'r') { |f|
|
268
|
+
assert f.chunk_size == 42
|
269
|
+
}
|
270
|
+
end
|
271
|
+
|
272
|
+
def test_md5
|
273
|
+
GridStore.open(@@db, 'new-file', 'w') { |f| f.write("hello world\n")}
|
274
|
+
GridStore.open(@@db, 'new-file', 'r') { |f|
|
275
|
+
assert f.md5 == '6f5902ac237024bdd0c176cb93063dc4'
|
276
|
+
begin
|
277
|
+
f.md5 = 'cant do this'
|
278
|
+
fail "should have seen error"
|
279
|
+
rescue => ex
|
280
|
+
true
|
281
|
+
end
|
282
|
+
}
|
283
|
+
GridStore.open(@@db, 'new-file', 'w') {}
|
284
|
+
GridStore.open(@@db, 'new-file', 'r') { |f|
|
285
|
+
assert f.md5 == 'd41d8cd98f00b204e9800998ecf8427e'
|
286
|
+
}
|
287
|
+
end
|
288
|
+
|
289
|
+
def test_upload_date
|
290
|
+
now = Time.now
|
291
|
+
orig_file_upload_date = nil
|
292
|
+
GridStore.open(@@db, 'foobar', 'r') { |f| orig_file_upload_date = f.upload_date }
|
293
|
+
assert_not_nil orig_file_upload_date
|
294
|
+
assert (orig_file_upload_date - now) < 5 # even a really slow system < 5 secs
|
295
|
+
|
296
|
+
sleep(2)
|
297
|
+
GridStore.open(@@db, 'foobar', 'w') { |f| f.write "new data" }
|
298
|
+
file_upload_date = nil
|
299
|
+
GridStore.open(@@db, 'foobar', 'r') { |f| file_upload_date = f.upload_date }
|
300
|
+
assert_equal orig_file_upload_date, file_upload_date
|
301
|
+
end
|
302
|
+
|
303
|
+
def test_content_type
|
304
|
+
ct = nil
|
305
|
+
GridStore.open(@@db, 'foobar', 'r') { |f| ct = f.content_type }
|
306
|
+
assert_equal GridStore::DEFAULT_CONTENT_TYPE, ct
|
307
|
+
|
308
|
+
GridStore.open(@@db, 'foobar', 'w+') { |f| f.content_type = 'text/html' }
|
309
|
+
ct2 = nil
|
310
|
+
GridStore.open(@@db, 'foobar', 'r') { |f| ct2 = f.content_type }
|
311
|
+
assert_equal 'text/html', ct2
|
312
|
+
end
|
313
|
+
|
314
|
+
def test_content_type_option
|
315
|
+
GridStore.open(@@db, 'new-file', 'w', :content_type => 'image/jpg') { |f| f.write('foo') }
|
316
|
+
ct = nil
|
317
|
+
GridStore.open(@@db, 'new-file', 'r') { |f| ct = f.content_type }
|
318
|
+
assert_equal 'image/jpg', ct
|
319
|
+
end
|
320
|
+
|
321
|
+
def test_unknown_mode
|
322
|
+
GridStore.open(@@db, 'foobar', 'x')
|
323
|
+
fail 'should have seen "illegal mode" error raised'
|
324
|
+
rescue => ex
|
325
|
+
assert_equal "error: illegal mode x", ex.to_s
|
326
|
+
end
|
327
|
+
|
328
|
+
def test_metadata
|
329
|
+
GridStore.open(@@db, 'foobar', 'r') { |f| assert_nil f.metadata }
|
330
|
+
GridStore.open(@@db, 'foobar', 'w+') { |f| f.metadata = {'a' => 1} }
|
331
|
+
GridStore.open(@@db, 'foobar', 'r') { |f| assert_equal({'a' => 1}, f.metadata) }
|
332
|
+
end
|
333
|
+
|
334
|
+
end
|
data/test/grid_test.rb
ADDED
@@ -0,0 +1,87 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
include Mongo
|
3
|
+
|
4
|
+
class GridTest < Test::Unit::TestCase
|
5
|
+
context "Tests:" do
|
6
|
+
setup do
|
7
|
+
@db ||= Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
|
8
|
+
ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-test')
|
9
|
+
@files = @db.collection('test-fs.files')
|
10
|
+
@chunks = @db.collection('test-fs.chunks')
|
11
|
+
end
|
12
|
+
|
13
|
+
teardown do
|
14
|
+
@files.remove
|
15
|
+
@chunks.remove
|
16
|
+
end
|
17
|
+
|
18
|
+
context "A basic grid-stored file" do
|
19
|
+
setup do
|
20
|
+
@data = "GRIDDATA" * 50000
|
21
|
+
@grid = Grid.new(@db, 'test-fs')
|
22
|
+
@id = @grid.put(@data, 'sample', :metadata => {'app' => 'photos'})
|
23
|
+
end
|
24
|
+
|
25
|
+
should "retrieve the stored data" do
|
26
|
+
data = @grid.get(@id).data
|
27
|
+
assert_equal @data, data
|
28
|
+
end
|
29
|
+
|
30
|
+
should "store the filename" do
|
31
|
+
file = @grid.get(@id)
|
32
|
+
assert_equal 'sample', file.filename
|
33
|
+
end
|
34
|
+
|
35
|
+
should "store any relevant metadata" do
|
36
|
+
file = @grid.get(@id)
|
37
|
+
assert_equal 'photos', file.metadata['app']
|
38
|
+
end
|
39
|
+
|
40
|
+
should "delete the file and any chunks" do
|
41
|
+
@grid.delete(@id)
|
42
|
+
assert_raise GridError do
|
43
|
+
@grid.get(@id)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
context "Streaming: " do || {}
|
49
|
+
setup do
|
50
|
+
def read_and_write_stream(filename, read_length, opts={})
|
51
|
+
io = File.open(File.join(File.dirname(__FILE__), 'data', filename), 'r')
|
52
|
+
id = @grid.put(io, filename + read_length.to_s, opts)
|
53
|
+
file = @grid.get(id)
|
54
|
+
io.rewind
|
55
|
+
data = io.read
|
56
|
+
if data.respond_to?(:force_encoding)
|
57
|
+
data.force_encoding(:binary)
|
58
|
+
end
|
59
|
+
read_data = ""
|
60
|
+
while(chunk = file.read(read_length))
|
61
|
+
read_data << chunk
|
62
|
+
end
|
63
|
+
assert_equal data.length, read_data.length
|
64
|
+
assert_equal data, read_data, "Unequal!"
|
65
|
+
end
|
66
|
+
|
67
|
+
@grid = Grid.new(@db, 'test-fs')
|
68
|
+
end
|
69
|
+
|
70
|
+
should "put and get a small io object with a small chunk size" do
|
71
|
+
read_and_write_stream('small_data.txt', 1, :chunk_size => 2)
|
72
|
+
end
|
73
|
+
|
74
|
+
should "put and get a small io object" do
|
75
|
+
read_and_write_stream('small_data.txt', 1)
|
76
|
+
end
|
77
|
+
|
78
|
+
should "put and get a large io object when reading smaller than the chunk size" do
|
79
|
+
read_and_write_stream('sample_file.pdf', 256 * 1024)
|
80
|
+
end
|
81
|
+
|
82
|
+
should "put and get a large io object when reading larger than the chunk size" do
|
83
|
+
read_and_write_stream('sample_file.pdf', 300 * 1024)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
@@ -0,0 +1,125 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
|
3
|
+
class ObjectIDTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
include Mongo
|
6
|
+
|
7
|
+
def setup
|
8
|
+
@o = ObjectID.new
|
9
|
+
end
|
10
|
+
|
11
|
+
def test_hashcode
|
12
|
+
assert_equal @o.instance_variable_get(:@data).hash, @o.hash
|
13
|
+
end
|
14
|
+
|
15
|
+
def test_array_uniq_for_equilavent_ids
|
16
|
+
a = ObjectID.new('123')
|
17
|
+
b = ObjectID.new('123')
|
18
|
+
assert_equal a, b
|
19
|
+
assert_equal 1, [a, b].uniq.size
|
20
|
+
end
|
21
|
+
|
22
|
+
def test_create_pk_method
|
23
|
+
doc = {:name => 'Mongo'}
|
24
|
+
doc = ObjectID.create_pk(doc)
|
25
|
+
assert doc[:_id]
|
26
|
+
|
27
|
+
doc = {:name => 'Mongo', :_id => '12345'}
|
28
|
+
doc = ObjectID.create_pk(doc)
|
29
|
+
assert_equal '12345', doc[:_id]
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_different
|
33
|
+
a = ObjectID.new
|
34
|
+
b = ObjectID.new
|
35
|
+
assert_not_equal a.to_a, b.to_a
|
36
|
+
assert_not_equal a, b
|
37
|
+
end
|
38
|
+
|
39
|
+
def test_eql?
|
40
|
+
o2 = ObjectID.new(@o.to_a)
|
41
|
+
assert_equal @o, o2
|
42
|
+
end
|
43
|
+
|
44
|
+
def test_to_s
|
45
|
+
s = @o.to_s
|
46
|
+
assert_equal 24, s.length
|
47
|
+
s =~ /^([0-9a-f]+)$/
|
48
|
+
assert_equal 24, $1.length
|
49
|
+
end
|
50
|
+
|
51
|
+
def test_inspect
|
52
|
+
assert_equal "ObjectID('#{@o.to_s}')", @o.inspect
|
53
|
+
end
|
54
|
+
|
55
|
+
def test_save_and_restore
|
56
|
+
host = ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost'
|
57
|
+
port = ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT
|
58
|
+
db = Connection.new(host, port).db('ruby-mongo-test')
|
59
|
+
coll = db.collection('test')
|
60
|
+
|
61
|
+
coll.remove
|
62
|
+
coll << {'a' => 1, '_id' => @o}
|
63
|
+
|
64
|
+
row = coll.find().collect.first
|
65
|
+
assert_equal 1, row['a']
|
66
|
+
assert_equal @o, row['_id']
|
67
|
+
end
|
68
|
+
|
69
|
+
def test_from_string
|
70
|
+
hex_str = @o.to_s
|
71
|
+
o2 = ObjectID.from_string(hex_str)
|
72
|
+
assert_equal hex_str, o2.to_s
|
73
|
+
assert_equal @o, o2
|
74
|
+
assert_equal @o.to_s, o2.to_s
|
75
|
+
end
|
76
|
+
|
77
|
+
def test_illegal_from_string
|
78
|
+
assert_raise InvalidObjectID do
|
79
|
+
ObjectID.from_string("")
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def test_legal
|
84
|
+
assert !ObjectID.legal?(nil)
|
85
|
+
assert !ObjectID.legal?("fred")
|
86
|
+
assert !ObjectID.legal?("0000")
|
87
|
+
assert !ObjectID.legal?('000102030405060708090A0')
|
88
|
+
assert ObjectID.legal?('000102030405060708090A0B')
|
89
|
+
assert ObjectID.legal?('abcdefABCDEF123456789012')
|
90
|
+
assert !ObjectID.legal?('abcdefABCDEF12345678901x')
|
91
|
+
end
|
92
|
+
|
93
|
+
def test_from_string_leading_zeroes
|
94
|
+
hex_str = '000000000000000000000000'
|
95
|
+
o = ObjectID.from_string(hex_str)
|
96
|
+
assert_equal hex_str, o.to_s
|
97
|
+
end
|
98
|
+
|
99
|
+
def test_byte_order
|
100
|
+
hex_str = '000102030405060708090A0B'
|
101
|
+
o = ObjectID.from_string(hex_str)
|
102
|
+
assert_equal [0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b], o.to_a
|
103
|
+
end
|
104
|
+
|
105
|
+
def test_generation_time
|
106
|
+
time = Time.now
|
107
|
+
id = ObjectID.new
|
108
|
+
generated_time = id.generation_time
|
109
|
+
|
110
|
+
assert_in_delta time.to_i, generated_time.to_i, 2
|
111
|
+
assert_equal "UTC", generated_time.zone
|
112
|
+
end
|
113
|
+
|
114
|
+
def test_from_time
|
115
|
+
time = Time.now.utc
|
116
|
+
id = ObjectID.from_time(time)
|
117
|
+
|
118
|
+
assert_equal time.to_i, id.generation_time.to_i
|
119
|
+
end
|
120
|
+
|
121
|
+
def test_json
|
122
|
+
id = ObjectID.new
|
123
|
+
assert_equal "{\"$oid\": \"#{id}\"}", id.to_json
|
124
|
+
end
|
125
|
+
end
|
@@ -0,0 +1,172 @@
|
|
1
|
+
require 'test/test_helper'
|
2
|
+
|
3
|
+
class OrderedHashTest < Test::Unit::TestCase
|
4
|
+
|
5
|
+
def setup
|
6
|
+
@oh = OrderedHash.new
|
7
|
+
@oh['c'] = 1
|
8
|
+
@oh['a'] = 2
|
9
|
+
@oh['z'] = 3
|
10
|
+
@ordered_keys = %w(c a z)
|
11
|
+
end
|
12
|
+
|
13
|
+
def test_initialize
|
14
|
+
a = OrderedHash.new
|
15
|
+
a['x'] = 1
|
16
|
+
a['y'] = 2
|
17
|
+
|
18
|
+
b = OrderedHash['x' => 1, 'y' => 2]
|
19
|
+
assert_equal a, b
|
20
|
+
end
|
21
|
+
|
22
|
+
def test_hash_code
|
23
|
+
o = OrderedHash.new
|
24
|
+
o['number'] = 50
|
25
|
+
assert o.hash
|
26
|
+
end
|
27
|
+
|
28
|
+
def test_empty
|
29
|
+
assert_equal [], OrderedHash.new.keys
|
30
|
+
end
|
31
|
+
|
32
|
+
def test_uniq
|
33
|
+
list = []
|
34
|
+
doc = OrderedHash.new
|
35
|
+
doc['_id'] = 'ab12'
|
36
|
+
doc['name'] = 'test'
|
37
|
+
|
38
|
+
same_doc = OrderedHash.new
|
39
|
+
same_doc['_id'] = 'ab12'
|
40
|
+
same_doc['name'] = 'test'
|
41
|
+
list << doc
|
42
|
+
list << same_doc
|
43
|
+
|
44
|
+
assert_equal 2, list.size
|
45
|
+
assert_equal 1, list.uniq.size
|
46
|
+
end
|
47
|
+
|
48
|
+
def test_equality
|
49
|
+
a = OrderedHash.new
|
50
|
+
a['x'] = 1
|
51
|
+
a['y'] = 2
|
52
|
+
|
53
|
+
b = OrderedHash.new
|
54
|
+
b['y'] = 2
|
55
|
+
b['x'] = 1
|
56
|
+
|
57
|
+
c = OrderedHash.new
|
58
|
+
c['x'] = 1
|
59
|
+
c['y'] = 2
|
60
|
+
|
61
|
+
d = OrderedHash.new
|
62
|
+
d['x'] = 2
|
63
|
+
d['y'] = 3
|
64
|
+
|
65
|
+
e = OrderedHash.new
|
66
|
+
e['z'] = 1
|
67
|
+
e['y'] = 2
|
68
|
+
|
69
|
+
assert_equal a, c
|
70
|
+
assert_not_equal a, b
|
71
|
+
assert_not_equal a, d
|
72
|
+
assert_not_equal a, e
|
73
|
+
end
|
74
|
+
|
75
|
+
def test_order_preserved
|
76
|
+
assert_equal @ordered_keys, @oh.keys
|
77
|
+
end
|
78
|
+
|
79
|
+
def test_to_a_order_preserved
|
80
|
+
assert_equal @ordered_keys, @oh.to_a.map {|m| m.first}
|
81
|
+
end
|
82
|
+
|
83
|
+
def test_order_preserved_after_replace
|
84
|
+
@oh['a'] = 42
|
85
|
+
assert_equal @ordered_keys, @oh.keys
|
86
|
+
@oh['c'] = 'foobar'
|
87
|
+
assert_equal @ordered_keys, @oh.keys
|
88
|
+
@oh['z'] = /huh?/
|
89
|
+
assert_equal @ordered_keys, @oh.keys
|
90
|
+
end
|
91
|
+
|
92
|
+
def test_each
|
93
|
+
keys = []
|
94
|
+
@oh.each { |k, v| keys << k }
|
95
|
+
assert_equal keys, @oh.keys
|
96
|
+
|
97
|
+
@oh['z'] = 42
|
98
|
+
assert_equal keys, @oh.keys
|
99
|
+
|
100
|
+
assert_equal @oh, @oh.each {|k,v|}
|
101
|
+
end
|
102
|
+
|
103
|
+
def test_values
|
104
|
+
assert_equal [1, 2, 3], @oh.values
|
105
|
+
end
|
106
|
+
|
107
|
+
def test_merge
|
108
|
+
other = OrderedHash.new
|
109
|
+
other['f'] = 'foo'
|
110
|
+
noob = @oh.merge(other)
|
111
|
+
assert_equal @ordered_keys + ['f'], noob.keys
|
112
|
+
assert_equal [1, 2, 3, 'foo'], noob.values
|
113
|
+
end
|
114
|
+
|
115
|
+
def test_merge_bang
|
116
|
+
other = OrderedHash.new
|
117
|
+
other['f'] = 'foo'
|
118
|
+
@oh.merge!(other)
|
119
|
+
assert_equal @ordered_keys + ['f'], @oh.keys
|
120
|
+
assert_equal [1, 2, 3, 'foo'], @oh.values
|
121
|
+
end
|
122
|
+
|
123
|
+
def test_merge_bang_with_overlap
|
124
|
+
other = OrderedHash.new
|
125
|
+
other['a'] = 'apple'
|
126
|
+
other['c'] = 'crab'
|
127
|
+
other['f'] = 'foo'
|
128
|
+
@oh.merge!(other)
|
129
|
+
assert_equal @ordered_keys + ['f'], @oh.keys
|
130
|
+
assert_equal ['crab', 'apple', 3, 'foo'], @oh.values
|
131
|
+
end
|
132
|
+
|
133
|
+
def test_merge_bang_with_hash_with_overlap
|
134
|
+
other = Hash.new
|
135
|
+
other['a'] = 'apple'
|
136
|
+
other['c'] = 'crab'
|
137
|
+
other['f'] = 'foo'
|
138
|
+
@oh.merge!(other)
|
139
|
+
assert_equal @ordered_keys + ['f'], @oh.keys
|
140
|
+
assert_equal ['crab', 'apple', 3, 'foo'], @oh.values
|
141
|
+
end
|
142
|
+
|
143
|
+
def test_update
|
144
|
+
other = OrderedHash.new
|
145
|
+
other['f'] = 'foo'
|
146
|
+
noob = @oh.update(other)
|
147
|
+
assert_equal @ordered_keys + ['f'], noob.keys
|
148
|
+
assert_equal [1, 2, 3, 'foo'], noob.values
|
149
|
+
end
|
150
|
+
|
151
|
+
def test_inspect_retains_order
|
152
|
+
assert_equal '{"c"=>1, "a"=>2, "z"=>3}', @oh.inspect
|
153
|
+
end
|
154
|
+
|
155
|
+
def test_clear
|
156
|
+
@oh.clear
|
157
|
+
assert @oh.keys.empty?
|
158
|
+
end
|
159
|
+
|
160
|
+
def test_delete
|
161
|
+
assert @oh.keys.include?('z')
|
162
|
+
@oh.delete('z')
|
163
|
+
assert !@oh.keys.include?('z')
|
164
|
+
end
|
165
|
+
|
166
|
+
def test_delete_if
|
167
|
+
assert @oh.keys.include?('z')
|
168
|
+
@oh.delete_if { |k,v| k == 'z' }
|
169
|
+
assert !@oh.keys.include?('z')
|
170
|
+
end
|
171
|
+
|
172
|
+
end
|