monga 0.0.2 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +1 -0
- data/.travis.yml +1 -0
- data/README.md +59 -3
- data/lib/monga/client.rb +51 -6
- data/lib/monga/clients/master_slave_client.rb +0 -5
- data/lib/monga/clients/replica_set_client.rb +32 -71
- data/lib/monga/clients/single_instance_client.rb +53 -0
- data/lib/monga/collection.rb +102 -41
- data/lib/monga/connection.rb +38 -13
- data/lib/monga/connection_pool.rb +6 -17
- data/lib/monga/connections/buffer.rb +33 -0
- data/lib/monga/connections/em_connection.rb +25 -56
- data/lib/monga/connections/em_proxy_connection.rb +80 -0
- data/lib/monga/connections/fibered_connection.rb +26 -0
- data/lib/monga/connections/fibered_proxy_connection.rb +23 -0
- data/lib/monga/connections/proxy_connection.rb +4 -0
- data/lib/monga/connections/tcp_connection.rb +57 -0
- data/lib/monga/cursor.rb +197 -95
- data/lib/monga/database.rb +175 -60
- data/lib/monga/{requests → protocol}/delete.rb +1 -2
- data/lib/monga/{requests → protocol}/get_more.rb +1 -1
- data/lib/monga/{requests → protocol}/insert.rb +1 -2
- data/lib/monga/{requests → protocol}/kill_cursors.rb +1 -1
- data/lib/monga/{requests → protocol}/query.rb +3 -3
- data/lib/monga/{requests → protocol}/update.rb +1 -1
- data/lib/monga/request.rb +27 -23
- data/lib/monga/utils/constants.rb +5 -0
- data/lib/monga/utils/exceptions.rb +11 -0
- data/lib/monga.rb +19 -11
- data/monga.gemspec +2 -2
- data/spec/helpers/mongodb.rb +115 -38
- data/spec/monga/block/collection_spec.rb +172 -0
- data/spec/monga/block/cursor_spec.rb +160 -0
- data/spec/monga/block/database_spec.rb +80 -0
- data/spec/monga/block/single_instance_client_spec.rb +31 -0
- data/spec/monga/em/collection_spec.rb +308 -0
- data/spec/monga/em/cursor_spec.rb +256 -0
- data/spec/monga/em/database_spec.rb +140 -0
- data/spec/monga/em/replica_set_client_spec.rb +86 -0
- data/spec/monga/em/single_instance_client_spec.rb +28 -0
- data/spec/monga/sync/collection_spec.rb +247 -0
- data/spec/monga/sync/cursor_spec.rb +211 -0
- data/spec/monga/sync/database_spec.rb +110 -0
- data/spec/monga/sync/replica_set_client_spec.rb +54 -0
- data/spec/monga/sync/single_instance_client_spec.rb +25 -0
- data/spec/spec_helper.rb +2 -20
- metadata +50 -38
- data/lib/monga/clients/client.rb +0 -24
- data/lib/monga/connections/primary.rb +0 -46
- data/lib/monga/connections/secondary.rb +0 -13
- data/lib/monga/exceptions.rb +0 -9
- data/lib/monga/miner.rb +0 -72
- data/lib/monga/response.rb +0 -11
- data/spec/helpers/truncate.rb +0 -15
- data/spec/monga/collection_spec.rb +0 -448
- data/spec/monga/connection_pool_spec.rb +0 -50
- data/spec/monga/connection_spec.rb +0 -64
- data/spec/monga/cursor_spec.rb +0 -186
- data/spec/monga/database_spec.rb +0 -67
- data/spec/monga/replica_set_client_spec.rb +0 -46
- data/spec/monga/requests/delete_spec.rb +0 -0
- data/spec/monga/requests/insert_spec.rb +0 -0
- data/spec/monga/requests/query_spec.rb +0 -28
@@ -0,0 +1,247 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Monga::Collection do
|
4
|
+
before do
|
5
|
+
EM.synchrony do
|
6
|
+
@client = Monga::Client.new(type: :sync, pool_size: 10)
|
7
|
+
@db = @client["dbTest"]
|
8
|
+
@collection = @db["testCollection"]
|
9
|
+
@collection.safe_remove
|
10
|
+
docs = []
|
11
|
+
10.times do |i|
|
12
|
+
docs << { artist: "Madonna", title: "Track #{i+1}" }
|
13
|
+
docs << { artist: "Radiohead", title: "Track #{i+1}" }
|
14
|
+
end
|
15
|
+
@collection.safe_insert(docs)
|
16
|
+
EM.stop
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
# QUERY
|
21
|
+
|
22
|
+
describe "query" do
|
23
|
+
it "should fetch all documents" do
|
24
|
+
EM.synchrony do
|
25
|
+
docs = @collection.find.all
|
26
|
+
docs.size.must_equal 20
|
27
|
+
EM.stop
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
it "should fetch all docs with skip and limit" do
|
32
|
+
EM.synchrony do
|
33
|
+
docs = @collection.find.skip(10).limit(4).all
|
34
|
+
docs.size.must_equal 4
|
35
|
+
EM.stop
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
it "should fetch first" do
|
40
|
+
EM.synchrony do
|
41
|
+
doc = @collection.first
|
42
|
+
doc.keys.must_equal ["_id", "artist", "title"]
|
43
|
+
EM.stop
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
# INSERT
|
49
|
+
|
50
|
+
describe "insert" do
|
51
|
+
before do
|
52
|
+
EM.synchrony do
|
53
|
+
@collection.safe_ensure_index({ "personal_id" => 1 }, { unique: true, sparse: true })
|
54
|
+
EM.stop
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
after do
|
59
|
+
EM.synchrony do
|
60
|
+
@collection.drop_index( personal_id: 1 )
|
61
|
+
EM.stop
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
it "should insert single doc" do
|
66
|
+
EM.synchrony do
|
67
|
+
doc = { name: "Peter", age: 18 }
|
68
|
+
@collection.safe_insert(doc)
|
69
|
+
resp = @collection.find(name: "Peter").all
|
70
|
+
resp.size.must_equal 1
|
71
|
+
resp.first["age"].must_equal 18
|
72
|
+
EM.stop
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
it "should insert batch of docs" do
|
77
|
+
EM.synchrony do
|
78
|
+
docs = [{ name: "Peter", age: 18 }, {name: "Jhon", age: 18}]
|
79
|
+
@collection.safe_insert(docs)
|
80
|
+
resp = @collection.find(age: 18).all
|
81
|
+
resp.size.must_equal 2
|
82
|
+
EM.stop
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
it "should fail on uniq index" do
|
87
|
+
EM.synchrony do
|
88
|
+
docs = [{ name: "Peter", age: 18, personal_id: 20 }, {name: "Jhon", age: 18, personal_id: 20}, {name: "Rebeca", age: 21, personal_id: 5}]
|
89
|
+
proc{ @collection.safe_insert(docs) }.must_raise Monga::Exceptions::QueryFailure
|
90
|
+
@collection.count.must_equal 21
|
91
|
+
EM.stop
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
it "should continue_on_error" do
|
96
|
+
EM.synchrony do
|
97
|
+
docs = [{ name: "Peter", age: 18, personal_id: 20 }, {name: "Jhon", age: 18, personal_id: 20}, {name: "Rebeca", age: 21, personal_id: 5}]
|
98
|
+
proc{ @collection.safe_insert(docs, continue_on_error: true) }.must_raise Monga::Exceptions::QueryFailure
|
99
|
+
@collection.count.must_equal 22
|
100
|
+
EM.stop
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
# UPDATE
|
106
|
+
|
107
|
+
describe "update" do
|
108
|
+
it "should make simple update (first matching)" do
|
109
|
+
EM.synchrony do
|
110
|
+
@collection.safe_update({ artist: "Madonna" }, { "$set" => { country: "USA" } })
|
111
|
+
@collection.count( query: { artist: "Madonna", country: "USA" }).must_equal 1
|
112
|
+
EM.stop
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
it "should create non existing item (upsert)" do
|
117
|
+
EM.synchrony do
|
118
|
+
@collection.safe_update({ artist: "Bjork" }, { "$set" => { country: "Iceland" } }, { upsert: true })
|
119
|
+
@collection.count(query: { artist: "Bjork" }).must_equal 1
|
120
|
+
EM.stop
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
it "should update all matching data (multi_update)" do
|
125
|
+
EM.synchrony do
|
126
|
+
@collection.safe_update({ artist: "Madonna" }, { "$set" => { country: "USA" } }, {multi_update: true})
|
127
|
+
docs = @collection.find(artist: "Madonna").all
|
128
|
+
docs.each{ |d| d["country"].must_equal "USA" }
|
129
|
+
EM.stop
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
# REMOVE
|
135
|
+
|
136
|
+
describe "remove" do
|
137
|
+
it "should delete all matching docs" do
|
138
|
+
EM.synchrony do
|
139
|
+
@collection.safe_delete(artist: "Madonna")
|
140
|
+
@collection.count(query: { artist: "Madonna" }).must_equal 0
|
141
|
+
EM.stop
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
it "should delete first matching doc (single_remove)" do
|
146
|
+
EM.synchrony do
|
147
|
+
@collection.safe_delete({ artist: "Madonna" }, single_remove: true)
|
148
|
+
@collection.count(query: { artist: "Madonna" }).must_equal 9
|
149
|
+
EM.stop
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
# COUNT
|
155
|
+
|
156
|
+
describe "count" do
|
157
|
+
it "should count all docs" do
|
158
|
+
EM.synchrony do
|
159
|
+
@collection.count.must_equal 20
|
160
|
+
EM.stop
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
it "should count all docs with query" do
|
165
|
+
EM.synchrony do
|
166
|
+
@collection.count(query: { artist: "Madonna" }).must_equal 10
|
167
|
+
EM.stop
|
168
|
+
end
|
169
|
+
end
|
170
|
+
|
171
|
+
it "should count all docs with limit" do
|
172
|
+
EM.synchrony do
|
173
|
+
@collection.count(query: { artist: "Madonna" }, limit: 5).must_equal 5
|
174
|
+
EM.stop
|
175
|
+
end
|
176
|
+
end
|
177
|
+
|
178
|
+
it "should count all docs with limit and skip" do
|
179
|
+
EM.synchrony do
|
180
|
+
@collection.count(query: { artist: "Madonna" }, limit: 5, skip: 6).must_equal 4
|
181
|
+
EM.stop
|
182
|
+
end
|
183
|
+
end
|
184
|
+
end
|
185
|
+
|
186
|
+
# ENSURE/DROP INDEX
|
187
|
+
|
188
|
+
describe "ensure_index" do
|
189
|
+
before do
|
190
|
+
EM.synchrony do
|
191
|
+
@collection.drop_indexes
|
192
|
+
EM.stop
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
it "should create index" do
|
197
|
+
EM.synchrony do
|
198
|
+
@collection.safe_ensure_index(title: 1)
|
199
|
+
docs = @collection.get_indexes
|
200
|
+
docs.any?{ |doc| doc["key"] == {"title" => 1}}.must_equal true
|
201
|
+
EM.stop
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
it "should create sparse index" do
|
206
|
+
EM.synchrony do
|
207
|
+
@collection.safe_ensure_index({ title: 1 }, sparse: true)
|
208
|
+
docs = @collection.get_indexes
|
209
|
+
docs.any?{ |doc| doc["key"] == {"title" => 1} && doc["sparse"] == true }.must_equal true
|
210
|
+
EM.stop
|
211
|
+
end
|
212
|
+
end
|
213
|
+
|
214
|
+
it "should create unique index" do
|
215
|
+
EM.synchrony do
|
216
|
+
@collection.safe_ensure_index({ some_field: 1 }, unique: true, sparse: true)
|
217
|
+
docs = @collection.get_indexes
|
218
|
+
docs.any?{ |doc| doc["key"] == {"some_field" => 1} && doc["unique"] == true }.must_equal true
|
219
|
+
EM.stop
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
it "should drop single index" do
|
224
|
+
EM.synchrony do
|
225
|
+
@collection.safe_ensure_index(title: 1)
|
226
|
+
docs = @collection.get_indexes
|
227
|
+
docs.any?{ |doc| doc["key"] == {"title" => 1}}.must_equal true
|
228
|
+
@collection.drop_index(title: 1)
|
229
|
+
docs = @collection.get_indexes
|
230
|
+
docs.any?{ |doc| doc["key"] == {"title" => 1}}.must_equal false
|
231
|
+
EM.stop
|
232
|
+
end
|
233
|
+
end
|
234
|
+
|
235
|
+
it "should drop all indexes (except primary on _id)" do
|
236
|
+
EM.synchrony do
|
237
|
+
@collection.safe_ensure_index(title: 1)
|
238
|
+
docs = @collection.get_indexes
|
239
|
+
docs.any?{ |doc| doc["key"] == {"title" => 1}}.must_equal true
|
240
|
+
@collection.drop_indexes
|
241
|
+
docs = @collection.get_indexes
|
242
|
+
docs.select{ |d| d["ns"] == "dbTest.testCollection" }.size.must_equal 1
|
243
|
+
EM.stop
|
244
|
+
end
|
245
|
+
end
|
246
|
+
end
|
247
|
+
end
|
@@ -0,0 +1,211 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Monga::Cursor do
|
4
|
+
before do
|
5
|
+
EM.synchrony do
|
6
|
+
@client = Monga::Client.new(type: :sync, pool_size: 10)
|
7
|
+
@db = @client["dbTest"]
|
8
|
+
@collection = @db["testCollection"]
|
9
|
+
@collection.safe_remove
|
10
|
+
docs = []
|
11
|
+
10.times do |i|
|
12
|
+
docs << { artist: "Madonna", title: "Track #{i+1}" }
|
13
|
+
docs << { artist: "Radiohead", title: "Track #{i+1}" }
|
14
|
+
end
|
15
|
+
@collection.safe_insert(docs)
|
16
|
+
EM.stop
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
# ALL
|
21
|
+
|
22
|
+
describe "all" do
|
23
|
+
it "should find all" do
|
24
|
+
EM.synchrony do
|
25
|
+
docs = @collection.find.all
|
26
|
+
docs.size.must_equal 20
|
27
|
+
EM.stop
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
it "should find all with query" do
|
32
|
+
EM.synchrony do
|
33
|
+
docs = @collection.find(artist: "Madonna").all
|
34
|
+
docs.size.must_equal 10
|
35
|
+
docs.each{ |d| d["artist"].must_equal "Madonna" }
|
36
|
+
EM.stop
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
it "should find all with limit" do
|
41
|
+
EM.synchrony do
|
42
|
+
docs = @collection.find.limit(5).all
|
43
|
+
docs.size.must_equal 5
|
44
|
+
EM.stop
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
it "should find all with batch size" do
|
49
|
+
EM.synchrony do
|
50
|
+
docs = @collection.find.batch_size(2).all
|
51
|
+
docs.size.must_equal 20
|
52
|
+
EM.stop
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
it "should find all with skip" do
|
57
|
+
EM.synchrony do
|
58
|
+
docs = @collection.find.skip(10).all
|
59
|
+
docs.size.must_equal 10
|
60
|
+
EM.stop
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
# FIRST
|
66
|
+
|
67
|
+
describe "first" do
|
68
|
+
it "should fetch first with sort" do
|
69
|
+
EM.synchrony do
|
70
|
+
doc = @collection.find.sort(title: 1).first
|
71
|
+
doc["title"].must_equal "Track 1"
|
72
|
+
EM.stop
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
it "should fetch first with sort and skip" do
|
77
|
+
EM.synchrony do
|
78
|
+
doc = @collection.find.sort(title: 1).skip(2).first
|
79
|
+
doc["title"].must_equal "Track 10"
|
80
|
+
EM.stop
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
# NEXT_BATCH
|
86
|
+
|
87
|
+
describe "next_batch" do
|
88
|
+
it "should fetch batches" do
|
89
|
+
EM.synchrony do
|
90
|
+
cursor = @collection.find.batch_size(2).limit(3)
|
91
|
+
batch, more = cursor.next_batch
|
92
|
+
batch.size.must_equal 2
|
93
|
+
more.must_equal true
|
94
|
+
batch, more = cursor.next_batch
|
95
|
+
batch.size.must_equal 1
|
96
|
+
more.must_equal false
|
97
|
+
EM.stop
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
# EACH_BATCH
|
103
|
+
|
104
|
+
describe "each_batch" do
|
105
|
+
it "should fetch 3 items by batches" do
|
106
|
+
EM.synchrony do
|
107
|
+
docs = []
|
108
|
+
@collection.find.batch_size(2).limit(3).each_batch do |batch|
|
109
|
+
docs += batch
|
110
|
+
end
|
111
|
+
docs.size.must_equal 3
|
112
|
+
EM.stop
|
113
|
+
end
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
# NEXT_DOC
|
118
|
+
|
119
|
+
describe "next_doc" do
|
120
|
+
it "should fetch doc by doc" do
|
121
|
+
EM.synchrony do
|
122
|
+
cursor = @collection.find.limit(3).batch_size(2)
|
123
|
+
doc, more = cursor.next_doc
|
124
|
+
more.must_equal true
|
125
|
+
doc, more = cursor.next_doc
|
126
|
+
doc, more = cursor.next_doc
|
127
|
+
more.must_equal false
|
128
|
+
EM.stop
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
# # EACH_DOC
|
134
|
+
|
135
|
+
describe "each_doc" do
|
136
|
+
it "should iterate over some docs" do
|
137
|
+
EM.synchrony do
|
138
|
+
docs = []
|
139
|
+
@collection.find.limit(100).skip(15).batch_size(3).each_doc do |doc|
|
140
|
+
docs << doc
|
141
|
+
end
|
142
|
+
docs.size.must_equal 5
|
143
|
+
EM.stop
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
it "should iterate over all docs" do
|
148
|
+
EM.synchrony do
|
149
|
+
docs = []
|
150
|
+
@collection.find.batch_size(3).each_doc do |doc|
|
151
|
+
docs << doc
|
152
|
+
end
|
153
|
+
docs.size.must_equal 20
|
154
|
+
EM.stop
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
# KILL CURSOR
|
160
|
+
|
161
|
+
describe "kill" do
|
162
|
+
it "should work with kill" do
|
163
|
+
EM.synchrony do
|
164
|
+
cursor = @collection.find
|
165
|
+
batch, more = cursor.next_batch
|
166
|
+
cursor.kill
|
167
|
+
proc{ cursor.next_batch }.must_raise Monga::Exceptions::ClosedCursor
|
168
|
+
EM.stop
|
169
|
+
end
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
# TAILABLE CURSOR
|
174
|
+
|
175
|
+
describe "tailable cursor" do
|
176
|
+
before do
|
177
|
+
EM.synchrony do
|
178
|
+
@db.create_collection("testCapped", capped: true, size: 4*1024)
|
179
|
+
@capped = @db["testCapped"]
|
180
|
+
@capped.safe_insert(title: "Test")
|
181
|
+
EM.stop
|
182
|
+
end
|
183
|
+
end
|
184
|
+
|
185
|
+
after do
|
186
|
+
EM.synchrony do
|
187
|
+
@db["testCapped"].drop
|
188
|
+
EM.stop
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
192
|
+
it "should be tailable" do
|
193
|
+
EM.synchrony do
|
194
|
+
tailable_cursor = @capped.find.flag(tailable_cursor: true)
|
195
|
+
docs = []
|
196
|
+
tailable_cursor.each_doc do |doc|
|
197
|
+
@capped.insert(title: "New!")
|
198
|
+
if doc
|
199
|
+
docs << doc
|
200
|
+
if docs.size == 2
|
201
|
+
docs.map{ |d| d["title"] }.must_equal ["Test", "New!"]
|
202
|
+
break
|
203
|
+
end
|
204
|
+
end
|
205
|
+
end
|
206
|
+
tailable_cursor.kill
|
207
|
+
EM.stop
|
208
|
+
end
|
209
|
+
end
|
210
|
+
end
|
211
|
+
end
|
@@ -0,0 +1,110 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Monga::Database do
|
4
|
+
before do
|
5
|
+
EM.synchrony do
|
6
|
+
@client = Monga::Client.new
|
7
|
+
@db = @client["dbTest"]
|
8
|
+
@collection = @db["testCollection"]
|
9
|
+
@collection.safe_remove
|
10
|
+
EM.stop
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
after do
|
15
|
+
EM.synchrony do
|
16
|
+
@collection.safe_remove
|
17
|
+
EM.stop
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
it "should create and drop collection" do
|
22
|
+
EM.synchrony do
|
23
|
+
@db.create_collection("cappedCollection")
|
24
|
+
@db.list_collections["retval"].must_include "cappedCollection"
|
25
|
+
@db.drop_collection("cappedCollection")
|
26
|
+
@db.list_collections["retval"].wont_include "cappedCollection"
|
27
|
+
EM.stop
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
it "should count in collection" do
|
32
|
+
EM.synchrony do
|
33
|
+
@collection.safe_insert([{ title: 1 }, { title: 2 }])
|
34
|
+
@db.count("testCollection").must_equal 2
|
35
|
+
EM.stop
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
it "should eval javascript" do
|
40
|
+
EM.synchrony do
|
41
|
+
@db.eval("1+1")["retval"].must_equal 2.0
|
42
|
+
EM.stop
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
# INDEXES
|
47
|
+
|
48
|
+
describe "indexes" do
|
49
|
+
before do
|
50
|
+
EM.synchrony do
|
51
|
+
@db.drop_indexes("testCollection", "*")
|
52
|
+
EM.stop
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
it "should drop index" do
|
57
|
+
EM.synchrony do
|
58
|
+
@collection.safe_ensure_index(title: 1)
|
59
|
+
@collection.get_indexes.select{ |i| i["ns"] == "dbTest.testCollection" }.size.must_equal 2
|
60
|
+
@db.drop_indexes("testCollection", title: 1)
|
61
|
+
@collection.get_indexes.select{ |i| i["ns"] == "dbTest.testCollection" }.size.must_equal 1
|
62
|
+
EM.stop
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
# GET LAST ERROR
|
68
|
+
|
69
|
+
describe "getLastError" do
|
70
|
+
before do
|
71
|
+
EM.synchrony do
|
72
|
+
@collection.drop_indexes
|
73
|
+
@collection.safe_ensure_index({ personal_id: 1 }, { unique: true, sparse: true })
|
74
|
+
EM.stop
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
it "should get last error" do
|
79
|
+
EM.synchrony do
|
80
|
+
req = @collection.insert(name: "Peter", personal_id: 10)
|
81
|
+
@db.get_last_error(req.connection)["ok"].must_equal 1.0
|
82
|
+
req = @collection.insert(name: "Peter", personal_id: 10)
|
83
|
+
@db.get_last_error(req.connection).class.must_equal Monga::Exceptions::QueryFailure
|
84
|
+
EM.stop
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
it "should getLastError with fsync" do
|
89
|
+
EM.synchrony do
|
90
|
+
req = @collection.insert(name: "Peter", personal_id: 10)
|
91
|
+
@db.get_last_error(req.connection, fsync: true)["ok"].must_equal 1.0
|
92
|
+
req = @collection.insert(name: "Peter", personal_id: 10)
|
93
|
+
@db.get_last_error(req.connection, fsync: true).class.must_equal Monga::Exceptions::QueryFailure
|
94
|
+
EM.stop
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
# AGGREGATION
|
100
|
+
|
101
|
+
describe "aggregation" do
|
102
|
+
it "should aggregate"
|
103
|
+
end
|
104
|
+
|
105
|
+
# MAP REDUCE
|
106
|
+
|
107
|
+
describe "map reduce" do
|
108
|
+
it "should run map reduce"
|
109
|
+
end
|
110
|
+
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Monga::Clients::ReplicaSetClient do
|
4
|
+
before do
|
5
|
+
EM.synchrony do
|
6
|
+
@replset = Fake::ReplicaSet.new([29000, 29100, 29200])
|
7
|
+
@client = Monga::Client.new servers: ['127.0.0.1:29000', '127.0.0.1:29100', '127.0.0.1:29200'], type: :sync, timeout: 1
|
8
|
+
@collection = @client["dbTest"]["myCollection"]
|
9
|
+
EM.stop
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
it "should fail on disconnect and reconnect when primary is up again" do
|
14
|
+
EM.synchrony do
|
15
|
+
@replset.start_all
|
16
|
+
@collection.safe_insert(name: "Peter")
|
17
|
+
@replset.primary.stop
|
18
|
+
proc{ @collection.safe_insert(name: "Peter") }.must_raise Monga::Exceptions::Disconnected
|
19
|
+
proc{ @collection.safe_insert(name: "Peter") }.must_raise Monga::Exceptions::Disconnected
|
20
|
+
proc{ @collection.safe_insert(name: "Peter") }.must_raise Monga::Exceptions::Disconnected
|
21
|
+
@replset.primary.start
|
22
|
+
@collection.safe_insert(name: "Madonna")
|
23
|
+
@collection.safe_insert(name: "Madonna")
|
24
|
+
@collection.safe_insert(name: "Madonna")
|
25
|
+
EM.stop
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
it "should work even if secondaries down" do
|
30
|
+
EM.synchrony do
|
31
|
+
@replset.start_all
|
32
|
+
@collection.safe_insert(name: "Peter")
|
33
|
+
@collection.safe_insert(name: "Peter")
|
34
|
+
@replset.secondaries.each(&:stop)
|
35
|
+
@collection.safe_insert(name: "Peter")
|
36
|
+
@collection.safe_insert(name: "Peter")
|
37
|
+
EM.stop
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
it "should find new primary if it is down" do
|
42
|
+
EM.synchrony do
|
43
|
+
@replset.start_all
|
44
|
+
@collection.safe_insert(name: "Peter")
|
45
|
+
@replset.primary.stop
|
46
|
+
proc{ @collection.safe_insert(name: "Peter") }.must_raise Monga::Exceptions::Disconnected
|
47
|
+
proc{ @collection.safe_insert(name: "Peter") }.must_raise Monga::Exceptions::Disconnected
|
48
|
+
proc{ @collection.safe_insert(name: "Peter") }.must_raise Monga::Exceptions::Disconnected
|
49
|
+
@replset.vote
|
50
|
+
@collection.safe_insert(name: "Madonna")
|
51
|
+
EM.stop
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe Monga::Clients::SingleInstanceClient do
|
4
|
+
before do
|
5
|
+
EM.synchrony do
|
6
|
+
@instance = Fake::SingleInstance.new(29000)
|
7
|
+
@client = Monga::Client.new port: 29000, type: :sync
|
8
|
+
@collection = @client["dbTest"]["myCollection"]
|
9
|
+
EM.stop
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
it "should fail on disconnect and reconnect when instance is up again" do
|
14
|
+
EM.synchrony do
|
15
|
+
@instance.start
|
16
|
+
@collection.safe_insert(name: "Peter")
|
17
|
+
@instance.stop
|
18
|
+
proc{ @collection.safe_insert(name: "Peter") }.must_raise Monga::Exceptions::Disconnected
|
19
|
+
proc{ @collection.safe_insert(name: "Peter") }.must_raise Monga::Exceptions::Disconnected
|
20
|
+
@instance.start
|
21
|
+
@collection.safe_insert(name: "Madonna")
|
22
|
+
EM.stop
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
data/spec/spec_helper.rb
CHANGED
@@ -1,29 +1,11 @@
|
|
1
1
|
LIB_PATH = File.expand_path('../../lib/monga', __FILE__)
|
2
2
|
|
3
3
|
require LIB_PATH
|
4
|
-
require 'helpers/truncate'
|
5
|
-
require 'helpers/mongodb'
|
6
4
|
require 'minitest/spec'
|
7
5
|
require 'minitest/autorun'
|
8
6
|
require 'minitest/reporters'
|
9
7
|
MiniTest::Reporters.use! MiniTest::Reporters::SpecReporter.new
|
10
8
|
|
11
|
-
|
12
|
-
CLIENT = Monga::Client.new(host: "localhost", port: 27017)
|
13
|
-
DB = CLIENT["dbTest"]
|
14
|
-
COLLECTION = DB["testCollection"]
|
15
|
-
EM.stop
|
16
|
-
end
|
9
|
+
Monga.logger.level = Logger::ERROR
|
17
10
|
|
18
|
-
|
19
|
-
|
20
|
-
REPL_SET_PORTS = [{ port: 29100 }, { port: 29200 }, { port: 29300 }]
|
21
|
-
EM.run do
|
22
|
-
REPL_SET = Mongodb::ReplicaSet.new(REPL_SET_PORTS)
|
23
|
-
RS_CLIENT = Monga::ReplicaSetClient.new(servers: REPL_SET_PORTS)
|
24
|
-
RS_DB = RS_CLIENT["dbTest"]
|
25
|
-
RS_COLLECTION = RS_DB["testCollection"]
|
26
|
-
EM.stop
|
27
|
-
end
|
28
|
-
|
29
|
-
# And welcome to callback Hell
|
11
|
+
require 'helpers/mongodb'
|