document-store 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.mdown +11 -0
- data/Gemfile +9 -0
- data/Gemfile.lock +47 -0
- data/Rakefile +10 -0
- data/app.gemspec +18 -0
- data/lib/store/cache.rb +98 -0
- data/lib/store/caches/in_memory.rb +23 -0
- data/lib/store/caches/memcached.rb +73 -0
- data/lib/store/memory.rb +269 -0
- data/lib/store/mongodb.rb +284 -0
- data/test/store_test.rb +346 -0
- data/test/test_helper.rb +14 -0
- metadata +88 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA1:
|
|
3
|
+
metadata.gz: 0af94c22aff4e1c04e680ca2f71a7cb26b26b379
|
|
4
|
+
data.tar.gz: 623a7699d80a76cedf3c8b54f915eb89a307be3f
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: c16170ccad0fd2c63132527452491ec41ac3849ac4f651f1cf09490fecbaa5e229df7c50987f2aa9c5229d0616fb6906ad66088ac745ed8327c082e23b8671c1
|
|
7
|
+
data.tar.gz: 39da60cd1bc23817aab499a8c10aaf0eb181c63ad58bafc9091a5a83722dc9fed4c509f37441740b9644d11296335ef4598ea3cf3bdfcd2120fd4f99da6af455
|
data/CHANGELOG.mdown
ADDED
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
PATH
|
|
2
|
+
remote: .
|
|
3
|
+
specs:
|
|
4
|
+
document-store (1.0.0)
|
|
5
|
+
em-mongo (= 0.4.3)
|
|
6
|
+
em-synchrony
|
|
7
|
+
|
|
8
|
+
GEM
|
|
9
|
+
remote: http://rubygems.org/
|
|
10
|
+
specs:
|
|
11
|
+
activesupport (3.2.7)
|
|
12
|
+
i18n (~> 0.6)
|
|
13
|
+
multi_json (~> 1.0)
|
|
14
|
+
ansi (1.4.3)
|
|
15
|
+
bson (1.9.2)
|
|
16
|
+
em-minitest (1.0.1)
|
|
17
|
+
em-mongo (0.4.3)
|
|
18
|
+
bson (>= 1.1.3)
|
|
19
|
+
eventmachine (>= 0.12.10)
|
|
20
|
+
em-synchrony (1.0.3)
|
|
21
|
+
eventmachine (>= 1.0.0.beta.1)
|
|
22
|
+
eventmachine (1.0.3)
|
|
23
|
+
i18n (0.6.0)
|
|
24
|
+
metaclass (0.0.1)
|
|
25
|
+
mocha (0.14.0)
|
|
26
|
+
metaclass (~> 0.0.1)
|
|
27
|
+
multi_json (1.3.6)
|
|
28
|
+
rake (10.1.0)
|
|
29
|
+
shoulda (3.1.1)
|
|
30
|
+
shoulda-context (~> 1.0)
|
|
31
|
+
shoulda-matchers (~> 1.2)
|
|
32
|
+
shoulda-context (1.0.0)
|
|
33
|
+
shoulda-matchers (1.2.0)
|
|
34
|
+
activesupport (>= 3.0.0)
|
|
35
|
+
turn (0.9.6)
|
|
36
|
+
ansi
|
|
37
|
+
|
|
38
|
+
PLATFORMS
|
|
39
|
+
ruby
|
|
40
|
+
|
|
41
|
+
DEPENDENCIES
|
|
42
|
+
document-store!
|
|
43
|
+
em-minitest
|
|
44
|
+
mocha
|
|
45
|
+
rake
|
|
46
|
+
shoulda
|
|
47
|
+
turn
|
data/Rakefile
ADDED
data/app.gemspec
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
Gem::Specification.new do |gem|
|
|
4
|
+
gem.authors = ['Mikael Wikman']
|
|
5
|
+
gem.email = ['mikael@wikman.me']
|
|
6
|
+
gem.description = %q{This wrapper provides a minimalistic interfaced to document-based databases. It includes a in-memory store that can be easily used for writing tests, as well as a in-memory cached version of each implementation.}
|
|
7
|
+
gem.summary = %q{A wrapper around document-based databases to provide a minimalistic interface that can be easily changed}
|
|
8
|
+
gem.homepage = "https://github.com/mikaelwikman/document-store"
|
|
9
|
+
|
|
10
|
+
gem.files = `git ls-files`.split($\)
|
|
11
|
+
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
|
12
|
+
gem.test_files = gem.files.grep(%r{^(test|features)/})
|
|
13
|
+
gem.name = "document-store"
|
|
14
|
+
gem.require_paths = ["lib"]
|
|
15
|
+
gem.version = '1.0.0'
|
|
16
|
+
gem.add_dependency 'em-synchrony'
|
|
17
|
+
gem.add_dependency 'em-mongo', '0.4.3'
|
|
18
|
+
end
|
data/lib/store/cache.rb
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
require 'store/caches/in_memory'
|
|
2
|
+
require 'store/caches/memcached'
|
|
3
|
+
|
|
4
|
+
class Store
|
|
5
|
+
class Cache
|
|
6
|
+
attr_reader :backend
|
|
7
|
+
|
|
8
|
+
def initialize backend_store, args={}
|
|
9
|
+
@backend = backend_store
|
|
10
|
+
if args[:memcached]
|
|
11
|
+
@cache = Caches::Memcached.new
|
|
12
|
+
else
|
|
13
|
+
@cache = Caches::InMemory.new
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def timestamper= ts
|
|
18
|
+
backend.timestamper = ts
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def timestamper
|
|
22
|
+
backend.timestamper
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def close
|
|
26
|
+
@cache.invalidate
|
|
27
|
+
backend.close
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def create *args
|
|
31
|
+
@cache.invalidate
|
|
32
|
+
backend.create *args
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def update *args
|
|
36
|
+
@cache.invalidate
|
|
37
|
+
backend.update *args
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def all table
|
|
41
|
+
each(table).map{|i|i}
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def count table
|
|
45
|
+
backend.count(table)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def each table
|
|
49
|
+
if data=@cache.load(table)
|
|
50
|
+
data
|
|
51
|
+
else
|
|
52
|
+
data = backend.all(table)
|
|
53
|
+
@cache.save table, data
|
|
54
|
+
data
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def reset *args
|
|
59
|
+
@cache.invalidate
|
|
60
|
+
backend.reset(*args)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def find *args
|
|
64
|
+
key = Marshal.dump(args)
|
|
65
|
+
if data=@cache.load(key)
|
|
66
|
+
data
|
|
67
|
+
else
|
|
68
|
+
data = backend.find(*args)
|
|
69
|
+
@cache.save key, data
|
|
70
|
+
data
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def collate *args
|
|
75
|
+
key = Marshal.dump(args)
|
|
76
|
+
if data=@cache.load(key)
|
|
77
|
+
data
|
|
78
|
+
else
|
|
79
|
+
data = backend.collate(*args)
|
|
80
|
+
@cache.save(key,data)
|
|
81
|
+
data
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def create_equal_filter *args
|
|
86
|
+
backend.create_equal_filter(*args)
|
|
87
|
+
end
|
|
88
|
+
def create_lt_filter *args
|
|
89
|
+
backend.create_lt_filter(*args)
|
|
90
|
+
end
|
|
91
|
+
def create_gt_filter *args
|
|
92
|
+
backend.create_gt_filter(*args)
|
|
93
|
+
end
|
|
94
|
+
def create_gte_filter *args
|
|
95
|
+
backend.create_gte_filter(*args)
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
end
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
|
|
2
|
+
module Caches
|
|
3
|
+
class InMemory
|
|
4
|
+
def initialize
|
|
5
|
+
@cache = {}
|
|
6
|
+
end
|
|
7
|
+
|
|
8
|
+
def invalidate
|
|
9
|
+
@cache = {}
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def load(key)
|
|
13
|
+
data = @cache[key]
|
|
14
|
+
if data
|
|
15
|
+
Marshal.load(data)
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def save key, data
|
|
20
|
+
@cache[key] = Marshal.dump(data)
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
end
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
#
|
|
2
|
+
# THIS IS UNTESTED, AND UNUSED, DON'T USE!!
|
|
3
|
+
#
|
|
4
|
+
|
|
5
|
+
require 'em-synchrony/em-memcache'
|
|
6
|
+
|
|
7
|
+
module Caches
|
|
8
|
+
class Memcached
|
|
9
|
+
@@max_size = 1000000
|
|
10
|
+
def initialize
|
|
11
|
+
@cache = EM::P::Memcache.connect
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def invalidate
|
|
15
|
+
set 'invalidated_at', Time.new
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def load(key)
|
|
19
|
+
key_date = "#{key}_date"
|
|
20
|
+
|
|
21
|
+
time_set = get(key_date)
|
|
22
|
+
invalidated_at = get('invalidated_at')
|
|
23
|
+
|
|
24
|
+
if time_set && (!invalidated_at || time_set > invalidated_at)
|
|
25
|
+
data = get(key)
|
|
26
|
+
data
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def save key, data
|
|
31
|
+
key_date = "#{key}_date"
|
|
32
|
+
|
|
33
|
+
set(key, data)
|
|
34
|
+
set(key_date, Time.new)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
private
|
|
38
|
+
|
|
39
|
+
def clean! key
|
|
40
|
+
key.gsub! /[^a-zA-Z_]/, ''
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def get key
|
|
44
|
+
clean!(key)
|
|
45
|
+
data = ""
|
|
46
|
+
i = 0
|
|
47
|
+
puts "READ #{key}_#{i}"
|
|
48
|
+
while new_data=@cache.get("#{key}_#{i}")
|
|
49
|
+
data << new_data
|
|
50
|
+
puts "Got #{new_data.length} characters"
|
|
51
|
+
i+=1
|
|
52
|
+
puts "READ #{key}_#{i}"
|
|
53
|
+
end
|
|
54
|
+
if data.length > 0
|
|
55
|
+
Marshal.load(data)
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def set key, data
|
|
60
|
+
clean!(key)
|
|
61
|
+
data = Marshal.dump(data)
|
|
62
|
+
i = 0
|
|
63
|
+
while i*@@max_size < data.length
|
|
64
|
+
tkey = "#{key}_#{i}"
|
|
65
|
+
minidata = data[i*@@max_size, @@max_size]
|
|
66
|
+
puts "Write #{tkey}, block of #{minidata.length}"
|
|
67
|
+
@cache.set(tkey, minidata)
|
|
68
|
+
|
|
69
|
+
i+=1
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
end
|
data/lib/store/memory.rb
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
1
|
+
class Store
|
|
2
|
+
class Memory
|
|
3
|
+
attr_writer :timestamper
|
|
4
|
+
def timestamper
|
|
5
|
+
@timestamper || lambda {Time.new}
|
|
6
|
+
end
|
|
7
|
+
|
|
8
|
+
def initialize database_name
|
|
9
|
+
@collections = {}
|
|
10
|
+
@id = 0
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def create table, entry
|
|
14
|
+
if !entry['_id']
|
|
15
|
+
entry['_id'] = @id += 1
|
|
16
|
+
end
|
|
17
|
+
entry.keys.each do |k|
|
|
18
|
+
entry[k.to_s] = entry.delete(k)
|
|
19
|
+
end
|
|
20
|
+
entry['updated_at'] = entry['created_at'] = timestamper.call
|
|
21
|
+
collection(table)[entry['_id']] = entry
|
|
22
|
+
entry['_id']
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def all table
|
|
26
|
+
collection(table).values
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def count table
|
|
30
|
+
collection(table).count
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def each table, &block
|
|
34
|
+
collection(table).values.each &block
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def reset table
|
|
38
|
+
@collections.delete(table)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def find table, filters, opts={}
|
|
42
|
+
values = collection(table).values
|
|
43
|
+
|
|
44
|
+
filters.each do |filter|
|
|
45
|
+
values = filter.filter(values)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
if opts[:sort]
|
|
49
|
+
fields = opts[:sort].split(',')
|
|
50
|
+
fields.map! do |field|
|
|
51
|
+
sort = field.split('=')
|
|
52
|
+
[sort[0], (sort[1] || 1).to_i]
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
values.sort! do |e1,e2|
|
|
56
|
+
order = 0
|
|
57
|
+
|
|
58
|
+
fields.each do |field|
|
|
59
|
+
name = field[0]
|
|
60
|
+
asc = field[1]
|
|
61
|
+
f1 = e1[name]
|
|
62
|
+
f2 = e2[name]
|
|
63
|
+
|
|
64
|
+
f1 = 1 if f1 == true
|
|
65
|
+
f1 = 0 if f1 == false
|
|
66
|
+
f2 = 1 if f2 == true
|
|
67
|
+
f2 = 0 if f2 == false
|
|
68
|
+
|
|
69
|
+
order = asc * ((f1 <=> f2) || 0)
|
|
70
|
+
break if order != 0
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
order
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
if opts[:start]
|
|
78
|
+
opts[:start].times do |i|
|
|
79
|
+
values.shift
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
if opts[:limit]
|
|
84
|
+
values.pop while values.count > opts[:limit]
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
values
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def collate table, filters, opts={}
|
|
91
|
+
# need to get all items, or else we can't calculate facets
|
|
92
|
+
start = opts.delete(:start)
|
|
93
|
+
limit = opts.delete(:limit)
|
|
94
|
+
facetlimit = opts.delete(:facetlimit)
|
|
95
|
+
|
|
96
|
+
result = {
|
|
97
|
+
items: find(table, filters, opts)
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
if opts[:facets]
|
|
101
|
+
result[:facets] = calculate_facets(opts[:facets], result[:items])
|
|
102
|
+
|
|
103
|
+
if facetlimit
|
|
104
|
+
result[:facets].each do |k,v|
|
|
105
|
+
v.pop while v.count > facetlimit
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
result[:count] = result[:items].count
|
|
111
|
+
|
|
112
|
+
if start
|
|
113
|
+
start.times do |i|
|
|
114
|
+
result[:items].shift
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
if limit
|
|
119
|
+
result[:items].pop while result[:items].count > limit
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
result
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
def update table, id, entry
|
|
126
|
+
old_entry=nil
|
|
127
|
+
|
|
128
|
+
if id.kind_of?(Hash)
|
|
129
|
+
collection(table).each do |orig_k,orig_v|
|
|
130
|
+
if id.all?{|k,v| orig_v[k.to_s] == v}
|
|
131
|
+
old_entry = orig_v
|
|
132
|
+
id = orig_k
|
|
133
|
+
break;
|
|
134
|
+
end
|
|
135
|
+
end
|
|
136
|
+
else
|
|
137
|
+
old_entry = collection(table)[id]
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
if not old_entry
|
|
141
|
+
create table, entry
|
|
142
|
+
return entry
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
entry.keys.each do |key|
|
|
146
|
+
entry[key.to_s] = entry.delete(key)
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
entry = old_entry.merge(entry)
|
|
150
|
+
entry['updated_at'] = timestamper.call
|
|
151
|
+
|
|
152
|
+
collection(table)[id] = entry
|
|
153
|
+
entry
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
# filters
|
|
157
|
+
def create_equal_filter field, value
|
|
158
|
+
EqualFilter.new(field,value)
|
|
159
|
+
end
|
|
160
|
+
def create_lt_filter field, value
|
|
161
|
+
LTFilter.new(field,value)
|
|
162
|
+
end
|
|
163
|
+
def create_gt_filter field, value
|
|
164
|
+
GTFilter.new(field,value)
|
|
165
|
+
end
|
|
166
|
+
def create_gte_filter field, value
|
|
167
|
+
GTEFilter.new(field,value)
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
private
|
|
171
|
+
|
|
172
|
+
def collection table
|
|
173
|
+
@collections[table] ||= {}
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
def calculate_facets facets, records
|
|
177
|
+
result = {}
|
|
178
|
+
facets.each do |facet|
|
|
179
|
+
facet = facet.to_s
|
|
180
|
+
temp = {}
|
|
181
|
+
|
|
182
|
+
records.each do |record|
|
|
183
|
+
record_value = record[facet] || 'unknown'
|
|
184
|
+
|
|
185
|
+
r = record_value.kind_of?(Array) ? record_value : [record_value]
|
|
186
|
+
|
|
187
|
+
r.each do |value|
|
|
188
|
+
value = value.to_s
|
|
189
|
+
value = 'unknown' if value.strip == ''
|
|
190
|
+
|
|
191
|
+
temp[value] ||= 0
|
|
192
|
+
temp[value] += 1
|
|
193
|
+
end
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
facet_entries = temp.map do |name, value|
|
|
197
|
+
[name.to_s, value]
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
facet_entries.sort! {|e1, e2| e2[1] <=> e1[1] }
|
|
201
|
+
result[facet.to_s] = facet_entries
|
|
202
|
+
end
|
|
203
|
+
result
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
class EqualFilter
|
|
207
|
+
def initialize field, value
|
|
208
|
+
@field = field.to_s
|
|
209
|
+
@value = value
|
|
210
|
+
@value = "" if value == 'unknown' || value == nil
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
def filter entries
|
|
214
|
+
entries.find_all do |entry|
|
|
215
|
+
value2 = entry[@field]
|
|
216
|
+
value2 = '' if value2 == nil
|
|
217
|
+
value2 == @value
|
|
218
|
+
end
|
|
219
|
+
end
|
|
220
|
+
end
|
|
221
|
+
|
|
222
|
+
class LTFilter
|
|
223
|
+
def initialize field, value
|
|
224
|
+
@field = field.to_s
|
|
225
|
+
@value = value
|
|
226
|
+
@value = "" if value == 'unknown' || value == nil
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
def filter entries
|
|
230
|
+
entries.find_all do |entry|
|
|
231
|
+
value2 = entry[@field]
|
|
232
|
+
value2 = '' if value2 == nil
|
|
233
|
+
value2 < @value
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
class GTFilter
|
|
239
|
+
def initialize field, value
|
|
240
|
+
@field = field.to_s
|
|
241
|
+
@value = value
|
|
242
|
+
@value = "" if value == 'unknown' || value == nil
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
def filter entries
|
|
246
|
+
entries.find_all do |entry|
|
|
247
|
+
value2 = entry[@field]
|
|
248
|
+
value2 = '' if value2 == nil
|
|
249
|
+
value2 > @value
|
|
250
|
+
end
|
|
251
|
+
end
|
|
252
|
+
end
|
|
253
|
+
class GTEFilter
|
|
254
|
+
def initialize field, value
|
|
255
|
+
@field = field.to_s
|
|
256
|
+
@value = value
|
|
257
|
+
@value = "" if value == 'unknown' || value == nil
|
|
258
|
+
end
|
|
259
|
+
|
|
260
|
+
def filter entries
|
|
261
|
+
entries.find_all do |entry|
|
|
262
|
+
value2 = entry[@field]
|
|
263
|
+
value2 = '' if value2 == nil
|
|
264
|
+
value2 >= @value
|
|
265
|
+
end
|
|
266
|
+
end
|
|
267
|
+
end
|
|
268
|
+
end
|
|
269
|
+
end
|
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
require 'em-mongo'
|
|
2
|
+
|
|
3
|
+
class Store
|
|
4
|
+
class Mongodb
|
|
5
|
+
attr_writer :timestamper
|
|
6
|
+
def timestamper
|
|
7
|
+
@timestamper ||= lambda { Time.new }
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def initialize database_name
|
|
11
|
+
@database_name = database_name
|
|
12
|
+
@free_connections ||= []
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def close
|
|
16
|
+
@db.close
|
|
17
|
+
@db=nil
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def create table, entry
|
|
21
|
+
connect do |db|
|
|
22
|
+
entry['created_at'] = entry['updated_at'] = timestamper.call
|
|
23
|
+
|
|
24
|
+
resp = db.collection(table).safe_insert(entry)
|
|
25
|
+
|
|
26
|
+
f = Fiber.current
|
|
27
|
+
resp.callback{|doc| f.resume(doc)}
|
|
28
|
+
resp.errback{|err| f.resume(:err, err)}
|
|
29
|
+
|
|
30
|
+
result, error = Fiber.yield
|
|
31
|
+
|
|
32
|
+
if result == :err
|
|
33
|
+
raise error.inspect
|
|
34
|
+
else
|
|
35
|
+
result
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def update table, id, entry
|
|
41
|
+
if entry.keys.any?{|key| key.kind_of?(Symbol) }
|
|
42
|
+
raise "MongoDb can't handle symbols, use only string keys!"
|
|
43
|
+
end
|
|
44
|
+
matcher = []
|
|
45
|
+
filter = id.kind_of?(Hash) ? id : { _id: id }
|
|
46
|
+
|
|
47
|
+
filter.each do |k,v|
|
|
48
|
+
matcher << create_equal_filter(k,v)
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
connect do |db|
|
|
52
|
+
old_entry = find(table, matcher).first
|
|
53
|
+
|
|
54
|
+
if old_entry
|
|
55
|
+
entry = old_entry.merge(entry)
|
|
56
|
+
entry['updated_at'] = timestamper.call
|
|
57
|
+
|
|
58
|
+
f = Fiber.current
|
|
59
|
+
resp = db.collection(table).safe_update(filter, entry)
|
|
60
|
+
resp.errback{|err| exit -1}
|
|
61
|
+
resp.callback{|doc| f.resume doc}
|
|
62
|
+
Fiber.yield
|
|
63
|
+
entry
|
|
64
|
+
else
|
|
65
|
+
id = create(table, entry)
|
|
66
|
+
find(table, matcher).first
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def all table
|
|
72
|
+
find(table,{})
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def count table
|
|
76
|
+
connect do |db|
|
|
77
|
+
resp = db.collection(table).count
|
|
78
|
+
f = Fiber.current
|
|
79
|
+
resp.callback {|count| f.resume count }
|
|
80
|
+
resp.errback {|err| raise err }
|
|
81
|
+
|
|
82
|
+
Fiber.yield
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
def reset table
|
|
87
|
+
connect do |db|
|
|
88
|
+
db.collection(table).remove()
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
def find table, filters, opts={}
|
|
93
|
+
real_filters = {}
|
|
94
|
+
filters.inject(real_filters) do |hash,f|
|
|
95
|
+
f.add_filter(hash)
|
|
96
|
+
hash
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
if opts[:sort]
|
|
100
|
+
fields = opts.delete(:sort).split(',')
|
|
101
|
+
opts[:sort] = []
|
|
102
|
+
fields.each do |field|
|
|
103
|
+
sort = field.split('=')
|
|
104
|
+
name = sort[0]
|
|
105
|
+
order = (sort[1] || '1') == '1' ? :asc : :desc
|
|
106
|
+
opts[:sort] << [name,order]
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
if opts[:start]
|
|
111
|
+
start = opts.delete(:start)
|
|
112
|
+
opts[:skip] = start
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
connect do |db|
|
|
116
|
+
f = Fiber.current
|
|
117
|
+
docs = []
|
|
118
|
+
resp = db.collection(table).find(real_filters, opts).each do |doc|
|
|
119
|
+
if doc
|
|
120
|
+
docs << doc
|
|
121
|
+
else
|
|
122
|
+
f.resume if f.alive?
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
Fiber.yield
|
|
126
|
+
docs
|
|
127
|
+
end
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
def collate table, filters, opts={}
|
|
131
|
+
# need to get all items, or else we can't calculate facets
|
|
132
|
+
start = opts.delete(:start)
|
|
133
|
+
limit = opts.delete(:limit)
|
|
134
|
+
facets = opts.delete(:facets)
|
|
135
|
+
facetlimit = opts.delete(:facetlimit)
|
|
136
|
+
|
|
137
|
+
result = {
|
|
138
|
+
items: find(table, filters, opts)
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
if facets
|
|
142
|
+
result[:facets] = calculate_facets(facets, result[:items])
|
|
143
|
+
|
|
144
|
+
if facetlimit
|
|
145
|
+
result[:facets].each do |k,v|
|
|
146
|
+
v.pop while v.count > facetlimit
|
|
147
|
+
end
|
|
148
|
+
end
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
result[:count] = result[:items].count
|
|
152
|
+
|
|
153
|
+
if start
|
|
154
|
+
start.times do |i|
|
|
155
|
+
result[:items].shift
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
if limit
|
|
160
|
+
result[:items].pop while result[:items].count > limit
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
result
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
# filter factories
|
|
167
|
+
def create_equal_filter field, name
|
|
168
|
+
EqualFilter.new(field, name)
|
|
169
|
+
end
|
|
170
|
+
def create_lt_filter field, name
|
|
171
|
+
LTFilter.new(field, name)
|
|
172
|
+
end
|
|
173
|
+
def create_gt_filter field, name
|
|
174
|
+
GTFilter.new(field, name)
|
|
175
|
+
end
|
|
176
|
+
def create_gte_filter field, name
|
|
177
|
+
GTEFilter.new(field, name)
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
private
|
|
181
|
+
|
|
182
|
+
def connect
|
|
183
|
+
# some simple connection pooling to avoid conflicts..
|
|
184
|
+
|
|
185
|
+
con = if @free_connections.length > 0
|
|
186
|
+
@free_connections.pop
|
|
187
|
+
else
|
|
188
|
+
EM::Mongo::Connection.new('localhost', 27017, nil, slave_ok: true).db(@database_name)
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
result = yield(con)
|
|
192
|
+
@free_connections << con
|
|
193
|
+
result
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
def calculate_facets facets, records
|
|
197
|
+
result = {}
|
|
198
|
+
facets.each do |facet|
|
|
199
|
+
facet = facet.to_s
|
|
200
|
+
temp = {}
|
|
201
|
+
|
|
202
|
+
records.each do |record|
|
|
203
|
+
record_value = record[facet] || 'unknown'
|
|
204
|
+
|
|
205
|
+
r = record_value.kind_of?(Array) ? record_value : [record_value]
|
|
206
|
+
|
|
207
|
+
r.each do |value|
|
|
208
|
+
value = value.to_s
|
|
209
|
+
value = 'unknown' if value.strip == ''
|
|
210
|
+
|
|
211
|
+
temp[value] ||= 0
|
|
212
|
+
temp[value] += 1
|
|
213
|
+
end
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
facet_entries = temp.map do |name, value|
|
|
217
|
+
[name.to_s, value]
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
facet_entries.sort! {|e1, e2| e2[1] <=> e1[1] }
|
|
221
|
+
result[facet] = facet_entries
|
|
222
|
+
end
|
|
223
|
+
result
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
class EqualFilter
|
|
227
|
+
def initialize(field, value)
|
|
228
|
+
@field = field; @value = value
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
def add_filter(hash)
|
|
232
|
+
if @value == 'unknown'
|
|
233
|
+
hash[@field] = nil
|
|
234
|
+
elsif @value.kind_of?(BSON::ObjectId)
|
|
235
|
+
hash[@field] = @value
|
|
236
|
+
else
|
|
237
|
+
hash[@field] = @value
|
|
238
|
+
end
|
|
239
|
+
end
|
|
240
|
+
end
|
|
241
|
+
|
|
242
|
+
class LTFilter
|
|
243
|
+
def initialize(field, value)
|
|
244
|
+
@field = field; @value = value
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
def add_filter(hash)
|
|
248
|
+
h = hash[@field] ||= {}
|
|
249
|
+
|
|
250
|
+
if @value != 'unknown'
|
|
251
|
+
h['$lt'] = @value
|
|
252
|
+
end
|
|
253
|
+
end
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
class GTFilter
|
|
257
|
+
def initialize(field, value)
|
|
258
|
+
@field = field; @value = value
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
def add_filter(hash)
|
|
262
|
+
h = hash[@field] ||= {}
|
|
263
|
+
|
|
264
|
+
if @value != 'unknown'
|
|
265
|
+
h['$gt'] = @value
|
|
266
|
+
end
|
|
267
|
+
end
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
class GTEFilter
|
|
271
|
+
def initialize(field, value)
|
|
272
|
+
@field = field; @value = value
|
|
273
|
+
end
|
|
274
|
+
|
|
275
|
+
def add_filter(hash)
|
|
276
|
+
h = hash[@field] ||= {}
|
|
277
|
+
|
|
278
|
+
if @value != 'unknown'
|
|
279
|
+
h['$gte'] = @value
|
|
280
|
+
end
|
|
281
|
+
end
|
|
282
|
+
end
|
|
283
|
+
end
|
|
284
|
+
end
|
data/test/store_test.rb
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
require 'test_helper'
|
|
2
|
+
require 'store/mongodb'
|
|
3
|
+
require 'store/memory'
|
|
4
|
+
require 'store/cache'
|
|
5
|
+
require 'em-synchrony'
|
|
6
|
+
|
|
7
|
+
# The cache store differs in initializer from the others, so we'll
|
|
8
|
+
# create a fake one to initialize it properly
|
|
9
|
+
class InMemoryCacheStore < Store::Cache
|
|
10
|
+
def initialize database
|
|
11
|
+
super(Store::Memory.new(database))
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
#class MemcachedStore < Store::Cache
|
|
15
|
+
# def initialize database
|
|
16
|
+
# super(Store::Memory.new(database), memcached: true)
|
|
17
|
+
# end
|
|
18
|
+
#end
|
|
19
|
+
|
|
20
|
+
[
|
|
21
|
+
Store::Mongodb,
|
|
22
|
+
Store::Memory,
|
|
23
|
+
InMemoryCacheStore
|
|
24
|
+
].each do |store|
|
|
25
|
+
Class.new(TestCase).class_eval do
|
|
26
|
+
|
|
27
|
+
should store.name+ ' use current Time as default time stamper' do
|
|
28
|
+
val = store.new('hubo').timestamper.call
|
|
29
|
+
assert val.kind_of?(Time)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
should store.name+ 'allow setting time stamper' do
|
|
33
|
+
s = store.new('hubo')
|
|
34
|
+
s.timestamper = lambda { 4 }
|
|
35
|
+
assert_equal 4, s.timestamper.call
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
context "Testing #{store.name}" do
|
|
39
|
+
setup do
|
|
40
|
+
@it = store.new('testDb')
|
|
41
|
+
@it.reset('test_table')
|
|
42
|
+
@it.reset('testosteron_table')
|
|
43
|
+
timestamp = 0
|
|
44
|
+
@it.timestamper = lambda { timestamp+=1 }
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
should '#count' do
|
|
48
|
+
id = @it.create('test_table', { duck: 'monkey' })
|
|
49
|
+
id = @it.create('test_table', { duck: 'monkey' })
|
|
50
|
+
id = @it.create('testosteron_table', { duck: 'monkey' })
|
|
51
|
+
|
|
52
|
+
assert_equal 2, @it.count('test_table')
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
should '#all aggregate all results' do
|
|
56
|
+
id = @it.create('test_table', { duck: 'monkey' })
|
|
57
|
+
|
|
58
|
+
result = @it.all('test_table')
|
|
59
|
+
|
|
60
|
+
assert_equal 1, result.count
|
|
61
|
+
assert_equal 'monkey', result[0]['duck']
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
should "create and retrieve entry" do
|
|
65
|
+
id = @it.create('test_table', { duck: 'monkey' })
|
|
66
|
+
|
|
67
|
+
assert id.kind_of?(BSON::ObjectId) || id.to_i > 0
|
|
68
|
+
|
|
69
|
+
result = @it.all('test_table')
|
|
70
|
+
assert_equal 1, result.count
|
|
71
|
+
assert_equal 'monkey', result.first['duck']
|
|
72
|
+
assert_equal 1, result.first['created_at']
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
context '#update' do
|
|
76
|
+
should 'handle many concurrent updates' do
|
|
77
|
+
id = @it.create('test_table', { duck: 'monkey' })
|
|
78
|
+
|
|
79
|
+
100.times do
|
|
80
|
+
f = Fiber.new do
|
|
81
|
+
entry = { 'duck' => 'history' }
|
|
82
|
+
@it.update('test_table', id, entry)
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
EM.next_tick { f.resume }
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
EM::Synchrony.sleep(0.1)
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
should 'update given fields entry' do
|
|
92
|
+
id = @it.create('test_table', { duck: 'monkey' })
|
|
93
|
+
|
|
94
|
+
entry = { 'duck' => 'history' }
|
|
95
|
+
|
|
96
|
+
@it.update('test_table', id, entry)
|
|
97
|
+
|
|
98
|
+
entries = @it.all('test_table')
|
|
99
|
+
|
|
100
|
+
assert_equal 1, entries.count
|
|
101
|
+
assert_equal 'history', entries.first['duck']
|
|
102
|
+
assert_equal 2, entries.first['updated_at']
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
should 'update entry by matcher' do
|
|
106
|
+
@it.create('test_table', { duck: 'monkey' })
|
|
107
|
+
@it.create('test_table', { duck: 'donkey' })
|
|
108
|
+
@it.create('test_table', { duck: 'congo' })
|
|
109
|
+
|
|
110
|
+
@it.update('test_table',
|
|
111
|
+
{ 'duck' => 'donkey'},
|
|
112
|
+
{ 'duck' => 'history'})
|
|
113
|
+
|
|
114
|
+
entries = @it.all('test_table')
|
|
115
|
+
|
|
116
|
+
assert_equal 3, entries.count
|
|
117
|
+
assert_equal 'monkey', entries[0]['duck']
|
|
118
|
+
assert_equal 'history', entries[1]['duck']
|
|
119
|
+
assert_equal 'congo', entries[2]['duck']
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
should 'update should create if not exist' do
|
|
123
|
+
r = @it.update('test_table', {'duck' => 'donkey'}, { 'duck' => 'donkey'})
|
|
124
|
+
|
|
125
|
+
entries = @it.all('test_table')
|
|
126
|
+
assert_equal 1, entries.count
|
|
127
|
+
assert_equal 'donkey', entries[0]['duck']
|
|
128
|
+
assert_equal 1, entries[0]['updated_at']
|
|
129
|
+
assert_equal 1, entries[0]['created_at']
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
should 'return the resulting entry while updating' do
|
|
133
|
+
id = @it.create('test_table', { duck: 'monkey', paid_taxes: true })
|
|
134
|
+
entry = @it.update('test_table', id, 'duck' => 'history')
|
|
135
|
+
|
|
136
|
+
assert_equal 'history', entry['duck']
|
|
137
|
+
assert_equal true, entry['paid_taxes']
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
should 'return the resulting entry after created' do
|
|
141
|
+
entry = @it.update('test_table', { 'duck' => 'history' }, 'duck' => 'history')
|
|
142
|
+
|
|
143
|
+
assert_equal 'history', entry['duck']
|
|
144
|
+
assert entry['_id'], 'ID should be set'
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
should 'make partial updates' do
|
|
148
|
+
entry = @it.create('test_table', { 'duck' => 'history', paid_taxes: true })
|
|
149
|
+
entry = @it.update('test_table', { 'duck' => 'history' }, 'duck' => 'monkey')
|
|
150
|
+
|
|
151
|
+
entries = @it.all('test_table')
|
|
152
|
+
assert_equal 1, entries.count
|
|
153
|
+
assert_equal 'monkey', entries[0]['duck']
|
|
154
|
+
assert_equal true, entries[0]['paid_taxes']
|
|
155
|
+
end
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
context '#find' do
|
|
159
|
+
setup do
|
|
160
|
+
@it.create('test_table', {
|
|
161
|
+
duck: 'horse',
|
|
162
|
+
has_duck: true,
|
|
163
|
+
number: 1
|
|
164
|
+
})
|
|
165
|
+
@it.create('test_table', {
|
|
166
|
+
duck: 'MoNkeY',
|
|
167
|
+
has_duck: true,
|
|
168
|
+
number: 2
|
|
169
|
+
})
|
|
170
|
+
@it.create('test_table', {
|
|
171
|
+
duck: 'donkey',
|
|
172
|
+
has_duck: true,
|
|
173
|
+
number: 3
|
|
174
|
+
})
|
|
175
|
+
@it.create('test_table', {
|
|
176
|
+
noduckie: 'here',
|
|
177
|
+
has_duck: false,
|
|
178
|
+
number: 4
|
|
179
|
+
})
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
should 'treat "unknown" as empty string as unexisting' do
|
|
183
|
+
@it.create('test_table', {
|
|
184
|
+
verify: true
|
|
185
|
+
})
|
|
186
|
+
filters = [@it.create_equal_filter(:duck, 'unknown')]
|
|
187
|
+
r = @it.find('test_table', filters)
|
|
188
|
+
assert_equal 2, r.count
|
|
189
|
+
assert_equal 'here', r[0]['noduckie']
|
|
190
|
+
assert_equal true, r[1]['verify']
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
should 'find entries case sensitive by filter' do
|
|
194
|
+
filters = [@it.create_equal_filter(:duck, 'monkey')]
|
|
195
|
+
result = @it.find('test_table', filters).map {|e| e}
|
|
196
|
+
assert_equal 0, result.count
|
|
197
|
+
|
|
198
|
+
filters = [@it.create_equal_filter(:duck, 'MoNkeY')]
|
|
199
|
+
result = @it.find('test_table', filters).map {|e| e}
|
|
200
|
+
assert_equal 1, result.count
|
|
201
|
+
assert_equal 'MoNkeY', result.first['duck']
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
context 'filters' do
|
|
205
|
+
|
|
206
|
+
should 'equal' do
|
|
207
|
+
filters = [@it.create_equal_filter(:has_duck, false)]
|
|
208
|
+
result = @it.find('test_table', filters).map {|e| e}
|
|
209
|
+
assert_equal 1, result.count
|
|
210
|
+
assert_equal 'here', result.first['noduckie']
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
should 'less-than' do
|
|
214
|
+
filters = [@it.create_lt_filter(:number, 3)]
|
|
215
|
+
result = @it.find('test_table', filters).map {|e| e}
|
|
216
|
+
assert_equal 2, result.count
|
|
217
|
+
assert_equal 1, result[0]['number']
|
|
218
|
+
assert_equal 2, result[1]['number']
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
should 'greater-than' do
|
|
222
|
+
filters = [@it.create_gt_filter(:number, 3)]
|
|
223
|
+
result = @it.find('test_table', filters).map {|e| e}
|
|
224
|
+
assert_equal 1, result.count
|
|
225
|
+
assert_equal 4, result[0]['number']
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
should 'greater-or-equal' do
|
|
229
|
+
filters = [@it.create_gte_filter(:number, 3)]
|
|
230
|
+
result = @it.find('test_table', filters).map {|e| e}
|
|
231
|
+
assert_equal 2, result.count
|
|
232
|
+
assert_equal 3, result[0]['number']
|
|
233
|
+
assert_equal 4, result[1]['number']
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
should 'limit response size' do
|
|
238
|
+
result = @it.find('test_table', [], limit: 1).map{|i|i}
|
|
239
|
+
assert_equal 1, result.count
|
|
240
|
+
end
|
|
241
|
+
|
|
242
|
+
should 'set zero-based start index' do
|
|
243
|
+
result = @it.find('test_table', [], start: 2).map{|i|i}
|
|
244
|
+
assert_equal 2, result.count
|
|
245
|
+
assert_equal 'donkey', result[0]['duck']
|
|
246
|
+
assert_equal 'here', result[1]['noduckie']
|
|
247
|
+
end
|
|
248
|
+
|
|
249
|
+
should 'treat \'unknown\' as nil or empty' do
|
|
250
|
+
filters = [@it.create_equal_filter(:duck, 'unknown')]
|
|
251
|
+
result = @it.find('test_table', filters).map {|e| e}
|
|
252
|
+
assert_equal 1, result.count
|
|
253
|
+
assert_equal 'here', result.first['noduckie']
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
should 'sort asc' do
|
|
257
|
+
result = @it.find('test_table', [], sort: 'has_duck=-1,duck=1').map {|e| e}
|
|
258
|
+
assert_equal 4, result.count
|
|
259
|
+
assert_equal 'MoNkeY', result[0]['duck']
|
|
260
|
+
assert_equal 'donkey', result[1]['duck']
|
|
261
|
+
assert_equal 'horse', result[2]['duck']
|
|
262
|
+
assert_equal 'here', result[3]['noduckie']
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
should 'sort desc' do
|
|
266
|
+
result = @it.find('test_table', [], sort: 'has_duck=-1,duck=-1').map {|e| e}
|
|
267
|
+
assert_equal 4, result.count
|
|
268
|
+
assert_equal 'horse', result[0]['duck']
|
|
269
|
+
assert_equal 'donkey', result[1]['duck']
|
|
270
|
+
assert_equal 'MoNkeY', result[2]['duck']
|
|
271
|
+
assert_equal 'here', result[3]['noduckie']
|
|
272
|
+
end
|
|
273
|
+
end
|
|
274
|
+
|
|
275
|
+
context '#collate' do
|
|
276
|
+
setup do
|
|
277
|
+
@it.create('test_table', { duck: 1990 })
|
|
278
|
+
@it.create('test_table', { duck: nil })
|
|
279
|
+
@it.create('test_table', { duck: "" })
|
|
280
|
+
@it.create('test_table', { duck: 'monkey' })
|
|
281
|
+
@it.create('test_table', { duck: 'donkey' })
|
|
282
|
+
@it.create('test_table', { duck: 'donkey' })
|
|
283
|
+
end
|
|
284
|
+
|
|
285
|
+
should 'find entries by filter' do
|
|
286
|
+
filters = [@it.create_equal_filter(:duck, 'monkey')]
|
|
287
|
+
result = @it.collate('test_table', filters)
|
|
288
|
+
assert_equal 1, result[:items].count
|
|
289
|
+
assert_equal 'monkey', result[:items].first['duck']
|
|
290
|
+
end
|
|
291
|
+
|
|
292
|
+
should 'limit response size' do
|
|
293
|
+
result = @it.collate('test_table', [], limit: 1)
|
|
294
|
+
assert_equal 1, result[:items].count
|
|
295
|
+
end
|
|
296
|
+
|
|
297
|
+
should 'set zero-based start index' do
|
|
298
|
+
result = @it.collate('test_table', [], start: 3, limit: 2)
|
|
299
|
+
assert_equal 2, result[:items].count
|
|
300
|
+
assert_equal 'monkey', result[:items][0]['duck']
|
|
301
|
+
assert_equal 'donkey', result[:items][1]['duck']
|
|
302
|
+
end
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
context 'total count' do
|
|
306
|
+
should 'not be affected by limit' do
|
|
307
|
+
result = @it.collate('test_table', [], limit: 1)
|
|
308
|
+
assert_equal 6, result[:count]
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
should 'not be affected by start' do
|
|
312
|
+
result = @it.collate('test_table', [], start: 3)
|
|
313
|
+
assert_equal 6, result[:count]
|
|
314
|
+
end
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
should 'include facets if given' do
|
|
318
|
+
@it.create('test_table', { duck: ['donkey', 'muppet'] })
|
|
319
|
+
|
|
320
|
+
result = @it.collate('test_table', [], facets: [:duck])
|
|
321
|
+
assert_equal 7, result[:items].count
|
|
322
|
+
assert_equal 1, result[:facets].count
|
|
323
|
+
|
|
324
|
+
entries = result[:facets]['duck']
|
|
325
|
+
assert entries, "Expected facets to include 'duck'"
|
|
326
|
+
assert_equal 5, entries.count
|
|
327
|
+
assert_equal [
|
|
328
|
+
['donkey' , 3],
|
|
329
|
+
['unknown' , 2],
|
|
330
|
+
['monkey' , 1],
|
|
331
|
+
['1990' , 1],
|
|
332
|
+
['muppet' , 1],
|
|
333
|
+
], entries
|
|
334
|
+
end
|
|
335
|
+
|
|
336
|
+
should 'limit facet entries count, cutting lesser important' do
|
|
337
|
+
result = @it.collate('test_table', [], facets: [:duck], facetlimit: 2)
|
|
338
|
+
entries = result[:facets]['duck']
|
|
339
|
+
assert_equal 2, entries.count
|
|
340
|
+
assert_equal(['donkey', 2], entries[0])
|
|
341
|
+
end
|
|
342
|
+
end
|
|
343
|
+
|
|
344
|
+
end
|
|
345
|
+
end
|
|
346
|
+
end
|
data/test/test_helper.rb
ADDED
metadata
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: document-store
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 1.0.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Mikael Wikman
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2013-10-07 00:00:00.000000000 Z
|
|
12
|
+
dependencies:
|
|
13
|
+
- !ruby/object:Gem::Dependency
|
|
14
|
+
name: em-synchrony
|
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
|
16
|
+
requirements:
|
|
17
|
+
- - '>='
|
|
18
|
+
- !ruby/object:Gem::Version
|
|
19
|
+
version: '0'
|
|
20
|
+
type: :runtime
|
|
21
|
+
prerelease: false
|
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
23
|
+
requirements:
|
|
24
|
+
- - '>='
|
|
25
|
+
- !ruby/object:Gem::Version
|
|
26
|
+
version: '0'
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
name: em-mongo
|
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - '='
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: 0.4.3
|
|
34
|
+
type: :runtime
|
|
35
|
+
prerelease: false
|
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
+
requirements:
|
|
38
|
+
- - '='
|
|
39
|
+
- !ruby/object:Gem::Version
|
|
40
|
+
version: 0.4.3
|
|
41
|
+
description: This wrapper provides a minimalistic interfaced to document-based databases.
|
|
42
|
+
It includes a in-memory store that can be easily used for writing tests, as well
|
|
43
|
+
as a in-memory cached version of each implementation.
|
|
44
|
+
email:
|
|
45
|
+
- mikael@wikman.me
|
|
46
|
+
executables: []
|
|
47
|
+
extensions: []
|
|
48
|
+
extra_rdoc_files: []
|
|
49
|
+
files:
|
|
50
|
+
- CHANGELOG.mdown
|
|
51
|
+
- Gemfile
|
|
52
|
+
- Gemfile.lock
|
|
53
|
+
- Rakefile
|
|
54
|
+
- app.gemspec
|
|
55
|
+
- lib/store/cache.rb
|
|
56
|
+
- lib/store/caches/in_memory.rb
|
|
57
|
+
- lib/store/caches/memcached.rb
|
|
58
|
+
- lib/store/memory.rb
|
|
59
|
+
- lib/store/mongodb.rb
|
|
60
|
+
- test/store_test.rb
|
|
61
|
+
- test/test_helper.rb
|
|
62
|
+
homepage: https://github.com/mikaelwikman/document-store
|
|
63
|
+
licenses: []
|
|
64
|
+
metadata: {}
|
|
65
|
+
post_install_message:
|
|
66
|
+
rdoc_options: []
|
|
67
|
+
require_paths:
|
|
68
|
+
- lib
|
|
69
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
70
|
+
requirements:
|
|
71
|
+
- - '>='
|
|
72
|
+
- !ruby/object:Gem::Version
|
|
73
|
+
version: '0'
|
|
74
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
75
|
+
requirements:
|
|
76
|
+
- - '>='
|
|
77
|
+
- !ruby/object:Gem::Version
|
|
78
|
+
version: '0'
|
|
79
|
+
requirements: []
|
|
80
|
+
rubyforge_project:
|
|
81
|
+
rubygems_version: 2.0.3
|
|
82
|
+
signing_key:
|
|
83
|
+
specification_version: 4
|
|
84
|
+
summary: A wrapper around document-based databases to provide a minimalistic interface
|
|
85
|
+
that can be easily changed
|
|
86
|
+
test_files:
|
|
87
|
+
- test/store_test.rb
|
|
88
|
+
- test/test_helper.rb
|