mongoid_fulltext 0.6.1 → 0.7.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/.gitignore +47 -0
- data/.rspec +1 -1
- data/.rubocop.yml +6 -0
- data/.rubocop_todo.yml +101 -0
- data/.travis.yml +11 -3
- data/CHANGELOG.md +9 -2
- data/Gemfile +19 -9
- data/LICENSE +1 -1
- data/README.md +12 -9
- data/Rakefile +9 -29
- data/lib/mongoid/full_text_search/version.rb +5 -0
- data/lib/mongoid/full_text_search.rb +372 -0
- data/lib/mongoid/indexable.rb +13 -0
- data/lib/mongoid/indexes.rb +13 -0
- data/lib/mongoid_fulltext.rb +1 -341
- data/mongoid_fulltext.gemspec +16 -82
- data/spec/models/accentless_artwork.rb +1 -1
- data/spec/models/advanced_artwork.rb +1 -1
- data/spec/models/basic_artwork.rb +0 -1
- data/spec/models/delayed_artwork.rb +1 -2
- data/spec/models/external_artist.rb +1 -2
- data/spec/models/external_artwork.rb +1 -2
- data/spec/models/external_artwork_no_fields_supplied.rb +2 -2
- data/spec/models/filtered_artist.rb +4 -4
- data/spec/models/filtered_artwork.rb +7 -7
- data/spec/models/filtered_other.rb +3 -3
- data/spec/models/hidden_dragon.rb +0 -1
- data/spec/models/multi_external_artwork.rb +3 -3
- data/spec/models/multi_field_artist.rb +1 -1
- data/spec/models/multi_field_artwork.rb +1 -1
- data/spec/models/partitioned_artist.rb +8 -9
- data/spec/models/russian_artwork.rb +2 -2
- data/spec/models/short_prefixes_artwork.rb +3 -4
- data/spec/models/stopwords_artwork.rb +3 -4
- data/spec/mongoid/full_text_search_spec.rb +752 -0
- data/spec/spec_helper.rb +11 -7
- metadata +27 -68
- data/VERSION +0 -1
- data/lib/mongoid_indexes.rb +0 -12
- data/spec/config/mongoid.yml +0 -6
- data/spec/mongoid/fulltext_spec.rb +0 -799
@@ -0,0 +1,13 @@
|
|
1
|
+
# hook onto model index creation to create related FT indexes
|
2
|
+
module Mongoid
|
3
|
+
module Indexable
|
4
|
+
module ClassMethods
|
5
|
+
alias_method :create_fulltext_indexes_hook, :create_indexes
|
6
|
+
|
7
|
+
def create_indexes
|
8
|
+
create_fulltext_indexes if respond_to?(:create_fulltext_indexes)
|
9
|
+
create_fulltext_indexes_hook
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
# hook onto model index creation to create related FT indexes
|
2
|
+
module Mongoid
|
3
|
+
module Indexes
|
4
|
+
module ClassMethods
|
5
|
+
alias_method :create_fulltext_indexes_hook, :create_indexes
|
6
|
+
|
7
|
+
def create_indexes
|
8
|
+
create_fulltext_indexes if respond_to?(:create_fulltext_indexes)
|
9
|
+
create_fulltext_indexes_hook
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
data/lib/mongoid_fulltext.rb
CHANGED
@@ -1,341 +1 @@
|
|
1
|
-
require '
|
2
|
-
require 'unicode_utils'
|
3
|
-
require 'cgi'
|
4
|
-
|
5
|
-
module Mongoid::FullTextSearch
|
6
|
-
extend ActiveSupport::Concern
|
7
|
-
|
8
|
-
included do
|
9
|
-
cattr_accessor :mongoid_fulltext_config
|
10
|
-
end
|
11
|
-
|
12
|
-
class UnspecifiedIndexError < StandardError; end
|
13
|
-
class UnknownFilterQueryOperator < StandardError; end
|
14
|
-
|
15
|
-
module ClassMethods
|
16
|
-
|
17
|
-
def fulltext_search_in(*args)
|
18
|
-
self.mongoid_fulltext_config = {} if self.mongoid_fulltext_config.nil?
|
19
|
-
options = args.last.is_a?(Hash) ? args.pop : {}
|
20
|
-
if options.has_key?(:index_name)
|
21
|
-
index_name = options[:index_name]
|
22
|
-
else
|
23
|
-
index_name = 'mongoid_fulltext.index_%s_%s' % [self.name.downcase, self.mongoid_fulltext_config.count]
|
24
|
-
end
|
25
|
-
|
26
|
-
config = {
|
27
|
-
:alphabet => 'abcdefghijklmnopqrstuvwxyz0123456789 ',
|
28
|
-
:word_separators => "-_ \n\t",
|
29
|
-
:ngram_width => 3,
|
30
|
-
:max_ngrams_to_search => 6,
|
31
|
-
:apply_prefix_scoring_to_all_words => true,
|
32
|
-
:index_full_words => true,
|
33
|
-
:index_short_prefixes => false,
|
34
|
-
:max_candidate_set_size => 1000,
|
35
|
-
:remove_accents => true,
|
36
|
-
:reindex_immediately => true,
|
37
|
-
:stop_words => Hash[['i', 'a', 's', 't', 'me', 'my', 'we', 'he', 'it', 'am', 'is', 'be', 'do', 'an', 'if',
|
38
|
-
'or', 'as', 'of', 'at', 'by', 'to', 'up', 'in', 'on', 'no', 'so', 'our', 'you', 'him',
|
39
|
-
'his', 'she', 'her', 'its', 'who', 'are', 'was', 'has', 'had', 'did', 'the', 'and',
|
40
|
-
'but', 'for', 'out', 'off', 'why', 'how', 'all', 'any', 'few', 'nor', 'not', 'own',
|
41
|
-
'too', 'can', 'don', 'now', 'ours', 'your', 'hers', 'they', 'them', 'what', 'whom',
|
42
|
-
'this', 'that', 'were', 'been', 'have', 'does', 'with', 'into', 'from', 'down', 'over',
|
43
|
-
'then', 'once', 'here', 'when', 'both', 'each', 'more', 'most', 'some', 'such', 'only',
|
44
|
-
'same', 'than', 'very', 'will', 'just', 'yours', 'their', 'which', 'these', 'those',
|
45
|
-
'being', 'doing', 'until', 'while', 'about', 'after', 'above', 'below', 'under',
|
46
|
-
'again', 'there', 'where', 'other', 'myself', 'itself', 'theirs', 'having', 'during',
|
47
|
-
'before', 'should', 'himself', 'herself', 'because', 'against', 'between', 'through',
|
48
|
-
'further', 'yourself', 'ourselves', 'yourselves', 'themselves'].map{ |x| [x,true] }]
|
49
|
-
}
|
50
|
-
|
51
|
-
config.update(options)
|
52
|
-
|
53
|
-
args = [:to_s] if args.empty?
|
54
|
-
config[:ngram_fields] = args
|
55
|
-
config[:alphabet] = Hash[config[:alphabet].split('').map{ |ch| [ch,ch] }]
|
56
|
-
config[:word_separators] = Hash[config[:word_separators].split('').map{ |ch| [ch,ch] }]
|
57
|
-
self.mongoid_fulltext_config[index_name] = config
|
58
|
-
|
59
|
-
before_save(:update_ngram_index) if config[:reindex_immediately]
|
60
|
-
before_destroy :remove_from_ngram_index
|
61
|
-
end
|
62
|
-
|
63
|
-
def create_fulltext_indexes
|
64
|
-
return unless self.mongoid_fulltext_config
|
65
|
-
self.mongoid_fulltext_config.each_pair do |index_name, fulltext_config|
|
66
|
-
fulltext_search_ensure_indexes(index_name, fulltext_config)
|
67
|
-
end
|
68
|
-
end
|
69
|
-
|
70
|
-
def fulltext_search_ensure_indexes(index_name, config)
|
71
|
-
db = collection.database
|
72
|
-
coll = db[index_name]
|
73
|
-
|
74
|
-
# The order of filters matters when the same index is used from two or more collections.
|
75
|
-
filter_indexes = (config[:filters] || []).map do |key,value|
|
76
|
-
["filter_values.#{key}", 1]
|
77
|
-
end.sort_by { |filter_index| filter_index[0] }
|
78
|
-
|
79
|
-
index_definition = [['ngram', 1], ['score', -1]].concat(filter_indexes)
|
80
|
-
|
81
|
-
# Since the definition of the index could have changed, we'll clean up by
|
82
|
-
# removing any indexes that aren't on the exact.
|
83
|
-
correct_keys = index_definition.map{ |field_def| field_def[0] }
|
84
|
-
all_filter_keys = filter_indexes.map{ |field_def| field_def[0] }
|
85
|
-
coll.indexes.each do |idef|
|
86
|
-
keys = idef['key'].keys
|
87
|
-
next if !keys.member?('ngram')
|
88
|
-
all_filter_keys |= keys.find_all{ |key| key.starts_with?('filter_values.') }
|
89
|
-
if keys & correct_keys != correct_keys
|
90
|
-
Mongoid.logger.info "Dropping #{idef['name']} [#{keys & correct_keys} <=> #{correct_keys}]" if Mongoid.logger
|
91
|
-
coll.indexes.drop(idef['key'])
|
92
|
-
end
|
93
|
-
end
|
94
|
-
|
95
|
-
if all_filter_keys.length > filter_indexes.length
|
96
|
-
filter_indexes = all_filter_keys.map {|key| [key, 1] }.sort_by { |filter_index| filter_index[0] }
|
97
|
-
index_definition = [['ngram', 1], ['score', -1]].concat(filter_indexes)
|
98
|
-
end
|
99
|
-
|
100
|
-
Mongoid.logger.info "Ensuring fts_index on #{coll.name}: #{index_definition}" if Mongoid.logger
|
101
|
-
coll.indexes.create(Hash[index_definition], { :name => 'fts_index' })
|
102
|
-
|
103
|
-
Mongoid.logger.info "Ensuring document_id index on #{coll.name}" if Mongoid.logger
|
104
|
-
coll.indexes.create('document_id' => 1) # to make removes fast
|
105
|
-
end
|
106
|
-
|
107
|
-
def fulltext_search(query_string, options={})
|
108
|
-
max_results = options.has_key?(:max_results) ? options.delete(:max_results) : 10
|
109
|
-
return_scores = options.has_key?(:return_scores) ? options.delete(:return_scores) : false
|
110
|
-
if self.mongoid_fulltext_config.count > 1 and !options.has_key?(:index)
|
111
|
-
error_message = '%s is indexed by multiple full-text indexes. You must specify one by passing an :index_name parameter'
|
112
|
-
raise UnspecifiedIndexError, error_message % self.name, caller
|
113
|
-
end
|
114
|
-
index_name = options.has_key?(:index) ? options.delete(:index) : self.mongoid_fulltext_config.keys.first
|
115
|
-
|
116
|
-
# Options hash should only contain filters after this point
|
117
|
-
|
118
|
-
ngrams = all_ngrams(query_string, self.mongoid_fulltext_config[index_name])
|
119
|
-
return [] if ngrams.empty?
|
120
|
-
|
121
|
-
# For each ngram, construct the query we'll use to pull index documents and
|
122
|
-
# get a count of the number of index documents containing that n-gram
|
123
|
-
ordering = {'score' => -1}
|
124
|
-
limit = self.mongoid_fulltext_config[index_name][:max_candidate_set_size]
|
125
|
-
coll = collection.database[index_name]
|
126
|
-
cursors = ngrams.map do |ngram|
|
127
|
-
query = {'ngram' => ngram[0]}
|
128
|
-
query.update(map_query_filters options)
|
129
|
-
count = coll.find(query).count
|
130
|
-
{:ngram => ngram, :count => count, :query => query}
|
131
|
-
end.sort!{ |record1, record2| record1[:count] <=> record2[:count] }
|
132
|
-
|
133
|
-
# Using the queries we just constructed and the n-gram frequency counts we
|
134
|
-
# just computed, pull in about *:max_candidate_set_size* candidates by
|
135
|
-
# considering the n-grams in order of increasing frequency. When we've
|
136
|
-
# spent all *:max_candidate_set_size* candidates, pull the top-scoring
|
137
|
-
# *max_results* candidates for each remaining n-gram.
|
138
|
-
results_so_far = 0
|
139
|
-
candidates_list = cursors.map do |doc|
|
140
|
-
next if doc[:count] == 0
|
141
|
-
query_result = coll.find(doc[:query])
|
142
|
-
if results_so_far >= limit
|
143
|
-
query_result = query_result.sort(ordering).limit(max_results)
|
144
|
-
elsif doc[:count] > limit - results_so_far
|
145
|
-
query_result = query_result.sort(ordering).limit(limit - results_so_far)
|
146
|
-
end
|
147
|
-
results_so_far += doc[:count]
|
148
|
-
ngram_score = ngrams[doc[:ngram][0]]
|
149
|
-
Hash[query_result.map do |candidate|
|
150
|
-
[candidate['document_id'],
|
151
|
-
{:clazz => candidate['class'], :score => candidate['score'] * ngram_score}]
|
152
|
-
end]
|
153
|
-
end.compact
|
154
|
-
|
155
|
-
# Finally, score all candidates by matching them up with other candidates that are
|
156
|
-
# associated with the same document. This is similar to how you might process a
|
157
|
-
# boolean AND query, except that with an AND query, you'd stop after considering
|
158
|
-
# the first candidate list and matching its candidates up with candidates from other
|
159
|
-
# lists, whereas here we want the search to be a little fuzzier so we'll run through
|
160
|
-
# all candidate lists, removing candidates as we match them up.
|
161
|
-
all_scores = []
|
162
|
-
while !candidates_list.empty?
|
163
|
-
candidates = candidates_list.pop
|
164
|
-
scores = candidates.map do |candidate_id, data|
|
165
|
-
{:id => candidate_id,
|
166
|
-
:clazz => data[:clazz],
|
167
|
-
:score => data[:score] + candidates_list.map{ |others| (others.delete(candidate_id) || {:score => 0})[:score] }.sum
|
168
|
-
}
|
169
|
-
end
|
170
|
-
all_scores.concat(scores)
|
171
|
-
end
|
172
|
-
all_scores.sort!{ |document1, document2| -document1[:score] <=> -document2[:score] }
|
173
|
-
instantiate_mapreduce_results(all_scores[0..max_results-1], { :return_scores => return_scores })
|
174
|
-
end
|
175
|
-
|
176
|
-
def instantiate_mapreduce_result(result)
|
177
|
-
result[:clazz].constantize.find(result[:id])
|
178
|
-
end
|
179
|
-
|
180
|
-
def instantiate_mapreduce_results(results, options)
|
181
|
-
if (options[:return_scores])
|
182
|
-
results.map { |result| [ instantiate_mapreduce_result(result), result[:score] ] }.find_all { |result| ! result[0].nil? }
|
183
|
-
else
|
184
|
-
results.map { |result| instantiate_mapreduce_result(result) }.compact
|
185
|
-
end
|
186
|
-
end
|
187
|
-
|
188
|
-
def all_ngrams(str, config, bound_number_returned = true)
|
189
|
-
return {} if str.nil?
|
190
|
-
|
191
|
-
if config[:remove_accents]
|
192
|
-
if defined?(UnicodeUtils)
|
193
|
-
str = UnicodeUtils.nfkd(str)
|
194
|
-
elsif defined?(DiacriticsFu)
|
195
|
-
str = DiacriticsFu::escape(str)
|
196
|
-
end
|
197
|
-
end
|
198
|
-
|
199
|
-
# Remove any characters that aren't in the alphabet and aren't word separators
|
200
|
-
filtered_str = str.mb_chars.downcase.to_s.split('').find_all{ |ch| config[:alphabet][ch] or config[:word_separators][ch] }.join('')
|
201
|
-
|
202
|
-
# Figure out how many ngrams to extract from the string. If we can't afford to extract all ngrams,
|
203
|
-
# step over the string in evenly spaced strides to extract ngrams. For example, to extract 3 3-letter
|
204
|
-
# ngrams from 'abcdefghijk', we'd want to extract 'abc', 'efg', and 'ijk'.
|
205
|
-
if bound_number_returned
|
206
|
-
step_size = [((filtered_str.length - config[:ngram_width]).to_f / config[:max_ngrams_to_search]).ceil, 1].max
|
207
|
-
else
|
208
|
-
step_size = 1
|
209
|
-
end
|
210
|
-
|
211
|
-
# Create an array of records of the form {:ngram => x, :score => y} for all ngrams that occur in the
|
212
|
-
# input string using the step size that we just computed. Let score(x,y) be the score of string x
|
213
|
-
# compared with string y - assigning scores to ngrams with the square root-based scoring function
|
214
|
-
# below and multiplying scores of matching ngrams together yields a score function that has the
|
215
|
-
# property that score(x,y) > score(x,z) for any string z containing y and score(x,y) > score(x,z)
|
216
|
-
# for any string z contained in y.
|
217
|
-
ngram_array = (0..filtered_str.length - config[:ngram_width]).step(step_size).map do |i|
|
218
|
-
if i == 0 or (config[:apply_prefix_scoring_to_all_words] and \
|
219
|
-
config[:word_separators].has_key?(filtered_str[i-1].chr))
|
220
|
-
score = Math.sqrt(1 + 1.0/filtered_str.length)
|
221
|
-
else
|
222
|
-
score = Math.sqrt(2.0/filtered_str.length)
|
223
|
-
end
|
224
|
-
{:ngram => filtered_str[i..i+config[:ngram_width]-1], :score => score}
|
225
|
-
end
|
226
|
-
|
227
|
-
# If an ngram appears multiple times in the query string, keep the max score
|
228
|
-
ngram_array = ngram_array.group_by{ |h| h[:ngram] }.map{ |key, values| {:ngram => key, :score => values.map{ |v| v[:score] }.max} }
|
229
|
-
|
230
|
-
if config[:index_short_prefixes] or config[:index_full_words]
|
231
|
-
split_regex_def = config[:word_separators].keys.map{ |k| Regexp.escape(k) }.join
|
232
|
-
split_regex = Regexp.compile("[#{split_regex_def}]")
|
233
|
-
all_words = filtered_str.split(split_regex)
|
234
|
-
end
|
235
|
-
|
236
|
-
# Add 'short prefix' records to the array: prefixes of the string that are length (ngram_width - 1)
|
237
|
-
if config[:index_short_prefixes]
|
238
|
-
prefixes_seen = {}
|
239
|
-
all_words.each do |word|
|
240
|
-
next if word.length < config[:ngram_width]-1
|
241
|
-
prefix = word[0...config[:ngram_width]-1]
|
242
|
-
if prefixes_seen[prefix].nil? and (config[:stop_words][word].nil? or word == filtered_str)
|
243
|
-
ngram_array << {:ngram => prefix, :score => 1 + 1.0/filtered_str.length}
|
244
|
-
prefixes_seen[prefix] = true
|
245
|
-
end
|
246
|
-
end
|
247
|
-
end
|
248
|
-
|
249
|
-
# Add records to the array of ngrams for each full word in the string that isn't a stop word
|
250
|
-
if config[:index_full_words]
|
251
|
-
full_words_seen = {}
|
252
|
-
all_words.each do |word|
|
253
|
-
if word.length > 1 and full_words_seen[word].nil? and (config[:stop_words][word].nil? or word == filtered_str)
|
254
|
-
ngram_array << {:ngram => word, :score => 1 + 1.0/filtered_str.length}
|
255
|
-
full_words_seen[word] = true
|
256
|
-
end
|
257
|
-
end
|
258
|
-
end
|
259
|
-
|
260
|
-
# If an ngram appears as any combination of full word, short prefix, and ngram, keep the sum of the two scores
|
261
|
-
Hash[ngram_array.group_by{ |h| h[:ngram] }.map{ |key, values| [key, values.map{ |v| v[:score] }.sum] }]
|
262
|
-
end
|
263
|
-
|
264
|
-
def remove_from_ngram_index
|
265
|
-
self.mongoid_fulltext_config.each_pair do |index_name, fulltext_config|
|
266
|
-
coll = collection.database[index_name]
|
267
|
-
coll.find({'class' => self.name}).remove_all
|
268
|
-
end
|
269
|
-
end
|
270
|
-
|
271
|
-
def update_ngram_index
|
272
|
-
self.all.each do |model|
|
273
|
-
model.update_ngram_index
|
274
|
-
end
|
275
|
-
end
|
276
|
-
|
277
|
-
private
|
278
|
-
# Take a list of filters to be mapped so they can update the query
|
279
|
-
# used upon the fulltext search of the ngrams
|
280
|
-
def map_query_filters filters
|
281
|
-
Hash[filters.map {|key,value|
|
282
|
-
case value
|
283
|
-
when Hash then
|
284
|
-
if value.has_key? :any then format_query_filter('$in',key,value[:any])
|
285
|
-
elsif value.has_key? :all then format_query_filter('$all',key,value[:all])
|
286
|
-
else raise UnknownFilterQueryOperator, value.keys.join(","), caller end
|
287
|
-
else format_query_filter('$all',key,value)
|
288
|
-
end
|
289
|
-
}]
|
290
|
-
end
|
291
|
-
def format_query_filter operator, key, value
|
292
|
-
['filter_values.%s' % key, {operator => [value].flatten}]
|
293
|
-
end
|
294
|
-
end
|
295
|
-
|
296
|
-
def update_ngram_index
|
297
|
-
self.mongoid_fulltext_config.each_pair do |index_name, fulltext_config|
|
298
|
-
if condition = fulltext_config[:update_if]
|
299
|
-
case condition
|
300
|
-
when Symbol; next unless self.send condition
|
301
|
-
when String; next unless instance_eval condition
|
302
|
-
when Proc; next unless condition.call self
|
303
|
-
else; next
|
304
|
-
end
|
305
|
-
end
|
306
|
-
|
307
|
-
# remove existing ngrams from external index
|
308
|
-
coll = collection.database[index_name.to_sym]
|
309
|
-
coll.find({'document_id' => self._id}).remove_all
|
310
|
-
# extract ngrams from fields
|
311
|
-
field_values = fulltext_config[:ngram_fields].map { |field| self.send(field) }
|
312
|
-
ngrams = field_values.inject({}) { |accum, item| accum.update(self.class.all_ngrams(item, fulltext_config, false))}
|
313
|
-
return if ngrams.empty?
|
314
|
-
# apply filters, if necessary
|
315
|
-
filter_values = nil
|
316
|
-
if fulltext_config.has_key?(:filters)
|
317
|
-
filter_values = Hash[fulltext_config[:filters].map do |key,value|
|
318
|
-
begin
|
319
|
-
[key, value.call(self)]
|
320
|
-
rescue
|
321
|
-
# Suppress any exceptions caused by filters
|
322
|
-
end
|
323
|
-
end.compact]
|
324
|
-
end
|
325
|
-
# insert new ngrams in external index
|
326
|
-
ngrams.each_pair do |ngram, score|
|
327
|
-
index_document = {'ngram' => ngram, 'document_id' => self._id, 'score' => score, 'class' => self.class.name}
|
328
|
-
index_document['filter_values'] = filter_values if fulltext_config.has_key?(:filters)
|
329
|
-
coll.insert(index_document)
|
330
|
-
end
|
331
|
-
end
|
332
|
-
end
|
333
|
-
|
334
|
-
def remove_from_ngram_index
|
335
|
-
self.mongoid_fulltext_config.each_pair do |index_name, fulltext_config|
|
336
|
-
coll = collection.database[index_name]
|
337
|
-
coll.find({'document_id' => self._id}).remove_all
|
338
|
-
end
|
339
|
-
end
|
340
|
-
|
341
|
-
end
|
1
|
+
require 'mongoid/full_text_search'
|
data/mongoid_fulltext.gemspec
CHANGED
@@ -1,85 +1,19 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
|
4
|
-
# -*- encoding: utf-8 -*-
|
1
|
+
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
|
2
|
+
require 'mongoid/full_text_search/version'
|
5
3
|
|
6
4
|
Gem::Specification.new do |s|
|
7
|
-
s.name =
|
8
|
-
s.version =
|
9
|
-
|
10
|
-
s.
|
11
|
-
s.
|
12
|
-
s.
|
13
|
-
s.
|
14
|
-
s.
|
15
|
-
s.
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
s.
|
20
|
-
|
21
|
-
".rspec",
|
22
|
-
".travis.yml",
|
23
|
-
"CHANGELOG.md",
|
24
|
-
"Gemfile",
|
25
|
-
"LICENSE",
|
26
|
-
"README.md",
|
27
|
-
"Rakefile",
|
28
|
-
"VERSION",
|
29
|
-
"lib/mongoid_fulltext.rb",
|
30
|
-
"lib/mongoid_indexes.rb",
|
31
|
-
"mongoid_fulltext.gemspec",
|
32
|
-
"spec/config/mongoid.yml",
|
33
|
-
"spec/models/accentless_artwork.rb",
|
34
|
-
"spec/models/advanced_artwork.rb",
|
35
|
-
"spec/models/basic_artwork.rb",
|
36
|
-
"spec/models/delayed_artwork.rb",
|
37
|
-
"spec/models/external_artist.rb",
|
38
|
-
"spec/models/external_artwork.rb",
|
39
|
-
"spec/models/external_artwork_no_fields_supplied.rb",
|
40
|
-
"spec/models/filtered_artist.rb",
|
41
|
-
"spec/models/filtered_artwork.rb",
|
42
|
-
"spec/models/filtered_other.rb",
|
43
|
-
"spec/models/gallery/basic_artwork.rb",
|
44
|
-
"spec/models/hidden_dragon.rb",
|
45
|
-
"spec/models/multi_external_artwork.rb",
|
46
|
-
"spec/models/multi_field_artist.rb",
|
47
|
-
"spec/models/multi_field_artwork.rb",
|
48
|
-
"spec/models/partitioned_artist.rb",
|
49
|
-
"spec/models/russian_artwork.rb",
|
50
|
-
"spec/models/short_prefixes_artwork.rb",
|
51
|
-
"spec/models/stopwords_artwork.rb",
|
52
|
-
"spec/mongoid/fulltext_spec.rb",
|
53
|
-
"spec/spec_helper.rb"
|
54
|
-
]
|
55
|
-
s.homepage = "http://github.com/aaw/mongoid_fulltext"
|
56
|
-
s.licenses = ["MIT"]
|
57
|
-
s.require_paths = ["lib"]
|
58
|
-
s.rubygems_version = "1.8.25"
|
59
|
-
s.summary = "Full-text search for the Mongoid ORM"
|
60
|
-
|
61
|
-
if s.respond_to? :specification_version then
|
62
|
-
s.specification_version = 3
|
63
|
-
|
64
|
-
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
|
65
|
-
s.add_runtime_dependency(%q<mongoid>, ["~> 3.0"])
|
66
|
-
s.add_runtime_dependency(%q<unicode_utils>, ["~> 1.0.0"])
|
67
|
-
s.add_development_dependency(%q<bundler>, [">= 0"])
|
68
|
-
s.add_development_dependency(%q<rspec>, ["~> 2.10.0"])
|
69
|
-
s.add_development_dependency(%q<jeweler>, ["~> 1.8.3"])
|
70
|
-
else
|
71
|
-
s.add_dependency(%q<mongoid>, ["~> 3.0"])
|
72
|
-
s.add_dependency(%q<unicode_utils>, ["~> 1.0.0"])
|
73
|
-
s.add_dependency(%q<bundler>, [">= 0"])
|
74
|
-
s.add_dependency(%q<rspec>, ["~> 2.10.0"])
|
75
|
-
s.add_dependency(%q<jeweler>, ["~> 1.8.3"])
|
76
|
-
end
|
77
|
-
else
|
78
|
-
s.add_dependency(%q<mongoid>, ["~> 3.0"])
|
79
|
-
s.add_dependency(%q<unicode_utils>, ["~> 1.0.0"])
|
80
|
-
s.add_dependency(%q<bundler>, [">= 0"])
|
81
|
-
s.add_dependency(%q<rspec>, ["~> 2.10.0"])
|
82
|
-
s.add_dependency(%q<jeweler>, ["~> 1.8.3"])
|
83
|
-
end
|
5
|
+
s.name = 'mongoid_fulltext'
|
6
|
+
s.version = Mongoid::FullTextSearch::VERSION
|
7
|
+
s.authors = ['Aaron Windsor']
|
8
|
+
s.email = 'aaron.windsor@gmail.com'
|
9
|
+
s.platform = Gem::Platform::RUBY
|
10
|
+
s.required_rubygems_version = '>= 1.3.6'
|
11
|
+
s.files = `git ls-files`.split("\n")
|
12
|
+
s.require_paths = ['lib']
|
13
|
+
s.homepage = 'https://github.com/artsy/mongoid_fulltext'
|
14
|
+
s.licenses = ['MIT']
|
15
|
+
s.summary = 'Full-text search for the Mongoid ORM, using n-grams extracted from text.'
|
16
|
+
s.add_dependency 'mongoid', '>= 3.0'
|
17
|
+
s.add_dependency 'mongoid-compatibility'
|
18
|
+
s.add_dependency 'unicode_utils'
|
84
19
|
end
|
85
|
-
|