ferret 0.9.6 → 0.10.0
Sign up to get free protection for your applications and to get access to all the features.
- data/MIT-LICENSE +1 -1
- data/README +12 -24
- data/Rakefile +38 -54
- data/TODO +14 -17
- data/ext/analysis.c +982 -823
- data/ext/analysis.h +133 -76
- data/ext/array.c +96 -58
- data/ext/array.h +40 -13
- data/ext/bitvector.c +476 -118
- data/ext/bitvector.h +264 -22
- data/ext/compound_io.c +217 -229
- data/ext/defines.h +49 -0
- data/ext/document.c +107 -317
- data/ext/document.h +31 -65
- data/ext/except.c +81 -36
- data/ext/except.h +117 -55
- data/ext/extconf.rb +2 -9
- data/ext/ferret.c +211 -104
- data/ext/ferret.h +22 -11
- data/ext/filter.c +97 -82
- data/ext/fs_store.c +348 -367
- data/ext/global.c +226 -188
- data/ext/global.h +44 -26
- data/ext/hash.c +474 -391
- data/ext/hash.h +441 -68
- data/ext/hashset.c +124 -96
- data/ext/hashset.h +169 -20
- data/ext/helper.c +56 -5
- data/ext/helper.h +7 -0
- data/ext/inc/lang.h +29 -49
- data/ext/inc/threading.h +31 -0
- data/ext/ind.c +288 -278
- data/ext/ind.h +68 -0
- data/ext/index.c +5688 -0
- data/ext/index.h +663 -616
- data/ext/lang.h +29 -49
- data/ext/libstemmer.c +3 -3
- data/ext/mem_pool.c +84 -0
- data/ext/mem_pool.h +35 -0
- data/ext/posh.c +1006 -0
- data/ext/posh.h +1007 -0
- data/ext/priorityqueue.c +117 -194
- data/ext/priorityqueue.h +135 -39
- data/ext/q_boolean.c +1305 -1108
- data/ext/q_const_score.c +106 -93
- data/ext/q_filtered_query.c +138 -135
- data/ext/q_fuzzy.c +206 -242
- data/ext/q_match_all.c +94 -80
- data/ext/q_multi_term.c +663 -0
- data/ext/q_parser.c +667 -593
- data/ext/q_phrase.c +992 -555
- data/ext/q_prefix.c +72 -61
- data/ext/q_range.c +235 -210
- data/ext/q_span.c +1480 -1166
- data/ext/q_term.c +273 -246
- data/ext/q_wildcard.c +127 -114
- data/ext/r_analysis.c +1720 -711
- data/ext/r_index.c +3049 -0
- data/ext/r_qparser.c +433 -146
- data/ext/r_search.c +2934 -1993
- data/ext/r_store.c +372 -143
- data/ext/r_utils.c +941 -0
- data/ext/ram_store.c +330 -326
- data/ext/search.c +1291 -668
- data/ext/search.h +403 -702
- data/ext/similarity.c +91 -113
- data/ext/similarity.h +45 -30
- data/ext/sort.c +721 -484
- data/ext/stopwords.c +361 -273
- data/ext/store.c +556 -58
- data/ext/store.h +706 -126
- data/ext/tags +3578 -2780
- data/ext/term_vectors.c +352 -0
- data/ext/threading.h +31 -0
- data/ext/win32.h +54 -0
- data/lib/ferret.rb +5 -17
- data/lib/ferret/document.rb +130 -2
- data/lib/ferret/index.rb +577 -26
- data/lib/ferret/number_tools.rb +157 -0
- data/lib/ferret_version.rb +3 -0
- data/test/test_helper.rb +5 -13
- data/test/unit/analysis/tc_analyzer.rb +513 -1
- data/test/unit/analysis/{ctc_tokenstream.rb → tc_token_stream.rb} +23 -0
- data/test/unit/index/tc_index.rb +183 -240
- data/test/unit/index/tc_index_reader.rb +312 -479
- data/test/unit/index/tc_index_writer.rb +397 -13
- data/test/unit/index/th_doc.rb +269 -206
- data/test/unit/query_parser/tc_query_parser.rb +40 -33
- data/test/unit/search/tc_filter.rb +59 -71
- data/test/unit/search/tc_fuzzy_query.rb +24 -16
- data/test/unit/search/tc_index_searcher.rb +23 -201
- data/test/unit/search/tc_multi_searcher.rb +78 -226
- data/test/unit/search/tc_search_and_sort.rb +93 -81
- data/test/unit/search/tc_sort.rb +23 -23
- data/test/unit/search/tc_sort_field.rb +7 -7
- data/test/unit/search/tc_spans.rb +51 -47
- data/test/unit/search/tm_searcher.rb +339 -0
- data/test/unit/store/tc_fs_store.rb +1 -1
- data/test/unit/store/tm_store_lock.rb +3 -3
- data/test/unit/tc_document.rb +81 -0
- data/test/unit/ts_analysis.rb +1 -1
- data/test/unit/ts_utils.rb +1 -1
- data/test/unit/utils/tc_bit_vector.rb +288 -0
- data/test/unit/utils/tc_number_tools.rb +117 -0
- data/test/unit/utils/tc_priority_queue.rb +106 -0
- metadata +140 -301
- data/CHANGELOG +0 -9
- data/ext/dummy.exe +0 -0
- data/ext/field.c +0 -408
- data/ext/frtio.h +0 -13
- data/ext/inc/except.h +0 -90
- data/ext/index_io.c +0 -382
- data/ext/index_rw.c +0 -2658
- data/ext/lang.c +0 -41
- data/ext/nix_io.c +0 -134
- data/ext/q_multi_phrase.c +0 -380
- data/ext/r_doc.c +0 -582
- data/ext/r_index_io.c +0 -1021
- data/ext/r_term.c +0 -219
- data/ext/term.c +0 -820
- data/ext/termdocs.c +0 -611
- data/ext/vector.c +0 -637
- data/ext/w32_io.c +0 -150
- data/lib/ferret/analysis.rb +0 -11
- data/lib/ferret/analysis/analyzers.rb +0 -112
- data/lib/ferret/analysis/standard_tokenizer.rb +0 -71
- data/lib/ferret/analysis/token.rb +0 -100
- data/lib/ferret/analysis/token_filters.rb +0 -86
- data/lib/ferret/analysis/token_stream.rb +0 -26
- data/lib/ferret/analysis/tokenizers.rb +0 -112
- data/lib/ferret/analysis/word_list_loader.rb +0 -27
- data/lib/ferret/document/document.rb +0 -152
- data/lib/ferret/document/field.rb +0 -312
- data/lib/ferret/index/compound_file_io.rb +0 -338
- data/lib/ferret/index/document_writer.rb +0 -289
- data/lib/ferret/index/field_infos.rb +0 -279
- data/lib/ferret/index/fields_io.rb +0 -181
- data/lib/ferret/index/index.rb +0 -675
- data/lib/ferret/index/index_file_names.rb +0 -33
- data/lib/ferret/index/index_reader.rb +0 -503
- data/lib/ferret/index/index_writer.rb +0 -534
- data/lib/ferret/index/multi_reader.rb +0 -377
- data/lib/ferret/index/multiple_term_doc_pos_enum.rb +0 -98
- data/lib/ferret/index/segment_infos.rb +0 -130
- data/lib/ferret/index/segment_merge_info.rb +0 -49
- data/lib/ferret/index/segment_merge_queue.rb +0 -16
- data/lib/ferret/index/segment_merger.rb +0 -358
- data/lib/ferret/index/segment_reader.rb +0 -412
- data/lib/ferret/index/segment_term_enum.rb +0 -169
- data/lib/ferret/index/segment_term_vector.rb +0 -58
- data/lib/ferret/index/term.rb +0 -53
- data/lib/ferret/index/term_buffer.rb +0 -83
- data/lib/ferret/index/term_doc_enum.rb +0 -291
- data/lib/ferret/index/term_enum.rb +0 -52
- data/lib/ferret/index/term_info.rb +0 -37
- data/lib/ferret/index/term_infos_io.rb +0 -321
- data/lib/ferret/index/term_vector_offset_info.rb +0 -20
- data/lib/ferret/index/term_vectors_io.rb +0 -553
- data/lib/ferret/query_parser.rb +0 -312
- data/lib/ferret/query_parser/query_parser.tab.rb +0 -928
- data/lib/ferret/search.rb +0 -50
- data/lib/ferret/search/boolean_clause.rb +0 -100
- data/lib/ferret/search/boolean_query.rb +0 -299
- data/lib/ferret/search/boolean_scorer.rb +0 -294
- data/lib/ferret/search/caching_wrapper_filter.rb +0 -40
- data/lib/ferret/search/conjunction_scorer.rb +0 -99
- data/lib/ferret/search/disjunction_sum_scorer.rb +0 -205
- data/lib/ferret/search/exact_phrase_scorer.rb +0 -32
- data/lib/ferret/search/explanation.rb +0 -41
- data/lib/ferret/search/field_cache.rb +0 -215
- data/lib/ferret/search/field_doc.rb +0 -31
- data/lib/ferret/search/field_sorted_hit_queue.rb +0 -184
- data/lib/ferret/search/filter.rb +0 -11
- data/lib/ferret/search/filtered_query.rb +0 -130
- data/lib/ferret/search/filtered_term_enum.rb +0 -79
- data/lib/ferret/search/fuzzy_query.rb +0 -154
- data/lib/ferret/search/fuzzy_term_enum.rb +0 -247
- data/lib/ferret/search/hit_collector.rb +0 -34
- data/lib/ferret/search/hit_queue.rb +0 -11
- data/lib/ferret/search/index_searcher.rb +0 -200
- data/lib/ferret/search/match_all_query.rb +0 -104
- data/lib/ferret/search/multi_phrase_query.rb +0 -216
- data/lib/ferret/search/multi_searcher.rb +0 -261
- data/lib/ferret/search/multi_term_query.rb +0 -65
- data/lib/ferret/search/non_matching_scorer.rb +0 -22
- data/lib/ferret/search/phrase_positions.rb +0 -55
- data/lib/ferret/search/phrase_query.rb +0 -214
- data/lib/ferret/search/phrase_scorer.rb +0 -152
- data/lib/ferret/search/prefix_query.rb +0 -54
- data/lib/ferret/search/query.rb +0 -140
- data/lib/ferret/search/query_filter.rb +0 -51
- data/lib/ferret/search/range_filter.rb +0 -103
- data/lib/ferret/search/range_query.rb +0 -139
- data/lib/ferret/search/req_excl_scorer.rb +0 -125
- data/lib/ferret/search/req_opt_sum_scorer.rb +0 -70
- data/lib/ferret/search/score_doc.rb +0 -38
- data/lib/ferret/search/score_doc_comparator.rb +0 -114
- data/lib/ferret/search/scorer.rb +0 -91
- data/lib/ferret/search/similarity.rb +0 -278
- data/lib/ferret/search/sloppy_phrase_scorer.rb +0 -47
- data/lib/ferret/search/sort.rb +0 -112
- data/lib/ferret/search/sort_comparator.rb +0 -60
- data/lib/ferret/search/sort_field.rb +0 -91
- data/lib/ferret/search/spans.rb +0 -12
- data/lib/ferret/search/spans/near_spans_enum.rb +0 -304
- data/lib/ferret/search/spans/span_first_query.rb +0 -79
- data/lib/ferret/search/spans/span_near_query.rb +0 -108
- data/lib/ferret/search/spans/span_not_query.rb +0 -130
- data/lib/ferret/search/spans/span_or_query.rb +0 -176
- data/lib/ferret/search/spans/span_query.rb +0 -25
- data/lib/ferret/search/spans/span_scorer.rb +0 -74
- data/lib/ferret/search/spans/span_term_query.rb +0 -105
- data/lib/ferret/search/spans/span_weight.rb +0 -84
- data/lib/ferret/search/spans/spans_enum.rb +0 -44
- data/lib/ferret/search/term_query.rb +0 -128
- data/lib/ferret/search/term_scorer.rb +0 -183
- data/lib/ferret/search/top_docs.rb +0 -36
- data/lib/ferret/search/top_field_docs.rb +0 -17
- data/lib/ferret/search/weight.rb +0 -54
- data/lib/ferret/search/wildcard_query.rb +0 -26
- data/lib/ferret/search/wildcard_term_enum.rb +0 -61
- data/lib/ferret/stemmers.rb +0 -1
- data/lib/ferret/stemmers/porter_stemmer.rb +0 -218
- data/lib/ferret/store.rb +0 -5
- data/lib/ferret/store/buffered_index_io.rb +0 -190
- data/lib/ferret/store/directory.rb +0 -141
- data/lib/ferret/store/fs_store.rb +0 -381
- data/lib/ferret/store/index_io.rb +0 -245
- data/lib/ferret/store/ram_store.rb +0 -286
- data/lib/ferret/utils.rb +0 -8
- data/lib/ferret/utils/bit_vector.rb +0 -123
- data/lib/ferret/utils/date_tools.rb +0 -138
- data/lib/ferret/utils/number_tools.rb +0 -91
- data/lib/ferret/utils/parameter.rb +0 -41
- data/lib/ferret/utils/priority_queue.rb +0 -120
- data/lib/ferret/utils/string_helper.rb +0 -47
- data/lib/ferret/utils/thread_local.rb +0 -28
- data/lib/ferret/utils/weak_key_hash.rb +0 -60
- data/lib/rferret.rb +0 -37
- data/rake_utils/code_statistics.rb +0 -106
- data/test/benchmark/tb_ram_store.rb +0 -76
- data/test/benchmark/tb_rw_vint.rb +0 -26
- data/test/functional/thread_safety_index_test.rb +0 -81
- data/test/functional/thread_safety_test.rb +0 -137
- data/test/longrunning/tc_numbertools.rb +0 -60
- data/test/longrunning/tm_store.rb +0 -19
- data/test/unit/analysis/ctc_analyzer.rb +0 -532
- data/test/unit/analysis/data/wordfile +0 -6
- data/test/unit/analysis/rtc_letter_tokenizer.rb +0 -20
- data/test/unit/analysis/rtc_lower_case_filter.rb +0 -20
- data/test/unit/analysis/rtc_lower_case_tokenizer.rb +0 -27
- data/test/unit/analysis/rtc_per_field_analyzer_wrapper.rb +0 -39
- data/test/unit/analysis/rtc_porter_stem_filter.rb +0 -16
- data/test/unit/analysis/rtc_standard_analyzer.rb +0 -20
- data/test/unit/analysis/rtc_standard_tokenizer.rb +0 -20
- data/test/unit/analysis/rtc_stop_analyzer.rb +0 -20
- data/test/unit/analysis/rtc_stop_filter.rb +0 -14
- data/test/unit/analysis/rtc_white_space_analyzer.rb +0 -21
- data/test/unit/analysis/rtc_white_space_tokenizer.rb +0 -20
- data/test/unit/analysis/rtc_word_list_loader.rb +0 -32
- data/test/unit/analysis/tc_token.rb +0 -25
- data/test/unit/document/rtc_field.rb +0 -28
- data/test/unit/document/tc_document.rb +0 -47
- data/test/unit/document/tc_field.rb +0 -98
- data/test/unit/index/rtc_compound_file_io.rb +0 -107
- data/test/unit/index/rtc_field_infos.rb +0 -127
- data/test/unit/index/rtc_fields_io.rb +0 -167
- data/test/unit/index/rtc_multiple_term_doc_pos_enum.rb +0 -83
- data/test/unit/index/rtc_segment_infos.rb +0 -74
- data/test/unit/index/rtc_segment_term_docs.rb +0 -17
- data/test/unit/index/rtc_segment_term_enum.rb +0 -60
- data/test/unit/index/rtc_segment_term_vector.rb +0 -71
- data/test/unit/index/rtc_term_buffer.rb +0 -57
- data/test/unit/index/rtc_term_info.rb +0 -19
- data/test/unit/index/rtc_term_infos_io.rb +0 -192
- data/test/unit/index/rtc_term_vectors_io.rb +0 -108
- data/test/unit/index/tc_term.rb +0 -27
- data/test/unit/index/tc_term_voi.rb +0 -18
- data/test/unit/search/rtc_similarity.rb +0 -37
- data/test/unit/search/rtc_sort_field.rb +0 -14
- data/test/unit/search/tc_multi_searcher2.rb +0 -126
- data/test/unit/store/rtc_fs_store.rb +0 -62
- data/test/unit/store/rtc_ram_store.rb +0 -15
- data/test/unit/store/rtm_store.rb +0 -150
- data/test/unit/store/rtm_store_lock.rb +0 -2
- data/test/unit/ts_document.rb +0 -2
- data/test/unit/utils/rtc_bit_vector.rb +0 -73
- data/test/unit/utils/rtc_date_tools.rb +0 -50
- data/test/unit/utils/rtc_number_tools.rb +0 -59
- data/test/unit/utils/rtc_parameter.rb +0 -40
- data/test/unit/utils/rtc_priority_queue.rb +0 -62
- data/test/unit/utils/rtc_string_helper.rb +0 -21
- data/test/unit/utils/rtc_thread.rb +0 -61
- data/test/unit/utils/rtc_weak_key_hash.rb +0 -25
- data/test/utils/number_to_spoken.rb +0 -132
@@ -1,137 +0,0 @@
|
|
1
|
-
require File.dirname(__FILE__) + "/../test_helper"
|
2
|
-
require File.dirname(__FILE__) + "/../utils/number_to_spoken.rb"
|
3
|
-
require 'thread'
|
4
|
-
|
5
|
-
class ThreadSafetyTest
|
6
|
-
include Ferret::Index
|
7
|
-
include Ferret::Search
|
8
|
-
include Ferret::Store
|
9
|
-
include Ferret::Document
|
10
|
-
|
11
|
-
def initialize(options)
|
12
|
-
@options = options
|
13
|
-
end
|
14
|
-
|
15
|
-
INDEX_DIR = File.expand_path(File.join(File.dirname(__FILE__), "index"))
|
16
|
-
ANALYZER = Ferret::Analysis::Analyzer.new()
|
17
|
-
ITERATIONS = 19
|
18
|
-
@@searcher = nil
|
19
|
-
|
20
|
-
def run_index_thread(writer)
|
21
|
-
reopen_interval = 30 + rand(60)
|
22
|
-
|
23
|
-
use_compound_file = false
|
24
|
-
|
25
|
-
(400*ITERATIONS).times do |i|
|
26
|
-
d = Document.new()
|
27
|
-
n = rand(0xFFFFFFFF)
|
28
|
-
d << Field.new("id", n.to_s, Field::Store::YES, Field::Index::UNTOKENIZED)
|
29
|
-
d << Field.new("contents", n.to_spoken, Field::Store::NO, Field::Index::TOKENIZED)
|
30
|
-
puts("Adding #{n}")
|
31
|
-
|
32
|
-
# Switch between single and multiple file segments
|
33
|
-
use_compound_file = (rand < 0.5)
|
34
|
-
writer.use_compound_file = use_compound_file
|
35
|
-
|
36
|
-
writer << d
|
37
|
-
|
38
|
-
if (i % reopen_interval == 0)
|
39
|
-
writer.close()
|
40
|
-
writer = IndexWriter.new(INDEX_DIR, :analyzer => ANALYZER)
|
41
|
-
end
|
42
|
-
end
|
43
|
-
|
44
|
-
writer.close()
|
45
|
-
rescue => e
|
46
|
-
puts e
|
47
|
-
puts e.backtrace
|
48
|
-
raise e
|
49
|
-
end
|
50
|
-
|
51
|
-
def run_search_thread(use_global)
|
52
|
-
reopen_interval = 10 + rand(20)
|
53
|
-
|
54
|
-
unless use_global
|
55
|
-
searcher = IndexSearcher.new(INDEX_DIR)
|
56
|
-
end
|
57
|
-
|
58
|
-
(50*ITERATIONS).times do |i|
|
59
|
-
search_for(rand(0xFFFFFFFF), (searcher.nil? ? @@searcher : searcher))
|
60
|
-
if (i%reopen_interval == 0)
|
61
|
-
if (searcher == nil)
|
62
|
-
@@searcher = IndexSearcher.new(INDEX_DIR)
|
63
|
-
else
|
64
|
-
searcher.close()
|
65
|
-
searcher = IndexSearcher.new(INDEX_DIR)
|
66
|
-
end
|
67
|
-
end
|
68
|
-
end
|
69
|
-
rescue => e
|
70
|
-
puts e
|
71
|
-
puts e.backtrace
|
72
|
-
raise e
|
73
|
-
end
|
74
|
-
|
75
|
-
def search_for(n, searcher)
|
76
|
-
puts("Searching for #{n}")
|
77
|
-
hits =
|
78
|
-
searcher.search(Ferret::QueryParser.parse(n.to_spoken, "contents", :analyzer => ANALYZER),
|
79
|
-
:num_docs => 3)
|
80
|
-
puts("Search for #{n}: total = #{hits.size}")
|
81
|
-
hits.each do |d, s|
|
82
|
-
puts "Hit for #{n}: #{searcher.reader.get_document(d)["id"]} - #{s}"
|
83
|
-
end
|
84
|
-
end
|
85
|
-
|
86
|
-
def run_test_threads
|
87
|
-
|
88
|
-
threads = []
|
89
|
-
unless @options[:read_only]
|
90
|
-
writer = IndexWriter.new(INDEX_DIR, :analyzer => ANALYZER,
|
91
|
-
:create => !@options[:add])
|
92
|
-
|
93
|
-
threads << Thread.new { run_index_thread(writer) }
|
94
|
-
|
95
|
-
sleep(1)
|
96
|
-
end
|
97
|
-
|
98
|
-
threads << Thread.new { run_search_thread(false)}
|
99
|
-
|
100
|
-
@@searcher = IndexSearcher.new(INDEX_DIR)
|
101
|
-
threads << Thread.new { run_search_thread(true)}
|
102
|
-
|
103
|
-
threads << Thread.new { run_search_thread(true)}
|
104
|
-
|
105
|
-
threads.each {|t| t.join}
|
106
|
-
end
|
107
|
-
end
|
108
|
-
|
109
|
-
|
110
|
-
if $0 == __FILE__
|
111
|
-
require 'optparse'
|
112
|
-
|
113
|
-
OPTIONS = {
|
114
|
-
:all => false,
|
115
|
-
:read_only => false,
|
116
|
-
}
|
117
|
-
|
118
|
-
ARGV.options do |opts|
|
119
|
-
script_name = File.basename($0)
|
120
|
-
opts.banner = "Usage: ruby #{script_name} [options]"
|
121
|
-
|
122
|
-
opts.separator ""
|
123
|
-
|
124
|
-
opts.on("-r", "--read-only", "Read Only.") { OPTIONS[:all] = true }
|
125
|
-
opts.on("-a", "--all", "All.") { OPTIONS[:read_only] = true }
|
126
|
-
|
127
|
-
opts.separator ""
|
128
|
-
|
129
|
-
opts.on("-h", "--help",
|
130
|
-
"Show this help message.") { puts opts; exit }
|
131
|
-
|
132
|
-
opts.parse!
|
133
|
-
end
|
134
|
-
|
135
|
-
tst = ThreadSafetyTest.new(OPTIONS)
|
136
|
-
tst.run_test_threads
|
137
|
-
end
|
@@ -1,60 +0,0 @@
|
|
1
|
-
require File.dirname(__FILE__) + "/../../../test_helper"
|
2
|
-
|
3
|
-
|
4
|
-
class NumberToolsTest < Test::Unit::TestCase
|
5
|
-
include Lucene::Document
|
6
|
-
def test_near_zero()
|
7
|
-
10.times() do |i|
|
8
|
-
10.times() { |j| subtest_two_longs(i, j) }
|
9
|
-
end
|
10
|
-
end
|
11
|
-
|
12
|
-
def test_max()
|
13
|
-
# make sure the constants convert to their equivelents
|
14
|
-
assert_equal(NumberTools::LONG_MAX_VALUE, NumberTools.s_to_long(NumberTools::MAX_STRING_VALUE))
|
15
|
-
assert_equal(NumberTools::MAX_STRING_VALUE, NumberTools.long_to_s(NumberTools::LONG_MAX_VALUE))
|
16
|
-
# test near MAX, too
|
17
|
-
|
18
|
-
NumberTools::LONG_MAX_VALUE.downto(NumberTools::LONG_MAX_VALUE - 100) do |l|
|
19
|
-
subtest_two_longs(l, l - 1)
|
20
|
-
end
|
21
|
-
end
|
22
|
-
|
23
|
-
def test_min()
|
24
|
-
# make sure the constants convert to their equivelents
|
25
|
-
assert_equal(NumberTools::LONG_MIN_VALUE, NumberTools.s_to_long(NumberTools::MIN_STRING_VALUE))
|
26
|
-
assert_equal(NumberTools::MIN_STRING_VALUE, NumberTools.long_to_s(NumberTools::LONG_MIN_VALUE))
|
27
|
-
|
28
|
-
# test near MIN, too
|
29
|
-
NumberTools::LONG_MIN_VALUE.upto(NumberTools::LONG_MIN_VALUE + 100) do |l|
|
30
|
-
subtest_two_longs(l, l + 1)
|
31
|
-
end
|
32
|
-
end
|
33
|
-
|
34
|
-
def subtest_two_longs(i, j)
|
35
|
-
# convert to strings
|
36
|
-
a = NumberTools.long_to_s(i)
|
37
|
-
b = NumberTools.long_to_s(j)
|
38
|
-
|
39
|
-
# are they the right length?
|
40
|
-
assert_equal(NumberTools::STR_SIZE, a.length())
|
41
|
-
assert_equal(NumberTools::STR_SIZE, b.length())
|
42
|
-
|
43
|
-
# are they the right order?
|
44
|
-
if (i < j)
|
45
|
-
assert(a < b)
|
46
|
-
elsif (i > j)
|
47
|
-
assert(a > b)
|
48
|
-
else
|
49
|
-
assert_equal(a, b)
|
50
|
-
end
|
51
|
-
|
52
|
-
# can we convert them back to longs?
|
53
|
-
i2 = NumberTools.s_to_long(a)
|
54
|
-
j2 = NumberTools.s_to_long(b)
|
55
|
-
|
56
|
-
assert_equal(i, i2)
|
57
|
-
assert_equal(j, j2)
|
58
|
-
end
|
59
|
-
|
60
|
-
end
|
@@ -1,19 +0,0 @@
|
|
1
|
-
module StoreTest
|
2
|
-
# declare dir so inheritors can access it.
|
3
|
-
def test_modified_full
|
4
|
-
# difficult to test this one but as file mtime is only stored to the
|
5
|
-
# nearest second. We can assume this test will happen in less than one
|
6
|
-
# second. (I hope)
|
7
|
-
time = Time.new.to_i
|
8
|
-
@dir.touch('mtime_test')
|
9
|
-
time_before = @dir.modified('mtime_test').to_i
|
10
|
-
assert(time_before - time <= 2, "test that mtime is approximately equal to the system time when the file was touched")
|
11
|
-
# wait until the time ticks over one second.
|
12
|
-
time = Time.new while (time.to_i == time_before)
|
13
|
-
time_before_again = @dir.modified('mtime_test').to_i
|
14
|
-
assert_equal(time_before, time_before_again, "the modified time shouldn't change")
|
15
|
-
@dir.touch('mtime_test')
|
16
|
-
time_after = @dir.modified('mtime_test').to_i
|
17
|
-
assert(time_before < time_after, "the modified time should now be greater")
|
18
|
-
end
|
19
|
-
end
|
@@ -1,532 +0,0 @@
|
|
1
|
-
require File.dirname(__FILE__) + "/../../test_helper"
|
2
|
-
|
3
|
-
class AnalyzerTest < Test::Unit::TestCase
|
4
|
-
include Ferret::Analysis
|
5
|
-
|
6
|
-
def test_c_analyzer()
|
7
|
-
input = 'DBalmain@gmail.com is My E-Mail 523@#$ ADDRESS. 23#@$'
|
8
|
-
a = Analyzer.new()
|
9
|
-
t = a.token_stream("fieldname", input)
|
10
|
-
t2 = a.token_stream("fieldname", input)
|
11
|
-
assert_equal(Token.new("dbalmain", 0, 8), t.next())
|
12
|
-
assert_equal(Token.new("gmail", 9, 14), t.next())
|
13
|
-
assert_equal(Token.new("com", 15, 18), t.next())
|
14
|
-
assert_equal(Token.new("is", 19, 21), t.next())
|
15
|
-
assert_equal(Token.new("my", 22, 24), t.next())
|
16
|
-
assert_equal(Token.new("e", 25, 26), t.next())
|
17
|
-
assert_equal(Token.new("mail", 27, 31), t.next())
|
18
|
-
assert_equal(Token.new("address", 39, 46), t.next())
|
19
|
-
assert(! t.next())
|
20
|
-
assert_equal(Token.new("dbalmain", 0, 8), t2.next())
|
21
|
-
assert_equal(Token.new("gmail", 9, 14), t2.next())
|
22
|
-
assert_equal(Token.new("com", 15, 18), t2.next())
|
23
|
-
assert_equal(Token.new("is", 19, 21), t2.next())
|
24
|
-
assert_equal(Token.new("my", 22, 24), t2.next())
|
25
|
-
assert_equal(Token.new("e", 25, 26), t2.next())
|
26
|
-
assert_equal(Token.new("mail", 27, 31), t2.next())
|
27
|
-
assert_equal(Token.new("address", 39, 46), t2.next())
|
28
|
-
assert(! t2.next())
|
29
|
-
a = Analyzer.new(false)
|
30
|
-
t = a.token_stream("fieldname", input)
|
31
|
-
assert_equal(Token.new("DBalmain", 0, 8), t.next())
|
32
|
-
assert_equal(Token.new("gmail", 9, 14), t.next())
|
33
|
-
assert_equal(Token.new("com", 15, 18), t.next())
|
34
|
-
assert_equal(Token.new("is", 19, 21), t.next())
|
35
|
-
assert_equal(Token.new("My", 22, 24), t.next())
|
36
|
-
assert_equal(Token.new("E", 25, 26), t.next())
|
37
|
-
assert_equal(Token.new("Mail", 27, 31), t.next())
|
38
|
-
assert_equal(Token.new("ADDRESS", 39, 46), t.next())
|
39
|
-
assert(! t.next())
|
40
|
-
end
|
41
|
-
end
|
42
|
-
|
43
|
-
class AsciiLetterAnalyzerTest < Test::Unit::TestCase
|
44
|
-
include Ferret::Analysis
|
45
|
-
|
46
|
-
def test_c_letter_analyzer()
|
47
|
-
input = 'DBalmain@gmail.com is My E-Mail 523@#$ ADDRESS. 23#@$'
|
48
|
-
a = AsciiLetterAnalyzer.new()
|
49
|
-
t = a.token_stream("fieldname", input)
|
50
|
-
t2 = a.token_stream("fieldname", input)
|
51
|
-
assert_equal(Token.new("dbalmain", 0, 8), t.next())
|
52
|
-
assert_equal(Token.new("gmail", 9, 14), t.next())
|
53
|
-
assert_equal(Token.new("com", 15, 18), t.next())
|
54
|
-
assert_equal(Token.new("is", 19, 21), t.next())
|
55
|
-
assert_equal(Token.new("my", 22, 24), t.next())
|
56
|
-
assert_equal(Token.new("e", 25, 26), t.next())
|
57
|
-
assert_equal(Token.new("mail", 27, 31), t.next())
|
58
|
-
assert_equal(Token.new("address", 39, 46), t.next())
|
59
|
-
assert(! t.next())
|
60
|
-
assert_equal(Token.new("dbalmain", 0, 8), t2.next())
|
61
|
-
assert_equal(Token.new("gmail", 9, 14), t2.next())
|
62
|
-
assert_equal(Token.new("com", 15, 18), t2.next())
|
63
|
-
assert_equal(Token.new("is", 19, 21), t2.next())
|
64
|
-
assert_equal(Token.new("my", 22, 24), t2.next())
|
65
|
-
assert_equal(Token.new("e", 25, 26), t2.next())
|
66
|
-
assert_equal(Token.new("mail", 27, 31), t2.next())
|
67
|
-
assert_equal(Token.new("address", 39, 46), t2.next())
|
68
|
-
assert(! t2.next())
|
69
|
-
a = AsciiLetterAnalyzer.new(false)
|
70
|
-
t = a.token_stream("fieldname", input)
|
71
|
-
assert_equal(Token.new("DBalmain", 0, 8), t.next())
|
72
|
-
assert_equal(Token.new("gmail", 9, 14), t.next())
|
73
|
-
assert_equal(Token.new("com", 15, 18), t.next())
|
74
|
-
assert_equal(Token.new("is", 19, 21), t.next())
|
75
|
-
assert_equal(Token.new("My", 22, 24), t.next())
|
76
|
-
assert_equal(Token.new("E", 25, 26), t.next())
|
77
|
-
assert_equal(Token.new("Mail", 27, 31), t.next())
|
78
|
-
assert_equal(Token.new("ADDRESS", 39, 46), t.next())
|
79
|
-
assert(! t.next())
|
80
|
-
end
|
81
|
-
end
|
82
|
-
|
83
|
-
class LetterAnalyzerTest < Test::Unit::TestCase
|
84
|
-
include Ferret::Analysis
|
85
|
-
|
86
|
-
def test_c_letter_analyzer()
|
87
|
-
Ferret.locale = ""
|
88
|
-
input = 'DBalmän@gmail.com is My e-mail 52 #$ address. 23#@$ ÁÄGÇ®ÊË̯ÚØìÖÎÍ'
|
89
|
-
a = LetterAnalyzer.new(false)
|
90
|
-
t = a.token_stream("fieldname", input)
|
91
|
-
t2 = a.token_stream("fieldname", input)
|
92
|
-
assert_equal(Token.new("DBalmän", 0, 8), t.next)
|
93
|
-
assert_equal(Token.new("gmail", 9, 14), t.next)
|
94
|
-
assert_equal(Token.new("com", 15, 18), t.next)
|
95
|
-
assert_equal(Token.new("is", 19, 21), t.next)
|
96
|
-
assert_equal(Token.new("My", 22, 24), t.next)
|
97
|
-
assert_equal(Token.new("e", 25, 26), t.next)
|
98
|
-
assert_equal(Token.new("mail", 27, 31), t.next)
|
99
|
-
assert_equal(Token.new("address", 40, 47), t.next)
|
100
|
-
assert_equal(Token.new("ÁÄGÇ", 55, 62), t.next)
|
101
|
-
assert_equal(Token.new("ÊËÌ", 64, 70), t.next)
|
102
|
-
assert_equal(Token.new("ÚØÃ", 72, 78), t.next)
|
103
|
-
assert_equal(Token.new("ÖÎÍ", 80, 86), t.next)
|
104
|
-
assert(! t.next())
|
105
|
-
assert_equal(Token.new("DBalmän", 0, 8), t2.next)
|
106
|
-
assert_equal(Token.new("gmail", 9, 14), t2.next)
|
107
|
-
assert_equal(Token.new("com", 15, 18), t2.next)
|
108
|
-
assert_equal(Token.new("is", 19, 21), t2.next)
|
109
|
-
assert_equal(Token.new("My", 22, 24), t2.next)
|
110
|
-
assert_equal(Token.new("e", 25, 26), t2.next)
|
111
|
-
assert_equal(Token.new("mail", 27, 31), t2.next)
|
112
|
-
assert_equal(Token.new("address", 40, 47), t2.next)
|
113
|
-
assert_equal(Token.new("ÁÄGÇ", 55, 62), t2.next)
|
114
|
-
assert_equal(Token.new("ÊËÌ", 64, 70), t2.next)
|
115
|
-
assert_equal(Token.new("ÚØÃ", 72, 78), t2.next)
|
116
|
-
assert_equal(Token.new("ÖÎÍ", 80, 86), t2.next)
|
117
|
-
assert(! t2.next())
|
118
|
-
a = LetterAnalyzer.new()
|
119
|
-
t = a.token_stream("fieldname", input)
|
120
|
-
assert_equal(Token.new("dbalmän", 0, 8), t.next)
|
121
|
-
assert_equal(Token.new("gmail", 9, 14), t.next)
|
122
|
-
assert_equal(Token.new("com", 15, 18), t.next)
|
123
|
-
assert_equal(Token.new("is", 19, 21), t.next)
|
124
|
-
assert_equal(Token.new("my", 22, 24), t.next)
|
125
|
-
assert_equal(Token.new("e", 25, 26), t.next)
|
126
|
-
assert_equal(Token.new("mail", 27, 31), t.next)
|
127
|
-
assert_equal(Token.new("address", 40, 47), t.next)
|
128
|
-
assert_equal(Token.new("áägç", 55, 62), t.next)
|
129
|
-
assert_equal(Token.new("êëì", 64, 70), t.next)
|
130
|
-
assert_equal(Token.new("úøã", 72, 78), t.next)
|
131
|
-
assert_equal(Token.new("öîí", 80, 86), t.next)
|
132
|
-
assert(! t.next())
|
133
|
-
end
|
134
|
-
end
|
135
|
-
|
136
|
-
class AsciiWhiteSpaceAnalyzerTest < Test::Unit::TestCase
|
137
|
-
include Ferret::Analysis
|
138
|
-
|
139
|
-
def test_c_white_space_analyzer()
|
140
|
-
input = 'DBalmain@gmail.com is My E-Mail 52 #$ ADDRESS. 23#@$'
|
141
|
-
a = AsciiWhiteSpaceAnalyzer.new()
|
142
|
-
t = a.token_stream("fieldname", input)
|
143
|
-
t2 = a.token_stream("fieldname", input)
|
144
|
-
assert_equal(Token.new('DBalmain@gmail.com', 0, 18), t.next)
|
145
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
146
|
-
assert_equal(Token.new('My', 22, 24), t.next)
|
147
|
-
assert_equal(Token.new('E-Mail', 25, 31), t.next)
|
148
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
149
|
-
assert_equal(Token.new('#$', 37, 39), t.next)
|
150
|
-
assert_equal(Token.new('ADDRESS.', 40, 48), t.next)
|
151
|
-
assert_equal(Token.new('23#@$', 49, 54), t.next)
|
152
|
-
assert(! t.next())
|
153
|
-
assert_equal(Token.new('DBalmain@gmail.com', 0, 18), t2.next)
|
154
|
-
assert_equal(Token.new('is', 19, 21), t2.next)
|
155
|
-
assert_equal(Token.new('My', 22, 24), t2.next)
|
156
|
-
assert_equal(Token.new('E-Mail', 25, 31), t2.next)
|
157
|
-
assert_equal(Token.new('52', 32, 34), t2.next)
|
158
|
-
assert_equal(Token.new('#$', 37, 39), t2.next)
|
159
|
-
assert_equal(Token.new('ADDRESS.', 40, 48), t2.next)
|
160
|
-
assert_equal(Token.new('23#@$', 49, 54), t2.next)
|
161
|
-
assert(! t2.next())
|
162
|
-
a = AsciiWhiteSpaceAnalyzer.new(true)
|
163
|
-
t = a.token_stream("fieldname", input)
|
164
|
-
assert_equal(Token.new('dbalmain@gmail.com', 0, 18), t.next)
|
165
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
166
|
-
assert_equal(Token.new('my', 22, 24), t.next)
|
167
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
168
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
169
|
-
assert_equal(Token.new('#$', 37, 39), t.next)
|
170
|
-
assert_equal(Token.new('address.', 40, 48), t.next)
|
171
|
-
assert_equal(Token.new('23#@$', 49, 54), t.next)
|
172
|
-
assert(! t.next())
|
173
|
-
end
|
174
|
-
end
|
175
|
-
|
176
|
-
class WhiteSpaceAnalyzerTest < Test::Unit::TestCase
|
177
|
-
include Ferret::Analysis
|
178
|
-
|
179
|
-
def test_c_white_space_analyzer()
|
180
|
-
input = 'DBalmän@gmail.com is My e-mail 52 #$ address. 23#@$ ÁÄGÇ®ÊË̯ÚØìÖÎÍ'
|
181
|
-
a = WhiteSpaceAnalyzer.new()
|
182
|
-
t = a.token_stream("fieldname", input)
|
183
|
-
t2 = a.token_stream("fieldname", input)
|
184
|
-
assert_equal(Token.new('DBalmän@gmail.com', 0, 18), t.next)
|
185
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
186
|
-
assert_equal(Token.new('My', 22, 24), t.next)
|
187
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
188
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
189
|
-
assert_equal(Token.new('#$', 37, 39), t.next)
|
190
|
-
assert_equal(Token.new('address.', 40, 48), t.next)
|
191
|
-
assert_equal(Token.new('23#@$', 49, 54), t.next)
|
192
|
-
assert_equal(Token.new('ÁÄGÇ®ÊË̯ÚØìÖÎÍ', 55, 86), t.next)
|
193
|
-
assert(! t.next())
|
194
|
-
assert_equal(Token.new('DBalmän@gmail.com', 0, 18), t2.next)
|
195
|
-
assert_equal(Token.new('is', 19, 21), t2.next)
|
196
|
-
assert_equal(Token.new('My', 22, 24), t2.next)
|
197
|
-
assert_equal(Token.new('e-mail', 25, 31), t2.next)
|
198
|
-
assert_equal(Token.new('52', 32, 34), t2.next)
|
199
|
-
assert_equal(Token.new('#$', 37, 39), t2.next)
|
200
|
-
assert_equal(Token.new('address.', 40, 48), t2.next)
|
201
|
-
assert_equal(Token.new('23#@$', 49, 54), t2.next)
|
202
|
-
assert_equal(Token.new('ÁÄGÇ®ÊË̯ÚØìÖÎÍ', 55, 86), t2.next)
|
203
|
-
assert(! t2.next())
|
204
|
-
a = WhiteSpaceAnalyzer.new(true)
|
205
|
-
t = a.token_stream("fieldname", input)
|
206
|
-
assert_equal(Token.new('dbalmän@gmail.com', 0, 18), t.next)
|
207
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
208
|
-
assert_equal(Token.new('my', 22, 24), t.next)
|
209
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
210
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
211
|
-
assert_equal(Token.new('#$', 37, 39), t.next)
|
212
|
-
assert_equal(Token.new('address.', 40, 48), t.next)
|
213
|
-
assert_equal(Token.new('23#@$', 49, 54), t.next)
|
214
|
-
assert_equal(Token.new('áägç®êëì¯úøã¬öîí', 55, 86), t.next)
|
215
|
-
assert(! t.next())
|
216
|
-
end
|
217
|
-
end
|
218
|
-
|
219
|
-
class AsciiStandardAnalyzerTest < Test::Unit::TestCase
|
220
|
-
include Ferret::Analysis
|
221
|
-
|
222
|
-
def test_c_standard_analyzer()
|
223
|
-
input = 'DBalmain@gmail.com is My e-mail 52 #$ Address. 23#@$ http://www.google.com/results/ T.N.T. 123-1235-ASD-1234'
|
224
|
-
a = AsciiStandardAnalyzer.new()
|
225
|
-
t = a.token_stream("fieldname", input)
|
226
|
-
t2 = a.token_stream("fieldname", input)
|
227
|
-
assert_equal(Token.new('dbalmain@gmail.com', 0, 18), t.next)
|
228
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
229
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
230
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
231
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
232
|
-
assert_equal(Token.new('www.google.com/results', 55, 84), t.next)
|
233
|
-
assert_equal(Token.new('tnt', 86, 91), t.next)
|
234
|
-
assert_equal(Token.new('123-1235-asd-1234', 93, 110), t.next)
|
235
|
-
assert(! t.next())
|
236
|
-
assert_equal(Token.new('dbalmain@gmail.com', 0, 18), t2.next)
|
237
|
-
assert_equal(Token.new('e-mail', 25, 31), t2.next)
|
238
|
-
assert_equal(Token.new('52', 32, 34), t2.next)
|
239
|
-
assert_equal(Token.new('address', 40, 47), t2.next)
|
240
|
-
assert_equal(Token.new('23', 49, 51), t2.next)
|
241
|
-
assert_equal(Token.new('www.google.com/results', 55, 84), t2.next)
|
242
|
-
assert_equal(Token.new('tnt', 86, 91), t2.next)
|
243
|
-
assert_equal(Token.new('123-1235-asd-1234', 93, 110), t2.next)
|
244
|
-
assert(! t2.next())
|
245
|
-
a = AsciiStandardAnalyzer.new(false)
|
246
|
-
t = a.token_stream("fieldname", input)
|
247
|
-
t2 = a.token_stream("fieldname", input)
|
248
|
-
assert_equal(Token.new('DBalmain@gmail.com', 0, 18), t.next)
|
249
|
-
assert_equal(Token.new('My', 22, 24), t.next)
|
250
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
251
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
252
|
-
assert_equal(Token.new('Address', 40, 47), t.next)
|
253
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
254
|
-
assert_equal(Token.new('www.google.com/results', 55, 84), t.next)
|
255
|
-
assert_equal(Token.new('TNT', 86, 91), t.next)
|
256
|
-
assert_equal(Token.new('123-1235-ASD-1234', 93, 110), t.next)
|
257
|
-
assert(! t.next())
|
258
|
-
end
|
259
|
-
end
|
260
|
-
|
261
|
-
class StandardAnalyzerTest < Test::Unit::TestCase
|
262
|
-
include Ferret::Analysis
|
263
|
-
|
264
|
-
def test_c_standard_analyzer()
|
265
|
-
input = 'DBalmán@gmail.com is My e-mail and the Address. 23#@$ http://www.google.com/results/ T.N.T. 123-1235-ASD-1234 23#@$ ÁÄGÇ®ÊË̯ÚØìÖÎÍ'
|
266
|
-
a = StandardAnalyzer.new()
|
267
|
-
t = a.token_stream("fieldname", input)
|
268
|
-
t2 = a.token_stream("fieldname", input)
|
269
|
-
assert_equal(Token.new('dbalmán@gmail.com', 0, 18), t.next)
|
270
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
271
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
272
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
273
|
-
assert_equal(Token.new('www.google.com/results', 55, 84), t.next)
|
274
|
-
assert_equal(Token.new('tnt', 86, 91), t.next)
|
275
|
-
assert_equal(Token.new('123-1235-asd-1234', 93, 110), t.next)
|
276
|
-
assert_equal(Token.new('23', 111, 113), t.next)
|
277
|
-
assert_equal(Token.new('áägç', 117, 124), t.next)
|
278
|
-
assert_equal(Token.new('êëì', 126, 132), t.next)
|
279
|
-
assert_equal(Token.new('úøã', 134, 140), t.next)
|
280
|
-
assert_equal(Token.new('öîí', 142, 148), t.next)
|
281
|
-
assert(! t.next())
|
282
|
-
assert_equal(Token.new('dbalmán@gmail.com', 0, 18), t2.next)
|
283
|
-
assert_equal(Token.new('e-mail', 25, 31), t2.next)
|
284
|
-
assert_equal(Token.new('address', 40, 47), t2.next)
|
285
|
-
assert_equal(Token.new('23', 49, 51), t2.next)
|
286
|
-
assert_equal(Token.new('www.google.com/results', 55, 84), t2.next)
|
287
|
-
assert_equal(Token.new('tnt', 86, 91), t2.next)
|
288
|
-
assert_equal(Token.new('123-1235-asd-1234', 93, 110), t2.next)
|
289
|
-
assert_equal(Token.new('23', 111, 113), t2.next)
|
290
|
-
assert_equal(Token.new('áägç', 117, 124), t2.next)
|
291
|
-
assert_equal(Token.new('êëì', 126, 132), t2.next)
|
292
|
-
assert_equal(Token.new('úøã', 134, 140), t2.next)
|
293
|
-
assert_equal(Token.new('öîí', 142, 148), t2.next)
|
294
|
-
assert(! t2.next())
|
295
|
-
a = StandardAnalyzer.new(nil, false)
|
296
|
-
t = a.token_stream("fieldname", input)
|
297
|
-
assert_equal(Token.new('DBalmán@gmail.com', 0, 18), t.next)
|
298
|
-
assert_equal(Token.new('My', 22, 24), t.next)
|
299
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
300
|
-
assert_equal(Token.new('Address', 40, 47), t.next)
|
301
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
302
|
-
assert_equal(Token.new('www.google.com/results', 55, 84), t.next)
|
303
|
-
assert_equal(Token.new('TNT', 86, 91), t.next)
|
304
|
-
assert_equal(Token.new('123-1235-ASD-1234', 93, 110), t.next)
|
305
|
-
assert_equal(Token.new('23', 111, 113), t.next)
|
306
|
-
assert_equal(Token.new('ÁÄGÇ', 117, 124), t.next)
|
307
|
-
assert_equal(Token.new('ÊËÌ', 126, 132), t.next)
|
308
|
-
assert_equal(Token.new('ÚØÃ', 134, 140), t.next)
|
309
|
-
assert_equal(Token.new('ÖÎÍ', 142, 148), t.next)
|
310
|
-
assert(! t.next())
|
311
|
-
a = StandardAnalyzer.new(["e-mail", "23", "tnt"])
|
312
|
-
t = a.token_stream("fieldname", input)
|
313
|
-
t2 = a.token_stream("fieldname", input)
|
314
|
-
assert_equal(Token.new('dbalmán@gmail.com', 0, 18), t.next)
|
315
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
316
|
-
assert_equal(Token.new('my', 22, 24), t.next)
|
317
|
-
assert_equal(Token.new('and', 32, 35), t.next)
|
318
|
-
assert_equal(Token.new('the', 36, 39), t.next)
|
319
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
320
|
-
assert_equal(Token.new('www.google.com/results', 55, 84), t.next)
|
321
|
-
assert_equal(Token.new('123-1235-asd-1234', 93, 110), t.next)
|
322
|
-
assert_equal(Token.new('áägç', 117, 124), t.next)
|
323
|
-
assert_equal(Token.new('êëì', 126, 132), t.next)
|
324
|
-
assert_equal(Token.new('úøã', 134, 140), t.next)
|
325
|
-
assert_equal(Token.new('öîí', 142, 148), t.next)
|
326
|
-
assert(! t.next())
|
327
|
-
assert_equal(Token.new('dbalmán@gmail.com', 0, 18), t2.next)
|
328
|
-
assert_equal(Token.new('is', 19, 21), t2.next)
|
329
|
-
assert_equal(Token.new('my', 22, 24), t2.next)
|
330
|
-
assert_equal(Token.new('and', 32, 35), t2.next)
|
331
|
-
assert_equal(Token.new('the', 36, 39), t2.next)
|
332
|
-
assert_equal(Token.new('address', 40, 47), t2.next)
|
333
|
-
assert_equal(Token.new('www.google.com/results', 55, 84), t2.next)
|
334
|
-
assert_equal(Token.new('123-1235-asd-1234', 93, 110), t2.next)
|
335
|
-
assert_equal(Token.new('áägç', 117, 124), t2.next)
|
336
|
-
assert_equal(Token.new('êëì', 126, 132), t2.next)
|
337
|
-
assert_equal(Token.new('úøã', 134, 140), t2.next)
|
338
|
-
assert_equal(Token.new('öîí', 142, 148), t2.next)
|
339
|
-
assert(! t2.next())
|
340
|
-
end
|
341
|
-
end
|
342
|
-
|
343
|
-
class PerFieldAnalyzerTest < Test::Unit::TestCase
|
344
|
-
include Ferret::Analysis
|
345
|
-
def test_c_per_field_analyzer()
|
346
|
-
input = 'DBalmain@gmail.com is My e-mail 52 #$ address. 23#@$'
|
347
|
-
pfa = PerFieldAnalyzer.new(StandardAnalyzer.new())
|
348
|
-
pfa['white'] = WhiteSpaceAnalyzer.new(false)
|
349
|
-
pfa['white_l'] = WhiteSpaceAnalyzer.new(true)
|
350
|
-
pfa['letter'] = LetterAnalyzer.new(false)
|
351
|
-
pfa.add_field('letter', LetterAnalyzer.new(true))
|
352
|
-
pfa.add_field('letter_u', LetterAnalyzer.new(false))
|
353
|
-
t = pfa.token_stream('white', input)
|
354
|
-
assert_equal(Token.new('DBalmain@gmail.com', 0, 18), t.next)
|
355
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
356
|
-
assert_equal(Token.new('My', 22, 24), t.next)
|
357
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
358
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
359
|
-
assert_equal(Token.new('#$', 37, 39), t.next)
|
360
|
-
assert_equal(Token.new('address.', 40, 48), t.next)
|
361
|
-
assert_equal(Token.new('23#@$', 49, 54), t.next)
|
362
|
-
assert(! t.next())
|
363
|
-
t = pfa.token_stream('white_l', input)
|
364
|
-
assert_equal(Token.new('dbalmain@gmail.com', 0, 18), t.next)
|
365
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
366
|
-
assert_equal(Token.new('my', 22, 24), t.next)
|
367
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
368
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
369
|
-
assert_equal(Token.new('#$', 37, 39), t.next)
|
370
|
-
assert_equal(Token.new('address.', 40, 48), t.next)
|
371
|
-
assert_equal(Token.new('23#@$', 49, 54), t.next)
|
372
|
-
assert(! t.next())
|
373
|
-
t = pfa.token_stream('letter_u', input)
|
374
|
-
assert_equal(Token.new('DBalmain', 0, 8), t.next)
|
375
|
-
assert_equal(Token.new('gmail', 9, 14), t.next)
|
376
|
-
assert_equal(Token.new('com', 15, 18), t.next)
|
377
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
378
|
-
assert_equal(Token.new('My', 22, 24), t.next)
|
379
|
-
assert_equal(Token.new('e', 25, 26), t.next)
|
380
|
-
assert_equal(Token.new('mail', 27, 31), t.next)
|
381
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
382
|
-
assert(! t.next())
|
383
|
-
t = pfa.token_stream('letter', input)
|
384
|
-
assert_equal(Token.new('dbalmain', 0, 8), t.next)
|
385
|
-
assert_equal(Token.new('gmail', 9, 14), t.next)
|
386
|
-
assert_equal(Token.new('com', 15, 18), t.next)
|
387
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
388
|
-
assert_equal(Token.new('my', 22, 24), t.next)
|
389
|
-
assert_equal(Token.new('e', 25, 26), t.next)
|
390
|
-
assert_equal(Token.new('mail', 27, 31), t.next)
|
391
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
392
|
-
assert(! t.next())
|
393
|
-
t = pfa.token_stream('XXX', input) # should use default StandardAnalzyer
|
394
|
-
assert_equal(Token.new('dbalmain@gmail.com', 0, 18), t.next)
|
395
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
396
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
397
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
398
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
399
|
-
assert(! t.next())
|
400
|
-
end
|
401
|
-
end
|
402
|
-
|
403
|
-
class RegExpAnalyzerTest < Test::Unit::TestCase
|
404
|
-
include Ferret::Analysis
|
405
|
-
|
406
|
-
def test_reg_exp_analyzer()
|
407
|
-
input = 'DBalmain@gmail.com is My e-mail 52 #$ Address. 23#@$ http://www.google.com/RESULT_3.html T.N.T. 123-1235-ASD-1234 23 Rob\'s'
|
408
|
-
a = RegExpAnalyzer.new()
|
409
|
-
t = a.token_stream('XXX', input)
|
410
|
-
t2 = a.token_stream('XXX', "one_Two three")
|
411
|
-
assert_equal(Token.new('dbalmain@gmail.com', 0, 18), t.next)
|
412
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
413
|
-
assert_equal(Token.new('my', 22, 24), t.next)
|
414
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
415
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
416
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
417
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
418
|
-
assert_equal(Token.new('http://www.google.com/result_3.html', 55, 90), t.next)
|
419
|
-
assert_equal(Token.new('t.n.t.', 91, 97), t.next)
|
420
|
-
assert_equal(Token.new('123-1235-asd-1234', 98, 115), t.next)
|
421
|
-
assert_equal(Token.new('23', 116, 118), t.next)
|
422
|
-
assert_equal(Token.new('rob\'s', 119, 124), t.next)
|
423
|
-
assert(! t.next())
|
424
|
-
t = t2
|
425
|
-
assert_equal(Token.new("one_two", 0, 7), t.next())
|
426
|
-
assert_equal(Token.new("three", 8, 13), t.next())
|
427
|
-
assert(! t.next())
|
428
|
-
a = RegExpAnalyzer.new(/\w{2,}/, false)
|
429
|
-
t = a.token_stream('XXX', input)
|
430
|
-
t2 = a.token_stream('XXX', "one Two three")
|
431
|
-
assert_equal(Token.new('DBalmain', 0, 8), t.next)
|
432
|
-
assert_equal(Token.new('gmail', 9, 14), t.next)
|
433
|
-
assert_equal(Token.new('com', 15, 18), t.next)
|
434
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
435
|
-
assert_equal(Token.new('My', 22, 24), t.next)
|
436
|
-
assert_equal(Token.new('mail', 27, 31), t.next)
|
437
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
438
|
-
assert_equal(Token.new('Address', 40, 47), t.next)
|
439
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
440
|
-
assert_equal(Token.new('http', 55, 59), t.next)
|
441
|
-
assert_equal(Token.new('www', 62, 65), t.next)
|
442
|
-
assert_equal(Token.new('google', 66, 72), t.next)
|
443
|
-
assert_equal(Token.new('com', 73, 76), t.next)
|
444
|
-
assert_equal(Token.new('RESULT_3', 77, 85), t.next)
|
445
|
-
assert_equal(Token.new('html', 86, 90), t.next)
|
446
|
-
assert_equal(Token.new('123', 98, 101), t.next)
|
447
|
-
assert_equal(Token.new('1235', 102, 106), t.next)
|
448
|
-
assert_equal(Token.new('ASD', 107, 110), t.next)
|
449
|
-
assert_equal(Token.new('1234', 111, 115), t.next)
|
450
|
-
assert_equal(Token.new('23', 116, 118), t.next)
|
451
|
-
assert_equal(Token.new('Rob', 119, 122), t.next)
|
452
|
-
assert(! t.next())
|
453
|
-
assert_equal(Token.new("one", 0, 3), t2.next())
|
454
|
-
assert_equal(Token.new("Two", 4, 7), t2.next())
|
455
|
-
assert_equal(Token.new("three", 8, 13), t2.next())
|
456
|
-
assert(! t2.next())
|
457
|
-
a = RegExpAnalyzer.new() do |str|
|
458
|
-
if str =~ /^[[:alpha:]]\.([[:alpha:]]\.)+$/
|
459
|
-
str.gsub!(/\./, '')
|
460
|
-
elsif str =~ /'[sS]$/
|
461
|
-
str.gsub!(/'[sS]$/, '')
|
462
|
-
end
|
463
|
-
str
|
464
|
-
end
|
465
|
-
t = a.token_stream('XXX', input)
|
466
|
-
t2 = a.token_stream('XXX', "one's don't T.N.T.")
|
467
|
-
assert_equal(Token.new('dbalmain@gmail.com', 0, 18), t.next)
|
468
|
-
assert_equal(Token.new('is', 19, 21), t.next)
|
469
|
-
assert_equal(Token.new('my', 22, 24), t.next)
|
470
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
471
|
-
assert_equal(Token.new('52', 32, 34), t.next)
|
472
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
473
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
474
|
-
assert_equal(Token.new('http://www.google.com/result_3.html', 55, 90), t.next)
|
475
|
-
assert_equal(Token.new('tnt', 91, 97), t.next)
|
476
|
-
assert_equal(Token.new('123-1235-asd-1234', 98, 115), t.next)
|
477
|
-
assert_equal(Token.new('23', 116, 118), t.next)
|
478
|
-
assert_equal(Token.new('rob', 119, 124), t.next)
|
479
|
-
assert(! t.next())
|
480
|
-
assert_equal(Token.new("one", 0, 5), t2.next())
|
481
|
-
assert_equal(Token.new("don't", 6, 11), t2.next())
|
482
|
-
assert_equal(Token.new("tnt", 12, 18), t2.next())
|
483
|
-
assert(! t2.next())
|
484
|
-
end
|
485
|
-
end
|
486
|
-
|
487
|
-
module Ferret::Analysis
|
488
|
-
class StemmingStandardAnalyzer < StandardAnalyzer
|
489
|
-
def token_stream(field, text)
|
490
|
-
StemFilter.new(super)
|
491
|
-
end
|
492
|
-
end
|
493
|
-
end
|
494
|
-
|
495
|
-
class CustomAnalyzerTest < Test::Unit::TestCase
|
496
|
-
include Ferret::Analysis
|
497
|
-
|
498
|
-
def test_custom_filter()
|
499
|
-
input = 'DBalmán@gmail.com is My e-mail and the Address. 23#@$ http://www.google.com/results/ T.N.T. 123-1235-ASD-1234 23#@$ ÁÄGÇ®ÊË̯ÚØìÖÎÍ'
|
500
|
-
a = StemmingStandardAnalyzer.new()
|
501
|
-
t = a.token_stream("fieldname", input)
|
502
|
-
assert_equal(Token.new('dbalmán@gmail.com', 0, 18), t.next)
|
503
|
-
assert_equal(Token.new('e-mail', 25, 31), t.next)
|
504
|
-
assert_equal(Token.new('address', 40, 47), t.next)
|
505
|
-
assert_equal(Token.new('23', 49, 51), t.next)
|
506
|
-
assert_equal(Token.new('www.google.com/result', 55, 84), t.next)
|
507
|
-
assert_equal(Token.new('tnt', 86, 91), t.next)
|
508
|
-
assert_equal(Token.new('123-1235-asd-1234', 93, 110), t.next)
|
509
|
-
assert_equal(Token.new('23', 111, 113), t.next)
|
510
|
-
assert_equal(Token.new('áägç', 117, 124), t.next)
|
511
|
-
assert_equal(Token.new('êëì', 126, 132), t.next)
|
512
|
-
assert_equal(Token.new('úøã', 134, 140), t.next)
|
513
|
-
assert_equal(Token.new('öîí', 142, 148), t.next)
|
514
|
-
assert(! t.next())
|
515
|
-
input = "Debate Debates DEBATED DEBating Debater";
|
516
|
-
t = a.token_stream("fieldname", input)
|
517
|
-
assert_equal(Token.new("debat", 0, 6), t.next)
|
518
|
-
assert_equal(Token.new("debat", 7, 14), t.next)
|
519
|
-
assert_equal(Token.new("debat", 15, 22), t.next)
|
520
|
-
assert_equal(Token.new("debat", 23, 31), t.next)
|
521
|
-
assert_equal(Token.new("debat", 32, 39), t.next)
|
522
|
-
assert(! t.next())
|
523
|
-
input = "Dêbate dêbates DÊBATED DÊBATing dêbater";
|
524
|
-
t = StemFilter.new(LowerCaseFilter.new(LetterTokenizer.new(input)), :english)
|
525
|
-
assert_equal(Token.new("dêbate", 0, 7), t.next)
|
526
|
-
assert_equal(Token.new("dêbate", 8, 16), t.next)
|
527
|
-
assert_equal(Token.new("dêbate", 17, 25), t.next)
|
528
|
-
assert_equal(Token.new("dêbate", 26, 35), t.next)
|
529
|
-
assert_equal(Token.new("dêbater", 36, 44), t.next)
|
530
|
-
assert(! t.next())
|
531
|
-
end
|
532
|
-
end
|