ferret 0.3.2 → 0.9.0
Sign up to get free protection for your applications and to get access to all the features.
- data/CHANGELOG +9 -0
- data/Rakefile +51 -25
- data/ext/analysis.c +553 -0
- data/ext/analysis.h +76 -0
- data/ext/array.c +83 -0
- data/ext/array.h +19 -0
- data/ext/bitvector.c +164 -0
- data/ext/bitvector.h +29 -0
- data/ext/compound_io.c +335 -0
- data/ext/document.c +336 -0
- data/ext/document.h +87 -0
- data/ext/ferret.c +88 -47
- data/ext/ferret.h +43 -109
- data/ext/field.c +395 -0
- data/ext/filter.c +103 -0
- data/ext/fs_store.c +352 -0
- data/ext/global.c +219 -0
- data/ext/global.h +73 -0
- data/ext/hash.c +446 -0
- data/ext/hash.h +80 -0
- data/ext/hashset.c +141 -0
- data/ext/hashset.h +37 -0
- data/ext/helper.c +11 -0
- data/ext/helper.h +5 -0
- data/ext/inc/lang.h +41 -0
- data/ext/ind.c +389 -0
- data/ext/index.h +884 -0
- data/ext/index_io.c +269 -415
- data/ext/index_rw.c +2543 -0
- data/ext/lang.c +31 -0
- data/ext/lang.h +41 -0
- data/ext/priorityqueue.c +228 -0
- data/ext/priorityqueue.h +44 -0
- data/ext/q_boolean.c +1331 -0
- data/ext/q_const_score.c +154 -0
- data/ext/q_fuzzy.c +287 -0
- data/ext/q_match_all.c +142 -0
- data/ext/q_multi_phrase.c +343 -0
- data/ext/q_parser.c +2180 -0
- data/ext/q_phrase.c +657 -0
- data/ext/q_prefix.c +75 -0
- data/ext/q_range.c +247 -0
- data/ext/q_span.c +1566 -0
- data/ext/q_term.c +308 -0
- data/ext/q_wildcard.c +146 -0
- data/ext/r_analysis.c +255 -0
- data/ext/r_doc.c +578 -0
- data/ext/r_index_io.c +996 -0
- data/ext/r_qparser.c +158 -0
- data/ext/r_search.c +2321 -0
- data/ext/r_store.c +263 -0
- data/ext/r_term.c +219 -0
- data/ext/ram_store.c +447 -0
- data/ext/search.c +524 -0
- data/ext/search.h +1065 -0
- data/ext/similarity.c +143 -39
- data/ext/sort.c +661 -0
- data/ext/store.c +35 -0
- data/ext/store.h +152 -0
- data/ext/term.c +704 -143
- data/ext/termdocs.c +599 -0
- data/ext/vector.c +594 -0
- data/lib/ferret.rb +9 -10
- data/lib/ferret/analysis/analyzers.rb +2 -2
- data/lib/ferret/analysis/standard_tokenizer.rb +1 -1
- data/lib/ferret/analysis/token.rb +14 -14
- data/lib/ferret/analysis/token_filters.rb +3 -3
- data/lib/ferret/document/field.rb +16 -17
- data/lib/ferret/index/document_writer.rb +4 -4
- data/lib/ferret/index/index.rb +39 -23
- data/lib/ferret/index/index_writer.rb +2 -2
- data/lib/ferret/index/multiple_term_doc_pos_enum.rb +1 -8
- data/lib/ferret/index/segment_term_vector.rb +4 -4
- data/lib/ferret/index/term.rb +5 -1
- data/lib/ferret/index/term_vector_offset_info.rb +6 -6
- data/lib/ferret/index/term_vectors_io.rb +5 -5
- data/lib/ferret/query_parser/query_parser.tab.rb +81 -77
- data/lib/ferret/search.rb +1 -1
- data/lib/ferret/search/boolean_query.rb +2 -1
- data/lib/ferret/search/field_sorted_hit_queue.rb +3 -3
- data/lib/ferret/search/fuzzy_query.rb +2 -1
- data/lib/ferret/search/index_searcher.rb +3 -0
- data/lib/ferret/search/{match_all_docs_query.rb → match_all_query.rb} +7 -7
- data/lib/ferret/search/multi_phrase_query.rb +6 -5
- data/lib/ferret/search/phrase_query.rb +3 -6
- data/lib/ferret/search/prefix_query.rb +4 -4
- data/lib/ferret/search/sort.rb +3 -1
- data/lib/ferret/search/sort_field.rb +9 -9
- data/lib/ferret/search/spans/near_spans_enum.rb +1 -1
- data/lib/ferret/search/spans/span_near_query.rb +1 -1
- data/lib/ferret/search/spans/span_weight.rb +1 -1
- data/lib/ferret/search/spans/spans_enum.rb +7 -7
- data/lib/ferret/store/fs_store.rb +10 -6
- data/lib/ferret/store/ram_store.rb +3 -3
- data/lib/rferret.rb +36 -0
- data/test/functional/thread_safety_index_test.rb +2 -2
- data/test/test_helper.rb +16 -2
- data/test/unit/analysis/c_token.rb +25 -0
- data/test/unit/analysis/tc_per_field_analyzer_wrapper.rb +1 -1
- data/test/unit/analysis/tc_standard_analyzer.rb +1 -1
- data/test/unit/document/{tc_document.rb → c_document.rb} +0 -0
- data/test/unit/document/c_field.rb +98 -0
- data/test/unit/document/tc_field.rb +0 -66
- data/test/unit/index/{tc_index.rb → c_index.rb} +62 -6
- data/test/unit/index/{tc_index_reader.rb → c_index_reader.rb} +51 -10
- data/test/unit/index/{tc_index_writer.rb → c_index_writer.rb} +0 -4
- data/test/unit/index/{tc_term.rb → c_term.rb} +1 -3
- data/test/unit/index/{tc_term_vector_offset_info.rb → c_term_voi.rb} +5 -5
- data/test/unit/index/tc_segment_term_vector.rb +2 -2
- data/test/unit/index/tc_term_vectors_io.rb +4 -4
- data/test/unit/query_parser/c_query_parser.rb +138 -0
- data/test/unit/search/{tc_filter.rb → c_filter.rb} +24 -24
- data/test/unit/search/{tc_fuzzy_query.rb → c_fuzzy_query.rb} +0 -0
- data/test/unit/search/{tc_index_searcher.rb → c_index_searcher.rb} +9 -26
- data/test/unit/search/{tc_search_and_sort.rb → c_search_and_sort.rb} +15 -15
- data/test/unit/search/{tc_sort.rb → c_sort.rb} +2 -1
- data/test/unit/search/c_sort_field.rb +27 -0
- data/test/unit/search/{tc_spans.rb → c_spans.rb} +0 -0
- data/test/unit/search/tc_sort_field.rb +7 -20
- data/test/unit/store/c_fs_store.rb +76 -0
- data/test/unit/store/c_ram_store.rb +35 -0
- data/test/unit/store/m_store.rb +34 -0
- data/test/unit/store/m_store_lock.rb +68 -0
- data/test/unit/store/tc_fs_store.rb +0 -53
- data/test/unit/store/tc_ram_store.rb +0 -20
- data/test/unit/store/tm_store.rb +0 -30
- data/test/unit/store/tm_store_lock.rb +0 -66
- metadata +84 -31
- data/ext/Makefile +0 -140
- data/ext/ferret_ext.so +0 -0
- data/ext/priority_queue.c +0 -232
- data/ext/ram_directory.c +0 -321
- data/ext/segment_merge_queue.c +0 -37
- data/ext/segment_term_enum.c +0 -326
- data/ext/string_helper.c +0 -42
- data/ext/tags +0 -344
- data/ext/term_buffer.c +0 -230
- data/ext/term_infos_reader.c +0 -54
- data/ext/terminfo.c +0 -160
- data/ext/token.c +0 -93
- data/ext/util.c +0 -12
data/ext/q_term.c
ADDED
@@ -0,0 +1,308 @@
|
|
1
|
+
#include <string.h>
|
2
|
+
#include "search.h"
|
3
|
+
|
4
|
+
/***************************************************************************
|
5
|
+
*
|
6
|
+
* TermWeight
|
7
|
+
*
|
8
|
+
***************************************************************************/
|
9
|
+
|
10
|
+
Scorer *tw_scorer(Weight *self, IndexReader *ir)
|
11
|
+
{
|
12
|
+
Term *term = ((TermQuery *)self->query->data)->term;
|
13
|
+
TermDocEnum *tde = ir_term_docs_for(ir,term);
|
14
|
+
if (!tde) return NULL;
|
15
|
+
|
16
|
+
return tsc_create(self, tde, ir->get_norms_always(ir, term->field));
|
17
|
+
}
|
18
|
+
|
19
|
+
Explanation *tw_explain(Weight *self, IndexReader *ir, int doc_num)
|
20
|
+
{
|
21
|
+
char *query_str = self->query->to_s(self->query, "");
|
22
|
+
TermQuery *tq = (TermQuery *)self->query->data;
|
23
|
+
Term *term = tq->term;
|
24
|
+
char *field_name = term->field;
|
25
|
+
|
26
|
+
Explanation *expl = expl_create(0.0,
|
27
|
+
epstrdup("weight(%s in %d), product of:",
|
28
|
+
strlen(query_str) + 20,
|
29
|
+
query_str, doc_num));
|
30
|
+
|
31
|
+
// We need two of these as it's included in both the query explanation
|
32
|
+
// and the field explanation
|
33
|
+
Explanation *idf_expl1 = expl_create(self->idf,
|
34
|
+
epstrdup("idf(doc_freq=%d)", 20, ir->doc_freq(ir, tq->term)));
|
35
|
+
Explanation *idf_expl2 = expl_create(self->idf,
|
36
|
+
epstrdup("idf(doc_freq=%d)", 20, ir->doc_freq(ir, tq->term)));
|
37
|
+
|
38
|
+
// explain query weight
|
39
|
+
Explanation *query_expl = expl_create(0.0,
|
40
|
+
epstrdup("query_weight(%s), product of:", strlen(query_str), query_str));
|
41
|
+
free(query_str);
|
42
|
+
|
43
|
+
if (self->query->boost != 1.0) {
|
44
|
+
expl_add_detail(query_expl, expl_create(self->query->boost, estrdup("boost")));
|
45
|
+
}
|
46
|
+
|
47
|
+
expl_add_detail(query_expl, idf_expl1);
|
48
|
+
|
49
|
+
Explanation *qnorm_expl = expl_create(self->qnorm, estrdup("query_norm"));
|
50
|
+
expl_add_detail(query_expl, qnorm_expl);
|
51
|
+
|
52
|
+
query_expl->value = self->query->boost * idf_expl1->value * qnorm_expl->value;
|
53
|
+
|
54
|
+
expl_add_detail(expl, query_expl);
|
55
|
+
|
56
|
+
// explain field weight
|
57
|
+
Explanation *field_expl = expl_create(0.0,
|
58
|
+
epstrdup("field_weight(%s:%s in %d), product of:",
|
59
|
+
strlen(field_name) + strlen(term->text) + 20,
|
60
|
+
field_name, term->text, doc_num));
|
61
|
+
|
62
|
+
Scorer *scorer = self->scorer(self, ir);
|
63
|
+
Explanation *tf_expl = scorer->explain(scorer, doc_num);
|
64
|
+
scorer->destroy(scorer);
|
65
|
+
expl_add_detail(field_expl, tf_expl);
|
66
|
+
expl_add_detail(field_expl, idf_expl2);
|
67
|
+
|
68
|
+
uchar *field_norms = ir->get_norms(ir, field_name);
|
69
|
+
float field_norm = (field_norms ? sim_decode_norm(self->similarity, field_norms[doc_num]) : 0.0);
|
70
|
+
Explanation *field_norm_expl = expl_create(field_norm,
|
71
|
+
epstrdup("field_norm(field=%s, doc=%d)",
|
72
|
+
strlen(field_name) + 20, field_name, doc_num));
|
73
|
+
expl_add_detail(field_expl, field_norm_expl);
|
74
|
+
|
75
|
+
field_expl->value = tf_expl->value * idf_expl2->value * field_norm_expl->value;
|
76
|
+
|
77
|
+
// combine them
|
78
|
+
if (query_expl->value == 1.0) {
|
79
|
+
expl_destoy(expl);
|
80
|
+
return field_expl;
|
81
|
+
} else {
|
82
|
+
expl->value = (query_expl->value * field_expl->value);
|
83
|
+
expl_add_detail(expl, field_expl);
|
84
|
+
return expl;
|
85
|
+
}
|
86
|
+
}
|
87
|
+
|
88
|
+
char *tw_to_s(Weight *self)
|
89
|
+
{
|
90
|
+
char dbuf[32];
|
91
|
+
dbl_to_s(dbuf, self->value);
|
92
|
+
return epstrdup("TermWeight(%#.5g)", strlen(dbuf), dbuf);
|
93
|
+
}
|
94
|
+
|
95
|
+
void tw_destroy(void *p)
|
96
|
+
{
|
97
|
+
free(p);
|
98
|
+
}
|
99
|
+
|
100
|
+
Weight *tw_create(Query *query, Searcher *searcher)
|
101
|
+
{
|
102
|
+
Weight *self = ALLOC(Weight);
|
103
|
+
ZEROSET(self, Weight, 1);
|
104
|
+
self->get_query = &w_get_query;
|
105
|
+
self->get_value = &w_get_value;
|
106
|
+
self->normalize = &w_normalize;
|
107
|
+
self->scorer = &tw_scorer;
|
108
|
+
self->explain = &tw_explain;
|
109
|
+
self->to_s = &tw_to_s;
|
110
|
+
self->destroy = &tw_destroy;
|
111
|
+
self->sum_of_squared_weights = &w_sum_of_squared_weights;
|
112
|
+
|
113
|
+
self->similarity = query->get_similarity(query, searcher);
|
114
|
+
self->idf = sim_idf(self->similarity,
|
115
|
+
searcher->doc_freq(searcher, ((TermQuery *)query->data)->term),
|
116
|
+
searcher->max_doc(searcher)); // compute idf
|
117
|
+
self->query = query;
|
118
|
+
self->value = 0.0;
|
119
|
+
|
120
|
+
return self;
|
121
|
+
}
|
122
|
+
|
123
|
+
/***************************************************************************
|
124
|
+
*
|
125
|
+
* TermQuery
|
126
|
+
*
|
127
|
+
***************************************************************************/
|
128
|
+
|
129
|
+
void tq_destroy(void *p)
|
130
|
+
{
|
131
|
+
Query *q = (Query *)p;
|
132
|
+
TermQuery *tq = q->data;
|
133
|
+
term_destroy(tq->term);
|
134
|
+
free(tq);
|
135
|
+
q_destroy(q);
|
136
|
+
}
|
137
|
+
|
138
|
+
char *tq_to_s(Query *self, char *field)
|
139
|
+
{
|
140
|
+
Term *term = ((TermQuery *)self->data)->term;
|
141
|
+
int flen = strlen(term->field);
|
142
|
+
int tlen = strlen(term->text);
|
143
|
+
char *buffer = ALLOC_N(char, 34 + flen + tlen);
|
144
|
+
char *bp = buffer;
|
145
|
+
if (strcmp(field, term->field) != 0) {
|
146
|
+
memcpy(bp, term->field, sizeof(char) * flen);
|
147
|
+
bp[flen] = ':';
|
148
|
+
bp += flen + 1;
|
149
|
+
}
|
150
|
+
memcpy(bp, term->text, tlen);
|
151
|
+
bp += tlen;
|
152
|
+
*bp = 0;
|
153
|
+
if (self->boost != 1.0) {
|
154
|
+
char dbuf[32];
|
155
|
+
dbl_to_s(dbuf, self->boost);
|
156
|
+
sprintf(bp, "^%s", dbuf);
|
157
|
+
}
|
158
|
+
return buffer;
|
159
|
+
}
|
160
|
+
|
161
|
+
void tq_extract_terms(Query *self, Array *terms)
|
162
|
+
{
|
163
|
+
Term *term = ((TermQuery *)self->data)->term;
|
164
|
+
ary_append(terms, term);
|
165
|
+
}
|
166
|
+
|
167
|
+
Query *tq_create(Term *term)
|
168
|
+
{
|
169
|
+
Query *self = q_create();
|
170
|
+
TermQuery *tq = ALLOC(TermQuery);
|
171
|
+
tq->term = term;
|
172
|
+
self->type = TERM_QUERY;
|
173
|
+
self->data = tq;
|
174
|
+
self->create_weight = &tw_create;
|
175
|
+
self->extract_terms = &tq_extract_terms;
|
176
|
+
self->to_s = &tq_to_s;
|
177
|
+
self->destroy = &tq_destroy;
|
178
|
+
|
179
|
+
return self;
|
180
|
+
}
|
181
|
+
|
182
|
+
/***************************************************************************
|
183
|
+
*
|
184
|
+
* TermScorer
|
185
|
+
*
|
186
|
+
***************************************************************************/
|
187
|
+
|
188
|
+
float tsc_score(Scorer *self)
|
189
|
+
{
|
190
|
+
TermScorer *ts = (TermScorer *)self->data;
|
191
|
+
int freq = ts->freqs[ts->pointer];
|
192
|
+
float score;
|
193
|
+
// compute tf(f)*weight
|
194
|
+
if (freq < SCORE_CACHE_SIZE) { // check cache
|
195
|
+
score = ts->score_cache[freq]; // cache hit
|
196
|
+
} else {
|
197
|
+
score = sim_tf(self->similarity, freq) * ts->weight_value; // cache miss
|
198
|
+
}
|
199
|
+
// normalize for field
|
200
|
+
score *= sim_decode_norm(self->similarity, ts->norms[self->doc]);
|
201
|
+
return score;
|
202
|
+
}
|
203
|
+
|
204
|
+
bool tsc_next(Scorer *self)
|
205
|
+
{
|
206
|
+
TermScorer *ts = (TermScorer *)self->data;
|
207
|
+
|
208
|
+
ts->pointer++;
|
209
|
+
if (ts->pointer >= ts->pointer_max) {
|
210
|
+
// refill buffer
|
211
|
+
ts->pointer_max = ts->tde->read(ts->tde, ts->docs, ts->freqs, TDE_READ_SIZE);
|
212
|
+
if (ts->pointer_max != 0) {
|
213
|
+
ts->pointer = 0;
|
214
|
+
} else {
|
215
|
+
ts->tde->close(ts->tde); // close stream
|
216
|
+
ts->tde = NULL;
|
217
|
+
return false;
|
218
|
+
}
|
219
|
+
}
|
220
|
+
self->doc = ts->docs[ts->pointer];
|
221
|
+
return true;
|
222
|
+
}
|
223
|
+
|
224
|
+
bool tsc_skip_to(Scorer *self, int doc_num)
|
225
|
+
{
|
226
|
+
TermScorer *ts = (TermScorer *)self->data;
|
227
|
+
|
228
|
+
// first scan in cache
|
229
|
+
while (++(ts->pointer) < ts->pointer_max) {
|
230
|
+
if (ts->docs[ts->pointer] >= doc_num) {
|
231
|
+
self->doc = ts->docs[ts->pointer];
|
232
|
+
return true;
|
233
|
+
}
|
234
|
+
}
|
235
|
+
|
236
|
+
// not found in cache, seek underlying stream
|
237
|
+
TermDocEnum *tde = ts->tde;
|
238
|
+
bool result = tde->skip_to(tde, doc_num);
|
239
|
+
if (result) {
|
240
|
+
ts->pointer_max = 1;
|
241
|
+
ts->pointer = 0;
|
242
|
+
ts->docs[0] = self->doc = tde->doc_num(tde);
|
243
|
+
ts->freqs[0] = tde->freq(tde);
|
244
|
+
return true;
|
245
|
+
} else {
|
246
|
+
return false;
|
247
|
+
}
|
248
|
+
}
|
249
|
+
|
250
|
+
Explanation *tsc_explain(Scorer *self, int doc_num)
|
251
|
+
{
|
252
|
+
TermScorer *ts = (TermScorer *)self->data;
|
253
|
+
Query *query = ts->weight->get_query(ts->weight);
|
254
|
+
Term *term = ((TermQuery *)query->data)->term;
|
255
|
+
int tf = 0;
|
256
|
+
TermDocEnum *tde = ts->tde;
|
257
|
+
while (ts->pointer < ts->pointer_max) {
|
258
|
+
if (ts->docs[ts->pointer] == doc_num)
|
259
|
+
tf = ts->freqs[ts->pointer];
|
260
|
+
ts->pointer++;
|
261
|
+
}
|
262
|
+
if (tf == 0) {
|
263
|
+
while (tde->next(tde)) {
|
264
|
+
if (tde->doc_num(tde) == doc_num)
|
265
|
+
tf = tde->freq(tde);
|
266
|
+
}
|
267
|
+
}
|
268
|
+
tde->close(tde);
|
269
|
+
ts->tde = NULL;
|
270
|
+
Explanation *tf_explanation = expl_create(sim_tf(self->similarity, tf),
|
271
|
+
epstrdup("tf(term_freq(%s:%s)=%ld)",
|
272
|
+
strlen(term->field) + strlen(term->text) + 20,
|
273
|
+
term->field, term->text, tf));
|
274
|
+
|
275
|
+
return tf_explanation;
|
276
|
+
}
|
277
|
+
|
278
|
+
void tsc_destroy(void *p)
|
279
|
+
{
|
280
|
+
Scorer *self = (Scorer *)p;
|
281
|
+
TermScorer *ts = (TermScorer *)self->data;
|
282
|
+
if (ts->tde) ts->tde->close(ts->tde);
|
283
|
+
scorer_destroy(p);
|
284
|
+
}
|
285
|
+
|
286
|
+
Scorer *tsc_create(Weight *weight, TermDocEnum *tde, uchar *norms)
|
287
|
+
{
|
288
|
+
int i;
|
289
|
+
Scorer *self = scorer_create(weight->similarity);
|
290
|
+
TermScorer *ts = ALLOC(TermScorer);
|
291
|
+
ZEROSET(ts, TermScorer, 1);
|
292
|
+
self->data = ts;
|
293
|
+
ts->weight = weight;
|
294
|
+
ts->tde = tde;
|
295
|
+
ts->norms = norms;
|
296
|
+
ts->weight_value = weight->value;
|
297
|
+
|
298
|
+
for (i = 0; i < SCORE_CACHE_SIZE; i++) {
|
299
|
+
ts->score_cache[i] = sim_tf(self->similarity, i) * ts->weight_value;
|
300
|
+
}
|
301
|
+
|
302
|
+
self->score = &tsc_score;
|
303
|
+
self->next = &tsc_next;
|
304
|
+
self->skip_to = &tsc_skip_to;
|
305
|
+
self->explain = &tsc_explain;
|
306
|
+
self->destroy = &tsc_destroy;
|
307
|
+
return self;
|
308
|
+
}
|
data/ext/q_wildcard.c
ADDED
@@ -0,0 +1,146 @@
|
|
1
|
+
#include <string.h>
|
2
|
+
#include "search.h"
|
3
|
+
|
4
|
+
/****************************************************************************
|
5
|
+
*
|
6
|
+
* WildCardQuery
|
7
|
+
*
|
8
|
+
****************************************************************************/
|
9
|
+
|
10
|
+
char *wcq_to_s(Query *self, char *field)
|
11
|
+
{
|
12
|
+
char *buffer, *bptr;
|
13
|
+
Term *term = (Term *)self->data;
|
14
|
+
int tlen = strlen(term->text);
|
15
|
+
int flen = strlen(term->field);
|
16
|
+
bptr = buffer = ALLOC_N(char, tlen + flen + 35);
|
17
|
+
|
18
|
+
if (strcmp(term->field, field) != 0) {
|
19
|
+
sprintf(bptr, "%s:", term->field);
|
20
|
+
bptr += strlen(term->field) + 1;
|
21
|
+
}
|
22
|
+
sprintf(bptr, "%s", term->text);
|
23
|
+
bptr = buffer + strlen(buffer);
|
24
|
+
if (self->boost != 1.0) {
|
25
|
+
*bptr = '^';
|
26
|
+
dbl_to_s(++bptr, self->boost);
|
27
|
+
}
|
28
|
+
|
29
|
+
return buffer;
|
30
|
+
}
|
31
|
+
|
32
|
+
bool wc_match(char *pattern, char *text)
|
33
|
+
{
|
34
|
+
char *p = pattern, *t = text, *xt;
|
35
|
+
|
36
|
+
/* include '\0' as we need to match empty string */
|
37
|
+
char *text_last = t + strlen(t);
|
38
|
+
|
39
|
+
for (;; p++, t++) {
|
40
|
+
|
41
|
+
/* end of text so make sure end of pattern doesn't matter */
|
42
|
+
if (*t == '\0') {
|
43
|
+
while (*p) {
|
44
|
+
if (*p != WILD_STRING) return false;
|
45
|
+
p++;
|
46
|
+
}
|
47
|
+
return true;
|
48
|
+
}
|
49
|
+
|
50
|
+
/* If we've gone past the end of the pattern, return false. */
|
51
|
+
if (*p == '\0') return false;
|
52
|
+
|
53
|
+
/* Match a single character, so continue. */
|
54
|
+
if (*p == WILD_CHAR) continue;
|
55
|
+
|
56
|
+
if (*p == WILD_STRING) {
|
57
|
+
// Look at the character beyond the '*'.
|
58
|
+
p++;
|
59
|
+
// Examine the string, starting at the last character.
|
60
|
+
for (xt = text_last; xt >= t; xt--) {
|
61
|
+
if (wc_match(p, xt)) return true;
|
62
|
+
}
|
63
|
+
return false;
|
64
|
+
}
|
65
|
+
if (*p != *t)
|
66
|
+
return false;
|
67
|
+
}
|
68
|
+
|
69
|
+
return false;
|
70
|
+
}
|
71
|
+
|
72
|
+
Query *wcq_rewrite(Query *self, IndexReader *ir)
|
73
|
+
{
|
74
|
+
Query *q;
|
75
|
+
Query *tq;
|
76
|
+
|
77
|
+
Term *term = (Term *)self->data;
|
78
|
+
char *text = term->text;
|
79
|
+
char *field = term->field;
|
80
|
+
char *first_star = index(text, WILD_STRING);
|
81
|
+
char *first_ques = index(text, WILD_CHAR);
|
82
|
+
if (!first_star && !first_ques) {
|
83
|
+
q = tq_create(term_clone(term));
|
84
|
+
} else {
|
85
|
+
TermEnum *te;
|
86
|
+
Term prefix_term;
|
87
|
+
char *prefix = NULL;
|
88
|
+
|
89
|
+
char *pattern = (first_ques && first_star > first_ques)
|
90
|
+
? first_ques : first_star;
|
91
|
+
|
92
|
+
int prefix_len = pattern - text;
|
93
|
+
|
94
|
+
prefix_term.field = field;
|
95
|
+
prefix_term.text = (char *)EMPTY_STRING;
|
96
|
+
if (prefix_len > 0) {
|
97
|
+
prefix = ALLOC_N(char, prefix_len + 1);
|
98
|
+
strncpy(prefix, text, prefix_len);
|
99
|
+
prefix_term.text = prefix;
|
100
|
+
prefix_term.text[prefix_len] = '\0';
|
101
|
+
}
|
102
|
+
te = ir->terms_from(ir, &prefix_term);
|
103
|
+
|
104
|
+
q = bq_create(true);
|
105
|
+
if (te) {
|
106
|
+
TermBuffer *tb = te->tb_curr;
|
107
|
+
do {
|
108
|
+
if (strcmp(tb->field, field) != 0 ||
|
109
|
+
(prefix && strncmp(tb->text, prefix, prefix_len) != 0))
|
110
|
+
break;
|
111
|
+
|
112
|
+
if (wc_match(pattern, tb->text + prefix_len)) {
|
113
|
+
tq = tq_create(term_create(tb->field, tb->text)); /* found match */
|
114
|
+
tq->boost = self->boost; /* set boost */
|
115
|
+
bq_add_query(q, tq, BC_SHOULD); /* add query */
|
116
|
+
}
|
117
|
+
} while ((tb = te->next(te)) != NULL);
|
118
|
+
te->close(te);
|
119
|
+
}
|
120
|
+
free(prefix);
|
121
|
+
}
|
122
|
+
|
123
|
+
if (self->rewritten) self->rewritten->destroy(self->rewritten);
|
124
|
+
return self->rewritten = q;
|
125
|
+
}
|
126
|
+
|
127
|
+
void wcq_destroy(void *p)
|
128
|
+
{
|
129
|
+
Query *self = (Query *)p;
|
130
|
+
if (self->destroy_all) term_destroy((Term *)self->data);
|
131
|
+
q_destroy(self);
|
132
|
+
}
|
133
|
+
|
134
|
+
Query *wcq_create(Term *term)
|
135
|
+
{
|
136
|
+
Query *self = q_create();
|
137
|
+
|
138
|
+
self->data = term;
|
139
|
+
self->type = WILD_CARD_QUERY;
|
140
|
+
self->create_weight = NULL;
|
141
|
+
self->to_s = &wcq_to_s;
|
142
|
+
self->rewrite = &wcq_rewrite;
|
143
|
+
self->destroy = &wcq_destroy;
|
144
|
+
|
145
|
+
return self;
|
146
|
+
}
|