doing 2.0.18 → 2.0.22
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +22 -0
- data/Gemfile.lock +15 -5
- data/README.md +1 -1
- data/bin/doing +2 -18
- data/doing.gemspec +5 -4
- data/doing.rdoc +2 -2
- data/generate_completions.sh +3 -3
- data/lib/doing/cli_status.rb +6 -2
- data/lib/doing/completion/bash_completion.rb +185 -0
- data/lib/doing/completion/fish_completion.rb +175 -0
- data/lib/doing/completion/string.rb +17 -0
- data/lib/doing/completion/zsh_completion.rb +140 -0
- data/lib/doing/completion.rb +39 -0
- data/lib/doing/version.rb +1 -1
- data/lib/doing/wwid.rb +18 -6
- data/lib/doing.rb +1 -1
- data/lib/helpers/fzf/.goreleaser.yml +119 -0
- data/lib/helpers/fzf/.rubocop.yml +28 -0
- data/lib/helpers/fzf/ADVANCED.md +565 -0
- data/lib/helpers/fzf/BUILD.md +49 -0
- data/lib/helpers/fzf/CHANGELOG.md +1193 -0
- data/lib/helpers/fzf/Dockerfile +11 -0
- data/lib/helpers/fzf/LICENSE +21 -0
- data/lib/helpers/fzf/Makefile +166 -0
- data/lib/helpers/fzf/README-VIM.md +486 -0
- data/lib/helpers/fzf/README.md +712 -0
- data/lib/helpers/fzf/bin/fzf-tmux +233 -0
- data/lib/helpers/fzf/doc/fzf.txt +512 -0
- data/lib/helpers/fzf/go.mod +17 -0
- data/lib/helpers/fzf/go.sum +31 -0
- data/lib/helpers/fzf/install +382 -0
- data/lib/helpers/fzf/install.ps1 +65 -0
- data/lib/helpers/fzf/main.go +14 -0
- data/lib/helpers/fzf/man/man1/fzf-tmux.1 +68 -0
- data/lib/helpers/fzf/man/man1/fzf.1 +1001 -0
- data/lib/helpers/fzf/plugin/fzf.vim +1048 -0
- data/lib/helpers/fzf/shell/completion.bash +381 -0
- data/lib/helpers/fzf/shell/completion.zsh +329 -0
- data/lib/helpers/fzf/shell/key-bindings.bash +96 -0
- data/lib/helpers/fzf/shell/key-bindings.fish +172 -0
- data/lib/helpers/fzf/shell/key-bindings.zsh +114 -0
- data/lib/helpers/fzf/src/LICENSE +21 -0
- data/lib/helpers/fzf/src/algo/algo.go +884 -0
- data/lib/helpers/fzf/src/algo/algo_test.go +197 -0
- data/lib/helpers/fzf/src/algo/normalize.go +492 -0
- data/lib/helpers/fzf/src/ansi.go +409 -0
- data/lib/helpers/fzf/src/ansi_test.go +427 -0
- data/lib/helpers/fzf/src/cache.go +81 -0
- data/lib/helpers/fzf/src/cache_test.go +39 -0
- data/lib/helpers/fzf/src/chunklist.go +89 -0
- data/lib/helpers/fzf/src/chunklist_test.go +80 -0
- data/lib/helpers/fzf/src/constants.go +85 -0
- data/lib/helpers/fzf/src/core.go +351 -0
- data/lib/helpers/fzf/src/history.go +96 -0
- data/lib/helpers/fzf/src/history_test.go +68 -0
- data/lib/helpers/fzf/src/item.go +44 -0
- data/lib/helpers/fzf/src/item_test.go +23 -0
- data/lib/helpers/fzf/src/matcher.go +235 -0
- data/lib/helpers/fzf/src/merger.go +120 -0
- data/lib/helpers/fzf/src/merger_test.go +88 -0
- data/lib/helpers/fzf/src/options.go +1691 -0
- data/lib/helpers/fzf/src/options_test.go +457 -0
- data/lib/helpers/fzf/src/pattern.go +425 -0
- data/lib/helpers/fzf/src/pattern_test.go +209 -0
- data/lib/helpers/fzf/src/protector/protector.go +8 -0
- data/lib/helpers/fzf/src/protector/protector_openbsd.go +10 -0
- data/lib/helpers/fzf/src/reader.go +201 -0
- data/lib/helpers/fzf/src/reader_test.go +63 -0
- data/lib/helpers/fzf/src/result.go +243 -0
- data/lib/helpers/fzf/src/result_others.go +16 -0
- data/lib/helpers/fzf/src/result_test.go +159 -0
- data/lib/helpers/fzf/src/result_x86.go +16 -0
- data/lib/helpers/fzf/src/terminal.go +2832 -0
- data/lib/helpers/fzf/src/terminal_test.go +638 -0
- data/lib/helpers/fzf/src/terminal_unix.go +26 -0
- data/lib/helpers/fzf/src/terminal_windows.go +45 -0
- data/lib/helpers/fzf/src/tokenizer.go +253 -0
- data/lib/helpers/fzf/src/tokenizer_test.go +112 -0
- data/lib/helpers/fzf/src/tui/dummy.go +46 -0
- data/lib/helpers/fzf/src/tui/light.go +987 -0
- data/lib/helpers/fzf/src/tui/light_unix.go +110 -0
- data/lib/helpers/fzf/src/tui/light_windows.go +145 -0
- data/lib/helpers/fzf/src/tui/tcell.go +721 -0
- data/lib/helpers/fzf/src/tui/tcell_test.go +392 -0
- data/lib/helpers/fzf/src/tui/ttyname_unix.go +47 -0
- data/lib/helpers/fzf/src/tui/ttyname_windows.go +14 -0
- data/lib/helpers/fzf/src/tui/tui.go +625 -0
- data/lib/helpers/fzf/src/tui/tui_test.go +20 -0
- data/lib/helpers/fzf/src/util/atomicbool.go +34 -0
- data/lib/helpers/fzf/src/util/atomicbool_test.go +17 -0
- data/lib/helpers/fzf/src/util/chars.go +198 -0
- data/lib/helpers/fzf/src/util/chars_test.go +46 -0
- data/lib/helpers/fzf/src/util/eventbox.go +96 -0
- data/lib/helpers/fzf/src/util/eventbox_test.go +61 -0
- data/lib/helpers/fzf/src/util/slab.go +12 -0
- data/lib/helpers/fzf/src/util/util.go +138 -0
- data/lib/helpers/fzf/src/util/util_test.go +40 -0
- data/lib/helpers/fzf/src/util/util_unix.go +47 -0
- data/lib/helpers/fzf/src/util/util_windows.go +83 -0
- data/lib/helpers/fzf/test/fzf.vader +175 -0
- data/lib/helpers/fzf/test/test_go.rb +2626 -0
- data/lib/helpers/fzf/uninstall +117 -0
- data/scripts/generate_bash_completions.rb +6 -12
- data/scripts/generate_fish_completions.rb +7 -16
- data/scripts/generate_zsh_completions.rb +6 -15
- metadata +144 -9
@@ -0,0 +1,425 @@
|
|
1
|
+
package fzf
|
2
|
+
|
3
|
+
import (
|
4
|
+
"fmt"
|
5
|
+
"regexp"
|
6
|
+
"strings"
|
7
|
+
|
8
|
+
"github.com/junegunn/fzf/src/algo"
|
9
|
+
"github.com/junegunn/fzf/src/util"
|
10
|
+
)
|
11
|
+
|
12
|
+
// fuzzy
|
13
|
+
// 'exact
|
14
|
+
// ^prefix-exact
|
15
|
+
// suffix-exact$
|
16
|
+
// !inverse-exact
|
17
|
+
// !'inverse-fuzzy
|
18
|
+
// !^inverse-prefix-exact
|
19
|
+
// !inverse-suffix-exact$
|
20
|
+
|
21
|
+
type termType int
|
22
|
+
|
23
|
+
const (
|
24
|
+
termFuzzy termType = iota
|
25
|
+
termExact
|
26
|
+
termPrefix
|
27
|
+
termSuffix
|
28
|
+
termEqual
|
29
|
+
)
|
30
|
+
|
31
|
+
type term struct {
|
32
|
+
typ termType
|
33
|
+
inv bool
|
34
|
+
text []rune
|
35
|
+
caseSensitive bool
|
36
|
+
normalize bool
|
37
|
+
}
|
38
|
+
|
39
|
+
// String returns the string representation of a term.
|
40
|
+
func (t term) String() string {
|
41
|
+
return fmt.Sprintf("term{typ: %d, inv: %v, text: []rune(%q), caseSensitive: %v}", t.typ, t.inv, string(t.text), t.caseSensitive)
|
42
|
+
}
|
43
|
+
|
44
|
+
type termSet []term
|
45
|
+
|
46
|
+
// Pattern represents search pattern
|
47
|
+
type Pattern struct {
|
48
|
+
fuzzy bool
|
49
|
+
fuzzyAlgo algo.Algo
|
50
|
+
extended bool
|
51
|
+
caseSensitive bool
|
52
|
+
normalize bool
|
53
|
+
forward bool
|
54
|
+
text []rune
|
55
|
+
termSets []termSet
|
56
|
+
sortable bool
|
57
|
+
cacheable bool
|
58
|
+
cacheKey string
|
59
|
+
delimiter Delimiter
|
60
|
+
nth []Range
|
61
|
+
procFun map[termType]algo.Algo
|
62
|
+
}
|
63
|
+
|
64
|
+
var (
|
65
|
+
_patternCache map[string]*Pattern
|
66
|
+
_splitRegex *regexp.Regexp
|
67
|
+
_cache ChunkCache
|
68
|
+
)
|
69
|
+
|
70
|
+
func init() {
|
71
|
+
_splitRegex = regexp.MustCompile(" +")
|
72
|
+
clearPatternCache()
|
73
|
+
clearChunkCache()
|
74
|
+
}
|
75
|
+
|
76
|
+
func clearPatternCache() {
|
77
|
+
// We can uniquely identify the pattern for a given string since
|
78
|
+
// search mode and caseMode do not change while the program is running
|
79
|
+
_patternCache = make(map[string]*Pattern)
|
80
|
+
}
|
81
|
+
|
82
|
+
func clearChunkCache() {
|
83
|
+
_cache = NewChunkCache()
|
84
|
+
}
|
85
|
+
|
86
|
+
// BuildPattern builds Pattern object from the given arguments
|
87
|
+
func BuildPattern(fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case, normalize bool, forward bool,
|
88
|
+
cacheable bool, nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
89
|
+
|
90
|
+
var asString string
|
91
|
+
if extended {
|
92
|
+
asString = strings.TrimLeft(string(runes), " ")
|
93
|
+
for strings.HasSuffix(asString, " ") && !strings.HasSuffix(asString, "\\ ") {
|
94
|
+
asString = asString[:len(asString)-1]
|
95
|
+
}
|
96
|
+
} else {
|
97
|
+
asString = string(runes)
|
98
|
+
}
|
99
|
+
|
100
|
+
cached, found := _patternCache[asString]
|
101
|
+
if found {
|
102
|
+
return cached
|
103
|
+
}
|
104
|
+
|
105
|
+
caseSensitive := true
|
106
|
+
sortable := true
|
107
|
+
termSets := []termSet{}
|
108
|
+
|
109
|
+
if extended {
|
110
|
+
termSets = parseTerms(fuzzy, caseMode, normalize, asString)
|
111
|
+
// We should not sort the result if there are only inverse search terms
|
112
|
+
sortable = false
|
113
|
+
Loop:
|
114
|
+
for _, termSet := range termSets {
|
115
|
+
for idx, term := range termSet {
|
116
|
+
if !term.inv {
|
117
|
+
sortable = true
|
118
|
+
}
|
119
|
+
// If the query contains inverse search terms or OR operators,
|
120
|
+
// we cannot cache the search scope
|
121
|
+
if !cacheable || idx > 0 || term.inv || fuzzy && term.typ != termFuzzy || !fuzzy && term.typ != termExact {
|
122
|
+
cacheable = false
|
123
|
+
if sortable {
|
124
|
+
// Can't break until we see at least one non-inverse term
|
125
|
+
break Loop
|
126
|
+
}
|
127
|
+
}
|
128
|
+
}
|
129
|
+
}
|
130
|
+
} else {
|
131
|
+
lowerString := strings.ToLower(asString)
|
132
|
+
normalize = normalize &&
|
133
|
+
lowerString == string(algo.NormalizeRunes([]rune(lowerString)))
|
134
|
+
caseSensitive = caseMode == CaseRespect ||
|
135
|
+
caseMode == CaseSmart && lowerString != asString
|
136
|
+
if !caseSensitive {
|
137
|
+
asString = lowerString
|
138
|
+
}
|
139
|
+
}
|
140
|
+
|
141
|
+
ptr := &Pattern{
|
142
|
+
fuzzy: fuzzy,
|
143
|
+
fuzzyAlgo: fuzzyAlgo,
|
144
|
+
extended: extended,
|
145
|
+
caseSensitive: caseSensitive,
|
146
|
+
normalize: normalize,
|
147
|
+
forward: forward,
|
148
|
+
text: []rune(asString),
|
149
|
+
termSets: termSets,
|
150
|
+
sortable: sortable,
|
151
|
+
cacheable: cacheable,
|
152
|
+
nth: nth,
|
153
|
+
delimiter: delimiter,
|
154
|
+
procFun: make(map[termType]algo.Algo)}
|
155
|
+
|
156
|
+
ptr.cacheKey = ptr.buildCacheKey()
|
157
|
+
ptr.procFun[termFuzzy] = fuzzyAlgo
|
158
|
+
ptr.procFun[termEqual] = algo.EqualMatch
|
159
|
+
ptr.procFun[termExact] = algo.ExactMatchNaive
|
160
|
+
ptr.procFun[termPrefix] = algo.PrefixMatch
|
161
|
+
ptr.procFun[termSuffix] = algo.SuffixMatch
|
162
|
+
|
163
|
+
_patternCache[asString] = ptr
|
164
|
+
return ptr
|
165
|
+
}
|
166
|
+
|
167
|
+
func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet {
|
168
|
+
str = strings.Replace(str, "\\ ", "\t", -1)
|
169
|
+
tokens := _splitRegex.Split(str, -1)
|
170
|
+
sets := []termSet{}
|
171
|
+
set := termSet{}
|
172
|
+
switchSet := false
|
173
|
+
afterBar := false
|
174
|
+
for _, token := range tokens {
|
175
|
+
typ, inv, text := termFuzzy, false, strings.Replace(token, "\t", " ", -1)
|
176
|
+
lowerText := strings.ToLower(text)
|
177
|
+
caseSensitive := caseMode == CaseRespect ||
|
178
|
+
caseMode == CaseSmart && text != lowerText
|
179
|
+
normalizeTerm := normalize &&
|
180
|
+
lowerText == string(algo.NormalizeRunes([]rune(lowerText)))
|
181
|
+
if !caseSensitive {
|
182
|
+
text = lowerText
|
183
|
+
}
|
184
|
+
if !fuzzy {
|
185
|
+
typ = termExact
|
186
|
+
}
|
187
|
+
|
188
|
+
if len(set) > 0 && !afterBar && text == "|" {
|
189
|
+
switchSet = false
|
190
|
+
afterBar = true
|
191
|
+
continue
|
192
|
+
}
|
193
|
+
afterBar = false
|
194
|
+
|
195
|
+
if strings.HasPrefix(text, "!") {
|
196
|
+
inv = true
|
197
|
+
typ = termExact
|
198
|
+
text = text[1:]
|
199
|
+
}
|
200
|
+
|
201
|
+
if text != "$" && strings.HasSuffix(text, "$") {
|
202
|
+
typ = termSuffix
|
203
|
+
text = text[:len(text)-1]
|
204
|
+
}
|
205
|
+
|
206
|
+
if strings.HasPrefix(text, "'") {
|
207
|
+
// Flip exactness
|
208
|
+
if fuzzy && !inv {
|
209
|
+
typ = termExact
|
210
|
+
text = text[1:]
|
211
|
+
} else {
|
212
|
+
typ = termFuzzy
|
213
|
+
text = text[1:]
|
214
|
+
}
|
215
|
+
} else if strings.HasPrefix(text, "^") {
|
216
|
+
if typ == termSuffix {
|
217
|
+
typ = termEqual
|
218
|
+
} else {
|
219
|
+
typ = termPrefix
|
220
|
+
}
|
221
|
+
text = text[1:]
|
222
|
+
}
|
223
|
+
|
224
|
+
if len(text) > 0 {
|
225
|
+
if switchSet {
|
226
|
+
sets = append(sets, set)
|
227
|
+
set = termSet{}
|
228
|
+
}
|
229
|
+
textRunes := []rune(text)
|
230
|
+
if normalizeTerm {
|
231
|
+
textRunes = algo.NormalizeRunes(textRunes)
|
232
|
+
}
|
233
|
+
set = append(set, term{
|
234
|
+
typ: typ,
|
235
|
+
inv: inv,
|
236
|
+
text: textRunes,
|
237
|
+
caseSensitive: caseSensitive,
|
238
|
+
normalize: normalizeTerm})
|
239
|
+
switchSet = true
|
240
|
+
}
|
241
|
+
}
|
242
|
+
if len(set) > 0 {
|
243
|
+
sets = append(sets, set)
|
244
|
+
}
|
245
|
+
return sets
|
246
|
+
}
|
247
|
+
|
248
|
+
// IsEmpty returns true if the pattern is effectively empty
|
249
|
+
func (p *Pattern) IsEmpty() bool {
|
250
|
+
if !p.extended {
|
251
|
+
return len(p.text) == 0
|
252
|
+
}
|
253
|
+
return len(p.termSets) == 0
|
254
|
+
}
|
255
|
+
|
256
|
+
// AsString returns the search query in string type
|
257
|
+
func (p *Pattern) AsString() string {
|
258
|
+
return string(p.text)
|
259
|
+
}
|
260
|
+
|
261
|
+
func (p *Pattern) buildCacheKey() string {
|
262
|
+
if !p.extended {
|
263
|
+
return p.AsString()
|
264
|
+
}
|
265
|
+
cacheableTerms := []string{}
|
266
|
+
for _, termSet := range p.termSets {
|
267
|
+
if len(termSet) == 1 && !termSet[0].inv && (p.fuzzy || termSet[0].typ == termExact) {
|
268
|
+
cacheableTerms = append(cacheableTerms, string(termSet[0].text))
|
269
|
+
}
|
270
|
+
}
|
271
|
+
return strings.Join(cacheableTerms, "\t")
|
272
|
+
}
|
273
|
+
|
274
|
+
// CacheKey is used to build string to be used as the key of result cache
|
275
|
+
func (p *Pattern) CacheKey() string {
|
276
|
+
return p.cacheKey
|
277
|
+
}
|
278
|
+
|
279
|
+
// Match returns the list of matches Items in the given Chunk
|
280
|
+
func (p *Pattern) Match(chunk *Chunk, slab *util.Slab) []Result {
|
281
|
+
// ChunkCache: Exact match
|
282
|
+
cacheKey := p.CacheKey()
|
283
|
+
if p.cacheable {
|
284
|
+
if cached := _cache.Lookup(chunk, cacheKey); cached != nil {
|
285
|
+
return cached
|
286
|
+
}
|
287
|
+
}
|
288
|
+
|
289
|
+
// Prefix/suffix cache
|
290
|
+
space := _cache.Search(chunk, cacheKey)
|
291
|
+
|
292
|
+
matches := p.matchChunk(chunk, space, slab)
|
293
|
+
|
294
|
+
if p.cacheable {
|
295
|
+
_cache.Add(chunk, cacheKey, matches)
|
296
|
+
}
|
297
|
+
return matches
|
298
|
+
}
|
299
|
+
|
300
|
+
func (p *Pattern) matchChunk(chunk *Chunk, space []Result, slab *util.Slab) []Result {
|
301
|
+
matches := []Result{}
|
302
|
+
|
303
|
+
if space == nil {
|
304
|
+
for idx := 0; idx < chunk.count; idx++ {
|
305
|
+
if match, _, _ := p.MatchItem(&chunk.items[idx], false, slab); match != nil {
|
306
|
+
matches = append(matches, *match)
|
307
|
+
}
|
308
|
+
}
|
309
|
+
} else {
|
310
|
+
for _, result := range space {
|
311
|
+
if match, _, _ := p.MatchItem(result.item, false, slab); match != nil {
|
312
|
+
matches = append(matches, *match)
|
313
|
+
}
|
314
|
+
}
|
315
|
+
}
|
316
|
+
return matches
|
317
|
+
}
|
318
|
+
|
319
|
+
// MatchItem returns true if the Item is a match
|
320
|
+
func (p *Pattern) MatchItem(item *Item, withPos bool, slab *util.Slab) (*Result, []Offset, *[]int) {
|
321
|
+
if p.extended {
|
322
|
+
if offsets, bonus, pos := p.extendedMatch(item, withPos, slab); len(offsets) == len(p.termSets) {
|
323
|
+
result := buildResult(item, offsets, bonus)
|
324
|
+
return &result, offsets, pos
|
325
|
+
}
|
326
|
+
return nil, nil, nil
|
327
|
+
}
|
328
|
+
offset, bonus, pos := p.basicMatch(item, withPos, slab)
|
329
|
+
if sidx := offset[0]; sidx >= 0 {
|
330
|
+
offsets := []Offset{offset}
|
331
|
+
result := buildResult(item, offsets, bonus)
|
332
|
+
return &result, offsets, pos
|
333
|
+
}
|
334
|
+
return nil, nil, nil
|
335
|
+
}
|
336
|
+
|
337
|
+
func (p *Pattern) basicMatch(item *Item, withPos bool, slab *util.Slab) (Offset, int, *[]int) {
|
338
|
+
var input []Token
|
339
|
+
if len(p.nth) == 0 {
|
340
|
+
input = []Token{{text: &item.text, prefixLength: 0}}
|
341
|
+
} else {
|
342
|
+
input = p.transformInput(item)
|
343
|
+
}
|
344
|
+
if p.fuzzy {
|
345
|
+
return p.iter(p.fuzzyAlgo, input, p.caseSensitive, p.normalize, p.forward, p.text, withPos, slab)
|
346
|
+
}
|
347
|
+
return p.iter(algo.ExactMatchNaive, input, p.caseSensitive, p.normalize, p.forward, p.text, withPos, slab)
|
348
|
+
}
|
349
|
+
|
350
|
+
func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Offset, int, *[]int) {
|
351
|
+
var input []Token
|
352
|
+
if len(p.nth) == 0 {
|
353
|
+
input = []Token{{text: &item.text, prefixLength: 0}}
|
354
|
+
} else {
|
355
|
+
input = p.transformInput(item)
|
356
|
+
}
|
357
|
+
offsets := []Offset{}
|
358
|
+
var totalScore int
|
359
|
+
var allPos *[]int
|
360
|
+
if withPos {
|
361
|
+
allPos = &[]int{}
|
362
|
+
}
|
363
|
+
for _, termSet := range p.termSets {
|
364
|
+
var offset Offset
|
365
|
+
var currentScore int
|
366
|
+
matched := false
|
367
|
+
for _, term := range termSet {
|
368
|
+
pfun := p.procFun[term.typ]
|
369
|
+
off, score, pos := p.iter(pfun, input, term.caseSensitive, term.normalize, p.forward, term.text, withPos, slab)
|
370
|
+
if sidx := off[0]; sidx >= 0 {
|
371
|
+
if term.inv {
|
372
|
+
continue
|
373
|
+
}
|
374
|
+
offset, currentScore = off, score
|
375
|
+
matched = true
|
376
|
+
if withPos {
|
377
|
+
if pos != nil {
|
378
|
+
*allPos = append(*allPos, *pos...)
|
379
|
+
} else {
|
380
|
+
for idx := off[0]; idx < off[1]; idx++ {
|
381
|
+
*allPos = append(*allPos, int(idx))
|
382
|
+
}
|
383
|
+
}
|
384
|
+
}
|
385
|
+
break
|
386
|
+
} else if term.inv {
|
387
|
+
offset, currentScore = Offset{0, 0}, 0
|
388
|
+
matched = true
|
389
|
+
continue
|
390
|
+
}
|
391
|
+
}
|
392
|
+
if matched {
|
393
|
+
offsets = append(offsets, offset)
|
394
|
+
totalScore += currentScore
|
395
|
+
}
|
396
|
+
}
|
397
|
+
return offsets, totalScore, allPos
|
398
|
+
}
|
399
|
+
|
400
|
+
func (p *Pattern) transformInput(item *Item) []Token {
|
401
|
+
if item.transformed != nil {
|
402
|
+
return *item.transformed
|
403
|
+
}
|
404
|
+
|
405
|
+
tokens := Tokenize(item.text.ToString(), p.delimiter)
|
406
|
+
ret := Transform(tokens, p.nth)
|
407
|
+
item.transformed = &ret
|
408
|
+
return ret
|
409
|
+
}
|
410
|
+
|
411
|
+
func (p *Pattern) iter(pfun algo.Algo, tokens []Token, caseSensitive bool, normalize bool, forward bool, pattern []rune, withPos bool, slab *util.Slab) (Offset, int, *[]int) {
|
412
|
+
for _, part := range tokens {
|
413
|
+
if res, pos := pfun(caseSensitive, normalize, forward, part.text, pattern, withPos, slab); res.Start >= 0 {
|
414
|
+
sidx := int32(res.Start) + part.prefixLength
|
415
|
+
eidx := int32(res.End) + part.prefixLength
|
416
|
+
if pos != nil {
|
417
|
+
for idx := range *pos {
|
418
|
+
(*pos)[idx] += int(part.prefixLength)
|
419
|
+
}
|
420
|
+
}
|
421
|
+
return Offset{sidx, eidx}, res.Score, pos
|
422
|
+
}
|
423
|
+
}
|
424
|
+
return Offset{-1, -1}, 0, nil
|
425
|
+
}
|
@@ -0,0 +1,209 @@
|
|
1
|
+
package fzf
|
2
|
+
|
3
|
+
import (
|
4
|
+
"reflect"
|
5
|
+
"testing"
|
6
|
+
|
7
|
+
"github.com/junegunn/fzf/src/algo"
|
8
|
+
"github.com/junegunn/fzf/src/util"
|
9
|
+
)
|
10
|
+
|
11
|
+
var slab *util.Slab
|
12
|
+
|
13
|
+
func init() {
|
14
|
+
slab = util.MakeSlab(slab16Size, slab32Size)
|
15
|
+
}
|
16
|
+
|
17
|
+
func TestParseTermsExtended(t *testing.T) {
|
18
|
+
terms := parseTerms(true, CaseSmart, false,
|
19
|
+
"aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$ | ^iii$ ^xxx | 'yyy | zzz$ | !ZZZ |")
|
20
|
+
if len(terms) != 9 ||
|
21
|
+
terms[0][0].typ != termFuzzy || terms[0][0].inv ||
|
22
|
+
terms[1][0].typ != termExact || terms[1][0].inv ||
|
23
|
+
terms[2][0].typ != termPrefix || terms[2][0].inv ||
|
24
|
+
terms[3][0].typ != termSuffix || terms[3][0].inv ||
|
25
|
+
terms[4][0].typ != termExact || !terms[4][0].inv ||
|
26
|
+
terms[5][0].typ != termFuzzy || !terms[5][0].inv ||
|
27
|
+
terms[6][0].typ != termPrefix || !terms[6][0].inv ||
|
28
|
+
terms[7][0].typ != termSuffix || !terms[7][0].inv ||
|
29
|
+
terms[7][1].typ != termEqual || terms[7][1].inv ||
|
30
|
+
terms[8][0].typ != termPrefix || terms[8][0].inv ||
|
31
|
+
terms[8][1].typ != termExact || terms[8][1].inv ||
|
32
|
+
terms[8][2].typ != termSuffix || terms[8][2].inv ||
|
33
|
+
terms[8][3].typ != termExact || !terms[8][3].inv {
|
34
|
+
t.Errorf("%v", terms)
|
35
|
+
}
|
36
|
+
for _, termSet := range terms[:8] {
|
37
|
+
term := termSet[0]
|
38
|
+
if len(term.text) != 3 {
|
39
|
+
t.Errorf("%v", term)
|
40
|
+
}
|
41
|
+
}
|
42
|
+
}
|
43
|
+
|
44
|
+
func TestParseTermsExtendedExact(t *testing.T) {
|
45
|
+
terms := parseTerms(false, CaseSmart, false,
|
46
|
+
"aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$")
|
47
|
+
if len(terms) != 8 ||
|
48
|
+
terms[0][0].typ != termExact || terms[0][0].inv || len(terms[0][0].text) != 3 ||
|
49
|
+
terms[1][0].typ != termFuzzy || terms[1][0].inv || len(terms[1][0].text) != 3 ||
|
50
|
+
terms[2][0].typ != termPrefix || terms[2][0].inv || len(terms[2][0].text) != 3 ||
|
51
|
+
terms[3][0].typ != termSuffix || terms[3][0].inv || len(terms[3][0].text) != 3 ||
|
52
|
+
terms[4][0].typ != termExact || !terms[4][0].inv || len(terms[4][0].text) != 3 ||
|
53
|
+
terms[5][0].typ != termFuzzy || !terms[5][0].inv || len(terms[5][0].text) != 3 ||
|
54
|
+
terms[6][0].typ != termPrefix || !terms[6][0].inv || len(terms[6][0].text) != 3 ||
|
55
|
+
terms[7][0].typ != termSuffix || !terms[7][0].inv || len(terms[7][0].text) != 3 {
|
56
|
+
t.Errorf("%v", terms)
|
57
|
+
}
|
58
|
+
}
|
59
|
+
|
60
|
+
func TestParseTermsEmpty(t *testing.T) {
|
61
|
+
terms := parseTerms(true, CaseSmart, false, "' ^ !' !^")
|
62
|
+
if len(terms) != 0 {
|
63
|
+
t.Errorf("%v", terms)
|
64
|
+
}
|
65
|
+
}
|
66
|
+
|
67
|
+
func TestExact(t *testing.T) {
|
68
|
+
defer clearPatternCache()
|
69
|
+
clearPatternCache()
|
70
|
+
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, true,
|
71
|
+
[]Range{}, Delimiter{}, []rune("'abc"))
|
72
|
+
chars := util.ToChars([]byte("aabbcc abc"))
|
73
|
+
res, pos := algo.ExactMatchNaive(
|
74
|
+
pattern.caseSensitive, pattern.normalize, pattern.forward, &chars, pattern.termSets[0][0].text, true, nil)
|
75
|
+
if res.Start != 7 || res.End != 10 {
|
76
|
+
t.Errorf("%v / %d / %d", pattern.termSets, res.Start, res.End)
|
77
|
+
}
|
78
|
+
if pos != nil {
|
79
|
+
t.Errorf("pos is expected to be nil")
|
80
|
+
}
|
81
|
+
}
|
82
|
+
|
83
|
+
func TestEqual(t *testing.T) {
|
84
|
+
defer clearPatternCache()
|
85
|
+
clearPatternCache()
|
86
|
+
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune("^AbC$"))
|
87
|
+
|
88
|
+
match := func(str string, sidxExpected int, eidxExpected int) {
|
89
|
+
chars := util.ToChars([]byte(str))
|
90
|
+
res, pos := algo.EqualMatch(
|
91
|
+
pattern.caseSensitive, pattern.normalize, pattern.forward, &chars, pattern.termSets[0][0].text, true, nil)
|
92
|
+
if res.Start != sidxExpected || res.End != eidxExpected {
|
93
|
+
t.Errorf("%v / %d / %d", pattern.termSets, res.Start, res.End)
|
94
|
+
}
|
95
|
+
if pos != nil {
|
96
|
+
t.Errorf("pos is expected to be nil")
|
97
|
+
}
|
98
|
+
}
|
99
|
+
match("ABC", -1, -1)
|
100
|
+
match("AbC", 0, 3)
|
101
|
+
match("AbC ", 0, 3)
|
102
|
+
match(" AbC ", 1, 4)
|
103
|
+
match(" AbC", 2, 5)
|
104
|
+
}
|
105
|
+
|
106
|
+
func TestCaseSensitivity(t *testing.T) {
|
107
|
+
defer clearPatternCache()
|
108
|
+
clearPatternCache()
|
109
|
+
pat1 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
110
|
+
clearPatternCache()
|
111
|
+
pat2 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
112
|
+
clearPatternCache()
|
113
|
+
pat3 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, false, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
114
|
+
clearPatternCache()
|
115
|
+
pat4 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseIgnore, false, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
116
|
+
clearPatternCache()
|
117
|
+
pat5 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, false, true, true, []Range{}, Delimiter{}, []rune("abc"))
|
118
|
+
clearPatternCache()
|
119
|
+
pat6 := BuildPattern(true, algo.FuzzyMatchV2, false, CaseRespect, false, true, true, []Range{}, Delimiter{}, []rune("Abc"))
|
120
|
+
|
121
|
+
if string(pat1.text) != "abc" || pat1.caseSensitive != false ||
|
122
|
+
string(pat2.text) != "Abc" || pat2.caseSensitive != true ||
|
123
|
+
string(pat3.text) != "abc" || pat3.caseSensitive != false ||
|
124
|
+
string(pat4.text) != "abc" || pat4.caseSensitive != false ||
|
125
|
+
string(pat5.text) != "abc" || pat5.caseSensitive != true ||
|
126
|
+
string(pat6.text) != "Abc" || pat6.caseSensitive != true {
|
127
|
+
t.Error("Invalid case conversion")
|
128
|
+
}
|
129
|
+
}
|
130
|
+
|
131
|
+
func TestOrigTextAndTransformed(t *testing.T) {
|
132
|
+
pattern := BuildPattern(true, algo.FuzzyMatchV2, true, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune("jg"))
|
133
|
+
tokens := Tokenize("junegunn", Delimiter{})
|
134
|
+
trans := Transform(tokens, []Range{{1, 1}})
|
135
|
+
|
136
|
+
origBytes := []byte("junegunn.choi")
|
137
|
+
for _, extended := range []bool{false, true} {
|
138
|
+
chunk := Chunk{count: 1}
|
139
|
+
chunk.items[0] = Item{
|
140
|
+
text: util.ToChars([]byte("junegunn")),
|
141
|
+
origText: &origBytes,
|
142
|
+
transformed: &trans}
|
143
|
+
pattern.extended = extended
|
144
|
+
matches := pattern.matchChunk(&chunk, nil, slab) // No cache
|
145
|
+
if !(matches[0].item.text.ToString() == "junegunn" &&
|
146
|
+
string(*matches[0].item.origText) == "junegunn.choi" &&
|
147
|
+
reflect.DeepEqual(*matches[0].item.transformed, trans)) {
|
148
|
+
t.Error("Invalid match result", matches)
|
149
|
+
}
|
150
|
+
|
151
|
+
match, offsets, pos := pattern.MatchItem(&chunk.items[0], true, slab)
|
152
|
+
if !(match.item.text.ToString() == "junegunn" &&
|
153
|
+
string(*match.item.origText) == "junegunn.choi" &&
|
154
|
+
offsets[0][0] == 0 && offsets[0][1] == 5 &&
|
155
|
+
reflect.DeepEqual(*match.item.transformed, trans)) {
|
156
|
+
t.Error("Invalid match result", match, offsets, extended)
|
157
|
+
}
|
158
|
+
if !((*pos)[0] == 4 && (*pos)[1] == 0) {
|
159
|
+
t.Error("Invalid pos array", *pos)
|
160
|
+
}
|
161
|
+
}
|
162
|
+
}
|
163
|
+
|
164
|
+
func TestCacheKey(t *testing.T) {
|
165
|
+
test := func(extended bool, patStr string, expected string, cacheable bool) {
|
166
|
+
clearPatternCache()
|
167
|
+
pat := BuildPattern(true, algo.FuzzyMatchV2, extended, CaseSmart, false, true, true, []Range{}, Delimiter{}, []rune(patStr))
|
168
|
+
if pat.CacheKey() != expected {
|
169
|
+
t.Errorf("Expected: %s, actual: %s", expected, pat.CacheKey())
|
170
|
+
}
|
171
|
+
if pat.cacheable != cacheable {
|
172
|
+
t.Errorf("Expected: %t, actual: %t (%s)", cacheable, pat.cacheable, patStr)
|
173
|
+
}
|
174
|
+
clearPatternCache()
|
175
|
+
}
|
176
|
+
test(false, "foo !bar", "foo !bar", true)
|
177
|
+
test(false, "foo | bar !baz", "foo | bar !baz", true)
|
178
|
+
test(true, "foo bar baz", "foo\tbar\tbaz", true)
|
179
|
+
test(true, "foo !bar", "foo", false)
|
180
|
+
test(true, "foo !bar baz", "foo\tbaz", false)
|
181
|
+
test(true, "foo | bar baz", "baz", false)
|
182
|
+
test(true, "foo | bar | baz", "", false)
|
183
|
+
test(true, "foo | bar !baz", "", false)
|
184
|
+
test(true, "| | foo", "", false)
|
185
|
+
test(true, "| | | foo", "foo", false)
|
186
|
+
}
|
187
|
+
|
188
|
+
func TestCacheable(t *testing.T) {
|
189
|
+
test := func(fuzzy bool, str string, expected string, cacheable bool) {
|
190
|
+
clearPatternCache()
|
191
|
+
pat := BuildPattern(fuzzy, algo.FuzzyMatchV2, true, CaseSmart, true, true, true, []Range{}, Delimiter{}, []rune(str))
|
192
|
+
if pat.CacheKey() != expected {
|
193
|
+
t.Errorf("Expected: %s, actual: %s", expected, pat.CacheKey())
|
194
|
+
}
|
195
|
+
if cacheable != pat.cacheable {
|
196
|
+
t.Errorf("Invalid Pattern.cacheable for \"%s\": %v (expected: %v)", str, pat.cacheable, cacheable)
|
197
|
+
}
|
198
|
+
clearPatternCache()
|
199
|
+
}
|
200
|
+
test(true, "foo bar", "foo\tbar", true)
|
201
|
+
test(true, "foo 'bar", "foo\tbar", false)
|
202
|
+
test(true, "foo !bar", "foo", false)
|
203
|
+
|
204
|
+
test(false, "foo bar", "foo\tbar", true)
|
205
|
+
test(false, "foo 'bar", "foo", false)
|
206
|
+
test(false, "foo '", "foo", true)
|
207
|
+
test(false, "foo 'bar", "foo", false)
|
208
|
+
test(false, "foo !bar", "foo", false)
|
209
|
+
}
|