mirror of
https://github.com/junegunn/fzf.git
synced 2025-08-03 05:32:08 -07:00
Compare commits
10 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
c656cfbdce | ||
|
de829c0938 | ||
|
64443221aa | ||
|
9017e29741 | ||
|
0a22142d88 | ||
|
ac160f98a8 | ||
|
62e01a2a62 | ||
|
5660cebaf6 | ||
|
a7e588ceac | ||
|
5baf1c5536 |
@@ -1,6 +1,12 @@
|
|||||||
CHANGELOG
|
CHANGELOG
|
||||||
=========
|
=========
|
||||||
|
|
||||||
|
0.10.5
|
||||||
|
------
|
||||||
|
|
||||||
|
- `'`-prefix to unquote the term in `--extended-exact` mode
|
||||||
|
- Backward scan when `--tiebreak=end` is set
|
||||||
|
|
||||||
0.10.4
|
0.10.4
|
||||||
------
|
------
|
||||||
|
|
||||||
|
@@ -124,8 +124,9 @@ such as: `^music .mp3$ sbtrkt !rmx`
|
|||||||
| `'wild` | Items that include `wild` | exact-match (quoted) |
|
| `'wild` | Items that include `wild` | exact-match (quoted) |
|
||||||
| `!'fire` | Items that do not include `fire` | inverse-exact-match |
|
| `!'fire` | Items that do not include `fire` | inverse-exact-match |
|
||||||
|
|
||||||
If you don't need fuzzy matching and do not wish to "quote" every word, start
|
If you don't prefer fuzzy matching and do not wish to "quote" every word,
|
||||||
fzf with `-e` or `--extended-exact` option.
|
start fzf with `-e` or `--extended-exact` option. Note that in
|
||||||
|
`--extended-exact` mode, `'`-prefix "unquotes" the term.
|
||||||
|
|
||||||
#### Environment variables
|
#### Environment variables
|
||||||
|
|
||||||
|
@@ -117,7 +117,9 @@ mkfifo $fifo3
|
|||||||
# Build arguments to fzf
|
# Build arguments to fzf
|
||||||
opts=""
|
opts=""
|
||||||
for arg in "${args[@]}"; do
|
for arg in "${args[@]}"; do
|
||||||
opts="$opts \"${arg//\"/\\\"}\""
|
arg="${arg//\"/\\\"}"
|
||||||
|
arg="${arg//\`/\\\`}"
|
||||||
|
opts="$opts \"$arg\""
|
||||||
done
|
done
|
||||||
|
|
||||||
if [ -n "$term" -o -t 0 ]; then
|
if [ -n "$term" -o -t 0 ]; then
|
||||||
|
4
install
4
install
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
[[ "$@" =~ --pre ]] && version=0.10.4 pre=1 ||
|
[[ "$@" =~ --pre ]] && version=0.10.5 pre=1 ||
|
||||||
version=0.10.4 pre=0
|
version=0.10.5 pre=0
|
||||||
|
|
||||||
cd $(dirname $BASH_SOURCE)
|
cd $(dirname $BASH_SOURCE)
|
||||||
fzf_base=$(pwd)
|
fzf_base=$(pwd)
|
||||||
|
@@ -21,7 +21,7 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
THE SOFTWARE.
|
THE SOFTWARE.
|
||||||
..
|
..
|
||||||
.TH fzf 1 "Aug 2015" "fzf 0.10.4" "fzf - a command-line fuzzy finder"
|
.TH fzf 1 "Sep 2015" "fzf 0.10.5" "fzf - a command-line fuzzy finder"
|
||||||
|
|
||||||
.SH NAME
|
.SH NAME
|
||||||
fzf - a command-line fuzzy finder
|
fzf - a command-line fuzzy finder
|
||||||
@@ -374,29 +374,40 @@ mode". In this mode, you can specify multiple patterns delimited by spaces,
|
|||||||
such as: \fB'wild ^music .mp3$ sbtrkt !rmx\fR
|
such as: \fB'wild ^music .mp3$ sbtrkt !rmx\fR
|
||||||
|
|
||||||
.SS Exact-match (quoted)
|
.SS Exact-match (quoted)
|
||||||
A term that is prefixed by a single-quote character (') is interpreted as an
|
A term that is prefixed by a single-quote character (\fB'\fR) is interpreted as
|
||||||
"exact-match" (or "non-fuzzy") term. fzf will search for the exact occurrences
|
an "exact-match" (or "non-fuzzy") term. fzf will search for the exact
|
||||||
of the string.
|
occurrences of the string.
|
||||||
|
|
||||||
.SS Anchored-match
|
.SS Anchored-match
|
||||||
A term can be prefixed by ^, or suffixed by $ to become an anchored-match term.
|
A term can be prefixed by \fB^\fR, or suffixed by \fB$\fR to become an
|
||||||
Then fzf will search for the items that start with or end with the given
|
anchored-match term. Then fzf will search for the items that start with or end
|
||||||
string. An anchored-match term is also an exact-match term.
|
with the given string. An anchored-match term is also an exact-match term.
|
||||||
|
|
||||||
.SS Negation
|
.SS Negation
|
||||||
If a term is prefixed by !, fzf will exclude the items that satisfy the term
|
If a term is prefixed by \fB!\fR, fzf will exclude the items that satisfy the
|
||||||
from the result.
|
term from the result.
|
||||||
|
|
||||||
.SS Extended-exact mode
|
.SS Extended-exact mode
|
||||||
If you don't need fuzzy matching at all and do not wish to "quote" (prefixing
|
If you don't prefer fuzzy matching and do not wish to "quote" (prefixing with
|
||||||
with ') every word, start fzf with \fB-e\fR or \fB--extended-exact\fR option
|
\fB'\fR) every word, start fzf with \fB-e\fR or \fB--extended-exact\fR option
|
||||||
(instead of \fB-x\fR or \fB--extended\fR).
|
(instead of \fB-x\fR or \fB--extended\fR). Note that in \fB--extended-exact\fR
|
||||||
|
mode, \fB'\fR-prefix "unquotes" the term.
|
||||||
|
|
||||||
.SH AUTHOR
|
.SH AUTHOR
|
||||||
Junegunn Choi (\fIjunegunn.c@gmail.com\fR)
|
Junegunn Choi (\fIjunegunn.c@gmail.com\fR)
|
||||||
|
|
||||||
.SH SEE ALSO
|
.SH SEE ALSO
|
||||||
|
.B Project homepage:
|
||||||
|
.RS
|
||||||
.I https://github.com/junegunn/fzf
|
.I https://github.com/junegunn/fzf
|
||||||
|
.RE
|
||||||
|
.br
|
||||||
|
.R ""
|
||||||
|
.br
|
||||||
|
.B Extra Vim plugin:
|
||||||
|
.RS
|
||||||
|
.I https://github.com/junegunn/fzf.vim
|
||||||
|
.RE
|
||||||
|
|
||||||
.SH LICENSE
|
.SH LICENSE
|
||||||
MIT
|
MIT
|
||||||
|
@@ -217,14 +217,15 @@ let s:launcher = function('s:xterm_launcher')
|
|||||||
function! s:execute(dict, command, temps)
|
function! s:execute(dict, command, temps)
|
||||||
call s:pushd(a:dict)
|
call s:pushd(a:dict)
|
||||||
silent! !clear 2> /dev/null
|
silent! !clear 2> /dev/null
|
||||||
|
let escaped = escape(substitute(a:command, '\n', '\\n', 'g'), '%#')
|
||||||
if has('gui_running')
|
if has('gui_running')
|
||||||
let Launcher = get(a:dict, 'launcher', get(g:, 'Fzf_launcher', get(g:, 'fzf_launcher', s:launcher)))
|
let Launcher = get(a:dict, 'launcher', get(g:, 'Fzf_launcher', get(g:, 'fzf_launcher', s:launcher)))
|
||||||
let fmt = type(Launcher) == 2 ? call(Launcher, []) : Launcher
|
let fmt = type(Launcher) == 2 ? call(Launcher, []) : Launcher
|
||||||
let command = printf(fmt, "'".substitute(a:command, "'", "'\"'\"'", 'g')."'")
|
let command = printf(fmt, "'".substitute(escaped, "'", "'\"'\"'", 'g')."'")
|
||||||
else
|
else
|
||||||
let command = a:command
|
let command = escaped
|
||||||
endif
|
endif
|
||||||
execute 'silent !'.escape(command, '%#')
|
execute 'silent !'.command
|
||||||
redraw!
|
redraw!
|
||||||
if v:shell_error
|
if v:shell_error
|
||||||
" Do not print error message on exit status 1
|
" Do not print error message on exit status 1
|
||||||
|
@@ -99,7 +99,11 @@ EOF
|
|||||||
}
|
}
|
||||||
|
|
||||||
fzf-completion() {
|
fzf-completion() {
|
||||||
local tokens cmd prefix trigger tail fzf matches lbuf d_cmds
|
local tokens cmd prefix trigger tail fzf matches lbuf d_cmds sws
|
||||||
|
if setopt | grep shwordsplit > /dev/null; then
|
||||||
|
sws=1
|
||||||
|
unsetopt shwordsplit
|
||||||
|
fi
|
||||||
|
|
||||||
# http://zsh.sourceforge.net/FAQ/zshfaq03.html
|
# http://zsh.sourceforge.net/FAQ/zshfaq03.html
|
||||||
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
|
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
|
||||||
@@ -148,6 +152,7 @@ fzf-completion() {
|
|||||||
else
|
else
|
||||||
eval "zle ${fzf_default_completion:-expand-or-complete}"
|
eval "zle ${fzf_default_completion:-expand-or-complete}"
|
||||||
fi
|
fi
|
||||||
|
[ -n "$sws" ] && setopt shwordsplit
|
||||||
}
|
}
|
||||||
|
|
||||||
[ -z "$fzf_default_completion" ] &&
|
[ -z "$fzf_default_completion" ] &&
|
||||||
|
@@ -19,7 +19,7 @@ function fzf_key_bindings
|
|||||||
-o -type f -print \
|
-o -type f -print \
|
||||||
-o -type d -print \
|
-o -type d -print \
|
||||||
-o -type l -print 2> /dev/null | sed 1d | cut -b3-"
|
-o -type l -print 2> /dev/null | sed 1d | cut -b3-"
|
||||||
eval $FZF_CTRL_T_COMMAND | eval (__fzfcmd) -m > $TMPDIR/fzf.result
|
eval "$FZF_CTRL_T_COMMAND | "(__fzfcmd)" -m > $TMPDIR/fzf.result"
|
||||||
and commandline -i (cat $TMPDIR/fzf.result | __fzf_escape)
|
and commandline -i (cat $TMPDIR/fzf.result | __fzf_escape)
|
||||||
commandline -f repaint
|
commandline -f repaint
|
||||||
rm -f $TMPDIR/fzf.result
|
rm -f $TMPDIR/fzf.result
|
||||||
|
@@ -15,8 +15,15 @@ import (
|
|||||||
* In short: They try to do as little work as possible.
|
* In short: They try to do as little work as possible.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
func runeAt(runes []rune, index int, max int, forward bool) rune {
|
||||||
|
if forward {
|
||||||
|
return runes[index]
|
||||||
|
}
|
||||||
|
return runes[max-index-1]
|
||||||
|
}
|
||||||
|
|
||||||
// FuzzyMatch performs fuzzy-match
|
// FuzzyMatch performs fuzzy-match
|
||||||
func FuzzyMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
func FuzzyMatch(caseSensitive bool, forward bool, runes []rune, pattern []rune) (int, int) {
|
||||||
if len(pattern) == 0 {
|
if len(pattern) == 0 {
|
||||||
return 0, 0
|
return 0, 0
|
||||||
}
|
}
|
||||||
@@ -34,7 +41,11 @@ func FuzzyMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
|||||||
sidx := -1
|
sidx := -1
|
||||||
eidx := -1
|
eidx := -1
|
||||||
|
|
||||||
for index, char := range runes {
|
lenRunes := len(runes)
|
||||||
|
lenPattern := len(pattern)
|
||||||
|
|
||||||
|
for index := range runes {
|
||||||
|
char := runeAt(runes, index, lenRunes, forward)
|
||||||
// This is considerably faster than blindly applying strings.ToLower to the
|
// This is considerably faster than blindly applying strings.ToLower to the
|
||||||
// whole string
|
// whole string
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
@@ -47,11 +58,12 @@ func FuzzyMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
|||||||
char = unicode.To(unicode.LowerCase, char)
|
char = unicode.To(unicode.LowerCase, char)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if char == pattern[pidx] {
|
pchar := runeAt(pattern, pidx, lenPattern, forward)
|
||||||
|
if char == pchar {
|
||||||
if sidx < 0 {
|
if sidx < 0 {
|
||||||
sidx = index
|
sidx = index
|
||||||
}
|
}
|
||||||
if pidx++; pidx == len(pattern) {
|
if pidx++; pidx == lenPattern {
|
||||||
eidx = index + 1
|
eidx = index + 1
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -61,7 +73,7 @@ func FuzzyMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
|||||||
if sidx >= 0 && eidx >= 0 {
|
if sidx >= 0 && eidx >= 0 {
|
||||||
pidx--
|
pidx--
|
||||||
for index := eidx - 1; index >= sidx; index-- {
|
for index := eidx - 1; index >= sidx; index-- {
|
||||||
char := runes[index]
|
char := runeAt(runes, index, lenRunes, forward)
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
if char >= 'A' && char <= 'Z' {
|
if char >= 'A' && char <= 'Z' {
|
||||||
char += 32
|
char += 32
|
||||||
@@ -69,14 +81,19 @@ func FuzzyMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
|||||||
char = unicode.To(unicode.LowerCase, char)
|
char = unicode.To(unicode.LowerCase, char)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if char == pattern[pidx] {
|
|
||||||
|
pchar := runeAt(pattern, pidx, lenPattern, forward)
|
||||||
|
if char == pchar {
|
||||||
if pidx--; pidx < 0 {
|
if pidx--; pidx < 0 {
|
||||||
sidx = index
|
sidx = index
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return sidx, eidx
|
if forward {
|
||||||
|
return sidx, eidx
|
||||||
|
}
|
||||||
|
return lenRunes - eidx, lenRunes - sidx
|
||||||
}
|
}
|
||||||
return -1, -1
|
return -1, -1
|
||||||
}
|
}
|
||||||
@@ -88,20 +105,21 @@ func FuzzyMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
|||||||
//
|
//
|
||||||
// We might try to implement better algorithms in the future:
|
// We might try to implement better algorithms in the future:
|
||||||
// http://en.wikipedia.org/wiki/String_searching_algorithm
|
// http://en.wikipedia.org/wiki/String_searching_algorithm
|
||||||
func ExactMatchNaive(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
func ExactMatchNaive(caseSensitive bool, forward bool, runes []rune, pattern []rune) (int, int) {
|
||||||
if len(pattern) == 0 {
|
if len(pattern) == 0 {
|
||||||
return 0, 0
|
return 0, 0
|
||||||
}
|
}
|
||||||
|
|
||||||
numRunes := len(runes)
|
lenRunes := len(runes)
|
||||||
plen := len(pattern)
|
lenPattern := len(pattern)
|
||||||
if numRunes < plen {
|
|
||||||
|
if lenRunes < lenPattern {
|
||||||
return -1, -1
|
return -1, -1
|
||||||
}
|
}
|
||||||
|
|
||||||
pidx := 0
|
pidx := 0
|
||||||
for index := 0; index < numRunes; index++ {
|
for index := 0; index < lenRunes; index++ {
|
||||||
char := runes[index]
|
char := runeAt(runes, index, lenRunes, forward)
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
if char >= 'A' && char <= 'Z' {
|
if char >= 'A' && char <= 'Z' {
|
||||||
char += 32
|
char += 32
|
||||||
@@ -109,10 +127,14 @@ func ExactMatchNaive(caseSensitive bool, runes []rune, pattern []rune) (int, int
|
|||||||
char = unicode.To(unicode.LowerCase, char)
|
char = unicode.To(unicode.LowerCase, char)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if pattern[pidx] == char {
|
pchar := runeAt(pattern, pidx, lenPattern, forward)
|
||||||
|
if pchar == char {
|
||||||
pidx++
|
pidx++
|
||||||
if pidx == plen {
|
if pidx == lenPattern {
|
||||||
return index - plen + 1, index + 1
|
if forward {
|
||||||
|
return index - lenPattern + 1, index + 1
|
||||||
|
}
|
||||||
|
return lenRunes - (index + 1), lenRunes - (index - lenPattern + 1)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
index -= pidx
|
index -= pidx
|
||||||
@@ -123,7 +145,7 @@ func ExactMatchNaive(caseSensitive bool, runes []rune, pattern []rune) (int, int
|
|||||||
}
|
}
|
||||||
|
|
||||||
// PrefixMatch performs prefix-match
|
// PrefixMatch performs prefix-match
|
||||||
func PrefixMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
func PrefixMatch(caseSensitive bool, forward bool, runes []rune, pattern []rune) (int, int) {
|
||||||
if len(runes) < len(pattern) {
|
if len(runes) < len(pattern) {
|
||||||
return -1, -1
|
return -1, -1
|
||||||
}
|
}
|
||||||
@@ -141,7 +163,7 @@ func PrefixMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// SuffixMatch performs suffix-match
|
// SuffixMatch performs suffix-match
|
||||||
func SuffixMatch(caseSensitive bool, input []rune, pattern []rune) (int, int) {
|
func SuffixMatch(caseSensitive bool, forward bool, input []rune, pattern []rune) (int, int) {
|
||||||
runes := util.TrimRight(input)
|
runes := util.TrimRight(input)
|
||||||
trimmedLen := len(runes)
|
trimmedLen := len(runes)
|
||||||
diff := trimmedLen - len(pattern)
|
diff := trimmedLen - len(pattern)
|
||||||
@@ -162,7 +184,7 @@ func SuffixMatch(caseSensitive bool, input []rune, pattern []rune) (int, int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// EqualMatch performs equal-match
|
// EqualMatch performs equal-match
|
||||||
func EqualMatch(caseSensitive bool, runes []rune, pattern []rune) (int, int) {
|
func EqualMatch(caseSensitive bool, forward bool, runes []rune, pattern []rune) (int, int) {
|
||||||
if len(runes) != len(pattern) {
|
if len(runes) != len(pattern) {
|
||||||
return -1, -1
|
return -1, -1
|
||||||
}
|
}
|
||||||
|
@@ -5,11 +5,11 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func assertMatch(t *testing.T, fun func(bool, []rune, []rune) (int, int), caseSensitive bool, input string, pattern string, sidx int, eidx int) {
|
func assertMatch(t *testing.T, fun func(bool, bool, []rune, []rune) (int, int), caseSensitive bool, forward bool, input string, pattern string, sidx int, eidx int) {
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
pattern = strings.ToLower(pattern)
|
pattern = strings.ToLower(pattern)
|
||||||
}
|
}
|
||||||
s, e := fun(caseSensitive, []rune(input), []rune(pattern))
|
s, e := fun(caseSensitive, forward, []rune(input), []rune(pattern))
|
||||||
if s != sidx {
|
if s != sidx {
|
||||||
t.Errorf("Invalid start index: %d (expected: %d, %s / %s)", s, sidx, input, pattern)
|
t.Errorf("Invalid start index: %d (expected: %d, %s / %s)", s, sidx, input, pattern)
|
||||||
}
|
}
|
||||||
@@ -19,33 +19,51 @@ func assertMatch(t *testing.T, fun func(bool, []rune, []rune) (int, int), caseSe
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestFuzzyMatch(t *testing.T) {
|
func TestFuzzyMatch(t *testing.T) {
|
||||||
assertMatch(t, FuzzyMatch, false, "fooBarbaz", "oBZ", 2, 9)
|
assertMatch(t, FuzzyMatch, false, true, "fooBarbaz", "oBZ", 2, 9)
|
||||||
assertMatch(t, FuzzyMatch, true, "fooBarbaz", "oBZ", -1, -1)
|
assertMatch(t, FuzzyMatch, true, true, "fooBarbaz", "oBZ", -1, -1)
|
||||||
assertMatch(t, FuzzyMatch, true, "fooBarbaz", "oBz", 2, 9)
|
assertMatch(t, FuzzyMatch, true, true, "fooBarbaz", "oBz", 2, 9)
|
||||||
assertMatch(t, FuzzyMatch, true, "fooBarbaz", "fooBarbazz", -1, -1)
|
assertMatch(t, FuzzyMatch, true, true, "fooBarbaz", "fooBarbazz", -1, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFuzzyMatchBackward(t *testing.T) {
|
||||||
|
assertMatch(t, FuzzyMatch, false, true, "foobar fb", "fb", 0, 4)
|
||||||
|
assertMatch(t, FuzzyMatch, false, false, "foobar fb", "fb", 7, 9)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExactMatchNaive(t *testing.T) {
|
func TestExactMatchNaive(t *testing.T) {
|
||||||
assertMatch(t, ExactMatchNaive, false, "fooBarbaz", "oBA", 2, 5)
|
for _, dir := range []bool{true, false} {
|
||||||
assertMatch(t, ExactMatchNaive, true, "fooBarbaz", "oBA", -1, -1)
|
assertMatch(t, ExactMatchNaive, false, dir, "fooBarbaz", "oBA", 2, 5)
|
||||||
assertMatch(t, ExactMatchNaive, true, "fooBarbaz", "fooBarbazz", -1, -1)
|
assertMatch(t, ExactMatchNaive, true, dir, "fooBarbaz", "oBA", -1, -1)
|
||||||
|
assertMatch(t, ExactMatchNaive, true, dir, "fooBarbaz", "fooBarbazz", -1, -1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExactMatchNaiveBackward(t *testing.T) {
|
||||||
|
assertMatch(t, FuzzyMatch, false, true, "foobar foob", "oo", 1, 3)
|
||||||
|
assertMatch(t, FuzzyMatch, false, false, "foobar foob", "oo", 8, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPrefixMatch(t *testing.T) {
|
func TestPrefixMatch(t *testing.T) {
|
||||||
assertMatch(t, PrefixMatch, false, "fooBarbaz", "Foo", 0, 3)
|
for _, dir := range []bool{true, false} {
|
||||||
assertMatch(t, PrefixMatch, true, "fooBarbaz", "Foo", -1, -1)
|
assertMatch(t, PrefixMatch, false, dir, "fooBarbaz", "Foo", 0, 3)
|
||||||
assertMatch(t, PrefixMatch, false, "fooBarbaz", "baz", -1, -1)
|
assertMatch(t, PrefixMatch, true, dir, "fooBarbaz", "Foo", -1, -1)
|
||||||
|
assertMatch(t, PrefixMatch, false, dir, "fooBarbaz", "baz", -1, -1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSuffixMatch(t *testing.T) {
|
func TestSuffixMatch(t *testing.T) {
|
||||||
assertMatch(t, SuffixMatch, false, "fooBarbaz", "Foo", -1, -1)
|
for _, dir := range []bool{true, false} {
|
||||||
assertMatch(t, SuffixMatch, false, "fooBarbaz", "baz", 6, 9)
|
assertMatch(t, SuffixMatch, false, dir, "fooBarbaz", "Foo", -1, -1)
|
||||||
assertMatch(t, SuffixMatch, true, "fooBarbaz", "Baz", -1, -1)
|
assertMatch(t, SuffixMatch, false, dir, "fooBarbaz", "baz", 6, 9)
|
||||||
|
assertMatch(t, SuffixMatch, true, dir, "fooBarbaz", "Baz", -1, -1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestEmptyPattern(t *testing.T) {
|
func TestEmptyPattern(t *testing.T) {
|
||||||
assertMatch(t, FuzzyMatch, true, "foobar", "", 0, 0)
|
for _, dir := range []bool{true, false} {
|
||||||
assertMatch(t, ExactMatchNaive, true, "foobar", "", 0, 0)
|
assertMatch(t, FuzzyMatch, true, dir, "foobar", "", 0, 0)
|
||||||
assertMatch(t, PrefixMatch, true, "foobar", "", 0, 0)
|
assertMatch(t, ExactMatchNaive, true, dir, "foobar", "", 0, 0)
|
||||||
assertMatch(t, SuffixMatch, true, "foobar", "", 6, 6)
|
assertMatch(t, PrefixMatch, true, dir, "foobar", "", 0, 0)
|
||||||
|
assertMatch(t, SuffixMatch, true, dir, "foobar", "", 6, 6)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -8,7 +8,7 @@ import (
|
|||||||
|
|
||||||
const (
|
const (
|
||||||
// Current version
|
// Current version
|
||||||
version = "0.10.4"
|
version = "0.10.5"
|
||||||
|
|
||||||
// Core
|
// Core
|
||||||
coordinatorDelayMax time.Duration = 100 * time.Millisecond
|
coordinatorDelayMax time.Duration = 100 * time.Millisecond
|
||||||
|
@@ -143,7 +143,8 @@ func Run(opts *Options) {
|
|||||||
// Matcher
|
// Matcher
|
||||||
patternBuilder := func(runes []rune) *Pattern {
|
patternBuilder := func(runes []rune) *Pattern {
|
||||||
return BuildPattern(
|
return BuildPattern(
|
||||||
opts.Mode, opts.Case, opts.Nth, opts.Delimiter, runes)
|
opts.Mode, opts.Case, opts.Tiebreak != byEnd,
|
||||||
|
opts.Nth, opts.Delimiter, runes)
|
||||||
}
|
}
|
||||||
matcher := NewMatcher(patternBuilder, sort, opts.Tac, eventBox)
|
matcher := NewMatcher(patternBuilder, sort, opts.Tac, eventBox)
|
||||||
|
|
||||||
|
@@ -39,12 +39,13 @@ type term struct {
|
|||||||
type Pattern struct {
|
type Pattern struct {
|
||||||
mode Mode
|
mode Mode
|
||||||
caseSensitive bool
|
caseSensitive bool
|
||||||
|
forward bool
|
||||||
text []rune
|
text []rune
|
||||||
terms []term
|
terms []term
|
||||||
hasInvTerm bool
|
hasInvTerm bool
|
||||||
delimiter Delimiter
|
delimiter Delimiter
|
||||||
nth []Range
|
nth []Range
|
||||||
procFun map[termType]func(bool, []rune, []rune) (int, int)
|
procFun map[termType]func(bool, bool, []rune, []rune) (int, int)
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -70,7 +71,7 @@ func clearChunkCache() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// BuildPattern builds Pattern object from the given arguments
|
// BuildPattern builds Pattern object from the given arguments
|
||||||
func BuildPattern(mode Mode, caseMode Case,
|
func BuildPattern(mode Mode, caseMode Case, forward bool,
|
||||||
nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
||||||
|
|
||||||
var asString string
|
var asString string
|
||||||
@@ -109,12 +110,13 @@ func BuildPattern(mode Mode, caseMode Case,
|
|||||||
ptr := &Pattern{
|
ptr := &Pattern{
|
||||||
mode: mode,
|
mode: mode,
|
||||||
caseSensitive: caseSensitive,
|
caseSensitive: caseSensitive,
|
||||||
|
forward: forward,
|
||||||
text: []rune(asString),
|
text: []rune(asString),
|
||||||
terms: terms,
|
terms: terms,
|
||||||
hasInvTerm: hasInvTerm,
|
hasInvTerm: hasInvTerm,
|
||||||
nth: nth,
|
nth: nth,
|
||||||
delimiter: delimiter,
|
delimiter: delimiter,
|
||||||
procFun: make(map[termType]func(bool, []rune, []rune) (int, int))}
|
procFun: make(map[termType]func(bool, bool, []rune, []rune) (int, int))}
|
||||||
|
|
||||||
ptr.procFun[termFuzzy] = algo.FuzzyMatch
|
ptr.procFun[termFuzzy] = algo.FuzzyMatch
|
||||||
ptr.procFun[termEqual] = algo.EqualMatch
|
ptr.procFun[termEqual] = algo.EqualMatch
|
||||||
@@ -151,6 +153,9 @@ func parseTerms(mode Mode, caseMode Case, str string) []term {
|
|||||||
if mode == ModeExtended {
|
if mode == ModeExtended {
|
||||||
typ = termExact
|
typ = termExact
|
||||||
text = text[1:]
|
text = text[1:]
|
||||||
|
} else if mode == ModeExtendedExact {
|
||||||
|
typ = termFuzzy
|
||||||
|
text = text[1:]
|
||||||
}
|
}
|
||||||
} else if strings.HasPrefix(text, "^") {
|
} else if strings.HasPrefix(text, "^") {
|
||||||
if strings.HasSuffix(text, "$") {
|
if strings.HasSuffix(text, "$") {
|
||||||
@@ -285,7 +290,7 @@ func dupItem(item *Item, offsets []Offset) *Item {
|
|||||||
|
|
||||||
func (p *Pattern) fuzzyMatch(item *Item) (int, int) {
|
func (p *Pattern) fuzzyMatch(item *Item) (int, int) {
|
||||||
input := p.prepareInput(item)
|
input := p.prepareInput(item)
|
||||||
return p.iter(algo.FuzzyMatch, input, p.caseSensitive, p.text)
|
return p.iter(algo.FuzzyMatch, input, p.caseSensitive, p.forward, p.text)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Pattern) extendedMatch(item *Item) []Offset {
|
func (p *Pattern) extendedMatch(item *Item) []Offset {
|
||||||
@@ -293,7 +298,7 @@ func (p *Pattern) extendedMatch(item *Item) []Offset {
|
|||||||
offsets := []Offset{}
|
offsets := []Offset{}
|
||||||
for _, term := range p.terms {
|
for _, term := range p.terms {
|
||||||
pfun := p.procFun[term.typ]
|
pfun := p.procFun[term.typ]
|
||||||
if sidx, eidx := p.iter(pfun, input, term.caseSensitive, term.text); sidx >= 0 {
|
if sidx, eidx := p.iter(pfun, input, term.caseSensitive, p.forward, term.text); sidx >= 0 {
|
||||||
if term.inv {
|
if term.inv {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@@ -321,11 +326,11 @@ func (p *Pattern) prepareInput(item *Item) []Token {
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Pattern) iter(pfun func(bool, []rune, []rune) (int, int),
|
func (p *Pattern) iter(pfun func(bool, bool, []rune, []rune) (int, int),
|
||||||
tokens []Token, caseSensitive bool, pattern []rune) (int, int) {
|
tokens []Token, caseSensitive bool, forward bool, pattern []rune) (int, int) {
|
||||||
for _, part := range tokens {
|
for _, part := range tokens {
|
||||||
prefixLength := part.prefixLength
|
prefixLength := part.prefixLength
|
||||||
if sidx, eidx := pfun(caseSensitive, part.text, pattern); sidx >= 0 {
|
if sidx, eidx := pfun(caseSensitive, forward, part.text, pattern); sidx >= 0 {
|
||||||
return sidx + prefixLength, eidx + prefixLength
|
return sidx + prefixLength, eidx + prefixLength
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -37,11 +37,11 @@ func TestParseTermsExtendedExact(t *testing.T) {
|
|||||||
"aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$")
|
"aaa 'bbb ^ccc ddd$ !eee !'fff !^ggg !hhh$")
|
||||||
if len(terms) != 8 ||
|
if len(terms) != 8 ||
|
||||||
terms[0].typ != termExact || terms[0].inv || len(terms[0].text) != 3 ||
|
terms[0].typ != termExact || terms[0].inv || len(terms[0].text) != 3 ||
|
||||||
terms[1].typ != termExact || terms[1].inv || len(terms[1].text) != 4 ||
|
terms[1].typ != termFuzzy || terms[1].inv || len(terms[1].text) != 3 ||
|
||||||
terms[2].typ != termPrefix || terms[2].inv || len(terms[2].text) != 3 ||
|
terms[2].typ != termPrefix || terms[2].inv || len(terms[2].text) != 3 ||
|
||||||
terms[3].typ != termSuffix || terms[3].inv || len(terms[3].text) != 3 ||
|
terms[3].typ != termSuffix || terms[3].inv || len(terms[3].text) != 3 ||
|
||||||
terms[4].typ != termExact || !terms[4].inv || len(terms[4].text) != 3 ||
|
terms[4].typ != termExact || !terms[4].inv || len(terms[4].text) != 3 ||
|
||||||
terms[5].typ != termExact || !terms[5].inv || len(terms[5].text) != 4 ||
|
terms[5].typ != termFuzzy || !terms[5].inv || len(terms[5].text) != 3 ||
|
||||||
terms[6].typ != termPrefix || !terms[6].inv || len(terms[6].text) != 3 ||
|
terms[6].typ != termPrefix || !terms[6].inv || len(terms[6].text) != 3 ||
|
||||||
terms[7].typ != termSuffix || !terms[7].inv || len(terms[7].text) != 3 {
|
terms[7].typ != termSuffix || !terms[7].inv || len(terms[7].text) != 3 {
|
||||||
t.Errorf("%s", terms)
|
t.Errorf("%s", terms)
|
||||||
@@ -58,10 +58,10 @@ func TestParseTermsEmpty(t *testing.T) {
|
|||||||
func TestExact(t *testing.T) {
|
func TestExact(t *testing.T) {
|
||||||
defer clearPatternCache()
|
defer clearPatternCache()
|
||||||
clearPatternCache()
|
clearPatternCache()
|
||||||
pattern := BuildPattern(ModeExtended, CaseSmart,
|
pattern := BuildPattern(ModeExtended, CaseSmart, true,
|
||||||
[]Range{}, Delimiter{}, []rune("'abc"))
|
[]Range{}, Delimiter{}, []rune("'abc"))
|
||||||
sidx, eidx := algo.ExactMatchNaive(
|
sidx, eidx := algo.ExactMatchNaive(
|
||||||
pattern.caseSensitive, []rune("aabbcc abc"), pattern.terms[0].text)
|
pattern.caseSensitive, pattern.forward, []rune("aabbcc abc"), pattern.terms[0].text)
|
||||||
if sidx != 7 || eidx != 10 {
|
if sidx != 7 || eidx != 10 {
|
||||||
t.Errorf("%s / %d / %d", pattern.terms, sidx, eidx)
|
t.Errorf("%s / %d / %d", pattern.terms, sidx, eidx)
|
||||||
}
|
}
|
||||||
@@ -70,11 +70,11 @@ func TestExact(t *testing.T) {
|
|||||||
func TestEqual(t *testing.T) {
|
func TestEqual(t *testing.T) {
|
||||||
defer clearPatternCache()
|
defer clearPatternCache()
|
||||||
clearPatternCache()
|
clearPatternCache()
|
||||||
pattern := BuildPattern(ModeExtended, CaseSmart, []Range{}, Delimiter{}, []rune("^AbC$"))
|
pattern := BuildPattern(ModeExtended, CaseSmart, true, []Range{}, Delimiter{}, []rune("^AbC$"))
|
||||||
|
|
||||||
match := func(str string, sidxExpected int, eidxExpected int) {
|
match := func(str string, sidxExpected int, eidxExpected int) {
|
||||||
sidx, eidx := algo.EqualMatch(
|
sidx, eidx := algo.EqualMatch(
|
||||||
pattern.caseSensitive, []rune(str), pattern.terms[0].text)
|
pattern.caseSensitive, pattern.forward, []rune(str), pattern.terms[0].text)
|
||||||
if sidx != sidxExpected || eidx != eidxExpected {
|
if sidx != sidxExpected || eidx != eidxExpected {
|
||||||
t.Errorf("%s / %d / %d", pattern.terms, sidx, eidx)
|
t.Errorf("%s / %d / %d", pattern.terms, sidx, eidx)
|
||||||
}
|
}
|
||||||
@@ -86,17 +86,17 @@ func TestEqual(t *testing.T) {
|
|||||||
func TestCaseSensitivity(t *testing.T) {
|
func TestCaseSensitivity(t *testing.T) {
|
||||||
defer clearPatternCache()
|
defer clearPatternCache()
|
||||||
clearPatternCache()
|
clearPatternCache()
|
||||||
pat1 := BuildPattern(ModeFuzzy, CaseSmart, []Range{}, Delimiter{}, []rune("abc"))
|
pat1 := BuildPattern(ModeFuzzy, CaseSmart, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||||
clearPatternCache()
|
clearPatternCache()
|
||||||
pat2 := BuildPattern(ModeFuzzy, CaseSmart, []Range{}, Delimiter{}, []rune("Abc"))
|
pat2 := BuildPattern(ModeFuzzy, CaseSmart, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||||
clearPatternCache()
|
clearPatternCache()
|
||||||
pat3 := BuildPattern(ModeFuzzy, CaseIgnore, []Range{}, Delimiter{}, []rune("abc"))
|
pat3 := BuildPattern(ModeFuzzy, CaseIgnore, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||||
clearPatternCache()
|
clearPatternCache()
|
||||||
pat4 := BuildPattern(ModeFuzzy, CaseIgnore, []Range{}, Delimiter{}, []rune("Abc"))
|
pat4 := BuildPattern(ModeFuzzy, CaseIgnore, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||||
clearPatternCache()
|
clearPatternCache()
|
||||||
pat5 := BuildPattern(ModeFuzzy, CaseRespect, []Range{}, Delimiter{}, []rune("abc"))
|
pat5 := BuildPattern(ModeFuzzy, CaseRespect, true, []Range{}, Delimiter{}, []rune("abc"))
|
||||||
clearPatternCache()
|
clearPatternCache()
|
||||||
pat6 := BuildPattern(ModeFuzzy, CaseRespect, []Range{}, Delimiter{}, []rune("Abc"))
|
pat6 := BuildPattern(ModeFuzzy, CaseRespect, true, []Range{}, Delimiter{}, []rune("Abc"))
|
||||||
|
|
||||||
if string(pat1.text) != "abc" || pat1.caseSensitive != false ||
|
if string(pat1.text) != "abc" || pat1.caseSensitive != false ||
|
||||||
string(pat2.text) != "Abc" || pat2.caseSensitive != true ||
|
string(pat2.text) != "Abc" || pat2.caseSensitive != true ||
|
||||||
@@ -109,7 +109,7 @@ func TestCaseSensitivity(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestOrigTextAndTransformed(t *testing.T) {
|
func TestOrigTextAndTransformed(t *testing.T) {
|
||||||
pattern := BuildPattern(ModeExtended, CaseSmart, []Range{}, Delimiter{}, []rune("jg"))
|
pattern := BuildPattern(ModeExtended, CaseSmart, true, []Range{}, Delimiter{}, []rune("jg"))
|
||||||
tokens := Tokenize([]rune("junegunn"), Delimiter{})
|
tokens := Tokenize([]rune("junegunn"), Delimiter{})
|
||||||
trans := Transform(tokens, []Range{Range{1, 1}})
|
trans := Transform(tokens, []Range{Range{1, 1}})
|
||||||
|
|
||||||
|
@@ -527,6 +527,17 @@ class TestGoFZF < TestBase
|
|||||||
assert_equal output, `cat #{tempname} | #{FZF} -fh -n2 -d:`.split($/)
|
assert_equal output, `cat #{tempname} | #{FZF} -fh -n2 -d:`.split($/)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def test_tiebreak_end_backward_scan
|
||||||
|
input = %w[
|
||||||
|
foobar-fb
|
||||||
|
fubar
|
||||||
|
]
|
||||||
|
writelines tempname, input
|
||||||
|
|
||||||
|
assert_equal input.reverse, `cat #{tempname} | #{FZF} -f fb`.split($/)
|
||||||
|
assert_equal input, `cat #{tempname} | #{FZF} -f fb --tiebreak=end`.split($/)
|
||||||
|
end
|
||||||
|
|
||||||
def test_invalid_cache
|
def test_invalid_cache
|
||||||
tmux.send_keys "(echo d; echo D; echo x) | #{fzf '-q d'}", :Enter
|
tmux.send_keys "(echo d; echo D; echo x) | #{fzf '-q d'}", :Enter
|
||||||
tmux.until { |lines| lines[-2].include? '2/3' }
|
tmux.until { |lines| lines[-2].include? '2/3' }
|
||||||
|
Reference in New Issue
Block a user