mirror of
https://github.com/junegunn/fzf.git
synced 2025-08-19 14:33:49 -07:00
Compare commits
6 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
8156e9894e | ||
|
cacc212f12 | ||
|
d0f2c00f9f | ||
|
766427de0c | ||
|
a7b75c99a5 | ||
|
bae10a6582 |
10
CHANGELOG.md
10
CHANGELOG.md
@@ -1,6 +1,16 @@
|
||||
CHANGELOG
|
||||
=========
|
||||
|
||||
0.10.3
|
||||
------
|
||||
|
||||
- Fixed slow performance of `--with-nth` when used with `--delimiter`
|
||||
- Regular expression engine of Golang as of now is very slow, so the fixed
|
||||
version will treat the given delimiter pattern as a plain string instead
|
||||
of a regular expression unless it contains special characters and is
|
||||
a valid regular expression.
|
||||
- Simpler regular expression for delimiter for better performance
|
||||
|
||||
0.10.2
|
||||
------
|
||||
|
||||
|
6
install
6
install
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
[[ "$@" =~ --pre ]] && version=0.10.2 pre=1 ||
|
||||
version=0.10.2 pre=0
|
||||
[[ "$@" =~ --pre ]] && version=0.10.3 pre=1 ||
|
||||
version=0.10.3 pre=0
|
||||
|
||||
cd $(dirname $BASH_SOURCE)
|
||||
fzf_base=$(pwd)
|
||||
@@ -96,6 +96,7 @@ if [ -n "$binary_error" ]; then
|
||||
echo "No prebuilt binary for $archi ... "
|
||||
else
|
||||
echo " - $binary_error !!!"
|
||||
exit 1
|
||||
fi
|
||||
echo "Installing legacy Ruby version ..."
|
||||
|
||||
@@ -250,6 +251,7 @@ append_line() {
|
||||
if [ -n "$line" ]; then
|
||||
echo " - Already exists: line #$line"
|
||||
else
|
||||
echo >> "$2"
|
||||
echo "$1" >> "$2"
|
||||
echo " + Added"
|
||||
fi
|
||||
|
@@ -21,7 +21,7 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
..
|
||||
.TH fzf 1 "Aug 2015" "fzf 0.10.2" "fzf - a command-line fuzzy finder"
|
||||
.TH fzf 1 "Aug 2015" "fzf 0.10.3" "fzf - a command-line fuzzy finder"
|
||||
|
||||
.SH NAME
|
||||
fzf - a command-line fuzzy finder
|
||||
|
@@ -8,7 +8,7 @@ import (
|
||||
|
||||
const (
|
||||
// Current version
|
||||
version = "0.10.2"
|
||||
version = "0.10.3"
|
||||
|
||||
// Core
|
||||
coordinatorDelayMax time.Duration = 100 * time.Millisecond
|
||||
|
@@ -1,7 +1,6 @@
|
||||
package fzf
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"regexp"
|
||||
@@ -104,7 +103,7 @@ type Options struct {
|
||||
Case Case
|
||||
Nth []Range
|
||||
WithNth []Range
|
||||
Delimiter *regexp.Regexp
|
||||
Delimiter Delimiter
|
||||
Sort int
|
||||
Tac bool
|
||||
Tiebreak tiebreak
|
||||
@@ -149,7 +148,7 @@ func defaultOptions() *Options {
|
||||
Case: CaseSmart,
|
||||
Nth: make([]Range, 0),
|
||||
WithNth: make([]Range, 0),
|
||||
Delimiter: nil,
|
||||
Delimiter: Delimiter{},
|
||||
Sort: 1000,
|
||||
Tac: false,
|
||||
Tiebreak: byLength,
|
||||
@@ -268,17 +267,23 @@ func splitNth(str string) []Range {
|
||||
return ranges
|
||||
}
|
||||
|
||||
func delimiterRegexp(str string) *regexp.Regexp {
|
||||
rx, e := regexp.Compile(str)
|
||||
if e != nil {
|
||||
str = regexp.QuoteMeta(str)
|
||||
func delimiterRegexp(str string) Delimiter {
|
||||
// Special handling of \t
|
||||
str = strings.Replace(str, "\\t", "\t", -1)
|
||||
|
||||
// 1. Pattern does not contain any special character
|
||||
if regexp.QuoteMeta(str) == str {
|
||||
return Delimiter{str: &str}
|
||||
}
|
||||
|
||||
rx, e = regexp.Compile(fmt.Sprintf("(?:.*?%s)|(?:.+?$)", str))
|
||||
rx, e := regexp.Compile(str)
|
||||
// 2. Pattern is not a valid regular expression
|
||||
if e != nil {
|
||||
errorExit("invalid regular expression: " + e.Error())
|
||||
return Delimiter{str: &str}
|
||||
}
|
||||
return rx
|
||||
|
||||
// 3. Pattern as regular expression. Slow.
|
||||
return Delimiter{regex: rx}
|
||||
}
|
||||
|
||||
func isAlphabet(char uint8) bool {
|
||||
|
@@ -8,11 +8,59 @@ import (
|
||||
)
|
||||
|
||||
func TestDelimiterRegex(t *testing.T) {
|
||||
rx := delimiterRegexp("*")
|
||||
tokens := rx.FindAllString("-*--*---**---", -1)
|
||||
if tokens[0] != "-*" || tokens[1] != "--*" || tokens[2] != "---*" ||
|
||||
tokens[3] != "*" || tokens[4] != "---" {
|
||||
t.Errorf("%s %s %d", rx, tokens, len(tokens))
|
||||
// Valid regex
|
||||
delim := delimiterRegexp(".")
|
||||
if delim.regex == nil || delim.str != nil {
|
||||
t.Error(delim)
|
||||
}
|
||||
// Broken regex -> string
|
||||
delim = delimiterRegexp("[0-9")
|
||||
if delim.regex != nil || *delim.str != "[0-9" {
|
||||
t.Error(delim)
|
||||
}
|
||||
// Valid regex
|
||||
delim = delimiterRegexp("[0-9]")
|
||||
if delim.regex.String() != "[0-9]" || delim.str != nil {
|
||||
t.Error(delim)
|
||||
}
|
||||
// Tab character
|
||||
delim = delimiterRegexp("\t")
|
||||
if delim.regex != nil || *delim.str != "\t" {
|
||||
t.Error(delim)
|
||||
}
|
||||
// Tab expression
|
||||
delim = delimiterRegexp("\\t")
|
||||
if delim.regex != nil || *delim.str != "\t" {
|
||||
t.Error(delim)
|
||||
}
|
||||
// Tabs -> regex
|
||||
delim = delimiterRegexp("\t+")
|
||||
if delim.regex == nil || delim.str != nil {
|
||||
t.Error(delim)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDelimiterRegexString(t *testing.T) {
|
||||
delim := delimiterRegexp("*")
|
||||
tokens := Tokenize([]rune("-*--*---**---"), delim)
|
||||
if delim.regex != nil ||
|
||||
string(tokens[0].text) != "-*" ||
|
||||
string(tokens[1].text) != "--*" ||
|
||||
string(tokens[2].text) != "---*" ||
|
||||
string(tokens[3].text) != "*" ||
|
||||
string(tokens[4].text) != "---" {
|
||||
t.Errorf("%s %s %d", delim, tokens, len(tokens))
|
||||
}
|
||||
}
|
||||
|
||||
func TestDelimiterRegexRegex(t *testing.T) {
|
||||
delim := delimiterRegexp("--\\*")
|
||||
tokens := Tokenize([]rune("-*--*---**---"), delim)
|
||||
if delim.str != nil ||
|
||||
string(tokens[0].text) != "-*--*" ||
|
||||
string(tokens[1].text) != "---*" ||
|
||||
string(tokens[2].text) != "*---" {
|
||||
t.Errorf("%s %d", tokens, len(tokens))
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -42,7 +42,7 @@ type Pattern struct {
|
||||
text []rune
|
||||
terms []term
|
||||
hasInvTerm bool
|
||||
delimiter *regexp.Regexp
|
||||
delimiter Delimiter
|
||||
nth []Range
|
||||
procFun map[termType]func(bool, []rune, []rune) (int, int)
|
||||
}
|
||||
@@ -71,7 +71,7 @@ func clearChunkCache() {
|
||||
|
||||
// BuildPattern builds Pattern object from the given arguments
|
||||
func BuildPattern(mode Mode, caseMode Case,
|
||||
nth []Range, delimiter *regexp.Regexp, runes []rune) *Pattern {
|
||||
nth []Range, delimiter Delimiter, runes []rune) *Pattern {
|
||||
|
||||
var asString string
|
||||
switch mode {
|
||||
|
@@ -59,7 +59,7 @@ func TestExact(t *testing.T) {
|
||||
defer clearPatternCache()
|
||||
clearPatternCache()
|
||||
pattern := BuildPattern(ModeExtended, CaseSmart,
|
||||
[]Range{}, nil, []rune("'abc"))
|
||||
[]Range{}, Delimiter{}, []rune("'abc"))
|
||||
sidx, eidx := algo.ExactMatchNaive(
|
||||
pattern.caseSensitive, []rune("aabbcc abc"), pattern.terms[0].text)
|
||||
if sidx != 7 || eidx != 10 {
|
||||
@@ -70,7 +70,7 @@ func TestExact(t *testing.T) {
|
||||
func TestEqual(t *testing.T) {
|
||||
defer clearPatternCache()
|
||||
clearPatternCache()
|
||||
pattern := BuildPattern(ModeExtended, CaseSmart, []Range{}, nil, []rune("^AbC$"))
|
||||
pattern := BuildPattern(ModeExtended, CaseSmart, []Range{}, Delimiter{}, []rune("^AbC$"))
|
||||
|
||||
match := func(str string, sidxExpected int, eidxExpected int) {
|
||||
sidx, eidx := algo.EqualMatch(
|
||||
@@ -86,17 +86,17 @@ func TestEqual(t *testing.T) {
|
||||
func TestCaseSensitivity(t *testing.T) {
|
||||
defer clearPatternCache()
|
||||
clearPatternCache()
|
||||
pat1 := BuildPattern(ModeFuzzy, CaseSmart, []Range{}, nil, []rune("abc"))
|
||||
pat1 := BuildPattern(ModeFuzzy, CaseSmart, []Range{}, Delimiter{}, []rune("abc"))
|
||||
clearPatternCache()
|
||||
pat2 := BuildPattern(ModeFuzzy, CaseSmart, []Range{}, nil, []rune("Abc"))
|
||||
pat2 := BuildPattern(ModeFuzzy, CaseSmart, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
clearPatternCache()
|
||||
pat3 := BuildPattern(ModeFuzzy, CaseIgnore, []Range{}, nil, []rune("abc"))
|
||||
pat3 := BuildPattern(ModeFuzzy, CaseIgnore, []Range{}, Delimiter{}, []rune("abc"))
|
||||
clearPatternCache()
|
||||
pat4 := BuildPattern(ModeFuzzy, CaseIgnore, []Range{}, nil, []rune("Abc"))
|
||||
pat4 := BuildPattern(ModeFuzzy, CaseIgnore, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
clearPatternCache()
|
||||
pat5 := BuildPattern(ModeFuzzy, CaseRespect, []Range{}, nil, []rune("abc"))
|
||||
pat5 := BuildPattern(ModeFuzzy, CaseRespect, []Range{}, Delimiter{}, []rune("abc"))
|
||||
clearPatternCache()
|
||||
pat6 := BuildPattern(ModeFuzzy, CaseRespect, []Range{}, nil, []rune("Abc"))
|
||||
pat6 := BuildPattern(ModeFuzzy, CaseRespect, []Range{}, Delimiter{}, []rune("Abc"))
|
||||
|
||||
if string(pat1.text) != "abc" || pat1.caseSensitive != false ||
|
||||
string(pat2.text) != "Abc" || pat2.caseSensitive != true ||
|
||||
@@ -109,8 +109,8 @@ func TestCaseSensitivity(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestOrigTextAndTransformed(t *testing.T) {
|
||||
pattern := BuildPattern(ModeExtended, CaseSmart, []Range{}, nil, []rune("jg"))
|
||||
tokens := Tokenize([]rune("junegunn"), nil)
|
||||
pattern := BuildPattern(ModeExtended, CaseSmart, []Range{}, Delimiter{}, []rune("jg"))
|
||||
tokens := Tokenize([]rune("junegunn"), Delimiter{})
|
||||
trans := Transform(tokens, []Range{Range{1, 1}})
|
||||
|
||||
origRunes := []rune("junegunn.choi")
|
||||
|
@@ -22,6 +22,12 @@ type Token struct {
|
||||
prefixLength int
|
||||
}
|
||||
|
||||
// Delimiter for tokenizing the input
|
||||
type Delimiter struct {
|
||||
regex *regexp.Regexp
|
||||
str *string
|
||||
}
|
||||
|
||||
func newRange(begin int, end int) Range {
|
||||
if begin == 1 {
|
||||
begin = rangeEllipsis
|
||||
@@ -68,15 +74,15 @@ func ParseRange(str *string) (Range, bool) {
|
||||
return newRange(n, n), true
|
||||
}
|
||||
|
||||
func withPrefixLengths(tokens []string, begin int) []Token {
|
||||
func withPrefixLengths(tokens [][]rune, begin int) []Token {
|
||||
ret := make([]Token, len(tokens))
|
||||
|
||||
prefixLength := begin
|
||||
for idx, token := range tokens {
|
||||
// Need to define a new local variable instead of the reused token to take
|
||||
// the pointer to it
|
||||
ret[idx] = Token{text: []rune(token), prefixLength: prefixLength}
|
||||
prefixLength += len([]rune(token))
|
||||
ret[idx] = Token{text: token, prefixLength: prefixLength}
|
||||
prefixLength += len(token)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
@@ -87,9 +93,9 @@ const (
|
||||
awkWhite
|
||||
)
|
||||
|
||||
func awkTokenizer(input []rune) ([]string, int) {
|
||||
func awkTokenizer(input []rune) ([][]rune, int) {
|
||||
// 9, 32
|
||||
ret := []string{}
|
||||
ret := [][]rune{}
|
||||
str := []rune{}
|
||||
prefixLength := 0
|
||||
state := awkNil
|
||||
@@ -112,27 +118,49 @@ func awkTokenizer(input []rune) ([]string, int) {
|
||||
if white {
|
||||
str = append(str, r)
|
||||
} else {
|
||||
ret = append(ret, string(str))
|
||||
ret = append(ret, str)
|
||||
state = awkBlack
|
||||
str = []rune{r}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(str) > 0 {
|
||||
ret = append(ret, string(str))
|
||||
ret = append(ret, str)
|
||||
}
|
||||
return ret, prefixLength
|
||||
}
|
||||
|
||||
// Tokenize tokenizes the given string with the delimiter
|
||||
func Tokenize(runes []rune, delimiter *regexp.Regexp) []Token {
|
||||
if delimiter == nil {
|
||||
func Tokenize(runes []rune, delimiter Delimiter) []Token {
|
||||
if delimiter.str == nil && delimiter.regex == nil {
|
||||
// AWK-style (\S+\s*)
|
||||
tokens, prefixLength := awkTokenizer(runes)
|
||||
return withPrefixLengths(tokens, prefixLength)
|
||||
}
|
||||
tokens := delimiter.FindAllString(string(runes), -1)
|
||||
return withPrefixLengths(tokens, 0)
|
||||
|
||||
var tokens []string
|
||||
if delimiter.str != nil {
|
||||
tokens = strings.Split(string(runes), *delimiter.str)
|
||||
for i := 0; i < len(tokens)-1; i++ {
|
||||
tokens[i] = tokens[i] + *delimiter.str
|
||||
}
|
||||
} else if delimiter.regex != nil {
|
||||
str := string(runes)
|
||||
for len(str) > 0 {
|
||||
loc := delimiter.regex.FindStringIndex(str)
|
||||
if loc == nil {
|
||||
loc = []int{0, len(str)}
|
||||
}
|
||||
last := util.Max(loc[1], 1)
|
||||
tokens = append(tokens, str[:last])
|
||||
str = str[last:]
|
||||
}
|
||||
}
|
||||
asRunes := make([][]rune, len(tokens))
|
||||
for i, token := range tokens {
|
||||
asRunes[i] = []rune(token)
|
||||
}
|
||||
return withPrefixLengths(asRunes, 0)
|
||||
}
|
||||
|
||||
func joinTokens(tokens []Token) []rune {
|
||||
|
@@ -43,7 +43,7 @@ func TestParseRange(t *testing.T) {
|
||||
func TestTokenize(t *testing.T) {
|
||||
// AWK-style
|
||||
input := " abc: def: ghi "
|
||||
tokens := Tokenize([]rune(input), nil)
|
||||
tokens := Tokenize([]rune(input), Delimiter{})
|
||||
if string(tokens[0].text) != "abc: " || tokens[0].prefixLength != 2 {
|
||||
t.Errorf("%s", tokens)
|
||||
}
|
||||
@@ -53,12 +53,21 @@ func TestTokenize(t *testing.T) {
|
||||
if string(tokens[0].text) != " abc:" || tokens[0].prefixLength != 0 {
|
||||
t.Errorf("%s", tokens)
|
||||
}
|
||||
|
||||
// With delimiter regex
|
||||
tokens = Tokenize([]rune(input), delimiterRegexp("\\s+"))
|
||||
if string(tokens[0].text) != " " || tokens[0].prefixLength != 0 ||
|
||||
string(tokens[1].text) != "abc: " || tokens[1].prefixLength != 2 ||
|
||||
string(tokens[2].text) != "def: " || tokens[2].prefixLength != 8 ||
|
||||
string(tokens[3].text) != "ghi " || tokens[3].prefixLength != 14 {
|
||||
t.Errorf("%s", tokens)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTransform(t *testing.T) {
|
||||
input := " abc: def: ghi: jkl"
|
||||
{
|
||||
tokens := Tokenize([]rune(input), nil)
|
||||
tokens := Tokenize([]rune(input), Delimiter{})
|
||||
{
|
||||
ranges := splitNth("1,2,3")
|
||||
tx := Transform(tokens, ranges)
|
||||
|
Reference in New Issue
Block a user