[perf] Optimize AWK-style tokenizer for --nth

Approx. 50% less memory footprint and 40% improvement in query time
This commit is contained in:
Junegunn Choi
2016-08-14 01:53:06 +09:00
parent 1d4057c209
commit ddc7bb9064
5 changed files with 57 additions and 59 deletions

View File

@@ -83,30 +83,6 @@ func IsTty() bool {
return int(C.isatty(C.int(os.Stdin.Fd()))) != 0
}
// TrimLen returns the length of trimmed rune array
func TrimLen(runes []rune) int {
var i int
for i = len(runes) - 1; i >= 0; i-- {
char := runes[i]
if char != ' ' && char != '\t' {
break
}
}
// Completely empty
if i < 0 {
return 0
}
var j int
for j = 0; j < len(runes); j++ {
char := runes[j]
if char != ' ' && char != '\t' {
break
}
}
return i - j + 1
}
// ExecCommand executes the given command with $SHELL
func ExecCommand(command string) *exec.Cmd {
shell := os.Getenv("SHELL")