mirror of
https://github.com/BurntSushi/ripgrep.git
synced 2025-08-01 20:52:03 -07:00
Compare commits
82 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
de79be2db2 | ||
|
416b69bae5 | ||
|
3e78fce3a3 | ||
|
7a3fd1f23f | ||
|
d306403440 | ||
|
ebabe1df6a | ||
|
f27aa3ff6f | ||
|
20ccd441f2 | ||
|
104d740f76 | ||
|
2da0eab2b8 | ||
|
b8c7864a02 | ||
|
ec26995655 | ||
|
a41235a3b5 | ||
|
1a91b900e7 | ||
|
2b15832655 | ||
|
b1c52b52d6 | ||
|
109bc3f78e | ||
|
b62195b33f | ||
|
baebfd7add | ||
|
19e405e5c5 | ||
|
f85822266f | ||
|
b034b77798 | ||
|
278e1168bf | ||
|
6a8051b258 | ||
|
a13ac3e3d4 | ||
|
a72467996b | ||
|
9395076468 | ||
|
a12c63957b | ||
|
982265af70 | ||
|
ed94aedf27 | ||
|
fd5ae2f795 | ||
|
3d6a39be06 | ||
|
e7839f2200 | ||
|
9dc5464c84 | ||
|
95edcd4d3a | ||
|
d97f404970 | ||
|
b2bbd46178 | ||
|
82542df5cb | ||
|
e4329037aa | ||
|
ab0d1c1c79 | ||
|
2015c56e8d | ||
|
23ad8b989d | ||
|
a8f3d9e87e | ||
|
9f1aae64f8 | ||
|
1595f0faf5 | ||
|
8eeb0c0b60 | ||
|
423f2a1927 | ||
|
4b5e789a2a | ||
|
37b731a048 | ||
|
a44735aa87 | ||
|
6b2efd4d88 | ||
|
c8227e0cf3 | ||
|
b941c10b90 | ||
|
872a107658 | ||
|
71ad9bf393 | ||
|
f733e9ebe4 | ||
|
ce85df1d2e | ||
|
a6e3cab65a | ||
|
7b860affbe | ||
|
af4dc78537 | ||
|
9ce0484670 | ||
|
346bad7dfc | ||
|
56fe93d343 | ||
|
155676b474 | ||
|
a3fc4cdded | ||
|
3bec8f3f0a | ||
|
3b37f12ec0 | ||
|
a2ed677e03 | ||
|
2fb9c3c42c | ||
|
447e1ba0e2 | ||
|
3b45059212 | ||
|
f74078af5b | ||
|
5ff9b2f2a2 | ||
|
cc90511ab2 | ||
|
f5d60a80a8 | ||
|
6fa158f6d3 | ||
|
ef6dea40ff | ||
|
9035c6b7b3 | ||
|
f5eb36baac | ||
|
6367dd61ba | ||
|
98892de1c1 | ||
|
273c14a45a |
102
CHANGELOG.md
Normal file
102
CHANGELOG.md
Normal file
@@ -0,0 +1,102 @@
|
||||
0.2.1
|
||||
=====
|
||||
Feature enhancements:
|
||||
|
||||
* Added or improved file type filtering for Clojure and SystemVerilog.
|
||||
* [FEATURE #89](https://github.com/BurntSushi/ripgrep/issues/89):
|
||||
Add a --null flag that outputs a NUL byte after every file path.
|
||||
|
||||
Bug fixes:
|
||||
|
||||
* [BUG #98](https://github.com/BurntSushi/ripgrep/issues/98):
|
||||
Fix a bug in single threaded mode when if opening a file failed, ripgrep
|
||||
quit instead of continuing the search.
|
||||
* [BUG #99](https://github.com/BurntSushi/ripgrep/issues/99):
|
||||
Fix another bug in single threaded mode where empty lines were being printed
|
||||
by mistake.
|
||||
* [BUG #105](https://github.com/BurntSushi/ripgrep/issues/105):
|
||||
Fix an off-by-one error with --column.
|
||||
* [BUG #106](https://github.com/BurntSushi/ripgrep/issues/106):
|
||||
Fix a bug where a whitespace only line in a gitignore file caused ripgrep
|
||||
to panic (i.e., crash).
|
||||
|
||||
|
||||
0.2.0
|
||||
=====
|
||||
Feature enhancements:
|
||||
|
||||
* Added or improved file type filtering for VB, R, F#, Swift, Nim, Javascript,
|
||||
TypeScript
|
||||
* [FEATURE #20](https://github.com/BurntSushi/ripgrep/issues/20):
|
||||
Adds a --no-filename flag.
|
||||
* [FEATURE #26](https://github.com/BurntSushi/ripgrep/issues/26):
|
||||
Adds --files-with-matches flag. Like --count, but only prints file paths
|
||||
and doesn't need to count every match.
|
||||
* [FEATURE #40](https://github.com/BurntSushi/ripgrep/issues/40):
|
||||
Switch from using `.rgignore` to `.ignore`. Note that `.rgignore` is
|
||||
still supported, but deprecated.
|
||||
* [FEATURE #68](https://github.com/BurntSushi/ripgrep/issues/68):
|
||||
Add --no-ignore-vcs flag that ignores .gitignore but not .ignore.
|
||||
* [FEATURE #70](https://github.com/BurntSushi/ripgrep/issues/70):
|
||||
Add -S/--smart-case flag (but is disabled by default).
|
||||
* [FEATURE #80](https://github.com/BurntSushi/ripgrep/issues/80):
|
||||
Add support for `{foo,bar}` globs.
|
||||
|
||||
Many many bug fixes. Thanks every for reporting these and helping make
|
||||
`ripgrep` better! (Note that I haven't captured every tracking issue here,
|
||||
some were closed as duplicates.)
|
||||
|
||||
* [BUG #8](https://github.com/BurntSushi/ripgrep/issues/8):
|
||||
Don't use an intermediate buffer when --threads=1. (Permits constant memory
|
||||
usage.)
|
||||
* [BUG #15](https://github.com/BurntSushi/ripgrep/issues/15):
|
||||
Improves the documentation for --type-add.
|
||||
* [BUG #16](https://github.com/BurntSushi/ripgrep/issues/16),
|
||||
[BUG #49](https://github.com/BurntSushi/ripgrep/issues/49),
|
||||
[BUG #50](https://github.com/BurntSushi/ripgrep/issues/50),
|
||||
[BUG #65](https://github.com/BurntSushi/ripgrep/issues/65):
|
||||
Some gitignore globs were being treated as anchored when they weren't.
|
||||
* [BUG #18](https://github.com/BurntSushi/ripgrep/issues/18):
|
||||
--vimgrep reported incorrect column number.
|
||||
* [BUG #19](https://github.com/BurntSushi/ripgrep/issues/19):
|
||||
ripgrep was hanging waiting on stdin in some Windows terminals. Note that
|
||||
this introduced a new bug:
|
||||
[#94](https://github.com/BurntSushi/ripgrep/issues/94).
|
||||
* [BUG #21](https://github.com/BurntSushi/ripgrep/issues/21):
|
||||
Removes leading `./` when printing file paths.
|
||||
* [BUG #22](https://github.com/BurntSushi/ripgrep/issues/22):
|
||||
Running `rg --help | echo` caused `rg` to panic.
|
||||
* [BUG #24](https://github.com/BurntSushi/ripgrep/issues/22):
|
||||
Clarify the central purpose of rg in its usage message.
|
||||
* [BUG #25](https://github.com/BurntSushi/ripgrep/issues/25):
|
||||
Anchored gitignore globs weren't applied in subdirectories correctly.
|
||||
* [BUG #30](https://github.com/BurntSushi/ripgrep/issues/30):
|
||||
Globs like `foo/**` should match contents of `foo`, but not `foo` itself.
|
||||
* [BUG #35](https://github.com/BurntSushi/ripgrep/issues/35),
|
||||
[BUG #81](https://github.com/BurntSushi/ripgrep/issues/81):
|
||||
When automatically detecting stdin, only read if it's a file or a fifo.
|
||||
i.e., ignore stdin in `rg foo < /dev/null`.
|
||||
* [BUG #36](https://github.com/BurntSushi/ripgrep/issues/36):
|
||||
Don't automatically pick memory maps on MacOS. Ever.
|
||||
* [BUG #38](https://github.com/BurntSushi/ripgrep/issues/38):
|
||||
Trailing whitespace in gitignore wasn't being ignored.
|
||||
* [BUG #43](https://github.com/BurntSushi/ripgrep/issues/43):
|
||||
--glob didn't work with directories.
|
||||
* [BUG #46](https://github.com/BurntSushi/ripgrep/issues/46):
|
||||
Use one fewer worker thread than what is provided on CLI.
|
||||
* [BUG #47](https://github.com/BurntSushi/ripgrep/issues/47):
|
||||
--help/--version now work even if other options are set.
|
||||
* [BUG #55](https://github.com/BurntSushi/ripgrep/issues/55):
|
||||
ripgrep was refusing to search /proc/cpuinfo. Fixed by disabling memory
|
||||
maps for files with zero size.
|
||||
* [BUG #64](https://github.com/BurntSushi/ripgrep/issues/64):
|
||||
The first path given with --files set was ignored.
|
||||
* [BUG #67](https://github.com/BurntSushi/ripgrep/issues/67):
|
||||
Sometimes whitelist globs like `!/dir` weren't interpreted as anchored.
|
||||
* [BUG #77](https://github.com/BurntSushi/ripgrep/issues/77):
|
||||
When -q/--quiet flag was passed, ripgrep kept searching even after a match
|
||||
was found.
|
||||
* [BUG #90](https://github.com/BurntSushi/ripgrep/issues/90):
|
||||
Permit whitelisting hidden files.
|
||||
* [BUG #93](https://github.com/BurntSushi/ripgrep/issues/93):
|
||||
ripgrep was extracting an erroneous inner literal from a repeated pattern.
|
12
Cargo.lock
generated
12
Cargo.lock
generated
@@ -1,13 +1,13 @@
|
||||
[root]
|
||||
name = "ripgrep"
|
||||
version = "0.1.17"
|
||||
version = "0.2.1"
|
||||
dependencies = [
|
||||
"deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"docopt 0.6.84 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"docopt 0.6.85 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"grep 0.1.2",
|
||||
"grep 0.1.3",
|
||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@@ -40,7 +40,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "docopt"
|
||||
version = "0.6.84"
|
||||
version = "0.6.85"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@@ -80,7 +80,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "grep"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
dependencies = [
|
||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@@ -234,7 +234,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
[metadata]
|
||||
"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
|
||||
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
|
||||
"checksum docopt 0.6.84 (registry+https://github.com/rust-lang/crates.io-index)" = "5cbd4e858d9719f44d8c22567afd9eb4a3d12a23f5be67bd3ddc98ffdcf31e5c"
|
||||
"checksum docopt 0.6.85 (registry+https://github.com/rust-lang/crates.io-index)" = "1b88d783674021c5570e7238e17985b9b8c7141d90f33de49031b8d56e7f0bf9"
|
||||
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
|
||||
"checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"
|
||||
"checksum fs2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "bcd414e5a1a979b931bb92f41b7a54106d3f6d2e6c253e9ce943b7cd468251ef"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ripgrep"
|
||||
version = "0.1.17" #:version
|
||||
version = "0.2.1" #:version
|
||||
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
||||
description = """
|
||||
Line oriented search tool using Rust's regex library. Combines the raw
|
||||
@@ -27,7 +27,7 @@ deque = "0.3"
|
||||
docopt = "0.6"
|
||||
env_logger = "0.3"
|
||||
fnv = "1.0"
|
||||
grep = { version = "0.1.2", path = "grep" }
|
||||
grep = { version = "0.1.3", path = "grep" }
|
||||
lazy_static = "0.2"
|
||||
libc = "0.2"
|
||||
log = "0.3"
|
||||
|
16
README.md
16
README.md
@@ -94,8 +94,7 @@ Linux.](https://github.com/BurntSushi/ripgrep/releases) Linux binaries are
|
||||
static executables. Windows binaries are available either as built with MinGW
|
||||
(GNU) or with Microsoft Visual C++ (MSVC). When possible, prefer MSVC over GNU,
|
||||
but you'll need to have the
|
||||
[Microsoft Visual C++ Build
|
||||
Tools](http://landinghub.visualstudio.com/visual-cpp-build-tools)
|
||||
[Microsoft VC++ 2015 redistributable](https://www.microsoft.com/en-us/download/details.aspx?id=48145)
|
||||
installed.
|
||||
|
||||
If you're a **Homebrew** user, then you can install it with a custom formula
|
||||
@@ -106,11 +105,10 @@ directly):
|
||||
$ brew install https://raw.githubusercontent.com/BurntSushi/ripgrep/master/pkg/brew/ripgrep.rb
|
||||
```
|
||||
|
||||
If you're an **Archlinux** user, then you can install `ripgrep` from the
|
||||
[`ripgrep` AUR package](https://aur.archlinux.org/packages/ripgrep/), e.g.,
|
||||
If you're an **Arch Linux** user, then you can install `ripgrep` from the official repos:
|
||||
|
||||
```
|
||||
$ yaourt -S ripgrep
|
||||
$ pacman -Syu ripgrep
|
||||
```
|
||||
|
||||
If you're a **Rust programmer**, `ripgrep` can be installed with `cargo`:
|
||||
@@ -146,9 +144,9 @@ files, ignore hidden files and directories and skip binary files:
|
||||
$ rg foobar
|
||||
```
|
||||
|
||||
The above command also respects all `.rgignore` files, including in parent
|
||||
directories. `.rgignore` files can be used when `.gitignore` files are
|
||||
insufficient. In all cases, `.rgignore` patterns take precedence over
|
||||
The above command also respects all `.ignore` files, including in parent
|
||||
directories. `.ignore` files can be used when `.gitignore` files are
|
||||
insufficient. In all cases, `.ignore` patterns take precedence over
|
||||
`.gitignore`.
|
||||
|
||||
To ignore all ignore files, use `-u`. To additionally search hidden files
|
||||
@@ -237,7 +235,7 @@ The syntax supported is
|
||||
`ripgrep` compiles with Rust 1.9 (stable) or newer. Building is easy:
|
||||
|
||||
```
|
||||
$ git clone git://github.com/BurntSushi/ripgrep
|
||||
$ git clone https://github.com/BurntSushi/ripgrep
|
||||
$ cd ripgrep
|
||||
$ cargo build --release
|
||||
$ ./target/release/rg --version
|
||||
|
@@ -16,6 +16,7 @@ disable_cross_doctests() {
|
||||
}
|
||||
|
||||
run_test_suite() {
|
||||
cargo clean --target $TARGET --verbose
|
||||
cargo build --target $TARGET --verbose
|
||||
cargo test --target $TARGET --verbose
|
||||
|
||||
|
53
doc/rg.1
53
doc/rg.1
@@ -16,9 +16,9 @@ rg [\f[I]options\f[]] \-\-files [\f[I]<\f[]path\f[I]> ...\f[]]
|
||||
.PP
|
||||
rg [\f[I]options\f[]] \-\-type\-list
|
||||
.PP
|
||||
rg \-\-help
|
||||
rg [\f[I]options\f[]] \-\-help
|
||||
.PP
|
||||
rg \-\-version
|
||||
rg [\f[I]options\f[]] \-\-version
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
rg (ripgrep) combines the usability of The Silver Searcher (an ack
|
||||
@@ -86,6 +86,7 @@ Suppress line numbers.
|
||||
.TP
|
||||
.B \-q, \-\-quiet
|
||||
Do not print anything to stdout.
|
||||
If a match is found in a file, stop searching that file.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
@@ -169,12 +170,23 @@ Print each file that would be searched (but don\[aq]t search).
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-l, \-\-files\-with\-matches
|
||||
Only show path of each file with matches.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-H, \-\-with\-filename
|
||||
Prefix each match with the file name that contains it.
|
||||
This is the default when more than one file is searched.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-no\-filename
|
||||
Never show the filename for a match.
|
||||
This is the default when one file is searched.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-heading
|
||||
Show the file name above clusters of matches from each file.
|
||||
This is the default mode at a tty.
|
||||
@@ -211,8 +223,8 @@ Never use memory maps, even when they might be faster.
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-no\-ignore
|
||||
Don\[aq]t respect ignore files (.gitignore, .rgignore, etc.) This
|
||||
implies \-\-no\-ignore\-parent.
|
||||
Don\[aq]t respect ignore files (.gitignore, .ignore, etc.) This implies
|
||||
\-\-no\-ignore\-parent.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
@@ -221,11 +233,31 @@ Don\[aq]t respect ignore files in parent directories.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-no\-ignore\-vcs
|
||||
Don\[aq]t respect version control ignore files (e.g., .gitignore).
|
||||
Note that .ignore files will continue to be respected.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-null
|
||||
Whenever a file name is printed, follow it with a NUL byte.
|
||||
This includes printing filenames before matches, and when printing a
|
||||
list of matching files such as with \-\-count, \-\-files\-with\-matches
|
||||
and \-\-files.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-p, \-\-pretty
|
||||
Alias for \-\-color=always \-\-heading \-n.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-S, \-\-smart\-case
|
||||
Search case insensitively if the pattern is all lowercase.
|
||||
Search case sensitively otherwise.
|
||||
.RS
|
||||
.RE
|
||||
.TP
|
||||
.B \-j, \-\-threads \f[I]ARG\f[]
|
||||
The number of threads to use.
|
||||
Defaults to the number of logical CPUs (capped at 6).
|
||||
@@ -254,11 +286,20 @@ Show all supported file types and their associated globs.
|
||||
.TP
|
||||
.B \-\-type\-add \f[I]ARG\f[] ...
|
||||
Add a new glob for a particular file type.
|
||||
Example: \-\-type\-add html:\f[I]\&.html,\f[].htm
|
||||
Only one glob can be added at a time.
|
||||
Multiple \-\-type\-add flags can be provided.
|
||||
Unless \-\-type\-clear is used, globs are added to any existing globs
|
||||
inside of ripgrep.
|
||||
Note that this must be passed to every invocation of rg.
|
||||
.RS
|
||||
.RE
|
||||
.PP
|
||||
Example: \f[C]\-\-type\-add\ html:*.html\f[]
|
||||
.TP
|
||||
.B \-\-type\-clear \f[I]TYPE\f[] ...
|
||||
Clear the file type globs for TYPE.
|
||||
Clear the file type globs previously defined for TYPE.
|
||||
This only clears the default type definitions that are found inside of
|
||||
ripgrep.
|
||||
Note that this must be passed to every invocation of rg.
|
||||
.RS
|
||||
.RE
|
||||
|
42
doc/rg.1.md
42
doc/rg.1.md
@@ -12,9 +12,9 @@ rg [*options*] --files [*<*path*> ...*]
|
||||
|
||||
rg [*options*] --type-list
|
||||
|
||||
rg --help
|
||||
rg [*options*] --help
|
||||
|
||||
rg --version
|
||||
rg [*options*] --version
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
@@ -58,7 +58,8 @@ the raw speed of grep.
|
||||
: Suppress line numbers.
|
||||
|
||||
-q, --quiet
|
||||
: Do not print anything to stdout.
|
||||
: Do not print anything to stdout. If a match is found in a file, stop
|
||||
searching that file.
|
||||
|
||||
-r, --replace *ARG*
|
||||
: Replace every match with the string given. Capture group indices (e.g., $5)
|
||||
@@ -110,10 +111,17 @@ the raw speed of grep.
|
||||
--files
|
||||
: Print each file that would be searched (but don't search).
|
||||
|
||||
-l, --files-with-matches
|
||||
: Only show path of each file with matches.
|
||||
|
||||
-H, --with-filename
|
||||
: Prefix each match with the file name that contains it. This is the
|
||||
default when more than one file is searched.
|
||||
|
||||
--no-filename
|
||||
: Never show the filename for a match. This is the default when
|
||||
one file is searched.
|
||||
|
||||
--heading
|
||||
: Show the file name above clusters of matches from each file.
|
||||
This is the default mode at a tty.
|
||||
@@ -137,15 +145,29 @@ the raw speed of grep.
|
||||
: Never use memory maps, even when they might be faster.
|
||||
|
||||
--no-ignore
|
||||
: Don't respect ignore files (.gitignore, .rgignore, etc.)
|
||||
: Don't respect ignore files (.gitignore, .ignore, etc.)
|
||||
This implies --no-ignore-parent.
|
||||
|
||||
--no-ignore-parent
|
||||
: Don't respect ignore files in parent directories.
|
||||
|
||||
--no-ignore-vcs
|
||||
: Don't respect version control ignore files (e.g., .gitignore).
|
||||
Note that .ignore files will continue to be respected.
|
||||
|
||||
--null
|
||||
: Whenever a file name is printed, follow it with a NUL byte.
|
||||
This includes printing filenames before matches, and when printing
|
||||
a list of matching files such as with --count, --files-with-matches
|
||||
and --files.
|
||||
|
||||
-p, --pretty
|
||||
: Alias for --color=always --heading -n.
|
||||
|
||||
-S, --smart-case
|
||||
: Search case insensitively if the pattern is all lowercase.
|
||||
Search case sensitively otherwise.
|
||||
|
||||
-j, --threads *ARG*
|
||||
: The number of threads to use. Defaults to the number of logical CPUs
|
||||
(capped at 6). [default: 0]
|
||||
@@ -164,8 +186,14 @@ the raw speed of grep.
|
||||
: Show all supported file types and their associated globs.
|
||||
|
||||
--type-add *ARG* ...
|
||||
: Add a new glob for a particular file type.
|
||||
Example: --type-add html:*.html,*.htm
|
||||
: Add a new glob for a particular file type. Only one glob can be added
|
||||
at a time. Multiple --type-add flags can be provided. Unless --type-clear
|
||||
is used, globs are added to any existing globs inside of ripgrep. Note that
|
||||
this must be passed to every invocation of rg.
|
||||
|
||||
Example: `--type-add html:*.html`
|
||||
|
||||
--type-clear *TYPE* ...
|
||||
: Clear the file type globs for TYPE.
|
||||
: Clear the file type globs previously defined for TYPE. This only clears
|
||||
the default type definitions that are found inside of ripgrep. Note
|
||||
that this must be passed to every invocation of rg.
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "grep"
|
||||
version = "0.1.2" #:version
|
||||
version = "0.1.3" #:version
|
||||
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
||||
description = """
|
||||
Fast line oriented regex searching as a library.
|
||||
|
@@ -8,7 +8,6 @@ Note that this implementation is incredibly suspicious. We need something more
|
||||
principled.
|
||||
*/
|
||||
use std::cmp;
|
||||
use std::iter;
|
||||
|
||||
use regex::bytes::Regex;
|
||||
use syntax::{
|
||||
@@ -181,8 +180,6 @@ fn repeat_range_literals<F: FnMut(&Expr, &mut Literals)>(
|
||||
lits: &mut Literals,
|
||||
mut f: F,
|
||||
) {
|
||||
use syntax::Expr::*;
|
||||
|
||||
if min == 0 {
|
||||
// This is a bit conservative. If `max` is set, then we could
|
||||
// treat this as a finite set of alternations. For now, we
|
||||
@@ -190,8 +187,12 @@ fn repeat_range_literals<F: FnMut(&Expr, &mut Literals)>(
|
||||
lits.cut();
|
||||
} else {
|
||||
let n = cmp::min(lits.limit_size(), min as usize);
|
||||
let es = iter::repeat(e.clone()).take(n).collect();
|
||||
f(&Concat(es), lits);
|
||||
// We only extract literals from a single repetition, even though
|
||||
// we could do more. e.g., `a{3}` will have `a` extracted instead of
|
||||
// `aaa`. The reason is that inner literal extraction can't be unioned
|
||||
// across repetitions. e.g., extracting `foofoofoo` from `(\w+foo){3}`
|
||||
// is wrong.
|
||||
f(e, lits);
|
||||
if n < min as usize {
|
||||
lits.cut();
|
||||
}
|
||||
|
@@ -52,6 +52,7 @@ pub struct GrepBuilder {
|
||||
#[derive(Clone, Debug)]
|
||||
struct Options {
|
||||
case_insensitive: bool,
|
||||
case_smart: bool,
|
||||
line_terminator: u8,
|
||||
size_limit: usize,
|
||||
dfa_size_limit: usize,
|
||||
@@ -61,6 +62,7 @@ impl Default for Options {
|
||||
fn default() -> Options {
|
||||
Options {
|
||||
case_insensitive: false,
|
||||
case_smart: false,
|
||||
line_terminator: b'\n',
|
||||
size_limit: 10 * (1 << 20),
|
||||
dfa_size_limit: 10 * (1 << 20),
|
||||
@@ -98,6 +100,18 @@ impl GrepBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Whether to enable smart case search or not (disabled by default).
|
||||
///
|
||||
/// Smart case uses case insensitive search if the regex is contains all
|
||||
/// lowercase literal characters. Otherwise, a case sensitive search is
|
||||
/// used instead.
|
||||
///
|
||||
/// Enabling the case_insensitive flag overrides this.
|
||||
pub fn case_smart(mut self, yes: bool) -> GrepBuilder {
|
||||
self.opts.case_smart = yes;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the approximate size limit of the compiled regular expression.
|
||||
///
|
||||
/// This roughly corresponds to the number of bytes occupied by a
|
||||
@@ -148,8 +162,11 @@ impl GrepBuilder {
|
||||
/// Creates a new regex from the given expression with the current
|
||||
/// configuration.
|
||||
fn regex(&self, expr: &Expr) -> Result<Regex> {
|
||||
let casei =
|
||||
self.opts.case_insensitive
|
||||
|| (self.opts.case_smart && !has_uppercase_literal(expr));
|
||||
RegexBuilder::new(&expr.to_string())
|
||||
.case_insensitive(self.opts.case_insensitive)
|
||||
.case_insensitive(casei)
|
||||
.multi_line(true)
|
||||
.unicode(true)
|
||||
.size_limit(self.opts.size_limit)
|
||||
@@ -274,6 +291,23 @@ impl<'b, 's> Iterator for Iter<'b, 's> {
|
||||
}
|
||||
}
|
||||
|
||||
fn has_uppercase_literal(expr: &Expr) -> bool {
|
||||
use syntax::Expr::*;
|
||||
match *expr {
|
||||
Literal { ref chars, casei } => {
|
||||
casei || chars.iter().any(|c| c.is_uppercase())
|
||||
}
|
||||
LiteralBytes { ref bytes, casei } => {
|
||||
casei || bytes.iter().any(|&b| b'A' <= b && b <= b'Z')
|
||||
}
|
||||
Group { ref e, .. } => has_uppercase_literal(e),
|
||||
Repeat { ref e, .. } => has_uppercase_literal(e),
|
||||
Concat(ref es) => es.iter().any(has_uppercase_literal),
|
||||
Alternate(ref es) => es.iter().any(has_uppercase_literal),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#![allow(unused_imports)]
|
||||
|
@@ -1,15 +1,15 @@
|
||||
require 'formula'
|
||||
class Ripgrep < Formula
|
||||
version '0.1.15'
|
||||
version '0.2.0'
|
||||
desc "Search tool like grep and The Silver Searcher."
|
||||
homepage "https://github.com/BurntSushi/ripgrep"
|
||||
|
||||
if Hardware::CPU.is_64_bit?
|
||||
url "https://github.com/BurntSushi/ripgrep/releases/download/#{version}/ripgrep-#{version}-x86_64-apple-darwin.tar.gz"
|
||||
sha256 "fc138cd57b533bd65739f3f695322e483fe648736358d853ddb9bcd26d84fdc5"
|
||||
sha256 "f55ef5dac04178bcae0d6c5ba2d09690d326e8c7c3f28e561025b04e1ab81d80"
|
||||
else
|
||||
url "https://github.com/BurntSushi/ripgrep/releases/download/#{version}/ripgrep-#{version}-i686-apple-darwin.tar.gz"
|
||||
sha256 "3ce1f12e49a463bc9dd4cfe2537aa9989a0dc81f7aa6f959ee0d0d82b5f768cb"
|
||||
sha256 "d901d55ccb48c19067f563d42652dfd8642bf50d28a40c0e2a4d3e866857a93b"
|
||||
end
|
||||
|
||||
def install
|
||||
|
118
src/args.rs
118
src/args.rs
@@ -39,10 +39,10 @@ Usage: rg [options] -e PATTERN ... [<path> ...]
|
||||
rg [options] <pattern> [<path> ...]
|
||||
rg [options] --files [<path> ...]
|
||||
rg [options] --type-list
|
||||
rg --help
|
||||
rg --version
|
||||
rg [options] --help
|
||||
rg [options] --version
|
||||
|
||||
rg combines the usability of The Silver Searcher with the raw speed of grep.
|
||||
rg recursively searches your current directory for a regex pattern.
|
||||
|
||||
Common options:
|
||||
-a, --text Search binary files as if they were text.
|
||||
@@ -65,7 +65,8 @@ Common options:
|
||||
-n, --line-number Show line numbers (1-based). This is enabled
|
||||
by default at a tty.
|
||||
-N, --no-line-number Suppress line numbers.
|
||||
-q, --quiet Do not print anything to stdout.
|
||||
-q, --quiet Do not print anything to stdout. If a match is
|
||||
found in a file, stop searching that file.
|
||||
-r, --replace ARG Replace every match with the string given.
|
||||
Capture group indices (e.g., $5) and names
|
||||
(e.g., $foo) are supported.
|
||||
@@ -110,10 +111,17 @@ Less common options:
|
||||
--files
|
||||
Print each file that would be searched (but don't search).
|
||||
|
||||
-l, --files-with-matches
|
||||
Only show path of each file with matches.
|
||||
|
||||
-H, --with-filename
|
||||
Prefix each match with the file name that contains it. This is the
|
||||
default when more than one file is searched.
|
||||
|
||||
--no-filename
|
||||
Never show the filename for a match. This is the default when
|
||||
one file is searched.
|
||||
|
||||
--heading
|
||||
Show the file name above clusters of matches from each file.
|
||||
This is the default mode at a tty.
|
||||
@@ -137,15 +145,29 @@ Less common options:
|
||||
Never use memory maps, even when they might be faster.
|
||||
|
||||
--no-ignore
|
||||
Don't respect ignore files (.gitignore, .rgignore, etc.)
|
||||
Don't respect ignore files (.gitignore, .ignore, etc.)
|
||||
This implies --no-ignore-parent.
|
||||
|
||||
--no-ignore-parent
|
||||
Don't respect ignore files in parent directories.
|
||||
|
||||
--no-ignore-vcs
|
||||
Don't respect version control ignore files (e.g., .gitignore).
|
||||
Note that .ignore files will continue to be respected.
|
||||
|
||||
--null
|
||||
Whenever a file name is printed, follow it with a NUL byte.
|
||||
This includes printing filenames before matches, and when printing
|
||||
a list of matching files such as with --count, --files-with-matches
|
||||
and --files.
|
||||
|
||||
-p, --pretty
|
||||
Alias for --color=always --heading -n.
|
||||
|
||||
-S, --smart-case
|
||||
Search case insensitively if the pattern is all lowercase.
|
||||
Search case sensitively otherwise.
|
||||
|
||||
-j, --threads ARG
|
||||
The number of threads to use. Defaults to the number of logical CPUs
|
||||
(capped at 6). [default: 0]
|
||||
@@ -163,11 +185,17 @@ File type management options:
|
||||
Show all supported file types and their associated globs.
|
||||
|
||||
--type-add ARG ...
|
||||
Add a new glob for a particular file type.
|
||||
Example: --type-add html:*.html,*.htm
|
||||
Add a new glob for a particular file type. Only one glob can be added
|
||||
at a time. Multiple type-add flags can be provided. Unless type-clear
|
||||
is used, globs are added to any existing globs inside of ripgrep. Note
|
||||
that this must be passed to every invocation of rg.
|
||||
|
||||
Example: `--type-add html:*.html`
|
||||
|
||||
--type-clear TYPE ...
|
||||
Clear the file type globs for TYPE.
|
||||
Clear the file type globs previously defined for TYPE. This only clears
|
||||
the default type definitions that are found inside of ripgrep. Note
|
||||
that this must be passed to every invocation of rg.
|
||||
";
|
||||
|
||||
/// RawArgs are the args as they are parsed from Docopt. They aren't used
|
||||
@@ -183,6 +211,7 @@ pub struct RawArgs {
|
||||
flag_context: usize,
|
||||
flag_context_separator: String,
|
||||
flag_count: bool,
|
||||
flag_files_with_matches: bool,
|
||||
flag_debug: bool,
|
||||
flag_files: bool,
|
||||
flag_follow: bool,
|
||||
@@ -197,12 +226,16 @@ pub struct RawArgs {
|
||||
flag_no_heading: bool,
|
||||
flag_no_ignore: bool,
|
||||
flag_no_ignore_parent: bool,
|
||||
flag_no_ignore_vcs: bool,
|
||||
flag_no_line_number: bool,
|
||||
flag_no_mmap: bool,
|
||||
flag_no_filename: bool,
|
||||
flag_null: bool,
|
||||
flag_pretty: bool,
|
||||
flag_quiet: bool,
|
||||
flag_regexp: Vec<String>,
|
||||
flag_replace: Option<String>,
|
||||
flag_smart_case: bool,
|
||||
flag_text: bool,
|
||||
flag_threads: usize,
|
||||
flag_type: Vec<String>,
|
||||
@@ -227,6 +260,7 @@ pub struct Args {
|
||||
column: bool,
|
||||
context_separator: Vec<u8>,
|
||||
count: bool,
|
||||
files_with_matches: bool,
|
||||
eol: u8,
|
||||
files: bool,
|
||||
follow: bool,
|
||||
@@ -241,6 +275,8 @@ pub struct Args {
|
||||
mmap: bool,
|
||||
no_ignore: bool,
|
||||
no_ignore_parent: bool,
|
||||
no_ignore_vcs: bool,
|
||||
null: bool,
|
||||
quiet: bool,
|
||||
replace: Option<Vec<u8>>,
|
||||
text: bool,
|
||||
@@ -259,7 +295,8 @@ impl RawArgs {
|
||||
if self.arg_path.is_empty() {
|
||||
if atty::on_stdin()
|
||||
|| self.flag_files
|
||||
|| self.flag_type_list {
|
||||
|| self.flag_type_list
|
||||
|| !atty::stdin_is_readable() {
|
||||
vec![Path::new("./").to_path_buf()]
|
||||
} else {
|
||||
vec![Path::new("-").to_path_buf()]
|
||||
@@ -283,6 +320,9 @@ impl RawArgs {
|
||||
} else if cfg!(windows) {
|
||||
// On Windows, memory maps appear faster than read calls. Neat.
|
||||
true
|
||||
} else if cfg!(darwin) {
|
||||
// On Mac, memory maps appear to suck. Neat.
|
||||
false
|
||||
} else {
|
||||
// If we're only searching a few paths and all of them are
|
||||
// files, then memory maps are probably faster.
|
||||
@@ -317,16 +357,20 @@ impl RawArgs {
|
||||
self.flag_color == "always"
|
||||
};
|
||||
let eol = b'\n';
|
||||
|
||||
let mut with_filename = self.flag_with_filename;
|
||||
if !with_filename {
|
||||
with_filename = paths.len() > 1 || paths[0].is_dir();
|
||||
}
|
||||
with_filename = with_filename && !self.flag_no_filename;
|
||||
|
||||
let mut btypes = TypesBuilder::new();
|
||||
btypes.add_defaults();
|
||||
try!(self.add_types(&mut btypes));
|
||||
let types = try!(btypes.build());
|
||||
let grep = try!(
|
||||
GrepBuilder::new(&pattern)
|
||||
.case_smart(self.flag_smart_case)
|
||||
.case_insensitive(self.flag_ignore_case)
|
||||
.line_terminator(eol)
|
||||
.build()
|
||||
@@ -343,6 +387,7 @@ impl RawArgs {
|
||||
column: self.flag_column,
|
||||
context_separator: unescape(&self.flag_context_separator),
|
||||
count: self.flag_count,
|
||||
files_with_matches: self.flag_files_with_matches,
|
||||
eol: eol,
|
||||
files: self.flag_files,
|
||||
follow: self.flag_follow,
|
||||
@@ -359,6 +404,10 @@ impl RawArgs {
|
||||
no_ignore_parent:
|
||||
// --no-ignore implies --no-ignore-parent
|
||||
self.flag_no_ignore_parent || no_ignore,
|
||||
no_ignore_vcs:
|
||||
// --no-ignore implies --no-ignore-vcs
|
||||
self.flag_no_ignore_vcs || no_ignore,
|
||||
null: self.flag_null,
|
||||
quiet: self.flag_quiet,
|
||||
replace: self.flag_replace.clone().map(|s| s.into_bytes()),
|
||||
text: text,
|
||||
@@ -451,7 +500,7 @@ impl Args {
|
||||
}
|
||||
}
|
||||
}
|
||||
let raw: RawArgs =
|
||||
let mut raw: RawArgs =
|
||||
Docopt::new(USAGE)
|
||||
.and_then(|d| d.argv(argv).version(Some(version())).decode())
|
||||
.unwrap_or_else(|e| e.exit());
|
||||
@@ -466,6 +515,13 @@ impl Args {
|
||||
errored!("failed to initialize logger: {}", err);
|
||||
}
|
||||
|
||||
// *sigh*... If --files is given, then the first path ends up in
|
||||
// pattern.
|
||||
if raw.flag_files {
|
||||
if !raw.arg_pattern.is_empty() {
|
||||
raw.arg_path.insert(0, raw.arg_pattern.clone());
|
||||
}
|
||||
}
|
||||
raw.to_args().map_err(From::from)
|
||||
}
|
||||
|
||||
@@ -506,6 +562,7 @@ impl Args {
|
||||
.heading(self.heading)
|
||||
.line_per_match(self.line_per_match)
|
||||
.quiet(self.quiet)
|
||||
.null(self.null)
|
||||
.with_filename(self.with_filename);
|
||||
if let Some(ref rep) = self.replace {
|
||||
p = p.replace(rep.clone());
|
||||
@@ -517,14 +574,23 @@ impl Args {
|
||||
/// to the writer given.
|
||||
pub fn out(&self) -> Out {
|
||||
let mut out = Out::new(self.color);
|
||||
if self.heading && !self.count {
|
||||
out = out.file_separator(b"".to_vec());
|
||||
} else if self.before_context > 0 || self.after_context > 0 {
|
||||
out = out.file_separator(self.context_separator.clone());
|
||||
if let Some(filesep) = self.file_separator() {
|
||||
out = out.file_separator(filesep);
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
/// Retrieve the configured file separator.
|
||||
pub fn file_separator(&self) -> Option<Vec<u8>> {
|
||||
if self.heading && !self.count && !self.files_with_matches {
|
||||
Some(b"".to_vec())
|
||||
} else if self.before_context > 0 || self.after_context > 0 {
|
||||
Some(self.context_separator.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new buffer for use with searching.
|
||||
#[cfg(not(windows))]
|
||||
pub fn outbuf(&self) -> ColoredTerminal<term::TerminfoTerminal<Vec<u8>>> {
|
||||
@@ -571,6 +637,7 @@ impl Args {
|
||||
.after_context(self.after_context)
|
||||
.before_context(self.before_context)
|
||||
.count(self.count)
|
||||
.files_with_matches(self.files_with_matches)
|
||||
.eol(self.eol)
|
||||
.line_number(self.line_number)
|
||||
.invert_match(self.invert_match)
|
||||
@@ -589,6 +656,7 @@ impl Args {
|
||||
) -> BufferSearcher<'a, W> {
|
||||
BufferSearcher::new(printer, grep, path, buf)
|
||||
.count(self.count)
|
||||
.files_with_matches(self.files_with_matches)
|
||||
.eol(self.eol)
|
||||
.line_number(self.line_number)
|
||||
.invert_match(self.invert_match)
|
||||
@@ -615,14 +683,20 @@ impl Args {
|
||||
pub fn walker(&self, path: &Path) -> Result<walk::Iter> {
|
||||
let wd = WalkDir::new(path).follow_links(self.follow);
|
||||
let mut ig = Ignore::new();
|
||||
ig.ignore_hidden(!self.hidden);
|
||||
ig.no_ignore(self.no_ignore);
|
||||
ig.add_types(self.types.clone());
|
||||
if !self.no_ignore_parent {
|
||||
try!(ig.push_parents(path));
|
||||
}
|
||||
if let Some(ref overrides) = self.glob_overrides {
|
||||
ig.add_override(overrides.clone());
|
||||
// Only register ignore rules if this is a directory. If it's a file,
|
||||
// then it was explicitly given by the end user, so we always search
|
||||
// it.
|
||||
if path.is_dir() {
|
||||
ig.ignore_hidden(!self.hidden);
|
||||
ig.no_ignore(self.no_ignore);
|
||||
ig.no_ignore_vcs(self.no_ignore_vcs);
|
||||
ig.add_types(self.types.clone());
|
||||
if !self.no_ignore_parent {
|
||||
try!(ig.push_parents(path));
|
||||
}
|
||||
if let Some(ref overrides) = self.glob_overrides {
|
||||
ig.add_override(overrides.clone());
|
||||
}
|
||||
}
|
||||
Ok(walk::Iter::new(ig, wd))
|
||||
}
|
||||
|
44
src/atty.rs
44
src/atty.rs
@@ -4,30 +4,58 @@ from (or to) a terminal. Windows and Unix do this differently, so implement
|
||||
both here.
|
||||
*/
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn stdin_is_readable() -> bool {
|
||||
use std::fs::File;
|
||||
use std::os::unix::fs::FileTypeExt;
|
||||
use std::os::unix::io::{FromRawFd, IntoRawFd};
|
||||
use libc;
|
||||
|
||||
let file = unsafe { File::from_raw_fd(libc::STDIN_FILENO) };
|
||||
let md = file.metadata();
|
||||
let _ = file.into_raw_fd();
|
||||
let ft = match md {
|
||||
Err(_) => return false,
|
||||
Ok(md) => md.file_type(),
|
||||
};
|
||||
ft.is_file() || ft.is_fifo()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn stdin_is_readable() -> bool {
|
||||
// ???
|
||||
true
|
||||
}
|
||||
|
||||
/// Returns true if there is a tty on stdin.
|
||||
#[cfg(unix)]
|
||||
pub fn on_stdin() -> bool {
|
||||
use libc;
|
||||
0 < unsafe { libc::isatty(libc::STDIN_FILENO) }
|
||||
}
|
||||
|
||||
/// Returns true if there is a tty on stdout.
|
||||
#[cfg(unix)]
|
||||
pub fn on_stdout() -> bool {
|
||||
use libc;
|
||||
0 < unsafe { libc::isatty(libc::STDOUT_FILENO) }
|
||||
}
|
||||
|
||||
/// Returns true if there is a tty on stdin.
|
||||
#[cfg(windows)]
|
||||
pub fn on_stdin() -> bool {
|
||||
use kernel32;
|
||||
use winapi;
|
||||
|
||||
unsafe {
|
||||
let fd = winapi::winbase::STD_INPUT_HANDLE;
|
||||
let mut out = 0;
|
||||
kernel32::GetConsoleMode(kernel32::GetStdHandle(fd), &mut out) != 0
|
||||
}
|
||||
// BUG: https://github.com/BurntSushi/ripgrep/issues/19
|
||||
// It's not clear to me how to determine whether there is a tty on stdin.
|
||||
// Checking GetConsoleMode(GetStdHandle(stdin)) != 0 appears to report
|
||||
// that stdin is a pipe, even if it's not in a cygwin terminal, for
|
||||
// example.
|
||||
//
|
||||
// To fix this, we just assume there is always a tty on stdin. If Windows
|
||||
// users need to search stdin, they'll have to pass -. Ug.
|
||||
true
|
||||
}
|
||||
|
||||
/// Returns true if there is a tty on stdout.
|
||||
#[cfg(windows)]
|
||||
pub fn on_stdout() -> bool {
|
||||
use kernel32;
|
||||
|
@@ -9,7 +9,7 @@ The motivation for this submodule is performance and portability:
|
||||
2. We could shell out to a `git` sub-command like ls-files or status, but it
|
||||
seems better to not rely on the existence of external programs for a search
|
||||
tool. Besides, we need to implement this logic anyway to support things like
|
||||
an .rgignore file.
|
||||
an .ignore file.
|
||||
|
||||
The key implementation detail here is that a single gitignore file is compiled
|
||||
into a single RegexSet, which can be used to report which globs match a
|
||||
@@ -31,7 +31,7 @@ use std::path::{Path, PathBuf};
|
||||
use regex;
|
||||
|
||||
use glob;
|
||||
use pathutil::strip_prefix;
|
||||
use pathutil::{is_file_name, strip_prefix};
|
||||
|
||||
/// Represents an error that can occur when parsing a gitignore file.
|
||||
#[derive(Debug)]
|
||||
@@ -115,7 +115,17 @@ impl Gitignore {
|
||||
if let Some(p) = strip_prefix("./", path) {
|
||||
path = p;
|
||||
}
|
||||
if let Some(p) = strip_prefix(&self.root, path) {
|
||||
// Strip any common prefix between the candidate path and the root
|
||||
// of the gitignore, to make sure we get relative matching right.
|
||||
// BUT, a file name might not have any directory components to it,
|
||||
// in which case, we don't want to accidentally strip any part of the
|
||||
// file name.
|
||||
if !is_file_name(path) {
|
||||
if let Some(p) = strip_prefix(&self.root, path) {
|
||||
path = p;
|
||||
}
|
||||
}
|
||||
if let Some(p) = strip_prefix("/", path) {
|
||||
path = p;
|
||||
}
|
||||
self.matched_stripped(path, is_dir)
|
||||
@@ -225,9 +235,10 @@ impl GitignoreBuilder {
|
||||
/// The path given should be the path at which the globs for this gitignore
|
||||
/// file should be matched.
|
||||
pub fn new<P: AsRef<Path>>(root: P) -> GitignoreBuilder {
|
||||
let root = strip_prefix("./", root.as_ref()).unwrap_or(root.as_ref());
|
||||
GitignoreBuilder {
|
||||
builder: glob::SetBuilder::new(),
|
||||
root: root.as_ref().to_path_buf(),
|
||||
root: root.to_path_buf(),
|
||||
patterns: vec![],
|
||||
}
|
||||
}
|
||||
@@ -272,6 +283,12 @@ impl GitignoreBuilder {
|
||||
from: P,
|
||||
mut line: &str,
|
||||
) -> Result<(), Error> {
|
||||
if line.starts_with("#") {
|
||||
return Ok(());
|
||||
}
|
||||
if !line.ends_with("\\ ") {
|
||||
line = line.trim_right();
|
||||
}
|
||||
if line.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
@@ -284,24 +301,15 @@ impl GitignoreBuilder {
|
||||
};
|
||||
let mut opts = glob::MatchOptions::default();
|
||||
let has_slash = line.chars().any(|c| c == '/');
|
||||
// If the line starts with an escaped '!', then remove the escape.
|
||||
// Otherwise, if it starts with an unescaped '!', then this is a
|
||||
// whitelist pattern.
|
||||
match line.chars().nth(0) {
|
||||
Some('#') => return Ok(()),
|
||||
Some('\\') => {
|
||||
match line.chars().nth(1) {
|
||||
Some('!') | Some('#') => {
|
||||
line = &line[1..];
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Some('!') => {
|
||||
let is_absolute = line.chars().nth(0).unwrap() == '/';
|
||||
if line.starts_with("\\!") || line.starts_with("\\#") {
|
||||
line = &line[1..];
|
||||
} else {
|
||||
if line.starts_with("!") {
|
||||
pat.whitelist = true;
|
||||
line = &line[1..];
|
||||
}
|
||||
Some('/') => {
|
||||
if line.starts_with("/") {
|
||||
// `man gitignore` says that if a glob starts with a slash,
|
||||
// then the glob can only match the beginning of a path
|
||||
// (relative to the location of gitignore). We achieve this by
|
||||
@@ -309,7 +317,6 @@ impl GitignoreBuilder {
|
||||
opts.require_literal_separator = true;
|
||||
line = &line[1..];
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
// If it ends with a slash, then this should only match directories,
|
||||
// but the slash should otherwise not be used while globbing.
|
||||
@@ -320,14 +327,25 @@ impl GitignoreBuilder {
|
||||
}
|
||||
}
|
||||
// If there is a literal slash, then we note that so that globbing
|
||||
// doesn't let wildcards match slashes. Otherwise, we need to let
|
||||
// the pattern match anywhere, so we add a `**/` prefix to achieve
|
||||
// that behavior.
|
||||
// doesn't let wildcards match slashes.
|
||||
pat.pat = line.to_string();
|
||||
if has_slash {
|
||||
opts.require_literal_separator = true;
|
||||
} else {
|
||||
pat.pat = format!("**/{}", pat.pat);
|
||||
}
|
||||
// If there was a leading slash, then this is a pattern that must
|
||||
// match the entire path name. Otherwise, we should let it match
|
||||
// anywhere, so use a **/ prefix.
|
||||
if !is_absolute {
|
||||
// ... but only if we don't already have a **/ prefix.
|
||||
if !pat.pat.starts_with("**/") {
|
||||
pat.pat = format!("**/{}", pat.pat);
|
||||
}
|
||||
}
|
||||
// If the pattern ends with `/**`, then we should only match everything
|
||||
// inside a directory, but not the directory itself. Standard globs
|
||||
// will match the directory. So we add `/*` to force the issue.
|
||||
if pat.pat.ends_with("/**") {
|
||||
pat.pat = format!("{}/*", pat.pat);
|
||||
}
|
||||
try!(self.builder.add_with(&pat.pat, &opts));
|
||||
self.patterns.push(pat);
|
||||
@@ -393,10 +411,14 @@ mod tests {
|
||||
ignored!(ig24, ROOT, "target", "grep/target");
|
||||
ignored!(ig25, ROOT, "Cargo.lock", "./tabwriter-bin/Cargo.lock");
|
||||
ignored!(ig26, ROOT, "/foo/bar/baz", "./foo/bar/baz");
|
||||
ignored!(ig27, ROOT, "foo/", "xyz/foo", true);
|
||||
ignored!(ig28, ROOT, "src/*.rs", "src/grep/src/main.rs");
|
||||
ignored!(ig29, "./src", "/llvm/", "./src/llvm", true);
|
||||
ignored!(ig30, ROOT, "node_modules/ ", "node_modules", true);
|
||||
|
||||
not_ignored!(ignot1, ROOT, "amonths", "months");
|
||||
not_ignored!(ignot2, ROOT, "monthsa", "months");
|
||||
not_ignored!(ignot3, ROOT, "src/*.rs", "src/grep/src/main.rs");
|
||||
not_ignored!(ignot3, ROOT, "/src/*.rs", "src/grep/src/main.rs");
|
||||
not_ignored!(ignot4, ROOT, "/*.c", "mozilla-sha1/sha1.c");
|
||||
not_ignored!(ignot5, ROOT, "/src/*.rs", "src/grep/src/main.rs");
|
||||
not_ignored!(ignot6, ROOT, "*.rs\n!src/main.rs", "src/main.rs");
|
||||
@@ -406,4 +428,11 @@ mod tests {
|
||||
not_ignored!(ignot10, ROOT, "**/foo/bar", "foo/src/bar");
|
||||
not_ignored!(ignot11, ROOT, "#foo", "#foo");
|
||||
not_ignored!(ignot12, ROOT, "\n\n\n", "foo");
|
||||
not_ignored!(ignot13, ROOT, "foo/**", "foo", true);
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/106
|
||||
#[test]
|
||||
fn regression_106() {
|
||||
Gitignore::from_str("/", " ").unwrap();
|
||||
}
|
||||
}
|
||||
|
185
src/glob.rs
185
src/glob.rs
@@ -43,12 +43,19 @@ use regex::bytes::RegexSet;
|
||||
|
||||
use pathutil::file_name;
|
||||
|
||||
lazy_static! {
|
||||
static ref FILE_SEPARATORS: String = regex::quote(r"/\");
|
||||
}
|
||||
|
||||
/// Represents an error that can occur when parsing a glob pattern.
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum Error {
|
||||
InvalidRecursive,
|
||||
UnclosedClass,
|
||||
InvalidRange(char, char),
|
||||
UnopenedAlternates,
|
||||
UnclosedAlternates,
|
||||
NestedAlternates,
|
||||
}
|
||||
|
||||
impl StdError for Error {
|
||||
@@ -63,6 +70,17 @@ impl StdError for Error {
|
||||
Error::InvalidRange(_, _) => {
|
||||
"invalid character range"
|
||||
}
|
||||
Error::UnopenedAlternates => {
|
||||
"unopened alternate group; missing '{' \
|
||||
(maybe escape '}' with '[}]'?)"
|
||||
}
|
||||
Error::UnclosedAlternates => {
|
||||
"unclosed alternate group; missing '}' \
|
||||
(maybe escape '{' with '[{]'?)"
|
||||
}
|
||||
Error::NestedAlternates => {
|
||||
"nested alternate groups are not allowed"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -70,7 +88,11 @@ impl StdError for Error {
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Error::InvalidRecursive | Error::UnclosedClass => {
|
||||
Error::InvalidRecursive
|
||||
| Error::UnclosedClass
|
||||
| Error::UnopenedAlternates
|
||||
| Error::UnclosedAlternates
|
||||
| Error::NestedAlternates => {
|
||||
write!(f, "{}", self.description())
|
||||
}
|
||||
Error::InvalidRange(s, e) => {
|
||||
@@ -322,7 +344,7 @@ impl SetBuilder {
|
||||
///
|
||||
/// It cannot be used directly to match file paths, but it can be converted
|
||||
/// to a regular expression string.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct Pattern {
|
||||
tokens: Vec<Token>,
|
||||
}
|
||||
@@ -350,6 +372,7 @@ enum Token {
|
||||
negated: bool,
|
||||
ranges: Vec<(char, char)>,
|
||||
},
|
||||
Alternates(Vec<Pattern>),
|
||||
}
|
||||
|
||||
impl Pattern {
|
||||
@@ -358,13 +381,19 @@ impl Pattern {
|
||||
/// If the pattern is not a valid glob, then an error is returned.
|
||||
pub fn new(pat: &str) -> Result<Pattern, Error> {
|
||||
let mut p = Parser {
|
||||
p: Pattern::default(),
|
||||
stack: vec![Pattern::default()],
|
||||
chars: pat.chars().peekable(),
|
||||
prev: None,
|
||||
cur: None,
|
||||
};
|
||||
try!(p.parse());
|
||||
Ok(p.p)
|
||||
if p.stack.is_empty() {
|
||||
Err(Error::UnopenedAlternates)
|
||||
} else if p.stack.len() > 1 {
|
||||
Err(Error::UnclosedAlternates)
|
||||
} else {
|
||||
Ok(p.stack.pop().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an extension if this pattern exclusively matches it.
|
||||
@@ -506,7 +535,6 @@ impl Pattern {
|
||||
/// regular expression and will represent the matching semantics of this
|
||||
/// glob pattern and the options given.
|
||||
pub fn to_regex_with(&self, options: &MatchOptions) -> String {
|
||||
let seps = regex::quote(r"/\");
|
||||
let mut re = String::new();
|
||||
re.push_str("(?-u)");
|
||||
if options.case_insensitive {
|
||||
@@ -520,7 +548,20 @@ impl Pattern {
|
||||
re.push('$');
|
||||
return re;
|
||||
}
|
||||
for tok in &self.tokens {
|
||||
self.tokens_to_regex(options, &self.tokens, &mut re);
|
||||
re.push('$');
|
||||
re
|
||||
}
|
||||
|
||||
fn tokens_to_regex(
|
||||
&self,
|
||||
options: &MatchOptions,
|
||||
tokens: &[Token],
|
||||
re: &mut String,
|
||||
) {
|
||||
let seps = &*FILE_SEPARATORS;
|
||||
|
||||
for tok in tokens {
|
||||
match *tok {
|
||||
Token::Literal(c) => {
|
||||
re.push_str(®ex::quote(&c.to_string()));
|
||||
@@ -566,15 +607,22 @@ impl Pattern {
|
||||
}
|
||||
re.push(']');
|
||||
}
|
||||
Token::Alternates(ref patterns) => {
|
||||
let mut parts = vec![];
|
||||
for pat in patterns {
|
||||
let mut altre = String::new();
|
||||
self.tokens_to_regex(options, &pat.tokens, &mut altre);
|
||||
parts.push(altre);
|
||||
}
|
||||
re.push_str(&parts.join("|"));
|
||||
}
|
||||
}
|
||||
}
|
||||
re.push('$');
|
||||
re
|
||||
}
|
||||
}
|
||||
|
||||
struct Parser<'a> {
|
||||
p: Pattern,
|
||||
stack: Vec<Pattern>,
|
||||
chars: iter::Peekable<str::Chars<'a>>,
|
||||
prev: Option<char>,
|
||||
cur: Option<char>,
|
||||
@@ -584,44 +632,101 @@ impl<'a> Parser<'a> {
|
||||
fn parse(&mut self) -> Result<(), Error> {
|
||||
while let Some(c) = self.bump() {
|
||||
match c {
|
||||
'?' => self.p.tokens.push(Token::Any),
|
||||
'?' => try!(self.push_token(Token::Any)),
|
||||
'*' => try!(self.parse_star()),
|
||||
'[' => try!(self.parse_class()),
|
||||
c => self.p.tokens.push(Token::Literal(c)),
|
||||
'{' => try!(self.push_alternate()),
|
||||
'}' => try!(self.pop_alternate()),
|
||||
',' => try!(self.parse_comma()),
|
||||
c => try!(self.push_token(Token::Literal(c))),
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn push_alternate(&mut self) -> Result<(), Error> {
|
||||
if self.stack.len() > 1 {
|
||||
return Err(Error::NestedAlternates);
|
||||
}
|
||||
Ok(self.stack.push(Pattern::default()))
|
||||
}
|
||||
|
||||
fn pop_alternate(&mut self) -> Result<(), Error> {
|
||||
let mut alts = vec![];
|
||||
while self.stack.len() >= 2 {
|
||||
alts.push(self.stack.pop().unwrap());
|
||||
}
|
||||
self.push_token(Token::Alternates(alts))
|
||||
}
|
||||
|
||||
fn push_token(&mut self, tok: Token) -> Result<(), Error> {
|
||||
match self.stack.last_mut() {
|
||||
None => Err(Error::UnopenedAlternates),
|
||||
Some(ref mut pat) => Ok(pat.tokens.push(tok)),
|
||||
}
|
||||
}
|
||||
|
||||
fn pop_token(&mut self) -> Result<Token, Error> {
|
||||
match self.stack.last_mut() {
|
||||
None => Err(Error::UnopenedAlternates),
|
||||
Some(ref mut pat) => Ok(pat.tokens.pop().unwrap()),
|
||||
}
|
||||
}
|
||||
|
||||
fn have_tokens(&self) -> Result<bool, Error> {
|
||||
match self.stack.last() {
|
||||
None => Err(Error::UnopenedAlternates),
|
||||
Some(ref pat) => Ok(!pat.tokens.is_empty()),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_comma(&mut self) -> Result<(), Error> {
|
||||
// If we aren't inside a group alternation, then don't
|
||||
// treat commas specially. Otherwise, we need to start
|
||||
// a new alternate.
|
||||
if self.stack.len() <= 1 {
|
||||
self.push_token(Token::Literal(','))
|
||||
} else {
|
||||
Ok(self.stack.push(Pattern::default()))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_star(&mut self) -> Result<(), Error> {
|
||||
let prev = self.prev;
|
||||
if self.chars.peek() != Some(&'*') {
|
||||
self.p.tokens.push(Token::ZeroOrMore);
|
||||
try!(self.push_token(Token::ZeroOrMore));
|
||||
return Ok(());
|
||||
}
|
||||
assert!(self.bump() == Some('*'));
|
||||
if self.p.tokens.is_empty() {
|
||||
self.p.tokens.push(Token::RecursivePrefix);
|
||||
if !try!(self.have_tokens()) {
|
||||
try!(self.push_token(Token::RecursivePrefix));
|
||||
let next = self.bump();
|
||||
if !next.is_none() && next != Some('/') {
|
||||
return Err(Error::InvalidRecursive);
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
self.p.tokens.pop().unwrap();
|
||||
try!(self.pop_token());
|
||||
if prev != Some('/') {
|
||||
return Err(Error::InvalidRecursive);
|
||||
if self.stack.len() <= 1
|
||||
|| (prev != Some(',') && prev != Some('{')) {
|
||||
return Err(Error::InvalidRecursive);
|
||||
}
|
||||
}
|
||||
let next = self.bump();
|
||||
if next.is_none() {
|
||||
self.p.tokens.push(Token::RecursiveSuffix);
|
||||
return Ok(());
|
||||
match self.chars.peek() {
|
||||
None => {
|
||||
assert!(self.bump().is_none());
|
||||
self.push_token(Token::RecursiveSuffix)
|
||||
}
|
||||
Some(&',') | Some(&'}') if self.stack.len() >= 2 => {
|
||||
self.push_token(Token::RecursiveSuffix)
|
||||
}
|
||||
Some(&'/') => {
|
||||
assert!(self.bump() == Some('/'));
|
||||
self.push_token(Token::RecursiveZeroOrMore)
|
||||
}
|
||||
_ => Err(Error::InvalidRecursive),
|
||||
}
|
||||
if next != Some('/') {
|
||||
return Err(Error::InvalidRecursive);
|
||||
}
|
||||
self.p.tokens.push(Token::RecursiveZeroOrMore);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_class(&mut self) -> Result<(), Error> {
|
||||
@@ -691,11 +796,10 @@ impl<'a> Parser<'a> {
|
||||
// it as a literal.
|
||||
ranges.push(('-', '-'));
|
||||
}
|
||||
self.p.tokens.push(Token::Class {
|
||||
self.push_token(Token::Class {
|
||||
negated: negated,
|
||||
ranges: ranges,
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn bump(&mut self) -> Option<char> {
|
||||
@@ -934,16 +1038,6 @@ mod tests {
|
||||
baseliteral!(lit6, "[ab]", false);
|
||||
baseliteral!(lit7, "?", false);
|
||||
|
||||
/*
|
||||
issuffix!(suf1, "", false);
|
||||
issuffix!(suf2, "a", true);
|
||||
issuffix!(suf3, "ab", true);
|
||||
issuffix!(suf4, "*ab", true);
|
||||
issuffix!(suf5, "*.ab", true);
|
||||
issuffix!(suf6, "?.ab", true);
|
||||
issuffix!(suf7, "ab*", false);
|
||||
*/
|
||||
|
||||
matches!(match1, "a", "a");
|
||||
matches!(match2, "a*b", "a_b");
|
||||
matches!(match3, "a*b*c", "abc");
|
||||
@@ -975,6 +1069,7 @@ mod tests {
|
||||
matches!(matchrec20, "**/.*", "abc/.abc");
|
||||
matches!(matchrec21, ".*/**", ".abc");
|
||||
matches!(matchrec22, ".*/**", ".abc/abc");
|
||||
matches!(matchnot23, "foo/**", "foo");
|
||||
|
||||
matches!(matchrange1, "a[0-9]b", "a0b");
|
||||
matches!(matchrange2, "a[0-9]b", "a9b");
|
||||
@@ -1004,6 +1099,20 @@ mod tests {
|
||||
matches!(matchcasei3, "aBcDeFg", "ABCDEFG", CASEI);
|
||||
matches!(matchcasei4, "aBcDeFg", "AbCdEfG", CASEI);
|
||||
|
||||
matches!(matchalt1, "a,b", "a,b");
|
||||
matches!(matchalt2, ",", ",");
|
||||
matches!(matchalt3, "{a,b}", "a");
|
||||
matches!(matchalt4, "{a,b}", "b");
|
||||
matches!(matchalt5, "{**/src/**,foo}", "abc/src/bar");
|
||||
matches!(matchalt6, "{**/src/**,foo}", "foo");
|
||||
matches!(matchalt7, "{[}],foo}", "}");
|
||||
matches!(matchalt8, "{foo}", "foo");
|
||||
matches!(matchalt9, "{}", "");
|
||||
matches!(matchalt10, "{,}", "");
|
||||
matches!(matchalt11, "{*.foo,*.bar,*.wat}", "test.foo");
|
||||
matches!(matchalt12, "{*.foo,*.bar,*.wat}", "test.bar");
|
||||
matches!(matchalt13, "{*.foo,*.bar,*.wat}", "test.wat");
|
||||
|
||||
matches!(matchslash1, "abc/def", "abc/def", SLASHLIT);
|
||||
nmatches!(matchslash2, "abc?def", "abc/def", SLASHLIT);
|
||||
nmatches!(matchslash2_win, "abc?def", "abc\\def", SLASHLIT);
|
||||
|
130
src/ignore.rs
130
src/ignore.rs
@@ -5,7 +5,7 @@ whether a *single* file path should be searched or not.
|
||||
In general, there are two ways to ignore a particular file:
|
||||
|
||||
1. Specify an ignore rule in some "global" configuration, such as a
|
||||
$HOME/.rgignore or on the command line.
|
||||
$HOME/.ignore or on the command line.
|
||||
2. A specific ignore file (like .gitignore) found during directory traversal.
|
||||
|
||||
The `IgnoreDir` type handles ignore patterns for any one particular directory
|
||||
@@ -14,16 +14,18 @@ of `IgnoreDir`s for use during directory traversal.
|
||||
*/
|
||||
|
||||
use std::error::Error as StdError;
|
||||
use std::ffi::OsString;
|
||||
use std::fmt;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use gitignore::{self, Gitignore, GitignoreBuilder, Match, Pattern};
|
||||
use pathutil::is_hidden;
|
||||
use pathutil::{file_name, is_hidden};
|
||||
use types::Types;
|
||||
|
||||
const IGNORE_NAMES: &'static [&'static str] = &[
|
||||
".gitignore",
|
||||
".ignore",
|
||||
".rgignore",
|
||||
];
|
||||
|
||||
@@ -77,7 +79,9 @@ impl From<gitignore::Error> for Error {
|
||||
pub struct Ignore {
|
||||
/// A stack of ignore patterns at each directory level of traversal.
|
||||
/// A directory that contributes no ignore patterns is `None`.
|
||||
stack: Vec<Option<IgnoreDir>>,
|
||||
stack: Vec<IgnoreDir>,
|
||||
/// A stack of parent directories above the root of the current search.
|
||||
parent_stack: Vec<IgnoreDir>,
|
||||
/// A set of override globs that are always checked first. A match (whether
|
||||
/// it's whitelist or blacklist) trumps anything in stack.
|
||||
overrides: Overrides,
|
||||
@@ -85,9 +89,11 @@ pub struct Ignore {
|
||||
types: Types,
|
||||
/// Whether to ignore hidden files or not.
|
||||
ignore_hidden: bool,
|
||||
/// When true, don't look at .gitignore or .agignore files for ignore
|
||||
/// When true, don't look at .gitignore or .ignore files for ignore
|
||||
/// rules.
|
||||
no_ignore: bool,
|
||||
/// When true, don't look at .gitignore files for ignore rules.
|
||||
no_ignore_vcs: bool,
|
||||
}
|
||||
|
||||
impl Ignore {
|
||||
@@ -95,10 +101,12 @@ impl Ignore {
|
||||
pub fn new() -> Ignore {
|
||||
Ignore {
|
||||
stack: vec![],
|
||||
parent_stack: vec![],
|
||||
overrides: Overrides::new(None),
|
||||
types: Types::empty(),
|
||||
ignore_hidden: true,
|
||||
no_ignore: false,
|
||||
no_ignore_vcs: true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,6 +122,12 @@ impl Ignore {
|
||||
self
|
||||
}
|
||||
|
||||
/// When set, VCS ignore files are ignored.
|
||||
pub fn no_ignore_vcs(&mut self, yes: bool) -> &mut Ignore {
|
||||
self.no_ignore_vcs = yes;
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a set of globs that overrides all other match logic.
|
||||
pub fn add_override(&mut self, gi: Gitignore) -> &mut Ignore {
|
||||
self.overrides = Overrides::new(Some(gi));
|
||||
@@ -138,10 +152,13 @@ impl Ignore {
|
||||
let mut path = &*path;
|
||||
let mut saw_git = path.join(".git").is_dir();
|
||||
let mut ignore_names = IGNORE_NAMES.to_vec();
|
||||
if self.no_ignore_vcs {
|
||||
ignore_names.retain(|&name| name != ".gitignore");
|
||||
}
|
||||
let mut ignore_dir_results = vec![];
|
||||
while let Some(parent) = path.parent() {
|
||||
if self.no_ignore {
|
||||
ignore_dir_results.push(Ok(None));
|
||||
ignore_dir_results.push(Ok(IgnoreDir::empty(parent)));
|
||||
} else {
|
||||
if saw_git {
|
||||
ignore_names.retain(|&name| name != ".gitignore");
|
||||
@@ -156,7 +173,7 @@ impl Ignore {
|
||||
}
|
||||
|
||||
for ignore_dir_result in ignore_dir_results.into_iter().rev() {
|
||||
try!(self.push_ignore_dir(ignore_dir_result));
|
||||
self.parent_stack.push(try!(ignore_dir_result));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -167,10 +184,13 @@ impl Ignore {
|
||||
/// stack (and therefore should be popped).
|
||||
pub fn push<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> {
|
||||
if self.no_ignore {
|
||||
self.stack.push(None);
|
||||
return Ok(());
|
||||
self.stack.push(IgnoreDir::empty(path));
|
||||
Ok(())
|
||||
} else if self.no_ignore_vcs {
|
||||
self.push_ignore_dir(IgnoreDir::without_vcs(path))
|
||||
} else {
|
||||
self.push_ignore_dir(IgnoreDir::new(path))
|
||||
}
|
||||
self.push_ignore_dir(IgnoreDir::new(path))
|
||||
}
|
||||
|
||||
/// Pushes the result of building a directory matcher on to the stack.
|
||||
@@ -178,7 +198,7 @@ impl Ignore {
|
||||
/// If the result given contains an error, then it is returned.
|
||||
pub fn push_ignore_dir(
|
||||
&mut self,
|
||||
result: Result<Option<IgnoreDir>, Error>,
|
||||
result: Result<IgnoreDir, Error>,
|
||||
) -> Result<(), Error> {
|
||||
match result {
|
||||
Ok(id) => {
|
||||
@@ -187,7 +207,7 @@ impl Ignore {
|
||||
}
|
||||
Err(err) => {
|
||||
// Don't leave the stack in an inconsistent state.
|
||||
self.stack.push(None);
|
||||
self.stack.push(IgnoreDir::empty("error"));
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
@@ -207,12 +227,9 @@ impl Ignore {
|
||||
if let Some(is_ignored) = self.ignore_match(path, mat) {
|
||||
return is_ignored;
|
||||
}
|
||||
if self.ignore_hidden && is_hidden(&path) {
|
||||
debug!("{} ignored because it is hidden", path.display());
|
||||
return true;
|
||||
}
|
||||
let mut whitelisted = false;
|
||||
if !self.no_ignore {
|
||||
for id in self.stack.iter().rev().filter_map(|id| id.as_ref()) {
|
||||
for id in self.stack.iter().rev() {
|
||||
let mat = id.matched(path, is_dir);
|
||||
if let Some(is_ignored) = self.ignore_match(path, mat) {
|
||||
if is_ignored {
|
||||
@@ -220,13 +237,43 @@ impl Ignore {
|
||||
}
|
||||
// If this path is whitelisted by an ignore, then
|
||||
// fallthrough and let the file type matcher have a say.
|
||||
whitelisted = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// If the file has been whitelisted, then we have to stop checking
|
||||
// parent directories. The only thing that can override a whitelist
|
||||
// at this point is a type filter.
|
||||
if !whitelisted {
|
||||
let mut path = path.to_path_buf();
|
||||
for id in self.parent_stack.iter().rev() {
|
||||
if let Some(ref dirname) = id.name {
|
||||
path = Path::new(dirname).join(path);
|
||||
}
|
||||
let mat = id.matched(&*path, is_dir);
|
||||
if let Some(is_ignored) = self.ignore_match(&*path, mat) {
|
||||
if is_ignored {
|
||||
return true;
|
||||
}
|
||||
// If this path is whitelisted by an ignore, then
|
||||
// fallthrough and let the file type matcher have a
|
||||
// say.
|
||||
whitelisted = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let mat = self.types.matched(path, is_dir);
|
||||
if let Some(is_ignored) = self.ignore_match(path, mat) {
|
||||
return is_ignored;
|
||||
if is_ignored {
|
||||
return true;
|
||||
}
|
||||
whitelisted = true;
|
||||
}
|
||||
if !whitelisted && self.ignore_hidden && is_hidden(&path) {
|
||||
debug!("{} ignored because it is hidden", path.display());
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
@@ -256,9 +303,12 @@ impl Ignore {
|
||||
|
||||
/// IgnoreDir represents a set of ignore patterns retrieved from a single
|
||||
/// directory.
|
||||
#[derive(Debug)]
|
||||
pub struct IgnoreDir {
|
||||
/// The path to this directory as given.
|
||||
path: PathBuf,
|
||||
/// The directory name, if one exists.
|
||||
name: Option<OsString>,
|
||||
/// A single accumulation of glob patterns for this directory, matched
|
||||
/// using gitignore semantics.
|
||||
///
|
||||
@@ -272,13 +322,27 @@ pub struct IgnoreDir {
|
||||
|
||||
impl IgnoreDir {
|
||||
/// Create a new matcher for the given directory.
|
||||
///
|
||||
/// If no ignore glob patterns could be found in the directory then `None`
|
||||
/// is returned.
|
||||
pub fn new<P: AsRef<Path>>(path: P) -> Result<Option<IgnoreDir>, Error> {
|
||||
pub fn new<P: AsRef<Path>>(path: P) -> Result<IgnoreDir, Error> {
|
||||
IgnoreDir::with_ignore_names(path, IGNORE_NAMES.iter())
|
||||
}
|
||||
|
||||
/// Create a new matcher for the given directory.
|
||||
///
|
||||
/// Don't respect VCS ignore files.
|
||||
pub fn without_vcs<P: AsRef<Path>>(path: P) -> Result<IgnoreDir, Error> {
|
||||
let names = IGNORE_NAMES.iter().filter(|name| **name != ".gitignore");
|
||||
IgnoreDir::with_ignore_names(path, names)
|
||||
}
|
||||
|
||||
/// Create a new IgnoreDir that never matches anything with the given path.
|
||||
pub fn empty<P: AsRef<Path>>(path: P) -> IgnoreDir {
|
||||
IgnoreDir {
|
||||
path: path.as_ref().to_path_buf(),
|
||||
name: file_name(path.as_ref()).map(|s| s.to_os_string()),
|
||||
gi: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new matcher for the given directory using only the ignore
|
||||
/// patterns found in the file names given.
|
||||
///
|
||||
@@ -291,12 +355,9 @@ impl IgnoreDir {
|
||||
pub fn with_ignore_names<P: AsRef<Path>, S, I>(
|
||||
path: P,
|
||||
names: I,
|
||||
) -> Result<Option<IgnoreDir>, Error>
|
||||
) -> Result<IgnoreDir, Error>
|
||||
where P: AsRef<Path>, S: AsRef<str>, I: Iterator<Item=S> {
|
||||
let mut id = IgnoreDir {
|
||||
path: path.as_ref().to_path_buf(),
|
||||
gi: None,
|
||||
};
|
||||
let mut id = IgnoreDir::empty(path);
|
||||
let mut ok = false;
|
||||
let mut builder = GitignoreBuilder::new(&id.path);
|
||||
// The ordering here is important. Later globs have higher precedence.
|
||||
@@ -304,11 +365,10 @@ impl IgnoreDir {
|
||||
ok = builder.add_path(id.path.join(name.as_ref())).is_ok() || ok;
|
||||
}
|
||||
if !ok {
|
||||
Ok(None)
|
||||
} else {
|
||||
id.gi = Some(try!(builder.build()));
|
||||
Ok(Some(id))
|
||||
return Ok(id);
|
||||
}
|
||||
id.gi = Some(try!(builder.build()));
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
/// Returns true if and only if the given file path should be ignored
|
||||
@@ -359,10 +419,6 @@ impl Overrides {
|
||||
/// Match::None (and interpreting non-matches as ignored) unless is_dir
|
||||
/// is true.
|
||||
pub fn matched<P: AsRef<Path>>(&self, path: P, is_dir: bool) -> Match {
|
||||
// File types don't apply to directories.
|
||||
if is_dir {
|
||||
return Match::None;
|
||||
}
|
||||
let path = path.as_ref();
|
||||
self.gi.as_ref()
|
||||
.map(|gi| {
|
||||
@@ -394,6 +450,9 @@ mod tests {
|
||||
let gi = builder.build().unwrap();
|
||||
let id = IgnoreDir {
|
||||
path: Path::new($root).to_path_buf(),
|
||||
name: Path::new($root).file_name().map(|s| {
|
||||
s.to_os_string()
|
||||
}),
|
||||
gi: Some(gi),
|
||||
};
|
||||
assert!(id.matched($path, false).is_ignored());
|
||||
@@ -411,6 +470,9 @@ mod tests {
|
||||
let gi = builder.build().unwrap();
|
||||
let id = IgnoreDir {
|
||||
path: Path::new($root).to_path_buf(),
|
||||
name: Path::new($root).file_name().map(|s| {
|
||||
s.to_os_string()
|
||||
}),
|
||||
gi: Some(gi),
|
||||
};
|
||||
assert!(!id.matched($path, false).is_ignored());
|
||||
|
78
src/main.rs
78
src/main.rs
@@ -23,11 +23,12 @@ extern crate winapi;
|
||||
use std::error::Error;
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::process;
|
||||
use std::result;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::thread;
|
||||
use std::cmp;
|
||||
|
||||
use deque::{Stealer, Stolen};
|
||||
use grep::Grep;
|
||||
@@ -87,14 +88,17 @@ fn main() {
|
||||
fn run(args: Args) -> Result<u64> {
|
||||
let args = Arc::new(args);
|
||||
let paths = args.paths();
|
||||
let threads = cmp::max(1, args.threads() - 1);
|
||||
let isone =
|
||||
paths.len() == 1 && (paths[0] == Path::new("-") || paths[0].is_file());
|
||||
if args.files() {
|
||||
return run_files(args.clone());
|
||||
}
|
||||
if args.type_list() {
|
||||
return run_types(args.clone());
|
||||
}
|
||||
if paths.len() == 1 && (paths[0] == Path::new("-") || paths[0].is_file()) {
|
||||
return run_one(args.clone(), &paths[0]);
|
||||
if threads == 1 || isone {
|
||||
return run_one_thread(args.clone());
|
||||
}
|
||||
|
||||
let out = Arc::new(Mutex::new(args.out()));
|
||||
@@ -102,7 +106,7 @@ fn run(args: Args) -> Result<u64> {
|
||||
|
||||
let workq = {
|
||||
let (workq, stealer) = deque::new();
|
||||
for _ in 0..args.threads() {
|
||||
for _ in 0..threads {
|
||||
let worker = MultiWorker {
|
||||
chan_work: stealer.clone(),
|
||||
out: out.clone(),
|
||||
@@ -145,22 +149,52 @@ fn run(args: Args) -> Result<u64> {
|
||||
Ok(match_count)
|
||||
}
|
||||
|
||||
fn run_one(args: Arc<Args>, path: &Path) -> Result<u64> {
|
||||
fn run_one_thread(args: Arc<Args>) -> Result<u64> {
|
||||
let mut worker = Worker {
|
||||
args: args.clone(),
|
||||
inpbuf: args.input_buffer(),
|
||||
grep: args.grep(),
|
||||
match_count: 0,
|
||||
};
|
||||
let term = args.stdout();
|
||||
let mut printer = args.printer(term);
|
||||
let work =
|
||||
if path == Path::new("-") {
|
||||
WorkReady::Stdin
|
||||
let paths = args.paths();
|
||||
let mut term = args.stdout();
|
||||
|
||||
let mut paths_searched: u64 = 0;
|
||||
for p in paths {
|
||||
if p == Path::new("-") {
|
||||
paths_searched += 1;
|
||||
let mut printer = args.printer(&mut term);
|
||||
if worker.match_count > 0 {
|
||||
if let Some(sep) = args.file_separator() {
|
||||
printer = printer.file_separator(sep);
|
||||
}
|
||||
}
|
||||
worker.do_work(&mut printer, WorkReady::Stdin);
|
||||
} else {
|
||||
WorkReady::PathFile(path.to_path_buf(), try!(File::open(path)))
|
||||
};
|
||||
worker.do_work(&mut printer, work);
|
||||
for ent in try!(args.walker(p)) {
|
||||
paths_searched += 1;
|
||||
let mut printer = args.printer(&mut term);
|
||||
if worker.match_count > 0 {
|
||||
if let Some(sep) = args.file_separator() {
|
||||
printer = printer.file_separator(sep);
|
||||
}
|
||||
}
|
||||
let file = match File::open(ent.path()) {
|
||||
Ok(file) => file,
|
||||
Err(err) => {
|
||||
eprintln!("{}: {}", ent.path().display(), err);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
worker.do_work(&mut printer, WorkReady::DirFile(ent, file));
|
||||
}
|
||||
}
|
||||
}
|
||||
if !paths.is_empty() && paths_searched == 0 {
|
||||
eprintln!("No files were searched, which means ripgrep probably \
|
||||
applied a filter you didn't expect. \
|
||||
Try running again with --debug.");
|
||||
}
|
||||
Ok(worker.match_count)
|
||||
}
|
||||
|
||||
@@ -202,7 +236,6 @@ enum Work {
|
||||
enum WorkReady {
|
||||
Stdin,
|
||||
DirFile(DirEntry, File),
|
||||
PathFile(PathBuf, File),
|
||||
}
|
||||
|
||||
struct MultiWorker {
|
||||
@@ -277,17 +310,6 @@ impl Worker {
|
||||
self.search(printer, path, file)
|
||||
}
|
||||
}
|
||||
WorkReady::PathFile(path, file) => {
|
||||
let mut path = &*path;
|
||||
if let Some(p) = strip_prefix("./", path) {
|
||||
path = p;
|
||||
}
|
||||
if self.args.mmap() {
|
||||
self.search_mmap(printer, path, &file)
|
||||
} else {
|
||||
self.search(printer, path, file)
|
||||
}
|
||||
}
|
||||
};
|
||||
match result {
|
||||
Ok(count) => {
|
||||
@@ -322,7 +344,11 @@ impl Worker {
|
||||
) -> Result<u64> {
|
||||
if try!(file.metadata()).len() == 0 {
|
||||
// Opening a memory map with an empty file results in an error.
|
||||
return Ok(0);
|
||||
// However, this may not actually be an empty file! For example,
|
||||
// /proc/cpuinfo reports itself as an empty file, but it can
|
||||
// produce data when it's read from. Therefore, we fall back to
|
||||
// regular read calls.
|
||||
return self.search(printer, path, file);
|
||||
}
|
||||
let mmap = try!(Mmap::open(file, Protection::Read));
|
||||
Ok(self.args.searcher_buffer(
|
||||
|
59
src/out.rs
59
src/out.rs
@@ -48,8 +48,6 @@ impl Out {
|
||||
|
||||
/// If set, the separator is printed between matches from different files.
|
||||
/// By default, no separator is printed.
|
||||
///
|
||||
/// If sep is empty, then no file separator is printed.
|
||||
pub fn file_separator(mut self, sep: Vec<u8>) -> Out {
|
||||
self.file_separator = Some(sep);
|
||||
self
|
||||
@@ -317,3 +315,60 @@ impl<T: Terminal + Send> term::Terminal for ColoredTerminal<T> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Terminal + Send> term::Terminal for &'a mut ColoredTerminal<T> {
|
||||
type Output = T::Output;
|
||||
|
||||
fn fg(&mut self, fg: term::color::Color) -> term::Result<()> {
|
||||
(**self).fg(fg)
|
||||
}
|
||||
|
||||
fn bg(&mut self, bg: term::color::Color) -> term::Result<()> {
|
||||
(**self).bg(bg)
|
||||
}
|
||||
|
||||
fn attr(&mut self, attr: term::Attr) -> term::Result<()> {
|
||||
(**self).attr(attr)
|
||||
}
|
||||
|
||||
fn supports_attr(&self, attr: term::Attr) -> bool {
|
||||
(**self).supports_attr(attr)
|
||||
}
|
||||
|
||||
fn reset(&mut self) -> term::Result<()> {
|
||||
(**self).reset()
|
||||
}
|
||||
|
||||
fn supports_reset(&self) -> bool {
|
||||
(**self).supports_reset()
|
||||
}
|
||||
|
||||
fn supports_color(&self) -> bool {
|
||||
(**self).supports_color()
|
||||
}
|
||||
|
||||
fn cursor_up(&mut self) -> term::Result<()> {
|
||||
(**self).cursor_up()
|
||||
}
|
||||
|
||||
fn delete_line(&mut self) -> term::Result<()> {
|
||||
(**self).delete_line()
|
||||
}
|
||||
|
||||
fn carriage_return(&mut self) -> term::Result<()> {
|
||||
(**self).carriage_return()
|
||||
}
|
||||
|
||||
fn get_ref(&self) -> &Self::Output {
|
||||
(**self).get_ref()
|
||||
}
|
||||
|
||||
fn get_mut(&mut self) -> &mut Self::Output {
|
||||
(**self).get_mut()
|
||||
}
|
||||
|
||||
fn into_inner(self) -> Self::Output {
|
||||
// Good golly miss molly...
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
@@ -98,3 +98,21 @@ pub fn is_hidden<P: AsRef<Path>>(path: P) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if this file path is just a file name. i.e., Its parent is
|
||||
/// the empty string.
|
||||
#[cfg(unix)]
|
||||
pub fn is_file_name<P: AsRef<Path>>(path: P) -> bool {
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
use memchr::memchr;
|
||||
|
||||
let path = path.as_ref().as_os_str().as_bytes();
|
||||
memchr(b'/', path).is_none()
|
||||
}
|
||||
|
||||
/// Returns true if this file path is just a file name. i.e., Its parent is
|
||||
/// the empty string.
|
||||
#[cfg(not(unix))]
|
||||
pub fn is_file_name<P: AsRef<Path>>(path: P) -> bool {
|
||||
path.as_ref().parent().map(|p| p.as_os_str().is_empty()).unwrap_or(false)
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ use regex::bytes::Regex;
|
||||
use term::{Attr, Terminal};
|
||||
use term::color;
|
||||
|
||||
use pathutil::strip_prefix;
|
||||
use types::FileTypeDef;
|
||||
|
||||
/// Printer encapsulates all output logic for searching.
|
||||
@@ -24,6 +25,8 @@ pub struct Printer<W> {
|
||||
/// printed via the match directly, but occasionally we need to insert them
|
||||
/// ourselves (for example, to print a context separator).
|
||||
eol: u8,
|
||||
/// A file separator to show before any matches are printed.
|
||||
file_separator: Option<Vec<u8>>,
|
||||
/// Whether to show file name as a heading or not.
|
||||
///
|
||||
/// N.B. If with_filename is false, then this setting has no effect.
|
||||
@@ -32,6 +35,9 @@ pub struct Printer<W> {
|
||||
line_per_match: bool,
|
||||
/// Whether to suppress all output.
|
||||
quiet: bool,
|
||||
/// Whether to print NUL bytes after a file path instead of new lines
|
||||
/// or `:`.
|
||||
null: bool,
|
||||
/// A string to use as a replacement of each match in a matching line.
|
||||
replace: Option<Vec<u8>>,
|
||||
/// Whether to prefix each match with the corresponding file name.
|
||||
@@ -47,9 +53,11 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
column: false,
|
||||
context_separator: "--".to_string().into_bytes(),
|
||||
eol: b'\n',
|
||||
file_separator: None,
|
||||
heading: false,
|
||||
line_per_match: false,
|
||||
quiet: false,
|
||||
null: false,
|
||||
replace: None,
|
||||
with_filename: false,
|
||||
}
|
||||
@@ -74,6 +82,13 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
self
|
||||
}
|
||||
|
||||
/// If set, the separator is printed before any matches. By default, no
|
||||
/// separator is printed.
|
||||
pub fn file_separator(mut self, sep: Vec<u8>) -> Printer<W> {
|
||||
self.file_separator = Some(sep);
|
||||
self
|
||||
}
|
||||
|
||||
/// Whether to show file name as a heading or not.
|
||||
///
|
||||
/// N.B. If with_filename is false, then this setting has no effect.
|
||||
@@ -88,6 +103,13 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
self
|
||||
}
|
||||
|
||||
/// Whether to cause NUL bytes to follow file paths instead of other
|
||||
/// visual separators (like `:`, `-` and `\n`).
|
||||
pub fn null(mut self, yes: bool) -> Printer<W> {
|
||||
self.null = yes;
|
||||
self
|
||||
}
|
||||
|
||||
/// When set, all output is suppressed.
|
||||
pub fn quiet(mut self, yes: bool) -> Printer<W> {
|
||||
self.quiet = yes;
|
||||
@@ -115,6 +137,11 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
self.has_printed
|
||||
}
|
||||
|
||||
/// Returns true if the printer has been configured to be quiet.
|
||||
pub fn is_quiet(&self) -> bool {
|
||||
self.quiet
|
||||
}
|
||||
|
||||
/// Flushes the underlying writer and returns it.
|
||||
pub fn into_inner(mut self) -> W {
|
||||
let _ = self.wtr.flush();
|
||||
@@ -138,15 +165,24 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
|
||||
/// Prints the given path.
|
||||
pub fn path<P: AsRef<Path>>(&mut self, path: P) {
|
||||
self.write(path.as_ref().to_string_lossy().as_bytes());
|
||||
self.write_eol();
|
||||
let path = strip_prefix("./", path.as_ref()).unwrap_or(path.as_ref());
|
||||
self.write_path(path);
|
||||
if self.null {
|
||||
self.write(b"\x00");
|
||||
} else {
|
||||
self.write_eol();
|
||||
}
|
||||
}
|
||||
|
||||
/// Prints the given path and a count of the number of matches found.
|
||||
pub fn path_count<P: AsRef<Path>>(&mut self, path: P, count: u64) {
|
||||
if self.with_filename {
|
||||
self.write(path.as_ref().to_string_lossy().as_bytes());
|
||||
self.write(b":");
|
||||
self.write_path(path);
|
||||
if self.null {
|
||||
self.write(b"\x00");
|
||||
} else {
|
||||
self.write(b":");
|
||||
}
|
||||
}
|
||||
self.write(count.to_string().as_bytes());
|
||||
self.write_eol();
|
||||
@@ -179,7 +215,7 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
let column =
|
||||
if self.column {
|
||||
Some(re.find(&buf[start..end])
|
||||
.map(|(s, _)| s + 1).unwrap_or(0) as u64)
|
||||
.map(|(s, _)| s).unwrap_or(0) as u64)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -204,16 +240,21 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
column: Option<u64>,
|
||||
) {
|
||||
if self.heading && self.with_filename && !self.has_printed {
|
||||
self.write_file_sep();
|
||||
self.write_heading(path.as_ref());
|
||||
} else if !self.heading && self.with_filename {
|
||||
self.write(path.as_ref().to_string_lossy().as_bytes());
|
||||
self.write(b":");
|
||||
self.write_path(path.as_ref());
|
||||
if self.null {
|
||||
self.write(b"\x00");
|
||||
} else {
|
||||
self.write(b":");
|
||||
}
|
||||
}
|
||||
if let Some(line_number) = line_number {
|
||||
self.line_number(line_number, b':');
|
||||
}
|
||||
if let Some(c) = column {
|
||||
self.write(c.to_string().as_bytes());
|
||||
self.write((c + 1).to_string().as_bytes());
|
||||
self.write(b":");
|
||||
}
|
||||
if self.replace.is_some() {
|
||||
@@ -254,10 +295,15 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
line_number: Option<u64>,
|
||||
) {
|
||||
if self.heading && self.with_filename && !self.has_printed {
|
||||
self.write_file_sep();
|
||||
self.write_heading(path.as_ref());
|
||||
} else if !self.heading && self.with_filename {
|
||||
self.write(path.as_ref().to_string_lossy().as_bytes());
|
||||
self.write(b"-");
|
||||
self.write_path(path.as_ref());
|
||||
if self.null {
|
||||
self.write(b"\x00");
|
||||
} else {
|
||||
self.write(b"-");
|
||||
}
|
||||
}
|
||||
if let Some(line_number) = line_number {
|
||||
self.line_number(line_number, b'-');
|
||||
@@ -273,8 +319,12 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
let _ = self.wtr.fg(color::BRIGHT_GREEN);
|
||||
let _ = self.wtr.attr(Attr::Bold);
|
||||
}
|
||||
self.write(path.as_ref().to_string_lossy().as_bytes());
|
||||
self.write_eol();
|
||||
self.write_path(path.as_ref());
|
||||
if self.null {
|
||||
self.write(b"\x00");
|
||||
} else {
|
||||
self.write_eol();
|
||||
}
|
||||
if self.wtr.supports_color() {
|
||||
let _ = self.wtr.reset();
|
||||
}
|
||||
@@ -292,6 +342,19 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
self.write(&[sep]);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn write_path<P: AsRef<Path>>(&mut self, path: P) {
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
||||
let path = path.as_ref().as_os_str().as_bytes();
|
||||
self.write(path);
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn write_path<P: AsRef<Path>>(&mut self, path: P) {
|
||||
self.write(path.as_ref().to_string_lossy().as_bytes());
|
||||
}
|
||||
|
||||
fn write(&mut self, buf: &[u8]) {
|
||||
if self.quiet {
|
||||
return;
|
||||
@@ -304,4 +367,15 @@ impl<W: Terminal + Send> Printer<W> {
|
||||
let eol = self.eol;
|
||||
self.write(&[eol]);
|
||||
}
|
||||
|
||||
fn write_file_sep(&mut self) {
|
||||
if self.quiet {
|
||||
return;
|
||||
}
|
||||
if let Some(ref sep) = self.file_separator {
|
||||
self.has_printed = true;
|
||||
let _ = self.wtr.write_all(sep);
|
||||
let _ = self.wtr.write_all(b"\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -53,6 +53,14 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
||||
self
|
||||
}
|
||||
|
||||
/// If enabled, searching will print the path instead of each match.
|
||||
///
|
||||
/// Disabled by default.
|
||||
pub fn files_with_matches(mut self, yes: bool) -> Self {
|
||||
self.opts.files_with_matches = yes;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the end-of-line byte used by this searcher.
|
||||
pub fn eol(mut self, eol: u8) -> Self {
|
||||
self.opts.eol = eol;
|
||||
@@ -96,6 +104,9 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
||||
self.print_match(m.start(), m.end());
|
||||
}
|
||||
last_end = m.end();
|
||||
if self.printer.is_quiet() || self.opts.files_with_matches {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if self.opts.invert_match {
|
||||
let upto = self.buf.len();
|
||||
@@ -104,13 +115,16 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
||||
if self.opts.count && self.match_count > 0 {
|
||||
self.printer.path_count(self.path, self.match_count);
|
||||
}
|
||||
if self.opts.files_with_matches && self.match_count > 0 {
|
||||
self.printer.path(self.path);
|
||||
}
|
||||
self.match_count
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn print_match(&mut self, start: usize, end: usize) {
|
||||
self.match_count += 1;
|
||||
if self.opts.count {
|
||||
if self.opts.skip_matches() {
|
||||
return;
|
||||
}
|
||||
self.count_lines(start);
|
||||
@@ -237,6 +251,14 @@ and exhibited clearly, with a label attached.\
|
||||
assert_eq!(out, "/baz.rs:2\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn files_with_matches() {
|
||||
let (count, out) = search(
|
||||
"Sherlock", SHERLOCK, |s| s.files_with_matches(true));
|
||||
assert_eq!(1, count);
|
||||
assert_eq!(out, "/baz.rs\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invert_match() {
|
||||
let (count, out) = search(
|
||||
|
@@ -80,6 +80,7 @@ pub struct Options {
|
||||
pub after_context: usize,
|
||||
pub before_context: usize,
|
||||
pub count: bool,
|
||||
pub files_with_matches: bool,
|
||||
pub eol: u8,
|
||||
pub invert_match: bool,
|
||||
pub line_number: bool,
|
||||
@@ -92,12 +93,22 @@ impl Default for Options {
|
||||
after_context: 0,
|
||||
before_context: 0,
|
||||
count: false,
|
||||
files_with_matches: false,
|
||||
eol: b'\n',
|
||||
invert_match: false,
|
||||
line_number: false,
|
||||
text: false,
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl Options {
|
||||
/// Both --count and --files-with-matches options imply that we should not
|
||||
/// display matches at all.
|
||||
pub fn skip_matches(&self) -> bool {
|
||||
return self.count || self.files_with_matches;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
||||
@@ -158,6 +169,14 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
||||
self
|
||||
}
|
||||
|
||||
/// If enabled, searching will print the path instead of each match.
|
||||
///
|
||||
/// Disabled by default.
|
||||
pub fn files_with_matches(mut self, yes: bool) -> Self {
|
||||
self.opts.files_with_matches = yes;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the end-of-line byte used by this searcher.
|
||||
pub fn eol(mut self, eol: u8) -> Self {
|
||||
self.opts.eol = eol;
|
||||
@@ -193,7 +212,7 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
||||
self.line_count = if self.opts.line_number { Some(0) } else { None };
|
||||
self.last_match = Match::default();
|
||||
self.after_context_remaining = 0;
|
||||
loop {
|
||||
while !self.terminate() {
|
||||
let upto = self.inp.lastnl;
|
||||
self.print_after_context(upto);
|
||||
if !try!(self.fill()) {
|
||||
@@ -202,7 +221,7 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
||||
if !self.opts.text && self.inp.is_binary {
|
||||
break;
|
||||
}
|
||||
while self.inp.pos < self.inp.lastnl {
|
||||
while !self.terminate() && self.inp.pos < self.inp.lastnl {
|
||||
let matched = self.grep.read_match(
|
||||
&mut self.last_match,
|
||||
&mut self.inp.buf[..self.inp.lastnl],
|
||||
@@ -234,12 +253,22 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
||||
}
|
||||
}
|
||||
}
|
||||
if self.opts.count && self.match_count > 0 {
|
||||
self.printer.path_count(self.path, self.match_count);
|
||||
if self.match_count > 0 {
|
||||
if self.opts.count {
|
||||
self.printer.path_count(self.path, self.match_count);
|
||||
} else if self.opts.files_with_matches {
|
||||
self.printer.path(self.path);
|
||||
}
|
||||
}
|
||||
Ok(self.match_count)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn terminate(&self) -> bool {
|
||||
self.match_count > 0
|
||||
&& (self.printer.is_quiet() || self.opts.files_with_matches)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn fill(&mut self) -> Result<bool, Error> {
|
||||
let mut keep = self.inp.lastnl;
|
||||
@@ -281,7 +310,7 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
||||
|
||||
#[inline(always)]
|
||||
fn print_before_context(&mut self, upto: usize) {
|
||||
if self.opts.count || self.opts.before_context == 0 {
|
||||
if self.opts.skip_matches() || self.opts.before_context == 0 {
|
||||
return;
|
||||
}
|
||||
let start = self.last_printed;
|
||||
@@ -304,7 +333,7 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
||||
|
||||
#[inline(always)]
|
||||
fn print_after_context(&mut self, upto: usize) {
|
||||
if self.opts.count || self.after_context_remaining == 0 {
|
||||
if self.opts.skip_matches() || self.after_context_remaining == 0 {
|
||||
return;
|
||||
}
|
||||
let start = self.last_printed;
|
||||
@@ -322,7 +351,7 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
||||
#[inline(always)]
|
||||
fn print_match(&mut self, start: usize, end: usize) {
|
||||
self.match_count += 1;
|
||||
if self.opts.count {
|
||||
if self.opts.skip_matches() {
|
||||
return;
|
||||
}
|
||||
self.print_separator(start);
|
||||
@@ -503,10 +532,6 @@ impl InputBuffer {
|
||||
if self.first && is_binary(&self.buf[self.end..self.end + n]) {
|
||||
self.is_binary = true;
|
||||
}
|
||||
if self.is_binary {
|
||||
replace_buf(
|
||||
&mut self.buf[self.end..self.end + n], b'\x00', self.eol);
|
||||
}
|
||||
self.first = false;
|
||||
// We assume that reading 0 bytes means we've hit EOF.
|
||||
if n == 0 {
|
||||
@@ -629,6 +654,7 @@ pub fn count_lines(buf: &[u8], eol: u8) -> u64 {
|
||||
}
|
||||
|
||||
/// Replaces a with b in buf.
|
||||
#[allow(dead_code)]
|
||||
fn replace_buf(buf: &mut [u8], a: u8, b: u8) {
|
||||
if a == b {
|
||||
return;
|
||||
@@ -970,7 +996,7 @@ fn main() {
|
||||
let text = "Sherlock\n\x00Holmes\n";
|
||||
let (count, out) = search("Sherlock|Holmes", text, |s| s.text(true));
|
||||
assert_eq!(2, count);
|
||||
assert_eq!(out, "/baz.rs:Sherlock\n/baz.rs:Holmes\n");
|
||||
assert_eq!(out, "/baz.rs:Sherlock\n/baz.rs:\x00Holmes\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -992,6 +1018,14 @@ fn main() {
|
||||
assert_eq!(out, "/baz.rs:2\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn files_with_matches() {
|
||||
let (count, out) = search_smallcap(
|
||||
"Sherlock", SHERLOCK, |s| s.files_with_matches(true));
|
||||
assert_eq!(1, count);
|
||||
assert_eq!(out, "/baz.rs\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invert_match() {
|
||||
let (count, out) = search_smallcap(
|
||||
|
14
src/types.rs
14
src/types.rs
@@ -18,7 +18,7 @@ const TYPE_EXTENSIONS: &'static [(&'static str, &'static [&'static str])] = &[
|
||||
("awk", &["*.awk"]),
|
||||
("c", &["*.c", "*.h", "*.H"]),
|
||||
("cbor", &["*.cbor"]),
|
||||
("clojure", &["*.clj", "*.cljs"]),
|
||||
("clojure", &["*.clj", "*.cljc", "*.cljs", "*.cljx"]),
|
||||
("cmake", &["CMakeLists.txt"]),
|
||||
("coffeescript", &["*.coffee"]),
|
||||
("cpp", &[
|
||||
@@ -36,12 +36,15 @@ const TYPE_EXTENSIONS: &'static [(&'static str, &'static [&'static str])] = &[
|
||||
"*.f", "*.F", "*.f77", "*.F77", "*.pfo",
|
||||
"*.f90", "*.F90", "*.f95", "*.F95",
|
||||
]),
|
||||
("fsharp", &["*.fs", "*.fsx", "*.fsi"]),
|
||||
("go", &["*.go"]),
|
||||
("groovy", &["*.groovy"]),
|
||||
("haskell", &["*.hs", "*.lhs"]),
|
||||
("html", &["*.htm", "*.html"]),
|
||||
("java", &["*.java"]),
|
||||
("js", &["*.js"]),
|
||||
("js", &[
|
||||
"*.js", "*.jsx", "*.vue",
|
||||
]),
|
||||
("json", &["*.json"]),
|
||||
("jsonl", &["*.jsonl"]),
|
||||
("lisp", &["*.el", "*.jl", "*.lisp", "*.lsp", "*.sc", "*.scm"]),
|
||||
@@ -52,6 +55,7 @@ const TYPE_EXTENSIONS: &'static [(&'static str, &'static [&'static str])] = &[
|
||||
("matlab", &["*.m"]),
|
||||
("mk", &["mkfile"]),
|
||||
("ml", &["*.ml"]),
|
||||
("nim", &["*.nim"]),
|
||||
("objc", &["*.h", "*.m"]),
|
||||
("objcpp", &["*.h", "*.mm"]),
|
||||
("ocaml", &["*.ml", "*.mli", "*.mll", "*.mly"]),
|
||||
@@ -59,17 +63,21 @@ const TYPE_EXTENSIONS: &'static [(&'static str, &'static [&'static str])] = &[
|
||||
("php", &["*.php", "*.php3", "*.php4", "*.php5", "*.phtml"]),
|
||||
("py", &["*.py"]),
|
||||
("readme", &["README*", "*README"]),
|
||||
("rr", &["*.R"]),
|
||||
("r", &["*.R", "*.r", "*.Rmd", "*.Rnw"]),
|
||||
("rst", &["*.rst"]),
|
||||
("ruby", &["*.rb"]),
|
||||
("rust", &["*.rs"]),
|
||||
("scala", &["*.scala"]),
|
||||
("sh", &["*.bash", "*.csh", "*.ksh", "*.sh", "*.tcsh"]),
|
||||
("sql", &["*.sql"]),
|
||||
("sv", &["*.v", "*.vg", "*.sv", "*.svh", "*.h"]),
|
||||
("swift", &["*.swift"]),
|
||||
("tex", &["*.tex", "*.cls", "*.sty"]),
|
||||
("ts", &["*.ts", "*.tsx"]),
|
||||
("txt", &["*.txt"]),
|
||||
("toml", &["*.toml", "Cargo.lock"]),
|
||||
("vala", &["*.vala"]),
|
||||
("vb", &["*.vb"]),
|
||||
("vimscript", &["*.vim"]),
|
||||
("xml", &["*.xml"]),
|
||||
("yacc", &["*.y"]),
|
||||
|
307
tests/tests.rs
307
tests/tests.rs
@@ -34,6 +34,33 @@ macro_rules! sherlock {
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! clean {
|
||||
($name:ident, $query:expr, $path:expr, $fun:expr) => {
|
||||
#[test]
|
||||
fn $name() {
|
||||
let wd = WorkDir::new(stringify!($name));
|
||||
let mut cmd = wd.command();
|
||||
cmd.arg($query).arg($path);
|
||||
$fun(wd, cmd);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn path(unix: &str) -> String {
|
||||
if cfg!(windows) {
|
||||
unix.replace("/", "\\")
|
||||
} else {
|
||||
unix.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn sort_lines(lines: &str) -> String {
|
||||
let mut lines: Vec<String> =
|
||||
lines.trim().lines().map(|s| s.to_owned()).collect();
|
||||
lines.sort();
|
||||
format!("{}\n", lines.join("\n"))
|
||||
}
|
||||
|
||||
sherlock!(single_file, |wd: WorkDir, mut cmd| {
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = "\
|
||||
@@ -118,7 +145,11 @@ be, to a very large extent, the result of luck. Sherlock Holmes
|
||||
foo
|
||||
Sherlock Holmes lives on Baker Street.
|
||||
";
|
||||
assert!(lines == expected1 || lines == expected2);
|
||||
if lines != expected1 {
|
||||
assert_eq!(lines, expected2);
|
||||
} else {
|
||||
assert_eq!(lines, expected1);
|
||||
}
|
||||
});
|
||||
|
||||
sherlock!(inverted, |wd: WorkDir, mut cmd: Command| {
|
||||
@@ -280,6 +311,20 @@ sherlock!(glob_negate, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
assert_eq!(lines, "file.py:Sherlock\n");
|
||||
});
|
||||
|
||||
sherlock!(count, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("--count");
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = "sherlock:2\n";
|
||||
assert_eq!(lines, expected);
|
||||
});
|
||||
|
||||
sherlock!(files_with_matches, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("--files-with-matches");
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = "sherlock\n";
|
||||
assert_eq!(lines, expected);
|
||||
});
|
||||
|
||||
sherlock!(after_context, |wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("-A").arg("1");
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
@@ -377,6 +422,11 @@ sherlock!(ignore_git, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.assert_err(&mut cmd);
|
||||
});
|
||||
|
||||
sherlock!(ignore_generic, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".ignore", "sherlock\n");
|
||||
wd.assert_err(&mut cmd);
|
||||
});
|
||||
|
||||
sherlock!(ignore_ripgrep, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".rgignore", "sherlock\n");
|
||||
wd.assert_err(&mut cmd);
|
||||
@@ -510,19 +560,11 @@ sherlock!(symlink_follow, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
cmd.current_dir(wd.path().join("foo/bar"));
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
if cfg!(windows) {
|
||||
let expected = "\
|
||||
baz\\sherlock:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
baz\\sherlock:be, to a very large extent, the result of luck. Sherlock Holmes
|
||||
";
|
||||
assert_eq!(lines, expected);
|
||||
} else {
|
||||
let expected = "\
|
||||
let expected = "\
|
||||
baz/sherlock:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
baz/sherlock:be, to a very large extent, the result of luck. Sherlock Holmes
|
||||
";
|
||||
assert_eq!(lines, expected);
|
||||
}
|
||||
assert_eq!(lines, path(expected));
|
||||
});
|
||||
|
||||
sherlock!(unrestricted1, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
@@ -556,7 +598,7 @@ sherlock!(unrestricted3, "foo", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("-uuu");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "file:foo\nfile:foo\n");
|
||||
assert_eq!(lines, "file:foo\x00bar\nfile:foo\x00baz\n");
|
||||
});
|
||||
|
||||
// On Windows, this test uses memory maps, so the NUL bytes don't get replaced.
|
||||
@@ -574,10 +616,217 @@ sherlock!(vimgrep, "Sherlock|Watson", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = "\
|
||||
sherlock:1:15:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
sherlock:1:56:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
sherlock:3:48:be, to a very large extent, the result of luck. Sherlock Holmes
|
||||
sherlock:5:11:but Doctor Watson has to have it taken out for him and dusted,
|
||||
sherlock:1:16:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
sherlock:1:57:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
sherlock:3:49:be, to a very large extent, the result of luck. Sherlock Holmes
|
||||
sherlock:5:12:but Doctor Watson has to have it taken out for him and dusted,
|
||||
";
|
||||
assert_eq!(lines, expected);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/16
|
||||
clean!(regression_16, "xyz", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".gitignore", "ghi/");
|
||||
wd.create_dir("ghi");
|
||||
wd.create_dir("def/ghi");
|
||||
wd.create("ghi/toplevel.txt", "xyz");
|
||||
wd.create("def/ghi/subdir.txt", "xyz");
|
||||
wd.assert_err(&mut cmd);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/25
|
||||
clean!(regression_25, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".gitignore", "/llvm/");
|
||||
wd.create_dir("src/llvm");
|
||||
wd.create("src/llvm/foo", "test");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = path("src/llvm/foo:test\n");
|
||||
assert_eq!(lines, expected);
|
||||
|
||||
cmd.current_dir(wd.path().join("src"));
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = path("llvm/foo:test\n");
|
||||
assert_eq!(lines, expected);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/30
|
||||
clean!(regression_30, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
if cfg!(windows) {
|
||||
wd.create(".gitignore", "vendor/**\n!vendor\\manifest");
|
||||
} else {
|
||||
wd.create(".gitignore", "vendor/**\n!vendor/manifest");
|
||||
}
|
||||
wd.create_dir("vendor");
|
||||
wd.create("vendor/manifest", "test");
|
||||
cmd.arg("--debug");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = path("vendor/manifest:test\n");
|
||||
assert_eq!(lines, expected);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/49
|
||||
clean!(regression_49, "xyz", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".gitignore", "foo/bar");
|
||||
wd.create_dir("test/foo/bar");
|
||||
wd.create("test/foo/bar/baz", "test");
|
||||
wd.assert_err(&mut cmd);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/50
|
||||
clean!(regression_50, "xyz", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".gitignore", "XXX/YYY/");
|
||||
wd.create_dir("abc/def/XXX/YYY");
|
||||
wd.create_dir("ghi/XXX/YYY");
|
||||
wd.create("abc/def/XXX/YYY/bar", "test");
|
||||
wd.create("ghi/XXX/YYY/bar", "test");
|
||||
wd.assert_err(&mut cmd);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/65
|
||||
clean!(regression_65, "xyz", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".gitignore", "a/");
|
||||
wd.create_dir("a");
|
||||
wd.create("a/foo", "xyz");
|
||||
wd.create("a/bar", "xyz");
|
||||
wd.assert_err(&mut cmd);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/67
|
||||
clean!(regression_67, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".gitignore", "/*\n!/dir");
|
||||
wd.create_dir("dir");
|
||||
wd.create_dir("foo");
|
||||
wd.create("foo/bar", "test");
|
||||
wd.create("dir/bar", "test");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, path("dir/bar:test\n"));
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/90
|
||||
clean!(regression_90, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".gitignore", "!.foo");
|
||||
wd.create(".foo", "test");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, ".foo:test\n");
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/93
|
||||
clean!(regression_93, r"(\d{1,3}\.){3}\d{1,3}", ".",
|
||||
|wd: WorkDir, mut cmd: Command| {
|
||||
wd.create("foo", "192.168.1.1");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "foo:192.168.1.1\n");
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/99
|
||||
clean!(regression_99, "test", ".",
|
||||
|wd: WorkDir, mut cmd: Command| {
|
||||
wd.create("foo1", "test");
|
||||
wd.create("foo2", "zzz");
|
||||
wd.create("bar", "test");
|
||||
cmd.arg("-j1").arg("--heading");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(sort_lines(&lines), sort_lines("bar\ntest\n\nfoo1\ntest\n"));
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/105
|
||||
clean!(regression_105_part1, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create("foo", "zztest");
|
||||
cmd.arg("--vimgrep");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "foo:1:3:zztest\n");
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/105
|
||||
clean!(regression_105_part2, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create("foo", "zztest");
|
||||
cmd.arg("--column");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "foo:3:zztest\n");
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/20
|
||||
sherlock!(feature_20, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("--no-filename");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = "\
|
||||
For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
be, to a very large extent, the result of luck. Sherlock Holmes
|
||||
";
|
||||
assert_eq!(lines, expected);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/68
|
||||
clean!(feature_68, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
wd.create(".gitignore", "foo");
|
||||
wd.create(".ignore", "bar");
|
||||
wd.create("foo", "test");
|
||||
wd.create("bar", "test");
|
||||
cmd.arg("--no-ignore-vcs");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "foo:test\n");
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/70
|
||||
sherlock!(feature_70, "sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("--smart-case");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = "\
|
||||
sherlock:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
sherlock:be, to a very large extent, the result of luck. Sherlock Holmes
|
||||
";
|
||||
assert_eq!(lines, expected);
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/89
|
||||
sherlock!(feature_89_files_with_matches, "Sherlock", ".",
|
||||
|wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("--null").arg("--files-with-matches");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "sherlock\x00");
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/89
|
||||
sherlock!(feature_89_count, "Sherlock", ".",
|
||||
|wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("--null").arg("--count");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "sherlock\x002\n");
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/89
|
||||
sherlock!(feature_89_files, "NADA", ".",
|
||||
|wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("--null").arg("--files");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "sherlock\x00");
|
||||
});
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/89
|
||||
sherlock!(feature_89_match, "Sherlock", ".",
|
||||
|wd: WorkDir, mut cmd: Command| {
|
||||
cmd.arg("--null").arg("-C1");
|
||||
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
let expected = "\
|
||||
sherlock\x00For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||
sherlock\x00Holmeses, success in the province of detective work must always
|
||||
sherlock\x00be, to a very large extent, the result of luck. Sherlock Holmes
|
||||
sherlock\x00can extract a clew from a wisp of straw or a flake of cigar ash;
|
||||
";
|
||||
assert_eq!(lines, expected);
|
||||
});
|
||||
@@ -619,7 +868,7 @@ fn binary_search_no_mmap() {
|
||||
let mut cmd = wd.command();
|
||||
cmd.arg("-a").arg("--no-mmap").arg("foo").arg("file");
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, "foo\nfoo\n");
|
||||
assert_eq!(lines, "foo\x00bar\nfoo\x00baz\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -632,13 +881,23 @@ fn files() {
|
||||
let mut cmd = wd.command();
|
||||
cmd.arg("--files");
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
if cfg!(windows) {
|
||||
assert!(lines == "./dir\\file\n./file\n"
|
||||
|| lines == "./file\n./dir\\file\n");
|
||||
} else {
|
||||
assert!(lines == "./file\n./dir/file\n"
|
||||
|| lines == "./dir/file\n./file\n");
|
||||
}
|
||||
assert!(lines == path("file\ndir/file\n")
|
||||
|| lines == path("dir/file\nfile\n"));
|
||||
}
|
||||
|
||||
// See: https://github.com/BurntSushi/ripgrep/issues/64
|
||||
#[test]
|
||||
fn regression_64() {
|
||||
let wd = WorkDir::new("regression_64");
|
||||
wd.create_dir("dir");
|
||||
wd.create_dir("foo");
|
||||
wd.create("dir/abc", "");
|
||||
wd.create("foo/abc", "");
|
||||
|
||||
let mut cmd = wd.command();
|
||||
cmd.arg("--files").arg("foo");
|
||||
let lines: String = wd.stdout(&mut cmd);
|
||||
assert_eq!(lines, path("foo/abc\n"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
Reference in New Issue
Block a user