mirror of
https://github.com/BurntSushi/ripgrep.git
synced 2025-05-19 01:30:21 -07:00
edition: manual changes
This is mostly just about removing 'extern crate' everywhere and fixing the fallout.
This commit is contained in:
parent
af54069c51
commit
e824531e38
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -224,7 +224,6 @@ dependencies = [
|
|||||||
"grep-regex",
|
"grep-regex",
|
||||||
"grep-searcher",
|
"grep-searcher",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"termcolor",
|
"termcolor",
|
||||||
]
|
]
|
||||||
@ -500,6 +499,9 @@ name = "serde"
|
|||||||
version = "1.0.125"
|
version = "1.0.125"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171"
|
checksum = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171"
|
||||||
|
dependencies = [
|
||||||
|
"serde_derive",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
|
@ -29,9 +29,3 @@ Add this to your `Cargo.toml`:
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
grep-cli = "0.1"
|
grep-cli = "0.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate grep_cli;
|
|
||||||
```
|
|
||||||
|
@ -230,7 +230,7 @@ impl DecompressionReaderBuilder {
|
|||||||
match self.command_builder.build(&mut cmd) {
|
match self.command_builder.build(&mut cmd) {
|
||||||
Ok(cmd_reader) => Ok(DecompressionReader { rdr: Ok(cmd_reader) }),
|
Ok(cmd_reader) => Ok(DecompressionReader { rdr: Ok(cmd_reader) }),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
debug!(
|
log::debug!(
|
||||||
"{}: error spawning command '{:?}': {} \
|
"{}: error spawning command '{:?}': {} \
|
||||||
(falling back to uncompressed reader)",
|
(falling back to uncompressed reader)",
|
||||||
path.display(),
|
path.display(),
|
||||||
@ -479,7 +479,7 @@ fn default_decompression_commands() -> Vec<DecompressionCommand> {
|
|||||||
let bin = match resolve_binary(Path::new(args[0])) {
|
let bin = match resolve_binary(Path::new(args[0])) {
|
||||||
Ok(bin) => bin,
|
Ok(bin) => bin,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
debug!("{}", err);
|
log::debug!("{}", err);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -88,7 +88,7 @@ impl From<ParseSizeError> for io::Error {
|
|||||||
///
|
///
|
||||||
/// Additional suffixes may be added over time.
|
/// Additional suffixes may be added over time.
|
||||||
pub fn parse_human_readable_size(size: &str) -> Result<u64, ParseSizeError> {
|
pub fn parse_human_readable_size(size: &str) -> Result<u64, ParseSizeError> {
|
||||||
lazy_static! {
|
lazy_static::lazy_static! {
|
||||||
// Normally I'd just parse something this simple by hand to avoid the
|
// Normally I'd just parse something this simple by hand to avoid the
|
||||||
// regex dep, but we bring regex in any way for glob matching, so might
|
// regex dep, but we bring regex in any way for glob matching, so might
|
||||||
// as well use it.
|
// as well use it.
|
||||||
|
@ -158,13 +158,6 @@ error message is crafted that typically tells the user how to fix the problem.
|
|||||||
|
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
use atty;
|
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
|
|
||||||
mod decompress;
|
mod decompress;
|
||||||
mod escape;
|
mod escape;
|
||||||
mod human;
|
mod human;
|
||||||
|
@ -254,7 +254,7 @@ impl CommandReader {
|
|||||||
impl Drop for CommandReader {
|
impl Drop for CommandReader {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
if let Err(error) = self.close() {
|
if let Err(error) = self.close() {
|
||||||
warn!("{}", error);
|
log::warn!("{}", error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,12 +22,6 @@ Add this to your `Cargo.toml`:
|
|||||||
globset = "0.3"
|
globset = "0.3"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate globset;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
|
|
||||||
* `serde1`: Enables implementing Serde traits on the `Glob` type.
|
* `serde1`: Enables implementing Serde traits on the `Glob` type.
|
||||||
|
@ -4,9 +4,6 @@ tool itself, see the benchsuite directory.
|
|||||||
*/
|
*/
|
||||||
#![feature(test)]
|
#![feature(test)]
|
||||||
|
|
||||||
use glob;
|
|
||||||
|
|
||||||
|
|
||||||
extern crate test;
|
extern crate test;
|
||||||
|
|
||||||
use globset::{Candidate, Glob, GlobMatcher, GlobSet, GlobSetBuilder};
|
use globset::{Candidate, Glob, GlobMatcher, GlobSet, GlobSetBuilder};
|
||||||
|
@ -103,16 +103,6 @@ or to enable case insensitive matching.
|
|||||||
|
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
use fnv;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
use regex;
|
|
||||||
|
|
||||||
#[cfg(feature = "serde1")]
|
|
||||||
extern crate serde;
|
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::{BTreeMap, HashMap};
|
||||||
use std::error::Error as StdError;
|
use std::error::Error as StdError;
|
||||||
@ -423,12 +413,12 @@ impl GlobSet {
|
|||||||
required_exts.add(i, ext, p.regex().to_owned());
|
required_exts.add(i, ext, p.regex().to_owned());
|
||||||
}
|
}
|
||||||
MatchStrategy::Regex => {
|
MatchStrategy::Regex => {
|
||||||
debug!("glob converted to regex: {:?}", p);
|
log::debug!("glob converted to regex: {:?}", p);
|
||||||
regexes.add(i, p.regex().to_owned());
|
regexes.add(i, p.regex().to_owned());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
debug!(
|
log::debug!(
|
||||||
"built glob set; {} literals, {} basenames, {} extensions, \
|
"built glob set; {} literals, {} basenames, {} extensions, \
|
||||||
{} prefixes, {} suffixes, {} required extensions, {} regexes",
|
{} prefixes, {} suffixes, {} required extensions, {} regexes",
|
||||||
lits.0.len(),
|
lits.0.len(),
|
||||||
@ -556,7 +546,11 @@ impl GlobSetMatchStrategy {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn matches_into(&self, candidate: &Candidate<'_>, matches: &mut Vec<usize>) {
|
fn matches_into(
|
||||||
|
&self,
|
||||||
|
candidate: &Candidate<'_>,
|
||||||
|
matches: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
use self::GlobSetMatchStrategy::*;
|
use self::GlobSetMatchStrategy::*;
|
||||||
match *self {
|
match *self {
|
||||||
Literal(ref s) => s.matches_into(candidate, matches),
|
Literal(ref s) => s.matches_into(candidate, matches),
|
||||||
@ -587,7 +581,11 @@ impl LiteralStrategy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
fn matches_into(&self, candidate: &Candidate<'_>, matches: &mut Vec<usize>) {
|
fn matches_into(
|
||||||
|
&self,
|
||||||
|
candidate: &Candidate<'_>,
|
||||||
|
matches: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
if let Some(hits) = self.0.get(candidate.path.as_bytes()) {
|
if let Some(hits) = self.0.get(candidate.path.as_bytes()) {
|
||||||
matches.extend(hits);
|
matches.extend(hits);
|
||||||
}
|
}
|
||||||
@ -614,7 +612,11 @@ impl BasenameLiteralStrategy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
fn matches_into(&self, candidate: &Candidate<'_>, matches: &mut Vec<usize>) {
|
fn matches_into(
|
||||||
|
&self,
|
||||||
|
candidate: &Candidate<'_>,
|
||||||
|
matches: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
if candidate.basename.is_empty() {
|
if candidate.basename.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -644,7 +646,11 @@ impl ExtensionStrategy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
fn matches_into(&self, candidate: &Candidate<'_>, matches: &mut Vec<usize>) {
|
fn matches_into(
|
||||||
|
&self,
|
||||||
|
candidate: &Candidate<'_>,
|
||||||
|
matches: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
if candidate.ext.is_empty() {
|
if candidate.ext.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -672,7 +678,11 @@ impl PrefixStrategy {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn matches_into(&self, candidate: &Candidate<'_>, matches: &mut Vec<usize>) {
|
fn matches_into(
|
||||||
|
&self,
|
||||||
|
candidate: &Candidate<'_>,
|
||||||
|
matches: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
let path = candidate.path_prefix(self.longest);
|
let path = candidate.path_prefix(self.longest);
|
||||||
for m in self.matcher.find_overlapping_iter(path) {
|
for m in self.matcher.find_overlapping_iter(path) {
|
||||||
if m.start() == 0 {
|
if m.start() == 0 {
|
||||||
@ -700,7 +710,11 @@ impl SuffixStrategy {
|
|||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn matches_into(&self, candidate: &Candidate<'_>, matches: &mut Vec<usize>) {
|
fn matches_into(
|
||||||
|
&self,
|
||||||
|
candidate: &Candidate<'_>,
|
||||||
|
matches: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
let path = candidate.path_suffix(self.longest);
|
let path = candidate.path_suffix(self.longest);
|
||||||
for m in self.matcher.find_overlapping_iter(path) {
|
for m in self.matcher.find_overlapping_iter(path) {
|
||||||
if m.end() == path.len() {
|
if m.end() == path.len() {
|
||||||
@ -732,7 +746,11 @@ impl RequiredExtensionStrategy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
fn matches_into(&self, candidate: &Candidate<'_>, matches: &mut Vec<usize>) {
|
fn matches_into(
|
||||||
|
&self,
|
||||||
|
candidate: &Candidate<'_>,
|
||||||
|
matches: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
if candidate.ext.is_empty() {
|
if candidate.ext.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -757,7 +775,11 @@ impl RegexSetStrategy {
|
|||||||
self.matcher.is_match(candidate.path.as_bytes())
|
self.matcher.is_match(candidate.path.as_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn matches_into(&self, candidate: &Candidate<'_>, matches: &mut Vec<usize>) {
|
fn matches_into(
|
||||||
|
&self,
|
||||||
|
candidate: &Candidate<'_>,
|
||||||
|
matches: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
for i in self.matcher.matches(candidate.path.as_bytes()) {
|
for i in self.matcher.matches(candidate.path.as_bytes()) {
|
||||||
matches.push(self.map[i]);
|
matches.push(self.map[i]);
|
||||||
}
|
}
|
||||||
|
@ -26,12 +26,6 @@ Add this to your `Cargo.toml`:
|
|||||||
grep = "0.2"
|
grep = "0.2"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate grep;
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### Features
|
### Features
|
||||||
|
|
||||||
|
@ -22,12 +22,6 @@ Add this to your `Cargo.toml`:
|
|||||||
ignore = "0.4"
|
ignore = "0.4"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate ignore;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example
|
### Example
|
||||||
|
|
||||||
This example shows the most basic usage of this crate. This code will
|
This example shows the most basic usage of this crate. This code will
|
||||||
|
@ -1,7 +1,3 @@
|
|||||||
extern crate crossbeam_channel as channel;
|
|
||||||
use ignore;
|
|
||||||
use walkdir;
|
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@ -14,7 +10,7 @@ fn main() {
|
|||||||
let mut path = env::args().nth(1).unwrap();
|
let mut path = env::args().nth(1).unwrap();
|
||||||
let mut parallel = false;
|
let mut parallel = false;
|
||||||
let mut simple = false;
|
let mut simple = false;
|
||||||
let (tx, rx) = channel::bounded::<DirEntry>(100);
|
let (tx, rx) = crossbeam_channel::bounded::<DirEntry>(100);
|
||||||
if path == "parallel" {
|
if path == "parallel" {
|
||||||
path = env::args().nth(2).unwrap();
|
path = env::args().nth(2).unwrap();
|
||||||
parallel = true;
|
parallel = true;
|
||||||
|
@ -581,7 +581,7 @@ impl IgnoreBuilder {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
let (gi, err) = builder.build_global();
|
let (gi, err) = builder.build_global();
|
||||||
if let Some(err) = err {
|
if let Some(err) = err {
|
||||||
debug!("{}", err);
|
log::debug!("{}", err);
|
||||||
}
|
}
|
||||||
gi
|
gi
|
||||||
};
|
};
|
||||||
|
@ -592,7 +592,7 @@ fn parse_excludes_file(data: &[u8]) -> Option<PathBuf> {
|
|||||||
// N.B. This is the lazy approach, and isn't technically correct, but
|
// N.B. This is the lazy approach, and isn't technically correct, but
|
||||||
// probably works in more circumstances. I guess we would ideally have
|
// probably works in more circumstances. I guess we would ideally have
|
||||||
// a full INI parser. Yuck.
|
// a full INI parser. Yuck.
|
||||||
lazy_static! {
|
lazy_static::lazy_static! {
|
||||||
static ref RE: Regex =
|
static ref RE: Regex =
|
||||||
Regex::new(r"(?im)^\s*excludesfile\s*=\s*(.+)\s*$").unwrap();
|
Regex::new(r"(?im)^\s*excludesfile\s*=\s*(.+)\s*$").unwrap();
|
||||||
};
|
};
|
||||||
|
@ -46,19 +46,6 @@ See the documentation for `WalkBuilder` for many other options.
|
|||||||
|
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
use walkdir;
|
|
||||||
#[cfg(windows)]
|
|
||||||
extern crate winapi_util;
|
|
||||||
|
|
||||||
use std::error;
|
use std::error;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
@ -427,7 +427,7 @@ impl TypesBuilder {
|
|||||||
/// If `name` is `all` or otherwise contains any character that is not a
|
/// If `name` is `all` or otherwise contains any character that is not a
|
||||||
/// Unicode letter or number, then an error is returned.
|
/// Unicode letter or number, then an error is returned.
|
||||||
pub fn add(&mut self, name: &str, glob: &str) -> Result<(), Error> {
|
pub fn add(&mut self, name: &str, glob: &str) -> Result<(), Error> {
|
||||||
lazy_static! {
|
lazy_static::lazy_static! {
|
||||||
static ref RE: Regex = Regex::new(r"^[\pL\pN]+$").unwrap();
|
static ref RE: Regex = Regex::new(r"^[\pL\pN]+$").unwrap();
|
||||||
};
|
};
|
||||||
if name == "all" || !RE.is_match(name) {
|
if name == "all" || !RE.is_match(name) {
|
||||||
|
@ -1725,7 +1725,7 @@ fn skip_filesize(
|
|||||||
|
|
||||||
if let Some(fs) = filesize {
|
if let Some(fs) = filesize {
|
||||||
if fs > max_filesize {
|
if fs > max_filesize {
|
||||||
debug!("ignoring {}: {} bytes", path.display(), fs);
|
log::debug!("ignoring {}: {} bytes", path.display(), fs);
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
@ -1738,10 +1738,10 @@ fn skip_filesize(
|
|||||||
fn should_skip_entry(ig: &Ignore, dent: &DirEntry) -> bool {
|
fn should_skip_entry(ig: &Ignore, dent: &DirEntry) -> bool {
|
||||||
let m = ig.matched_dir_entry(dent);
|
let m = ig.matched_dir_entry(dent);
|
||||||
if m.is_ignore() {
|
if m.is_ignore() {
|
||||||
debug!("ignoring {}: {:?}", dent.path().display(), m);
|
log::debug!("ignoring {}: {:?}", dent.path().display(), m);
|
||||||
true
|
true
|
||||||
} else if m.is_whitelist() {
|
} else if m.is_whitelist() {
|
||||||
debug!("whitelisting {}: {:?}", dent.path().display(), m);
|
log::debug!("whitelisting {}: {:?}", dent.path().display(), m);
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
@ -27,9 +27,3 @@ Add this to your `Cargo.toml`:
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
grep-matcher = "0.1"
|
grep-matcher = "0.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate grep_matcher;
|
|
||||||
```
|
|
||||||
|
@ -30,9 +30,3 @@ Add this to your `Cargo.toml`:
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
grep-pcre2 = "0.1"
|
grep-pcre2 = "0.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate grep_pcre2;
|
|
||||||
```
|
|
||||||
|
@ -16,7 +16,7 @@ edition = "2018"
|
|||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["serde1"]
|
default = ["serde1"]
|
||||||
serde1 = ["base64", "serde", "serde_derive", "serde_json"]
|
serde1 = ["base64", "serde", "serde_json"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base64 = { version = "0.13.0", optional = true }
|
base64 = { version = "0.13.0", optional = true }
|
||||||
@ -24,8 +24,7 @@ bstr = "0.2.0"
|
|||||||
grep-matcher = { version = "0.1.2", path = "../matcher" }
|
grep-matcher = { version = "0.1.2", path = "../matcher" }
|
||||||
grep-searcher = { version = "0.1.4", path = "../searcher" }
|
grep-searcher = { version = "0.1.4", path = "../searcher" }
|
||||||
termcolor = "1.0.4"
|
termcolor = "1.0.4"
|
||||||
serde = { version = "1.0.77", optional = true }
|
serde = { version = "1.0.77", optional = true, features = ["derive"] }
|
||||||
serde_derive = { version = "1.0.77", optional = true }
|
|
||||||
serde_json = { version = "1.0.27", optional = true }
|
serde_json = { version = "1.0.27", optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
@ -26,9 +26,3 @@ Add this to your `Cargo.toml`:
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
grep-printer = "0.1"
|
grep-printer = "0.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate grep_printer;
|
|
||||||
```
|
|
||||||
|
@ -147,9 +147,6 @@ pub struct ColorSpecs {
|
|||||||
/// A `UserColorSpec` can also be converted to a `termcolor::ColorSpec`:
|
/// A `UserColorSpec` can also be converted to a `termcolor::ColorSpec`:
|
||||||
///
|
///
|
||||||
/// ```rust
|
/// ```rust
|
||||||
/// extern crate grep_printer;
|
|
||||||
/// extern crate termcolor;
|
|
||||||
///
|
|
||||||
/// # fn main() {
|
/// # fn main() {
|
||||||
/// use termcolor::{Color, ColorSpec};
|
/// use termcolor::{Color, ColorSpec};
|
||||||
/// use grep_printer::UserColorSpec;
|
/// use grep_printer::UserColorSpec;
|
||||||
|
@ -27,10 +27,6 @@ contain matches.
|
|||||||
This example shows how to create a "standard" printer and execute a search.
|
This example shows how to create a "standard" printer and execute a search.
|
||||||
|
|
||||||
```
|
```
|
||||||
extern crate grep_regex;
|
|
||||||
extern crate grep_printer;
|
|
||||||
extern crate grep_searcher;
|
|
||||||
|
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use grep_regex::RegexMatcher;
|
use grep_regex::RegexMatcher;
|
||||||
@ -68,20 +64,6 @@ fn example() -> Result<(), Box<Error>> {
|
|||||||
|
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
#[cfg(feature = "serde1")]
|
|
||||||
extern crate base64;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#[cfg(feature = "serde1")]
|
|
||||||
#[macro_use]
|
|
||||||
extern crate serde_derive;
|
|
||||||
#[cfg(feature = "serde1")]
|
|
||||||
extern crate serde_json;
|
|
||||||
|
|
||||||
|
|
||||||
pub use crate::color::{
|
pub use crate::color::{
|
||||||
default_color_specs, ColorError, ColorSpecs, UserColorSpec,
|
default_color_specs, ColorError, ColorSpecs, UserColorSpec,
|
||||||
};
|
};
|
||||||
|
@ -8,7 +8,7 @@ use crate::util::NiceDuration;
|
|||||||
/// When statistics are reported by a printer, they correspond to all searches
|
/// When statistics are reported by a printer, they correspond to all searches
|
||||||
/// executed with that printer.
|
/// executed with that printer.
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||||
#[cfg_attr(feature = "serde1", derive(Serialize))]
|
#[cfg_attr(feature = "serde1", derive(serde::Serialize))]
|
||||||
pub struct Stats {
|
pub struct Stats {
|
||||||
elapsed: NiceDuration,
|
elapsed: NiceDuration,
|
||||||
searches: u64,
|
searches: u64,
|
||||||
|
@ -26,9 +26,3 @@ Add this to your `Cargo.toml`:
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
grep-regex = "0.1"
|
grep-regex = "0.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate grep_regex;
|
|
||||||
```
|
|
||||||
|
@ -1,18 +1,8 @@
|
|||||||
/*!
|
/*!
|
||||||
An implementation of `grep-matcher`'s `Matcher` trait for Rust's regex engine.
|
An implementation of `grep-matcher`'s `Matcher` trait for Rust's regex engine.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
pub use crate::error::{Error, ErrorKind};
|
pub use crate::error::{Error, ErrorKind};
|
||||||
pub use crate::matcher::{RegexCaptures, RegexMatcher, RegexMatcherBuilder};
|
pub use crate::matcher::{RegexCaptures, RegexMatcher, RegexMatcherBuilder};
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ impl LiteralSets {
|
|||||||
|
|
||||||
if !word {
|
if !word {
|
||||||
if self.prefixes.all_complete() && !self.prefixes.is_empty() {
|
if self.prefixes.all_complete() && !self.prefixes.is_empty() {
|
||||||
debug!("literal prefixes detected: {:?}", self.prefixes);
|
log::debug!("literal prefixes detected: {:?}", self.prefixes);
|
||||||
// When this is true, the regex engine will do a literal scan,
|
// When this is true, the regex engine will do a literal scan,
|
||||||
// so we don't need to return anything. But we only do this
|
// so we don't need to return anything. But we only do this
|
||||||
// if we aren't doing a word regex, since a word regex adds
|
// if we aren't doing a word regex, since a word regex adds
|
||||||
@ -106,7 +106,7 @@ impl LiteralSets {
|
|||||||
&& !any_empty
|
&& !any_empty
|
||||||
&& !any_white
|
&& !any_white
|
||||||
{
|
{
|
||||||
debug!("required literals found: {:?}", req_lits);
|
log::debug!("required literals found: {:?}", req_lits);
|
||||||
let alts: Vec<String> = req_lits
|
let alts: Vec<String> = req_lits
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|x| util::bytes_to_regex(x))
|
.map(|x| util::bytes_to_regex(x))
|
||||||
@ -149,27 +149,27 @@ impl LiteralSets {
|
|||||||
let lits = match (p_min_len, s_min_len) {
|
let lits = match (p_min_len, s_min_len) {
|
||||||
(None, None) => return None,
|
(None, None) => return None,
|
||||||
(Some(_), None) => {
|
(Some(_), None) => {
|
||||||
debug!("prefix literals found");
|
log::debug!("prefix literals found");
|
||||||
self.prefixes.literals()
|
self.prefixes.literals()
|
||||||
}
|
}
|
||||||
(None, Some(_)) => {
|
(None, Some(_)) => {
|
||||||
debug!("suffix literals found");
|
log::debug!("suffix literals found");
|
||||||
self.suffixes.literals()
|
self.suffixes.literals()
|
||||||
}
|
}
|
||||||
(Some(p), Some(s)) => {
|
(Some(p), Some(s)) => {
|
||||||
if p >= s {
|
if p >= s {
|
||||||
debug!("prefix literals found");
|
log::debug!("prefix literals found");
|
||||||
self.prefixes.literals()
|
self.prefixes.literals()
|
||||||
} else {
|
} else {
|
||||||
debug!("suffix literals found");
|
log::debug!("suffix literals found");
|
||||||
self.suffixes.literals()
|
self.suffixes.literals()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("prefix/suffix literals found: {:?}", lits);
|
log::debug!("prefix/suffix literals found: {:?}", lits);
|
||||||
if has_only_whitespace(lits) {
|
if has_only_whitespace(lits) {
|
||||||
debug!("dropping literals because one was whitespace");
|
log::debug!("dropping literals because one was whitespace");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let alts: Vec<String> =
|
let alts: Vec<String> =
|
||||||
@ -177,9 +177,9 @@ impl LiteralSets {
|
|||||||
// We're matching raw bytes, so disable Unicode mode.
|
// We're matching raw bytes, so disable Unicode mode.
|
||||||
Some(format!("(?-u:{})", alts.join("|")))
|
Some(format!("(?-u:{})", alts.join("|")))
|
||||||
} else {
|
} else {
|
||||||
debug!("required literal found: {:?}", util::show_bytes(lit));
|
log::debug!("required literal found: {:?}", util::show_bytes(lit));
|
||||||
if lit.chars().all(|c| c.is_whitespace()) {
|
if lit.chars().all(|c| c.is_whitespace()) {
|
||||||
debug!("dropping literal because one was whitespace");
|
log::debug!("dropping literal because one was whitespace");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
Some(format!("(?-u:{})", util::bytes_to_regex(&lit)))
|
Some(format!("(?-u:{})", util::bytes_to_regex(&lit)))
|
||||||
|
@ -47,11 +47,11 @@ impl RegexMatcherBuilder {
|
|||||||
let fast_line_regex = chir.fast_line_regex()?;
|
let fast_line_regex = chir.fast_line_regex()?;
|
||||||
let non_matching_bytes = chir.non_matching_bytes();
|
let non_matching_bytes = chir.non_matching_bytes();
|
||||||
if let Some(ref re) = fast_line_regex {
|
if let Some(ref re) = fast_line_regex {
|
||||||
debug!("extracted fast line regex: {:?}", re);
|
log::debug!("extracted fast line regex: {:?}", re);
|
||||||
}
|
}
|
||||||
|
|
||||||
let matcher = RegexMatcherImpl::new(&chir)?;
|
let matcher = RegexMatcherImpl::new(&chir)?;
|
||||||
trace!("final regex: {:?}", matcher.regex());
|
log::trace!("final regex: {:?}", matcher.regex());
|
||||||
Ok(RegexMatcher {
|
Ok(RegexMatcher {
|
||||||
config: self.config.clone(),
|
config: self.config.clone(),
|
||||||
matcher,
|
matcher,
|
||||||
|
@ -49,7 +49,7 @@ impl WordMatcher {
|
|||||||
expr.with_pattern(|pat| format!("^(?:{})$", pat))?.regex()?;
|
expr.with_pattern(|pat| format!("^(?:{})$", pat))?.regex()?;
|
||||||
let word_expr = expr.with_pattern(|pat| {
|
let word_expr = expr.with_pattern(|pat| {
|
||||||
let pat = format!(r"(?:(?m:^)|\W)({})(?:\W|(?m:$))", pat);
|
let pat = format!(r"(?:(?m:^)|\W)({})(?:\W|(?m:$))", pat);
|
||||||
debug!("word regex: {:?}", pat);
|
log::debug!("word regex: {:?}", pat);
|
||||||
pat
|
pat
|
||||||
})?;
|
})?;
|
||||||
let regex = word_expr.regex()?;
|
let regex = word_expr.regex()?;
|
||||||
|
@ -28,9 +28,3 @@ Add this to your `Cargo.toml`:
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
grep-searcher = "0.1"
|
grep-searcher = "0.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
and this to your crate root:
|
|
||||||
|
|
||||||
```rust
|
|
||||||
extern crate grep_searcher;
|
|
||||||
```
|
|
||||||
|
@ -48,10 +48,6 @@ using the
|
|||||||
implementation of `Sink`.
|
implementation of `Sink`.
|
||||||
|
|
||||||
```
|
```
|
||||||
extern crate grep_matcher;
|
|
||||||
extern crate grep_regex;
|
|
||||||
extern crate grep_searcher;
|
|
||||||
|
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use grep_matcher::Matcher;
|
use grep_matcher::Matcher;
|
||||||
@ -99,9 +95,6 @@ searches stdin.
|
|||||||
|
|
||||||
#![deny(missing_docs)]
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
|
||||||
|
|
||||||
pub use crate::lines::{LineIter, LineStep};
|
pub use crate::lines::{LineIter, LineStep};
|
||||||
pub use crate::searcher::{
|
pub use crate::searcher::{
|
||||||
BinaryDetection, ConfigError, Encoding, MmapChoice, Searcher,
|
BinaryDetection, ConfigError, Encoding, MmapChoice, Searcher,
|
||||||
|
@ -53,9 +53,9 @@ impl<'s, M: Matcher, S: Sink> Core<'s, M, S> {
|
|||||||
};
|
};
|
||||||
if !core.searcher.multi_line_with_matcher(&core.matcher) {
|
if !core.searcher.multi_line_with_matcher(&core.matcher) {
|
||||||
if core.is_line_by_line_fast() {
|
if core.is_line_by_line_fast() {
|
||||||
trace!("searcher core: will use fast line searcher");
|
log::trace!("searcher core: will use fast line searcher");
|
||||||
} else {
|
} else {
|
||||||
trace!("searcher core: will use slow line searcher");
|
log::trace!("searcher core: will use slow line searcher");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
core
|
core
|
||||||
|
@ -83,13 +83,13 @@ impl MmapChoice {
|
|||||||
Ok(mmap) => Some(mmap),
|
Ok(mmap) => Some(mmap),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if let Some(path) = path {
|
if let Some(path) = path {
|
||||||
debug!(
|
log::debug!(
|
||||||
"{}: failed to open memory map: {}",
|
"{}: failed to open memory map: {}",
|
||||||
path.display(),
|
path.display(),
|
||||||
err
|
err
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
debug!("failed to open memory map: {}", err);
|
log::debug!("failed to open memory map: {}", err);
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -659,16 +659,19 @@ impl Searcher {
|
|||||||
S: Sink,
|
S: Sink,
|
||||||
{
|
{
|
||||||
if let Some(mmap) = self.config.mmap.open(file, path) {
|
if let Some(mmap) = self.config.mmap.open(file, path) {
|
||||||
trace!("{:?}: searching via memory map", path);
|
log::trace!("{:?}: searching via memory map", path);
|
||||||
return self.search_slice(matcher, &mmap, write_to);
|
return self.search_slice(matcher, &mmap, write_to);
|
||||||
}
|
}
|
||||||
// Fast path for multi-line searches of files when memory maps are
|
// Fast path for multi-line searches of files when memory maps are
|
||||||
// not enabled. This pre-allocates a buffer roughly the size of the
|
// not enabled. This pre-allocates a buffer roughly the size of the
|
||||||
// file, which isn't possible when searching an arbitrary io::Read.
|
// file, which isn't possible when searching an arbitrary io::Read.
|
||||||
if self.multi_line_with_matcher(&matcher) {
|
if self.multi_line_with_matcher(&matcher) {
|
||||||
trace!("{:?}: reading entire file on to heap for mulitline", path);
|
log::trace!(
|
||||||
|
"{:?}: reading entire file on to heap for mulitline",
|
||||||
|
path
|
||||||
|
);
|
||||||
self.fill_multi_line_buffer_from_file::<S>(file)?;
|
self.fill_multi_line_buffer_from_file::<S>(file)?;
|
||||||
trace!("{:?}: searching via multiline strategy", path);
|
log::trace!("{:?}: searching via multiline strategy", path);
|
||||||
MultiLine::new(
|
MultiLine::new(
|
||||||
self,
|
self,
|
||||||
matcher,
|
matcher,
|
||||||
@ -677,7 +680,7 @@ impl Searcher {
|
|||||||
)
|
)
|
||||||
.run()
|
.run()
|
||||||
} else {
|
} else {
|
||||||
trace!("{:?}: searching using generic reader", path);
|
log::trace!("{:?}: searching using generic reader", path);
|
||||||
self.search_reader(matcher, file, write_to)
|
self.search_reader(matcher, file, write_to)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -713,9 +716,11 @@ impl Searcher {
|
|||||||
.map_err(S::Error::error_io)?;
|
.map_err(S::Error::error_io)?;
|
||||||
|
|
||||||
if self.multi_line_with_matcher(&matcher) {
|
if self.multi_line_with_matcher(&matcher) {
|
||||||
trace!("generic reader: reading everything to heap for multiline");
|
log::trace!(
|
||||||
|
"generic reader: reading everything to heap for multiline"
|
||||||
|
);
|
||||||
self.fill_multi_line_buffer_from_reader::<_, S>(decoder)?;
|
self.fill_multi_line_buffer_from_reader::<_, S>(decoder)?;
|
||||||
trace!("generic reader: searching via multiline strategy");
|
log::trace!("generic reader: searching via multiline strategy");
|
||||||
MultiLine::new(
|
MultiLine::new(
|
||||||
self,
|
self,
|
||||||
matcher,
|
matcher,
|
||||||
@ -726,7 +731,7 @@ impl Searcher {
|
|||||||
} else {
|
} else {
|
||||||
let mut line_buffer = self.line_buffer.borrow_mut();
|
let mut line_buffer = self.line_buffer.borrow_mut();
|
||||||
let rdr = LineBufferReader::new(decoder, &mut *line_buffer);
|
let rdr = LineBufferReader::new(decoder, &mut *line_buffer);
|
||||||
trace!("generic reader: searching via roll buffer strategy");
|
log::trace!("generic reader: searching via roll buffer strategy");
|
||||||
ReadByLine::new(self, matcher, rdr, write_to).run()
|
ReadByLine::new(self, matcher, rdr, write_to).run()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -747,14 +752,16 @@ impl Searcher {
|
|||||||
|
|
||||||
// We can search the slice directly, unless we need to do transcoding.
|
// We can search the slice directly, unless we need to do transcoding.
|
||||||
if self.slice_needs_transcoding(slice) {
|
if self.slice_needs_transcoding(slice) {
|
||||||
trace!("slice reader: needs transcoding, using generic reader");
|
log::trace!(
|
||||||
|
"slice reader: needs transcoding, using generic reader"
|
||||||
|
);
|
||||||
return self.search_reader(matcher, slice, write_to);
|
return self.search_reader(matcher, slice, write_to);
|
||||||
}
|
}
|
||||||
if self.multi_line_with_matcher(&matcher) {
|
if self.multi_line_with_matcher(&matcher) {
|
||||||
trace!("slice reader: searching via multiline strategy");
|
log::trace!("slice reader: searching via multiline strategy");
|
||||||
MultiLine::new(self, matcher, slice, write_to).run()
|
MultiLine::new(self, matcher, slice, write_to).run()
|
||||||
} else {
|
} else {
|
||||||
trace!("slice reader: searching via slice-by-line strategy");
|
log::trace!("slice reader: searching via slice-by-line strategy");
|
||||||
SliceByLine::new(self, matcher, slice, write_to).run()
|
SliceByLine::new(self, matcher, slice, write_to).run()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user