diff -Nru rust-ignore-0.4.10/Cargo.lock rust-ignore-0.4.16/Cargo.lock --- rust-ignore-0.4.10/Cargo.lock 1970-01-01 00:00:00.000000000 +0000 +++ rust-ignore-0.4.16/Cargo.lock 2020-05-29 13:13:02.000000000 +0000 @@ -2,179 +2,192 @@ # It is not intended for manual editing. [[package]] name = "aho-corasick" -version = "0.7.6" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8716408b8bc624ed7f65d223ddb9ac2d044c0547b6fa4b0d554f3a9540496ada" dependencies = [ - "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr", ] [[package]] +name = "autocfg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" + +[[package]] name = "bstr" -version = "0.2.6" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31accafdb70df7871592c058eca3985b71104e15ac32f64706022c58867da931" dependencies = [ - "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr", ] [[package]] name = "cfg-if" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" [[package]] name = "crossbeam-channel" -version = "0.3.9" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cced8691919c02aac3cb0a1bc2e9b73d89e832bf9a06fc579d4e71b68a2da061" dependencies = [ - "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils", + "maybe-uninit", ] [[package]] name = "crossbeam-utils" -version = "0.6.6" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg", + "cfg-if", + "lazy_static", ] [[package]] name = "fnv" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "globset" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ad1da430bd7281dde2576f44c84cc3f0f7b475e7202cd503042dff01a8c8120" dependencies = [ - "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", - "bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "aho-corasick", + "bstr", + "fnv", + "log", + "regex", ] [[package]] name = "ignore" -version = "0.4.10" +version = "0.4.16" dependencies = [ - "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", - "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", - "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-channel", + "crossbeam-utils", + "globset", + "lazy_static", + "log", + "memchr", + "regex", + "same-file", + "thread_local", + "walkdir", + "winapi-util", ] [[package]] name = "lazy_static" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "log" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" dependencies = [ - "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "cfg-if", ] [[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + +[[package]] name = "memchr" -version = "2.2.1" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" [[package]] name = "regex" -version = "1.2.1" +version = "1.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3780fcf44b193bc4d09f36d2a3c87b251da4a046c87795a0d35f4f927ad8e6" dependencies = [ - "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", - "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "aho-corasick", + "memchr", + "regex-syntax", + "thread_local", ] [[package]] name = "regex-syntax" -version = "0.6.11" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26412eb97c6b088a6997e05f69403a802a92d520de2f8e63c2b65f9e0f47c4e8" [[package]] name = "same-file" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ - "winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util", ] [[package]] name = "thread_local" -version = "0.3.6" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" dependencies = [ - "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static", ] [[package]] name = "walkdir" -version = "2.2.9" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" dependencies = [ - "same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "same-file", + "winapi", + "winapi-util", ] [[package]] name = "winapi" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" dependencies = [ - "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.2" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ - "winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" -"checksum bstr 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e0a692f1c740e7e821ca71a22cf99b9b2322dfa94d10f71443befb1797b3946a" -"checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33" -"checksum crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c8ec7fcd21571dc78f96cc96243cab8d8f035247c3efd16c687be154c3fa9efa" -"checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" -"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" -"checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2" -"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" -"checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" -"checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" -"checksum regex 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88c3d9193984285d544df4a30c23a4e62ead42edf70a4452ceb76dac1ce05c26" -"checksum regex-syntax 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b143cceb2ca5e56d5671988ef8b15615733e7ee16cd348e064333b251b89343f" -"checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" -"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" -"checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e" -"checksum winapi 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "f10e386af2b13e47c89e7236a7a14a086791a2b88ebad6df9bf42040195cf770" -"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -"checksum winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7168bab6e1daee33b4557efd0e95d5ca70a03706d39fa5f3fe7a236f584b03c9" -"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff -Nru rust-ignore-0.4.10/Cargo.toml rust-ignore-0.4.16/Cargo.toml --- rust-ignore-0.4.10/Cargo.toml 1970-01-01 00:00:00.000000000 +0000 +++ rust-ignore-0.4.16/Cargo.toml 2020-05-29 13:13:02.000000000 +0000 @@ -12,24 +12,24 @@ [package] name = "ignore" -version = "0.4.10" +version = "0.4.16" authors = ["Andrew Gallant "] description = "A fast library for efficiently matching ignore files such as `.gitignore`\nagainst file paths.\n" -homepage = "https://github.com/BurntSushi/ripgrep/tree/master/ignore" +homepage = "https://github.com/BurntSushi/ripgrep/tree/master/crates/ignore" documentation = "https://docs.rs/ignore" readme = "README.md" keywords = ["glob", "ignore", "gitignore", "pattern", "file"] license = "Unlicense/MIT" -repository = "https://github.com/BurntSushi/ripgrep/tree/master/ignore" +repository = "https://github.com/BurntSushi/ripgrep/tree/master/crates/ignore" [lib] name = "ignore" bench = false -[dependencies.crossbeam-channel] -version = "0.3.6" +[dependencies.crossbeam-utils] +version = "0.7.0" [dependencies.globset] -version = "0.4.3" +version = "0.4.5" [dependencies.lazy_static] version = "1.1" @@ -47,10 +47,12 @@ version = "1.0.4" [dependencies.thread_local] -version = "0.3.6" +version = "1" [dependencies.walkdir] version = "2.2.7" +[dev-dependencies.crossbeam-channel] +version = "0.4.0" [features] simd-accel = ["globset/simd-accel"] diff -Nru rust-ignore-0.4.10/Cargo.toml.orig rust-ignore-0.4.16/Cargo.toml.orig --- rust-ignore-0.4.10/Cargo.toml.orig 2019-08-06 13:47:45.000000000 +0000 +++ rust-ignore-0.4.16/Cargo.toml.orig 2020-05-29 13:13:02.000000000 +0000 @@ -1,14 +1,14 @@ [package] name = "ignore" -version = "0.4.10" #:version +version = "0.4.16" #:version authors = ["Andrew Gallant "] description = """ A fast library for efficiently matching ignore files such as `.gitignore` against file paths. """ documentation = "https://docs.rs/ignore" -homepage = "https://github.com/BurntSushi/ripgrep/tree/master/ignore" -repository = "https://github.com/BurntSushi/ripgrep/tree/master/ignore" +homepage = "https://github.com/BurntSushi/ripgrep/tree/master/crates/ignore" +repository = "https://github.com/BurntSushi/ripgrep/tree/master/crates/ignore" readme = "README.md" keywords = ["glob", "ignore", "gitignore", "pattern", "file"] license = "Unlicense/MIT" @@ -18,18 +18,21 @@ bench = false [dependencies] -crossbeam-channel = "0.3.6" -globset = { version = "0.4.3", path = "../globset" } +crossbeam-utils = "0.7.0" +globset = { version = "0.4.5", path = "../globset" } lazy_static = "1.1" log = "0.4.5" memchr = "2.1" regex = "1.1" same-file = "1.0.4" -thread_local = "0.3.6" +thread_local = "1" walkdir = "2.2.7" [target.'cfg(windows)'.dependencies.winapi-util] version = "0.1.2" +[dev-dependencies] +crossbeam-channel = "0.4.0" + [features] simd-accel = ["globset/simd-accel"] diff -Nru rust-ignore-0.4.10/.cargo_vcs_info.json rust-ignore-0.4.16/.cargo_vcs_info.json --- rust-ignore-0.4.10/.cargo_vcs_info.json 1970-01-01 00:00:00.000000000 +0000 +++ rust-ignore-0.4.16/.cargo_vcs_info.json 2020-05-29 13:13:02.000000000 +0000 @@ -1,5 +1,5 @@ { "git": { - "sha1": "345124a7fa030cfa922fd9dc4675cdff769cd7a3" + "sha1": "72bdde677100d8384c89da0ef1d7136dda03d1f3" } } diff -Nru rust-ignore-0.4.10/debian/cargo-checksum.json rust-ignore-0.4.16/debian/cargo-checksum.json --- rust-ignore-0.4.10/debian/cargo-checksum.json 2019-08-08 03:57:32.000000000 +0000 +++ rust-ignore-0.4.16/debian/cargo-checksum.json 2020-06-20 08:34:40.000000000 +0000 @@ -1 +1 @@ -{"package":"0ec16832258409d571aaef8273f3c3cc5b060d784e159d1a0f3b0017308f84a7","files":{}} +{"package":"22dcbf2a4a289528dbef21686354904e1c694ac642610a9bff9e7df730d9ec72","files":{}} diff -Nru rust-ignore-0.4.10/debian/changelog rust-ignore-0.4.16/debian/changelog --- rust-ignore-0.4.10/debian/changelog 2019-08-08 03:57:32.000000000 +0000 +++ rust-ignore-0.4.16/debian/changelog 2020-06-20 08:34:40.000000000 +0000 @@ -1,3 +1,31 @@ +rust-ignore (0.4.16-2) unstable; urgency=medium + + * Team upload. + * Package ignore 0.4.16 from crates.io using debcargo 2.4.3 + + -- Sylvestre Ledru Sat, 20 Jun 2020 10:34:40 +0200 + +rust-ignore (0.4.16-1) unstable; urgency=medium + + * Team upload. + * Package ignore 0.4.16 from crates.io using debcargo 2.4.2 + + -- Sylvestre Ledru Mon, 01 Jun 2020 12:54:41 +0200 + +rust-ignore (0.4.15-1) unstable; urgency=medium + + * Team upload. + * Package ignore 0.4.15 from crates.io using debcargo 2.4.2 + + -- Sylvestre Ledru Sun, 10 May 2020 16:21:09 +0200 + +rust-ignore (0.4.13-1) unstable; urgency=medium + + * Team upload. + * Package ignore 0.4.13 from crates.io using debcargo 2.4.0 + + -- Sylvestre Ledru Thu, 26 Mar 2020 10:22:00 +0100 + rust-ignore (0.4.10-1) unstable; urgency=medium * Package ignore 0.4.10 from crates.io using debcargo 2.4.0 diff -Nru rust-ignore-0.4.10/debian/control rust-ignore-0.4.16/debian/control --- rust-ignore-0.4.10/debian/control 2019-08-08 03:57:32.000000000 +0000 +++ rust-ignore-0.4.16/debian/control 2020-06-20 08:34:40.000000000 +0000 @@ -6,37 +6,37 @@ cargo:native , rustc:native , libstd-rust-dev , - librust-crossbeam-channel-0.3+default-dev (>= 0.3.6-~~) , - librust-globset-0.4+default-dev (>= 0.4.3-~~) , + librust-crossbeam-utils-0.7+default-dev , + librust-globset-0.4+default-dev (>= 0.4.5-~~) , librust-lazy-static-1+default-dev (>= 1.1-~~) , librust-log-0.4+default-dev (>= 0.4.5-~~) , librust-memchr-2+default-dev (>= 2.1-~~) , librust-regex-1+default-dev (>= 1.1-~~) , librust-same-file-1+default-dev (>= 1.0.4-~~) , - librust-thread-local-0.3+default-dev (>= 0.3.6-~~) , + librust-thread-local-1+default-dev , librust-walkdir-2+default-dev (>= 2.2.7-~~) , librust-winapi-util-0.1+default-dev (>= 0.1.2-~~) Maintainer: Debian Rust Maintainers Uploaders: Ximin Luo -Standards-Version: 4.2.0 +Standards-Version: 4.4.1 Vcs-Git: https://salsa.debian.org/rust-team/debcargo-conf.git [src/ignore] Vcs-Browser: https://salsa.debian.org/rust-team/debcargo-conf/tree/master/src/ignore -Homepage: https://github.com/BurntSushi/ripgrep/tree/master/ignore +Homepage: https://github.com/BurntSushi/ripgrep/tree/master/crates/ignore Package: librust-ignore-dev Architecture: any Multi-Arch: same Depends: ${misc:Depends}, - librust-crossbeam-channel-0.3+default-dev (>= 0.3.6-~~), - librust-globset-0.4+default-dev (>= 0.4.3-~~), + librust-crossbeam-utils-0.7+default-dev, + librust-globset-0.4+default-dev (>= 0.4.5-~~), librust-lazy-static-1+default-dev (>= 1.1-~~), librust-log-0.4+default-dev (>= 0.4.5-~~), librust-memchr-2+default-dev (>= 2.1-~~), librust-regex-1+default-dev (>= 1.1-~~), librust-same-file-1+default-dev (>= 1.0.4-~~), - librust-thread-local-0.3+default-dev (>= 0.3.6-~~), + librust-thread-local-1+default-dev, librust-walkdir-2+default-dev (>= 2.2.7-~~), librust-winapi-util-0.1+default-dev (>= 0.1.2-~~) Suggests: @@ -47,8 +47,8 @@ librust-ignore-0+default-dev (= ${binary:Version}), librust-ignore-0.4-dev (= ${binary:Version}), librust-ignore-0.4+default-dev (= ${binary:Version}), - librust-ignore-0.4.10-dev (= ${binary:Version}), - librust-ignore-0.4.10+default-dev (= ${binary:Version}) + librust-ignore-0.4.16-dev (= ${binary:Version}), + librust-ignore-0.4.16+default-dev (= ${binary:Version}) Description: Fast library for matching ignore files like .gitignore against file paths - Rust source code This package contains the source for the Rust ignore crate, packaged by debcargo for use with cargo and dh-cargo. @@ -59,11 +59,11 @@ Depends: ${misc:Depends}, librust-ignore-dev (= ${binary:Version}), - librust-globset-0.4+simd-accel-dev (>= 0.4.3-~~) + librust-globset-0.4+simd-accel-dev (>= 0.4.5-~~) Provides: librust-ignore-0+simd-accel-dev (= ${binary:Version}), librust-ignore-0.4+simd-accel-dev (= ${binary:Version}), - librust-ignore-0.4.10+simd-accel-dev (= ${binary:Version}) + librust-ignore-0.4.16+simd-accel-dev (= ${binary:Version}) Description: Fast library for matching ignore files like .gitignore against file paths - feature "simd-accel" This metapackage enables feature "simd-accel" for the Rust ignore crate, by pulling in any additional dependencies needed by that feature. diff -Nru rust-ignore-0.4.10/debian/copyright.debcargo.hint rust-ignore-0.4.16/debian/copyright.debcargo.hint --- rust-ignore-0.4.10/debian/copyright.debcargo.hint 2019-08-08 03:57:32.000000000 +0000 +++ rust-ignore-0.4.16/debian/copyright.debcargo.hint 2020-06-20 08:34:40.000000000 +0000 @@ -1,7 +1,7 @@ Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ Upstream-Name: ignore Upstream-Contact: Andrew Gallant -Source: https://github.com/BurntSushi/ripgrep/tree/master/ignore +Source: https://github.com/BurntSushi/ripgrep/tree/master/crates/ignore Files: * Copyright: FIXME (overlay) UNKNOWN-YEARS Andrew Gallant @@ -21,8 +21,8 @@ Files: debian/* Copyright: - 2018-2019 Debian Rust Maintainers - 2018-2019 Ximin Luo + 2018-2020 Debian Rust Maintainers + 2018-2020 Ximin Luo License: Unlicense or MIT License: MIT diff -Nru rust-ignore-0.4.10/debian/rules rust-ignore-0.4.16/debian/rules --- rust-ignore-0.4.10/debian/rules 2019-08-08 03:57:32.000000000 +0000 +++ rust-ignore-0.4.16/debian/rules 2020-06-20 08:34:40.000000000 +0000 @@ -1,6 +1,3 @@ #!/usr/bin/make -f %: dh $@ --buildsystem cargo - -override_dh_auto_test: - dh_auto_test -- test --all diff -Nru rust-ignore-0.4.10/debian/tests/control rust-ignore-0.4.16/debian/tests/control --- rust-ignore-0.4.10/debian/tests/control 2019-08-08 03:57:32.000000000 +0000 +++ rust-ignore-0.4.16/debian/tests/control 2020-06-20 08:34:40.000000000 +0000 @@ -1,11 +1,14 @@ -Test-Command: /usr/share/cargo/bin/cargo-auto-test ignore 0.4.10 --all-targets --all-features -Depends: dh-cargo (>= 18), @ +Test-Command: /usr/share/cargo/bin/cargo-auto-test ignore 0.4.16 --all-targets --all-features +Features: test-name=@ +Depends: dh-cargo (>= 18), librust-crossbeam-channel-0.4+default-dev, @ Restrictions: allow-stderr, skip-not-installable -Test-Command: /usr/share/cargo/bin/cargo-auto-test ignore 0.4.10 --all-targets --no-default-features -Depends: dh-cargo (>= 18), librust-ignore-dev +Test-Command: /usr/share/cargo/bin/cargo-auto-test ignore 0.4.16 --all-targets --no-default-features +Features: test-name=librust-ignore-dev +Depends: dh-cargo (>= 18), librust-crossbeam-channel-0.4+default-dev, @ Restrictions: allow-stderr, skip-not-installable -Test-Command: /usr/share/cargo/bin/cargo-auto-test ignore 0.4.10 --all-targets --features simd-accel -Depends: dh-cargo (>= 18), librust-ignore+simd-accel-dev +Test-Command: /usr/share/cargo/bin/cargo-auto-test ignore 0.4.16 --all-targets --features simd-accel +Features: test-name=librust-ignore+simd-accel-dev +Depends: dh-cargo (>= 18), librust-crossbeam-channel-0.4+default-dev, @ Restrictions: allow-stderr, skip-not-installable diff -Nru rust-ignore-0.4.10/debian/watch rust-ignore-0.4.16/debian/watch --- rust-ignore-0.4.10/debian/watch 2019-08-08 03:57:32.000000000 +0000 +++ rust-ignore-0.4.16/debian/watch 2020-06-20 08:34:40.000000000 +0000 @@ -2,4 +2,3 @@ opts=filenamemangle=s/.*\/(.*)\/download/ignore-$1\.tar\.gz/g,\ uversionmangle=s/(\d)[_\.\-\+]?((RC|rc|pre|dev|beta|alpha)\d*)$/$1~$2/ \ https://qa.debian.org/cgi-bin/fakeupstream.cgi?upstream=crates.io/ignore .*/crates/ignore/@ANY_VERSION@/download - diff -Nru rust-ignore-0.4.10/src/default_types.rs rust-ignore-0.4.16/src/default_types.rs --- rust-ignore-0.4.10/src/default_types.rs 1970-01-01 00:00:00.000000000 +0000 +++ rust-ignore-0.4.16/src/default_types.rs 2020-05-20 01:10:08.000000000 +0000 @@ -0,0 +1,249 @@ +/// This list represents the default file types that ripgrep ships with. In +/// general, any file format is fair game, although it should generally be +/// limited to reasonably popular open formats. For other cases, you can add +/// types to each invocation of ripgrep with the '--type-add' flag. +/// +/// If you would like to add or improve this list, please file a PR: +/// https://github.com/BurntSushi/ripgrep +/// +/// Please try to keep this list sorted lexicographically and wrapped to 79 +/// columns (inclusive). +#[rustfmt::skip] +pub const DEFAULT_TYPES: &[(&str, &[&str])] = &[ + ("agda", &["*.agda", "*.lagda"]), + ("aidl", &["*.aidl"]), + ("amake", &["*.mk", "*.bp"]), + ("asciidoc", &["*.adoc", "*.asc", "*.asciidoc"]), + ("asm", &["*.asm", "*.s", "*.S"]), + ("asp", &[ + "*.aspx", "*.aspx.cs", "*.aspx.cs", "*.ascx", "*.ascx.cs", "*.ascx.vb", + ]), + ("ats", &["*.ats", "*.dats", "*.sats", "*.hats"]), + ("avro", &["*.avdl", "*.avpr", "*.avsc"]), + ("awk", &["*.awk"]), + ("bazel", &["*.bzl", "WORKSPACE", "BUILD", "BUILD.bazel"]), + ("bitbake", &["*.bb", "*.bbappend", "*.bbclass", "*.conf", "*.inc"]), + ("brotli", &["*.br"]), + ("buildstream", &["*.bst"]), + ("bzip2", &["*.bz2", "*.tbz2"]), + ("c", &["*.[chH]", "*.[chH].in", "*.cats"]), + ("cabal", &["*.cabal"]), + ("cbor", &["*.cbor"]), + ("ceylon", &["*.ceylon"]), + ("clojure", &["*.clj", "*.cljc", "*.cljs", "*.cljx"]), + ("cmake", &["*.cmake", "CMakeLists.txt"]), + ("coffeescript", &["*.coffee"]), + ("config", &["*.cfg", "*.conf", "*.config", "*.ini"]), + ("coq", &["*.v"]), + ("cpp", &[ + "*.[ChH]", "*.cc", "*.[ch]pp", "*.[ch]xx", "*.hh", "*.inl", + "*.[ChH].in", "*.cc.in", "*.[ch]pp.in", "*.[ch]xx.in", "*.hh.in", + ]), + ("creole", &["*.creole"]), + ("crystal", &["Projectfile", "*.cr"]), + ("cs", &["*.cs"]), + ("csharp", &["*.cs"]), + ("cshtml", &["*.cshtml"]), + ("css", &["*.css", "*.scss"]), + ("csv", &["*.csv"]), + ("cython", &["*.pyx", "*.pxi", "*.pxd"]), + ("d", &["*.d"]), + ("dart", &["*.dart"]), + ("dhall", &["*.dhall"]), + ("diff", &["*.patch", "*.diff"]), + ("docker", &["*Dockerfile*"]), + ("ebuild", &["*.ebuild"]), + ("edn", &["*.edn"]), + ("elisp", &["*.el"]), + ("elixir", &["*.ex", "*.eex", "*.exs"]), + ("elm", &["*.elm"]), + ("erb", &["*.erb"]), + ("erlang", &["*.erl", "*.hrl"]), + ("fidl", &["*.fidl"]), + ("fish", &["*.fish"]), + ("fortran", &[ + "*.f", "*.F", "*.f77", "*.F77", "*.pfo", + "*.f90", "*.F90", "*.f95", "*.F95", + ]), + ("fsharp", &["*.fs", "*.fsx", "*.fsi"]), + ("gap", &["*.g", "*.gap", "*.gi", "*.gd", "*.tst"]), + ("gn", &["*.gn", "*.gni"]), + ("go", &["*.go"]), + ("gradle", &["*.gradle"]), + ("groovy", &["*.groovy", "*.gradle"]), + ("gzip", &["*.gz", "*.tgz"]), + ("h", &["*.h", "*.hpp"]), + ("haml", &["*.haml"]), + ("haskell", &["*.hs", "*.lhs", "*.cpphs", "*.c2hs", "*.hsc"]), + ("hbs", &["*.hbs"]), + ("hs", &["*.hs", "*.lhs"]), + ("html", &["*.htm", "*.html", "*.ejs"]), + ("idris", &["*.idr", "*.lidr"]), + ("java", &["*.java", "*.jsp", "*.jspx", "*.properties"]), + ("jinja", &["*.j2", "*.jinja", "*.jinja2"]), + ("jl", &["*.jl"]), + ("js", &["*.js", "*.jsx", "*.vue"]), + ("json", &["*.json", "composer.lock"]), + ("jsonl", &["*.jsonl"]), + ("julia", &["*.jl"]), + ("jupyter", &["*.ipynb", "*.jpynb"]), + ("k", &["*.k"]), + ("kotlin", &["*.kt", "*.kts"]), + ("less", &["*.less"]), + ("license", &[ + // General + "COPYING", "COPYING[.-]*", + "COPYRIGHT", "COPYRIGHT[.-]*", + "EULA", "EULA[.-]*", + "licen[cs]e", "licen[cs]e.*", + "LICEN[CS]E", "LICEN[CS]E[.-]*", "*[.-]LICEN[CS]E*", + "NOTICE", "NOTICE[.-]*", + "PATENTS", "PATENTS[.-]*", + "UNLICEN[CS]E", "UNLICEN[CS]E[.-]*", + // GPL (gpl.txt, etc.) + "agpl[.-]*", + "gpl[.-]*", + "lgpl[.-]*", + // Other license-specific (APACHE-2.0.txt, etc.) + "AGPL-*[0-9]*", + "APACHE-*[0-9]*", + "BSD-*[0-9]*", + "CC-BY-*", + "GFDL-*[0-9]*", + "GNU-*[0-9]*", + "GPL-*[0-9]*", + "LGPL-*[0-9]*", + "MIT-*[0-9]*", + "MPL-*[0-9]*", + "OFL-*[0-9]*", + ]), + ("lisp", &["*.el", "*.jl", "*.lisp", "*.lsp", "*.sc", "*.scm"]), + ("lock", &["*.lock", "package-lock.json"]), + ("log", &["*.log"]), + ("lua", &["*.lua"]), + ("lz4", &["*.lz4"]), + ("lzma", &["*.lzma"]), + ("m4", &["*.ac", "*.m4"]), + ("make", &[ + "[Gg][Nn][Uu]makefile", "[Mm]akefile", + "[Gg][Nn][Uu]makefile.am", "[Mm]akefile.am", + "[Gg][Nn][Uu]makefile.in", "[Mm]akefile.in", + "*.mk", "*.mak" + ]), + ("mako", &["*.mako", "*.mao"]), + ("man", &["*.[0-9lnpx]", "*.[0-9][cEFMmpSx]"]), + ("markdown", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]), + ("matlab", &["*.m"]), + ("md", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]), + ("meson", &["meson.build", "meson_options.txt"]), + ("mk", &["mkfile"]), + ("ml", &["*.ml"]), + ("msbuild", &[ + "*.csproj", "*.fsproj", "*.vcxproj", "*.proj", "*.props", "*.targets", + ]), + ("nim", &["*.nim", "*.nimf", "*.nimble", "*.nims"]), + ("nix", &["*.nix"]), + ("objc", &["*.h", "*.m"]), + ("objcpp", &["*.h", "*.mm"]), + ("ocaml", &["*.ml", "*.mli", "*.mll", "*.mly"]), + ("org", &["*.org", "*.org_archive"]), + ("pascal", &["*.pas", "*.dpr", "*.lpr", "*.pp", "*.inc"]), + ("pdf", &["*.pdf"]), + ("perl", &["*.perl", "*.pl", "*.PL", "*.plh", "*.plx", "*.pm", "*.t"]), + ("php", &["*.php", "*.php3", "*.php4", "*.php5", "*.phtml"]), + ("pod", &["*.pod"]), + ("postscript", &["*.eps", "*.ps"]), + ("protobuf", &["*.proto"]), + ("ps", &["*.cdxml", "*.ps1", "*.ps1xml", "*.psd1", "*.psm1"]), + ("puppet", &["*.erb", "*.pp", "*.rb"]), + ("purs", &["*.purs"]), + ("py", &["*.py"]), + ("qmake", &["*.pro", "*.pri", "*.prf"]), + ("qml", &["*.qml"]), + ("r", &["*.R", "*.r", "*.Rmd", "*.Rnw"]), + ("rdoc", &["*.rdoc"]), + ("readme", &["README*", "*README"]), + ("robot", &["*.robot"]), + ("rst", &["*.rst"]), + ("ruby", &["Gemfile", "*.gemspec", ".irbrc", "Rakefile", "*.rb"]), + ("rust", &["*.rs"]), + ("sass", &["*.sass", "*.scss"]), + ("scala", &["*.scala", "*.sbt"]), + ("sh", &[ + // Portable/misc. init files + ".login", ".logout", ".profile", "profile", + // bash-specific init files + ".bash_login", "bash_login", + ".bash_logout", "bash_logout", + ".bash_profile", "bash_profile", + ".bashrc", "bashrc", "*.bashrc", + // csh-specific init files + ".cshrc", "*.cshrc", + // ksh-specific init files + ".kshrc", "*.kshrc", + // tcsh-specific init files + ".tcshrc", + // zsh-specific init files + ".zshenv", "zshenv", + ".zlogin", "zlogin", + ".zlogout", "zlogout", + ".zprofile", "zprofile", + ".zshrc", "zshrc", + // Extensions + "*.bash", "*.csh", "*.ksh", "*.sh", "*.tcsh", "*.zsh", + ]), + ("slim", &["*.skim", "*.slim", "*.slime"]), + ("smarty", &["*.tpl"]), + ("sml", &["*.sml", "*.sig"]), + ("soy", &["*.soy"]), + ("spark", &["*.spark"]), + ("spec", &["*.spec"]), + ("sql", &["*.sql", "*.psql"]), + ("stylus", &["*.styl"]), + ("sv", &["*.v", "*.vg", "*.sv", "*.svh", "*.h"]), + ("svg", &["*.svg"]), + ("swift", &["*.swift"]), + ("swig", &["*.def", "*.i"]), + ("systemd", &[ + "*.automount", "*.conf", "*.device", "*.link", "*.mount", "*.path", + "*.scope", "*.service", "*.slice", "*.socket", "*.swap", "*.target", + "*.timer", + ]), + ("taskpaper", &["*.taskpaper"]), + ("tcl", &["*.tcl"]), + ("tex", &["*.tex", "*.ltx", "*.cls", "*.sty", "*.bib", "*.dtx", "*.ins"]), + ("textile", &["*.textile"]), + ("tf", &["*.tf"]), + ("thrift", &["*.thrift"]), + ("toml", &["*.toml", "Cargo.lock"]), + ("ts", &["*.ts", "*.tsx"]), + ("twig", &["*.twig"]), + ("txt", &["*.txt"]), + ("typoscript", &["*.typoscript", "*.ts"]), + ("vala", &["*.vala"]), + ("vb", &["*.vb"]), + ("verilog", &["*.v", "*.vh", "*.sv", "*.svh"]), + ("vhdl", &["*.vhd", "*.vhdl"]), + ("vim", &["*.vim"]), + ("vimscript", &["*.vim"]), + ("webidl", &["*.idl", "*.webidl", "*.widl"]), + ("wiki", &["*.mediawiki", "*.wiki"]), + ("xml", &[ + "*.xml", "*.xml.dist", "*.dtd", "*.xsl", "*.xslt", "*.xsd", "*.xjb", + "*.rng", "*.sch", "*.xhtml", + ]), + ("xz", &["*.xz", "*.txz"]), + ("yacc", &["*.y"]), + ("yaml", &["*.yaml", "*.yml"]), + ("z", &["*.Z"]), + ("zig", &["*.zig"]), + ("zsh", &[ + ".zshenv", "zshenv", + ".zlogin", "zlogin", + ".zlogout", "zlogout", + ".zprofile", "zprofile", + ".zshrc", "zshrc", + "*.zsh", + ]), + ("zstd", &["*.zst", "*.zstd"]), +]; diff -Nru rust-ignore-0.4.10/src/dir.rs rust-ignore-0.4.16/src/dir.rs --- rust-ignore-0.4.10/src/dir.rs 2019-08-06 13:40:52.000000000 +0000 +++ rust-ignore-0.4.16/src/dir.rs 2020-04-18 00:01:03.000000000 +0000 @@ -14,13 +14,15 @@ // well. use std::collections::HashMap; -use std::ffi::{OsString, OsStr}; +use std::ffi::{OsStr, OsString}; +use std::fs::{File, FileType}; +use std::io::{self, BufRead}; use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock}; use gitignore::{self, Gitignore, GitignoreBuilder}; -use pathutil::{is_hidden, strip_prefix}; use overrides::{self, Override}; +use pathutil::{is_hidden, strip_prefix}; use types::{self, Types}; use walk::DirEntry; use {Error, Match, PartialErrorBuilder}; @@ -76,6 +78,9 @@ git_exclude: bool, /// Whether to ignore files case insensitively ignore_case_insensitive: bool, + /// Whether a git repository must be present in order to apply any + /// git-related ignore rules. + require_git: bool, } /// Ignore is a matcher useful for recursively walking one or more directories. @@ -197,12 +202,11 @@ errs.maybe_push(err); igtmp.is_absolute_parent = true; igtmp.absolute_base = Some(absolute_base.clone()); - igtmp.has_git = - if self.0.opts.git_ignore { - parent.join(".git").exists() - } else { - false - }; + igtmp.has_git = if self.0.opts.git_ignore { + parent.join(".git").exists() + } else { + false + }; ig = Ignore(Arc::new(igtmp)); compiled.insert(parent.as_os_str().to_os_string(), ig.clone()); } @@ -227,65 +231,70 @@ /// Like add_child, but takes a full path and returns an IgnoreInner. fn add_child_path(&self, dir: &Path) -> (IgnoreInner, Option) { + let git_type = if self.0.opts.git_ignore || self.0.opts.git_exclude { + dir.join(".git").metadata().ok().map(|md| md.file_type()) + } else { + None + }; + let has_git = git_type.map(|_| true).unwrap_or(false); + let mut errs = PartialErrorBuilder::default(); - let custom_ig_matcher = - if self.0.custom_ignore_filenames.is_empty() { - Gitignore::empty() - } else { - let (m, err) = - create_gitignore( - &dir, - &self.0.custom_ignore_filenames, - self.0.opts.ignore_case_insensitive, - ); - errs.maybe_push(err); - m - }; - let ig_matcher = - if !self.0.opts.ignore { - Gitignore::empty() - } else { - let (m, err) = - create_gitignore( - &dir, - &[".ignore"], - self.0.opts.ignore_case_insensitive, - ); - errs.maybe_push(err); - m - }; - let gi_matcher = - if !self.0.opts.git_ignore { - Gitignore::empty() - } else { - let (m, err) = - create_gitignore( - &dir, - &[".gitignore"], - self.0.opts.ignore_case_insensitive, - ); - errs.maybe_push(err); - m - }; - let gi_exclude_matcher = - if !self.0.opts.git_exclude { - Gitignore::empty() - } else { - let (m, err) = - create_gitignore( + let custom_ig_matcher = if self.0.custom_ignore_filenames.is_empty() { + Gitignore::empty() + } else { + let (m, err) = create_gitignore( + &dir, + &dir, + &self.0.custom_ignore_filenames, + self.0.opts.ignore_case_insensitive, + ); + errs.maybe_push(err); + m + }; + let ig_matcher = if !self.0.opts.ignore { + Gitignore::empty() + } else { + let (m, err) = create_gitignore( + &dir, + &dir, + &[".ignore"], + self.0.opts.ignore_case_insensitive, + ); + errs.maybe_push(err); + m + }; + let gi_matcher = if !self.0.opts.git_ignore { + Gitignore::empty() + } else { + let (m, err) = create_gitignore( + &dir, + &dir, + &[".gitignore"], + self.0.opts.ignore_case_insensitive, + ); + errs.maybe_push(err); + m + }; + let gi_exclude_matcher = if !self.0.opts.git_exclude { + Gitignore::empty() + } else { + match resolve_git_commondir(dir, git_type) { + Ok(git_dir) => { + let (m, err) = create_gitignore( &dir, - &[".git/info/exclude"], + &git_dir, + &["info/exclude"], self.0.opts.ignore_case_insensitive, ); - errs.maybe_push(err); - m - }; - let has_git = - if self.0.opts.git_ignore { - dir.join(".git").exists() - } else { - false - }; + errs.maybe_push(err); + m + } + Err(err) => { + errs.maybe_push(err); + Gitignore::empty() + } + } + }; let ig = IgnoreInner { compiled: self.0.compiled.clone(), dir: dir.to_path_buf(), @@ -301,7 +310,7 @@ git_global_matcher: self.0.git_global_matcher.clone(), git_ignore_matcher: gi_matcher, git_exclude_matcher: gi_exclude_matcher, - has_git: has_git, + has_git, opts: self.0.opts, }; (ig, errs.into_error_option()) @@ -310,12 +319,16 @@ /// Returns true if at least one type of ignore rule should be matched. fn has_any_ignore_rules(&self) -> bool { let opts = self.0.opts; - let has_custom_ignore_files = !self.0.custom_ignore_filenames.is_empty(); + let has_custom_ignore_files = + !self.0.custom_ignore_filenames.is_empty(); let has_explicit_ignores = !self.0.explicit_ignores.is_empty(); - opts.ignore || opts.git_global || opts.git_ignore - || opts.git_exclude || has_custom_ignore_files - || has_explicit_ignores + opts.ignore + || opts.git_global + || opts.git_ignore + || opts.git_exclude + || has_custom_ignore_files + || has_explicit_ignores } /// Like `matched`, but works with a directory entry instead. @@ -350,9 +363,11 @@ // return that result immediately. Overrides have the highest // precedence. if !self.0.overrides.is_empty() { - let mat = - self.0.overrides.matched(path, is_dir) - .map(IgnoreMatch::overrides); + let mat = self + .0 + .overrides + .matched(path, is_dir) + .map(IgnoreMatch::overrides); if !mat.is_none() { return mat; } @@ -385,56 +400,72 @@ path: &Path, is_dir: bool, ) -> Match> { - let (mut m_custom_ignore, mut m_ignore, mut m_gi, mut m_gi_exclude, mut m_explicit) = - (Match::None, Match::None, Match::None, Match::None, Match::None); - let any_git = self.parents().any(|ig| ig.0.has_git); + let ( + mut m_custom_ignore, + mut m_ignore, + mut m_gi, + mut m_gi_exclude, + mut m_explicit, + ) = (Match::None, Match::None, Match::None, Match::None, Match::None); + let any_git = + !self.0.opts.require_git || self.parents().any(|ig| ig.0.has_git); let mut saw_git = false; for ig in self.parents().take_while(|ig| !ig.0.is_absolute_parent) { if m_custom_ignore.is_none() { m_custom_ignore = - ig.0.custom_ignore_matcher.matched(path, is_dir) - .map(IgnoreMatch::gitignore); + ig.0.custom_ignore_matcher + .matched(path, is_dir) + .map(IgnoreMatch::gitignore); } if m_ignore.is_none() { m_ignore = - ig.0.ignore_matcher.matched(path, is_dir) - .map(IgnoreMatch::gitignore); + ig.0.ignore_matcher + .matched(path, is_dir) + .map(IgnoreMatch::gitignore); } if any_git && !saw_git && m_gi.is_none() { m_gi = - ig.0.git_ignore_matcher.matched(path, is_dir) - .map(IgnoreMatch::gitignore); + ig.0.git_ignore_matcher + .matched(path, is_dir) + .map(IgnoreMatch::gitignore); } if any_git && !saw_git && m_gi_exclude.is_none() { m_gi_exclude = - ig.0.git_exclude_matcher.matched(path, is_dir) - .map(IgnoreMatch::gitignore); + ig.0.git_exclude_matcher + .matched(path, is_dir) + .map(IgnoreMatch::gitignore); } saw_git = saw_git || ig.0.has_git; } if self.0.opts.parents { if let Some(abs_parent_path) = self.absolute_base() { let path = abs_parent_path.join(path); - for ig in self.parents().skip_while(|ig|!ig.0.is_absolute_parent) { + for ig in + self.parents().skip_while(|ig| !ig.0.is_absolute_parent) + { if m_custom_ignore.is_none() { m_custom_ignore = - ig.0.custom_ignore_matcher.matched(&path, is_dir) - .map(IgnoreMatch::gitignore); + ig.0.custom_ignore_matcher + .matched(&path, is_dir) + .map(IgnoreMatch::gitignore); } if m_ignore.is_none() { m_ignore = - ig.0.ignore_matcher.matched(&path, is_dir) - .map(IgnoreMatch::gitignore); + ig.0.ignore_matcher + .matched(&path, is_dir) + .map(IgnoreMatch::gitignore); } if any_git && !saw_git && m_gi.is_none() { m_gi = - ig.0.git_ignore_matcher.matched(&path, is_dir) - .map(IgnoreMatch::gitignore); + ig.0.git_ignore_matcher + .matched(&path, is_dir) + .map(IgnoreMatch::gitignore); } if any_git && !saw_git && m_gi_exclude.is_none() { m_gi_exclude = - ig.0.git_exclude_matcher.matched(&path, is_dir) - .map(IgnoreMatch::gitignore); + ig.0.git_exclude_matcher + .matched(&path, is_dir) + .map(IgnoreMatch::gitignore); } saw_git = saw_git || ig.0.has_git; } @@ -446,16 +477,21 @@ } m_explicit = gi.matched(&path, is_dir).map(IgnoreMatch::gitignore); } - let m_global = - if any_git { - self.0.git_global_matcher - .matched(&path, is_dir) - .map(IgnoreMatch::gitignore) - } else { - Match::None - }; + let m_global = if any_git { + self.0 + .git_global_matcher + .matched(&path, is_dir) + .map(IgnoreMatch::gitignore) + } else { + Match::None + }; - m_custom_ignore.or(m_ignore).or(m_gi).or(m_gi_exclude).or(m_global).or(m_explicit) + m_custom_ignore + .or(m_ignore) + .or(m_gi) + .or(m_gi_exclude) + .or(m_global) + .or(m_explicit) } /// Returns an iterator over parent ignore matchers, including this one. @@ -526,6 +562,7 @@ git_ignore: true, git_exclude: true, ignore_case_insensitive: false, + require_git: true, }, } } @@ -535,20 +572,19 @@ /// The matcher returned won't match anything until ignore rules from /// directories are added to it. pub fn build(&self) -> Ignore { - let git_global_matcher = - if !self.opts.git_global { - Gitignore::empty() - } else { - let mut builder = GitignoreBuilder::new(""); - builder - .case_insensitive(self.opts.ignore_case_insensitive) - .unwrap(); - let (gi, err) = builder.build_global(); - if let Some(err) = err { - debug!("{}", err); - } - gi - }; + let git_global_matcher = if !self.opts.git_global { + Gitignore::empty() + } else { + let mut builder = GitignoreBuilder::new(""); + builder + .case_insensitive(self.opts.ignore_case_insensitive) + .unwrap(); + let (gi, err) = builder.build_global(); + if let Some(err) = err { + debug!("{}", err); + } + gi + }; Ignore(Arc::new(IgnoreInner { compiled: Arc::new(RwLock::new(HashMap::new())), @@ -559,7 +595,9 @@ is_absolute_parent: true, absolute_base: None, explicit_ignores: Arc::new(self.explicit_ignores.clone()), - custom_ignore_filenames: Arc::new(self.custom_ignore_filenames.clone()), + custom_ignore_filenames: Arc::new( + self.custom_ignore_filenames.clone(), + ), custom_ignore_matcher: Gitignore::empty(), ignore_matcher: Gitignore::empty(), git_global_matcher: Arc::new(git_global_matcher), @@ -604,7 +642,7 @@ /// later names. pub fn add_custom_ignore_filename>( &mut self, - file_name: S + file_name: S, ) -> &mut IgnoreBuilder { self.custom_ignore_filenames.push(file_name.as_ref().to_os_string()); self @@ -675,6 +713,16 @@ self } + /// Whether a git repository is required to apply git-related ignore + /// rules (global rules, .gitignore and local exclude rules). + /// + /// When disabled, git-related ignore rules are applied even when searching + /// outside a git repository. + pub fn require_git(&mut self, yes: bool) -> &mut IgnoreBuilder { + self.opts.require_git = yes; + self + } + /// Process ignore files case insensitively /// /// This is disabled by default. @@ -689,12 +737,15 @@ /// Creates a new gitignore matcher for the directory given. /// -/// Ignore globs are extracted from each of the file names in `dir` in the -/// order given (earlier names have lower precedence than later names). +/// The matcher is meant to match files below `dir`. +/// Ignore globs are extracted from each of the file names relative to +/// `dir_for_ignorefile` in the order given (earlier names have lower +/// precedence than later names). /// /// I/O errors are ignored. pub fn create_gitignore>( dir: &Path, + dir_for_ignorefile: &Path, names: &[T], case_insensitive: bool, ) -> (Gitignore, Option) { @@ -702,8 +753,22 @@ let mut errs = PartialErrorBuilder::default(); builder.case_insensitive(case_insensitive).unwrap(); for name in names { - let gipath = dir.join(name.as_ref()); - errs.maybe_push_ignore_io(builder.add(gipath)); + let gipath = dir_for_ignorefile.join(name.as_ref()); + // This check is not necessary, but is added for performance. Namely, + // a simple stat call checking for existence can often be just a bit + // quicker than actually trying to open a file. Since the number of + // directories without ignore files likely greatly exceeds the number + // with ignore files, this check generally makes sense. + // + // However, until demonstrated otherwise, we speculatively do not do + // this on Windows since Windows is notorious for having slow file + // system operations. Namely, it's not clear whether this analysis + // makes sense on Windows. + // + // For more details: https://github.com/BurntSushi/ripgrep/pull/1381 + if cfg!(windows) || gipath.exists() { + errs.maybe_push_ignore_io(builder.add(gipath)); + } } let gi = match builder.build() { Ok(gi) => gi, @@ -715,6 +780,60 @@ (gi, errs.into_error_option()) } +/// Find the GIT_COMMON_DIR for the given git worktree. +/// +/// This is the directory that may contain a private ignore file +/// "info/exclude". Unlike git, this function does *not* read environment +/// variables GIT_DIR and GIT_COMMON_DIR, because it is not clear how to use +/// them when multiple repositories are searched. +/// +/// Some I/O errors are ignored. +fn resolve_git_commondir( + dir: &Path, + git_type: Option, +) -> Result> { + let git_dir_path = || dir.join(".git"); + let git_dir = git_dir_path(); + if !git_type.map_or(false, |ft| ft.is_file()) { + return Ok(git_dir); + } + let file = match File::open(git_dir) { + Ok(file) => io::BufReader::new(file), + Err(err) => { + return Err(Some(Error::Io(err).with_path(git_dir_path()))); + } + }; + let dot_git_line = match file.lines().next() { + Some(Ok(line)) => line, + Some(Err(err)) => { + return Err(Some(Error::Io(err).with_path(git_dir_path()))); + } + None => return Err(None), + }; + if !dot_git_line.starts_with("gitdir: ") { + return Err(None); + } + let real_git_dir = PathBuf::from(&dot_git_line["gitdir: ".len()..]); + let git_commondir_file = || real_git_dir.join("commondir"); + let file = match File::open(git_commondir_file()) { + Ok(file) => io::BufReader::new(file), + Err(_) => return Err(None), + }; + let commondir_line = match file.lines().next() { + Some(Ok(line)) => line, + Some(Err(err)) => { + return Err(Some(Error::Io(err).with_path(git_commondir_file()))); + } + None => return Err(None), + }; + let commondir_abs = if commondir_line.starts_with(".") { + real_git_dir.join(commondir_line) // relative commondir + } else { + PathBuf::from(commondir_line) + }; + Ok(commondir_abs) +} + #[cfg(test)] mod tests { use std::fs::{self, File}; @@ -742,19 +861,19 @@ } } - fn tmpdir(prefix: &str) -> TempDir { + fn tmpdir() -> TempDir { TempDir::new().unwrap() } #[test] fn explicit_ignore() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); wfile(td.path().join("not-an-ignore"), "foo\n!bar"); let (gi, err) = Gitignore::new(td.path().join("not-an-ignore")); assert!(err.is_none()); - let (ig, err) = IgnoreBuilder::new() - .add_ignore(gi).build().add_child(td.path()); + let (ig, err) = + IgnoreBuilder::new().add_ignore(gi).build().add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_ignore()); assert!(ig.matched("bar", false).is_whitelist()); @@ -763,7 +882,7 @@ #[test] fn git_exclude() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); mkdirp(td.path().join(".git/info")); wfile(td.path().join(".git/info/exclude"), "foo\n!bar"); @@ -776,7 +895,7 @@ #[test] fn gitignore() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); mkdirp(td.path().join(".git")); wfile(td.path().join(".gitignore"), "foo\n!bar"); @@ -789,7 +908,7 @@ #[test] fn gitignore_no_git() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); wfile(td.path().join(".gitignore"), "foo\n!bar"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); @@ -800,8 +919,23 @@ } #[test] + fn gitignore_allowed_no_git() { + let td = tmpdir(); + wfile(td.path().join(".gitignore"), "foo\n!bar"); + + let (ig, err) = IgnoreBuilder::new() + .require_git(false) + .build() + .add_child(td.path()); + assert!(err.is_none()); + assert!(ig.matched("foo", false).is_ignore()); + assert!(ig.matched("bar", false).is_whitelist()); + assert!(ig.matched("baz", false).is_none()); + } + + #[test] fn ignore() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); wfile(td.path().join(".ignore"), "foo\n!bar"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); @@ -813,13 +947,14 @@ #[test] fn custom_ignore() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); let custom_ignore = ".customignore"; wfile(td.path().join(custom_ignore), "foo\n!bar"); let (ig, err) = IgnoreBuilder::new() .add_custom_ignore_filename(custom_ignore) - .build().add_child(td.path()); + .build() + .add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_ignore()); assert!(ig.matched("bar", false).is_whitelist()); @@ -829,14 +964,15 @@ // Tests that a custom ignore file will override an .ignore. #[test] fn custom_ignore_over_ignore() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); let custom_ignore = ".customignore"; wfile(td.path().join(".ignore"), "foo"); wfile(td.path().join(custom_ignore), "!foo"); let (ig, err) = IgnoreBuilder::new() .add_custom_ignore_filename(custom_ignore) - .build().add_child(td.path()); + .build() + .add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_whitelist()); } @@ -844,7 +980,7 @@ // Tests that earlier custom ignore files have lower precedence than later. #[test] fn custom_ignore_precedence() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); let custom_ignore1 = ".customignore1"; let custom_ignore2 = ".customignore2"; wfile(td.path().join(custom_ignore1), "foo"); @@ -853,7 +989,8 @@ let (ig, err) = IgnoreBuilder::new() .add_custom_ignore_filename(custom_ignore1) .add_custom_ignore_filename(custom_ignore2) - .build().add_child(td.path()); + .build() + .add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_whitelist()); } @@ -861,7 +998,7 @@ // Tests that an .ignore will override a .gitignore. #[test] fn ignore_over_gitignore() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); wfile(td.path().join(".gitignore"), "foo"); wfile(td.path().join(".ignore"), "!foo"); @@ -873,7 +1010,7 @@ // Tests that exclude has lower precedent than both .ignore and .gitignore. #[test] fn exclude_lowest() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); wfile(td.path().join(".gitignore"), "!foo"); wfile(td.path().join(".ignore"), "!bar"); mkdirp(td.path().join(".git/info")); @@ -888,7 +1025,7 @@ #[test] fn errored() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); wfile(td.path().join(".gitignore"), "{foo"); let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); @@ -897,7 +1034,7 @@ #[test] fn errored_both() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); wfile(td.path().join(".gitignore"), "{foo"); wfile(td.path().join(".ignore"), "{bar"); @@ -907,7 +1044,7 @@ #[test] fn errored_partial() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); mkdirp(td.path().join(".git")); wfile(td.path().join(".gitignore"), "{foo\nbar"); @@ -918,7 +1055,7 @@ #[test] fn errored_partial_and_ignore() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); wfile(td.path().join(".gitignore"), "{foo\nbar"); wfile(td.path().join(".ignore"), "!bar"); @@ -929,7 +1066,7 @@ #[test] fn not_present_empty() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_none()); @@ -939,7 +1076,7 @@ fn stops_at_git_dir() { // This tests that .gitignore files beyond a .git barrier aren't // matched, but .ignore files are. - let td = tmpdir("ignore-test-"); + let td = tmpdir(); mkdirp(td.path().join(".git")); mkdirp(td.path().join("foo/.git")); wfile(td.path().join(".gitignore"), "foo"); @@ -960,7 +1097,7 @@ #[test] fn absolute_parent() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); mkdirp(td.path().join(".git")); mkdirp(td.path().join("foo")); wfile(td.path().join(".gitignore"), "bar"); @@ -983,7 +1120,7 @@ #[test] fn absolute_parent_anchored() { - let td = tmpdir("ignore-test-"); + let td = tmpdir(); mkdirp(td.path().join(".git")); mkdirp(td.path().join("src/llvm")); wfile(td.path().join(".gitignore"), "/llvm/\nfoo"); @@ -1000,4 +1137,49 @@ assert!(ig2.matched("foo", false).is_ignore()); assert!(ig2.matched("src/foo", false).is_ignore()); } + + #[test] + fn git_info_exclude_in_linked_worktree() { + let td = tmpdir(); + let git_dir = td.path().join(".git"); + mkdirp(git_dir.join("info")); + wfile(git_dir.join("info/exclude"), "ignore_me"); + mkdirp(git_dir.join("worktrees/linked-worktree")); + let commondir_path = + || git_dir.join("worktrees/linked-worktree/commondir"); + mkdirp(td.path().join("linked-worktree")); + let worktree_git_dir_abs = format!( + "gitdir: {}", + git_dir.join("worktrees/linked-worktree").to_str().unwrap(), + ); + wfile(td.path().join("linked-worktree/.git"), &worktree_git_dir_abs); + + // relative commondir + wfile(commondir_path(), "../.."); + let ib = IgnoreBuilder::new().build(); + let (ignore, err) = ib.add_child(td.path().join("linked-worktree")); + assert!(err.is_none()); + assert!(ignore.matched("ignore_me", false).is_ignore()); + + // absolute commondir + wfile(commondir_path(), git_dir.to_str().unwrap()); + let (ignore, err) = ib.add_child(td.path().join("linked-worktree")); + assert!(err.is_none()); + assert!(ignore.matched("ignore_me", false).is_ignore()); + + // missing commondir file + assert!(fs::remove_file(commondir_path()).is_ok()); + let (_, err) = ib.add_child(td.path().join("linked-worktree")); + // We squash the error in this case, because it occurs in repositories + // that are not linked worktrees but have submodules. + assert!(err.is_none()); + + wfile(td.path().join("linked-worktree/.git"), "garbage"); + let (_, err) = ib.add_child(td.path().join("linked-worktree")); + assert!(err.is_none()); + + wfile(td.path().join("linked-worktree/.git"), "gitdir: garbage"); + let (_, err) = ib.add_child(td.path().join("linked-worktree")); + assert!(err.is_none()); + } } diff -Nru rust-ignore-0.4.10/src/gitignore.rs rust-ignore-0.4.16/src/gitignore.rs --- rust-ignore-0.4.10/src/gitignore.rs 2019-08-01 20:47:08.000000000 +0000 +++ rust-ignore-0.4.16/src/gitignore.rs 2020-03-15 13:36:53.000000000 +0000 @@ -249,7 +249,7 @@ return Match::None; } let path = path.as_ref(); - let _matches = self.matches.as_ref().unwrap().get_default(); + let _matches = self.matches.as_ref().unwrap().get_or_default(); let mut matches = _matches.borrow_mut(); let candidate = Candidate::new(path); self.set.matches_candidate_into(&candidate, &mut *matches); @@ -332,13 +332,10 @@ pub fn build(&self) -> Result { let nignore = self.globs.iter().filter(|g| !g.is_whitelist()).count(); let nwhite = self.globs.iter().filter(|g| g.is_whitelist()).count(); - let set = - self.builder.build().map_err(|err| { - Error::Glob { - glob: None, - err: err.to_string(), - } - })?; + let set = self + .builder + .build() + .map_err(|err| Error::Glob { glob: None, err: err.to_string() })?; Ok(Gitignore { set: set, root: self.root.clone(), @@ -499,18 +496,15 @@ if glob.actual.ends_with("/**") { glob.actual = format!("{}/*", glob.actual); } - let parsed = - GlobBuilder::new(&glob.actual) - .literal_separator(true) - .case_insensitive(self.case_insensitive) - .backslash_escape(true) - .build() - .map_err(|err| { - Error::Glob { - glob: Some(glob.original.clone()), - err: err.kind().to_string(), - } - })?; + let parsed = GlobBuilder::new(&glob.actual) + .literal_separator(true) + .case_insensitive(self.case_insensitive) + .backslash_escape(true) + .build() + .map_err(|err| Error::Glob { + glob: Some(glob.original.clone()), + err: err.kind().to_string(), + })?; self.builder.add(parsed); self.globs.push(glob); Ok(self) @@ -537,7 +531,7 @@ /// /// Note that the file path returned may not exist. fn gitconfig_excludes_path() -> Option { - // git supports $HOME/.gitconfig and $XDG_CONFIG_DIR/git/config. Notably, + // git supports $HOME/.gitconfig and $XDG_CONFIG_HOME/git/config. Notably, // both can be active at the same time, where $HOME/.gitconfig takes // precedent. So if $HOME/.gitconfig defines a `core.excludesFile`, then // we're done. @@ -568,7 +562,7 @@ } /// Returns the file contents of git's global config file, if one exists, in -/// the user's XDG_CONFIG_DIR directory. +/// the user's XDG_CONFIG_HOME directory. fn gitconfig_xdg_contents() -> Option> { let path = env::var_os("XDG_CONFIG_HOME") .and_then(|x| if x.is_empty() { None } else { Some(PathBuf::from(x)) }) @@ -599,9 +593,8 @@ // probably works in more circumstances. I guess we would ideally have // a full INI parser. Yuck. lazy_static! { - static ref RE: Regex = Regex::new( - r"(?im)^\s*excludesfile\s*=\s*(.+)\s*$" - ).unwrap(); + static ref RE: Regex = + Regex::new(r"(?im)^\s*excludesfile\s*=\s*(.+)\s*$").unwrap(); }; let caps = match RE.captures(data) { None => return None, @@ -630,8 +623,8 @@ #[cfg(test)] mod tests { - use std::path::Path; use super::{Gitignore, GitignoreBuilder}; + use std::path::Path; fn gi_from_str>(root: P, s: &str) -> Gitignore { let mut builder = GitignoreBuilder::new(root); @@ -726,8 +719,11 @@ not_ignored!(ignot12, ROOT, "\n\n\n", "foo"); not_ignored!(ignot13, ROOT, "foo/**", "foo", true); not_ignored!( - ignot14, "./third_party/protobuf", "m4/ltoptions.m4", - "./third_party/protobuf/csharp/src/packages/repositories.config"); + ignot14, + "./third_party/protobuf", + "m4/ltoptions.m4", + "./third_party/protobuf/csharp/src/packages/repositories.config" + ); not_ignored!(ignot15, ROOT, "!/bar", "foo/bar"); not_ignored!(ignot16, ROOT, "*\n!**/", "foo", true); not_ignored!(ignot17, ROOT, "src/*.rs", "src/grep/src/main.rs"); @@ -771,9 +767,12 @@ #[test] fn case_insensitive() { let gi = GitignoreBuilder::new(ROOT) - .case_insensitive(true).unwrap() - .add_str(None, "*.html").unwrap() - .build().unwrap(); + .case_insensitive(true) + .unwrap() + .add_str(None, "*.html") + .unwrap() + .build() + .unwrap(); assert!(gi.matched("foo.html", false).is_ignore()); assert!(gi.matched("foo.HTML", false).is_ignore()); assert!(!gi.matched("foo.htm", false).is_ignore()); diff -Nru rust-ignore-0.4.10/src/lib.rs rust-ignore-0.4.16/src/lib.rs --- rust-ignore-0.4.10/src/lib.rs 2019-08-06 13:44:35.000000000 +0000 +++ rust-ignore-0.4.16/src/lib.rs 2020-05-09 02:37:54.000000000 +0000 @@ -46,7 +46,6 @@ #![deny(missing_docs)] -extern crate crossbeam_channel as channel; extern crate globset; #[macro_use] extern crate lazy_static; @@ -65,12 +64,16 @@ use std::io; use std::path::{Path, PathBuf}; -pub use walk::{DirEntry, Walk, WalkBuilder, WalkParallel, WalkState}; +pub use walk::{ + DirEntry, ParallelVisitor, ParallelVisitorBuilder, Walk, WalkBuilder, + WalkParallel, WalkState, +}; +mod default_types; mod dir; pub mod gitignore; -mod pathutil; pub mod overrides; +mod pathutil; pub mod types; mod walk; @@ -143,20 +146,14 @@ Error::WithDepth { depth, ref err } => { Error::WithDepth { depth: depth, err: err.clone() } } - Error::Loop { ref ancestor, ref child } => { - Error::Loop { - ancestor: ancestor.clone(), - child: child.clone() - } - } - Error::Io(ref err) => { - match err.raw_os_error() { - Some(e) => Error::Io(io::Error::from_raw_os_error(e)), - None => { - Error::Io(io::Error::new(err.kind(), err.to_string())) - } - } - } + Error::Loop { ref ancestor, ref child } => Error::Loop { + ancestor: ancestor.clone(), + child: child.clone(), + }, + Error::Io(ref err) => match err.raw_os_error() { + Some(e) => Error::Io(io::Error::from_raw_os_error(e)), + None => Error::Io(io::Error::new(err.kind(), err.to_string())), + }, Error::Glob { ref glob, ref err } => { Error::Glob { glob: glob.clone(), err: err.clone() } } @@ -219,19 +216,14 @@ /// Turn an error into a tagged error with the given depth. fn with_depth(self, depth: usize) -> Error { - Error::WithDepth { - depth: depth, - err: Box::new(self), - } + Error::WithDepth { depth: depth, err: Box::new(self) } } /// Turn an error into a tagged error with the given file path and line /// number. If path is empty, then it is omitted from the error. fn tagged>(self, path: P, lineno: u64) -> Error { - let errline = Error::WithLineNumber { - line: lineno, - err: Box::new(self), - }; + let errline = + Error::WithLineNumber { line: lineno, err: Box::new(self) }; if path.as_ref().as_os_str().is_empty() { return errline; } @@ -253,16 +245,14 @@ let path = err.path().map(|p| p.to_path_buf()); let mut ig_err = Error::Io(io::Error::from(err)); if let Some(path) = path { - ig_err = Error::WithPath { - path: path, - err: Box::new(ig_err), - }; + ig_err = Error::WithPath { path: path, err: Box::new(ig_err) }; } ig_err } } impl error::Error for Error { + #[allow(deprecated)] fn description(&self) -> &str { match *self { Error::Partial(_) => "partial error", @@ -293,11 +283,13 @@ write!(f, "{}: {}", path.display(), err) } Error::WithDepth { ref err, .. } => err.fmt(f), - Error::Loop { ref ancestor, ref child } => { - write!(f, "File system loop found: \ + Error::Loop { ref ancestor, ref child } => write!( + f, + "File system loop found: \ {} points to an ancestor {}", - child.display(), ancestor.display()) - } + child.display(), + ancestor.display() + ), Error::Io(ref err) => err.fmt(f), Error::Glob { glob: None, ref err } => write!(f, "{}", err), Error::Glob { glob: Some(ref glob), ref err } => { @@ -306,10 +298,11 @@ Error::UnrecognizedFileType(ref ty) => { write!(f, "unrecognized file type: {}", ty) } - Error::InvalidDefinition => { - write!(f, "invalid definition (format is type:glob, e.g., \ - html:*.html)") - } + Error::InvalidDefinition => write!( + f, + "invalid definition (format is type:glob, e.g., \ + html:*.html)" + ), } } } diff -Nru rust-ignore-0.4.10/src/overrides.rs rust-ignore-0.4.16/src/overrides.rs --- rust-ignore-0.4.10/src/overrides.rs 2019-08-01 20:47:08.000000000 +0000 +++ rust-ignore-0.4.16/src/overrides.rs 2020-03-15 13:36:53.000000000 +0000 @@ -115,9 +115,7 @@ /// /// Matching is done relative to the directory path provided. pub fn new>(path: P) -> OverrideBuilder { - OverrideBuilder { - builder: GitignoreBuilder::new(path), - } + OverrideBuilder { builder: GitignoreBuilder::new(path) } } /// Builds a new override matcher from the globs added so far. @@ -240,9 +238,12 @@ #[test] fn case_insensitive() { let ov = OverrideBuilder::new(ROOT) - .case_insensitive(true).unwrap() - .add("*.html").unwrap() - .build().unwrap(); + .case_insensitive(true) + .unwrap() + .add("*.html") + .unwrap() + .build() + .unwrap(); assert!(ov.matched("foo.html", false).is_whitelist()); assert!(ov.matched("foo.HTML", false).is_whitelist()); assert!(ov.matched("foo.htm", false).is_ignore()); @@ -251,9 +252,8 @@ #[test] fn default_case_sensitive() { - let ov = OverrideBuilder::new(ROOT) - .add("*.html").unwrap() - .build().unwrap(); + let ov = + OverrideBuilder::new(ROOT).add("*.html").unwrap().build().unwrap(); assert!(ov.matched("foo.html", false).is_whitelist()); assert!(ov.matched("foo.HTML", false).is_ignore()); assert!(ov.matched("foo.htm", false).is_ignore()); diff -Nru rust-ignore-0.4.10/src/pathutil.rs rust-ignore-0.4.16/src/pathutil.rs --- rust-ignore-0.4.10/src/pathutil.rs 2019-08-01 20:47:08.000000000 +0000 +++ rust-ignore-0.4.16/src/pathutil.rs 2020-03-15 13:36:53.000000000 +0000 @@ -91,8 +91,8 @@ /// the empty string. #[cfg(unix)] pub fn is_file_name>(path: P) -> bool { - use std::os::unix::ffi::OsStrExt; use memchr::memchr; + use std::os::unix::ffi::OsStrExt; let path = path.as_ref().as_os_str().as_bytes(); memchr(b'/', path).is_none() @@ -113,8 +113,8 @@ pub fn file_name<'a, P: AsRef + ?Sized>( path: &'a P, ) -> Option<&'a OsStr> { - use std::os::unix::ffi::OsStrExt; use memchr::memrchr; + use std::os::unix::ffi::OsStrExt; let path = path.as_ref().as_os_str().as_bytes(); if path.is_empty() { diff -Nru rust-ignore-0.4.10/src/types.rs rust-ignore-0.4.16/src/types.rs --- rust-ignore-0.4.10/src/types.rs 2019-08-01 21:11:08.000000000 +0000 +++ rust-ignore-0.4.16/src/types.rs 2020-03-15 13:36:53.000000000 +0000 @@ -93,236 +93,10 @@ use regex::Regex; use thread_local::ThreadLocal; +use default_types::DEFAULT_TYPES; use pathutil::file_name; use {Error, Match}; -const DEFAULT_TYPES: &'static [(&'static str, &'static [&'static str])] = &[ - ("agda", &["*.agda", "*.lagda"]), - ("ats", &["*.ats", "*.dats", "*.sats", "*.hats"]), - ("aidl", &["*.aidl"]), - ("amake", &["*.mk", "*.bp"]), - ("asciidoc", &["*.adoc", "*.asc", "*.asciidoc"]), - ("asm", &["*.asm", "*.s", "*.S"]), - ("asp", &["*.aspx", "*.aspx.cs", "*.aspx.cs", "*.ascx", "*.ascx.cs", "*.ascx.vb"]), - ("avro", &["*.avdl", "*.avpr", "*.avsc"]), - ("awk", &["*.awk"]), - ("bazel", &["*.bzl", "WORKSPACE", "BUILD", "BUILD.bazel"]), - ("bitbake", &["*.bb", "*.bbappend", "*.bbclass", "*.conf", "*.inc"]), - ("brotli", &["*.br"]), - ("buildstream", &["*.bst"]), - ("bzip2", &["*.bz2", "*.tbz2"]), - ("c", &["*.[chH]", "*.[chH].in", "*.cats"]), - ("cabal", &["*.cabal"]), - ("cbor", &["*.cbor"]), - ("ceylon", &["*.ceylon"]), - ("clojure", &["*.clj", "*.cljc", "*.cljs", "*.cljx"]), - ("cmake", &["*.cmake", "CMakeLists.txt"]), - ("coffeescript", &["*.coffee"]), - ("creole", &["*.creole"]), - ("config", &["*.cfg", "*.conf", "*.config", "*.ini"]), - ("cpp", &[ - "*.[ChH]", "*.cc", "*.[ch]pp", "*.[ch]xx", "*.hh", "*.inl", - "*.[ChH].in", "*.cc.in", "*.[ch]pp.in", "*.[ch]xx.in", "*.hh.in", - ]), - ("crystal", &["Projectfile", "*.cr"]), - ("cs", &["*.cs"]), - ("csharp", &["*.cs"]), - ("cshtml", &["*.cshtml"]), - ("css", &["*.css", "*.scss"]), - ("csv", &["*.csv"]), - ("cython", &["*.pyx", "*.pxi", "*.pxd"]), - ("dart", &["*.dart"]), - ("d", &["*.d"]), - ("dhall", &["*.dhall"]), - ("docker", &["*Dockerfile*"]), - ("edn", &["*.edn"]), - ("elisp", &["*.el"]), - ("elixir", &["*.ex", "*.eex", "*.exs"]), - ("elm", &["*.elm"]), - ("erlang", &["*.erl", "*.hrl"]), - ("fidl", &["*.fidl"]), - ("fish", &["*.fish"]), - ("fortran", &[ - "*.f", "*.F", "*.f77", "*.F77", "*.pfo", - "*.f90", "*.F90", "*.f95", "*.F95", - ]), - ("fsharp", &["*.fs", "*.fsx", "*.fsi"]), - ("gap", &["*.g", "*.gap", "*.gi", "*.gd", "*.tst"]), - ("gn", &["*.gn", "*.gni"]), - ("go", &["*.go"]), - ("gzip", &["*.gz", "*.tgz"]), - ("groovy", &["*.groovy", "*.gradle"]), - ("h", &["*.h", "*.hpp"]), - ("hbs", &["*.hbs"]), - ("haskell", &["*.hs", "*.lhs", "*.cpphs", "*.c2hs", "*.hsc"]), - ("hs", &["*.hs", "*.lhs"]), - ("html", &["*.htm", "*.html", "*.ejs"]), - ("idris", &["*.idr", "*.lidr"]), - ("java", &["*.java", "*.jsp", "*.jspx", "*.properties"]), - ("jinja", &["*.j2", "*.jinja", "*.jinja2"]), - ("js", &[ - "*.js", "*.jsx", "*.vue", - ]), - ("json", &["*.json", "composer.lock"]), - ("jsonl", &["*.jsonl"]), - ("julia", &["*.jl"]), - ("jupyter", &["*.ipynb", "*.jpynb"]), - ("jl", &["*.jl"]), - ("kotlin", &["*.kt", "*.kts"]), - ("less", &["*.less"]), - ("license", &[ - // General - "COPYING", "COPYING[.-]*", - "COPYRIGHT", "COPYRIGHT[.-]*", - "EULA", "EULA[.-]*", - "licen[cs]e", "licen[cs]e.*", - "LICEN[CS]E", "LICEN[CS]E[.-]*", "*[.-]LICEN[CS]E*", - "NOTICE", "NOTICE[.-]*", - "PATENTS", "PATENTS[.-]*", - "UNLICEN[CS]E", "UNLICEN[CS]E[.-]*", - // GPL (gpl.txt, etc.) - "agpl[.-]*", - "gpl[.-]*", - "lgpl[.-]*", - // Other license-specific (APACHE-2.0.txt, etc.) - "AGPL-*[0-9]*", - "APACHE-*[0-9]*", - "BSD-*[0-9]*", - "CC-BY-*", - "GFDL-*[0-9]*", - "GNU-*[0-9]*", - "GPL-*[0-9]*", - "LGPL-*[0-9]*", - "MIT-*[0-9]*", - "MPL-*[0-9]*", - "OFL-*[0-9]*", - ]), - ("lisp", &["*.el", "*.jl", "*.lisp", "*.lsp", "*.sc", "*.scm"]), - ("lock", &["*.lock", "package-lock.json"]), - ("log", &["*.log"]), - ("lua", &["*.lua"]), - ("lzma", &["*.lzma"]), - ("lz4", &["*.lz4"]), - ("m4", &["*.ac", "*.m4"]), - ("make", &[ - "[Gg][Nn][Uu]makefile", "[Mm]akefile", - "[Gg][Nn][Uu]makefile.am", "[Mm]akefile.am", - "[Gg][Nn][Uu]makefile.in", "[Mm]akefile.in", - "*.mk", "*.mak" - ]), - ("mako", &["*.mako", "*.mao"]), - ("markdown", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]), - ("md", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]), - ("man", &["*.[0-9lnpx]", "*.[0-9][cEFMmpSx]"]), - ("matlab", &["*.m"]), - ("mk", &["mkfile"]), - ("ml", &["*.ml"]), - ("msbuild", &[ - "*.csproj", "*.fsproj", "*.vcxproj", "*.proj", "*.props", "*.targets" - ]), - ("nim", &["*.nim", "*.nimf", "*.nimble", "*.nims"]), - ("nix", &["*.nix"]), - ("objc", &["*.h", "*.m"]), - ("objcpp", &["*.h", "*.mm"]), - ("ocaml", &["*.ml", "*.mli", "*.mll", "*.mly"]), - ("org", &["*.org"]), - ("pascal", &["*.pas", "*.dpr", "*.lpr", "*.pp", "*.inc"]), - ("perl", &["*.perl", "*.pl", "*.PL", "*.plh", "*.plx", "*.pm", "*.t"]), - ("pdf", &["*.pdf"]), - ("php", &["*.php", "*.php3", "*.php4", "*.php5", "*.phtml"]), - ("pod", &["*.pod"]), - ("postscript", &[".eps", ".ps"]), - ("protobuf", &["*.proto"]), - ("ps", &["*.cdxml", "*.ps1", "*.ps1xml", "*.psd1", "*.psm1"]), - ("puppet", &["*.erb", "*.pp", "*.rb"]), - ("purs", &["*.purs"]), - ("py", &["*.py"]), - ("qmake", &["*.pro", "*.pri", "*.prf"]), - ("qml", &["*.qml"]), - ("readme", &["README*", "*README"]), - ("r", &["*.R", "*.r", "*.Rmd", "*.Rnw"]), - ("rdoc", &["*.rdoc"]), - ("robot", &["*.robot"]), - ("rst", &["*.rst"]), - ("ruby", &["Gemfile", "*.gemspec", ".irbrc", "Rakefile", "*.rb"]), - ("rust", &["*.rs"]), - ("sass", &["*.sass", "*.scss"]), - ("scala", &["*.scala", "*.sbt"]), - ("sh", &[ - // Portable/misc. init files - ".login", ".logout", ".profile", "profile", - // bash-specific init files - ".bash_login", "bash_login", - ".bash_logout", "bash_logout", - ".bash_profile", "bash_profile", - ".bashrc", "bashrc", "*.bashrc", - // csh-specific init files - ".cshrc", "*.cshrc", - // ksh-specific init files - ".kshrc", "*.kshrc", - // tcsh-specific init files - ".tcshrc", - // zsh-specific init files - ".zshenv", "zshenv", - ".zlogin", "zlogin", - ".zlogout", "zlogout", - ".zprofile", "zprofile", - ".zshrc", "zshrc", - // Extensions - "*.bash", "*.csh", "*.ksh", "*.sh", "*.tcsh", "*.zsh", - ]), - ("smarty", &["*.tpl"]), - ("sml", &["*.sml", "*.sig"]), - ("soy", &["*.soy"]), - ("spark", &["*.spark"]), - ("sql", &["*.sql", "*.psql"]), - ("stylus", &["*.styl"]), - ("sv", &["*.v", "*.vg", "*.sv", "*.svh", "*.h"]), - ("svg", &["*.svg"]), - ("swift", &["*.swift"]), - ("swig", &["*.def", "*.i"]), - ("systemd", &[ - "*.automount", "*.conf", "*.device", "*.link", "*.mount", "*.path", - "*.scope", "*.service", "*.slice", "*.socket", "*.swap", "*.target", - "*.timer", - ]), - ("taskpaper", &["*.taskpaper"]), - ("tcl", &["*.tcl"]), - ("tex", &["*.tex", "*.ltx", "*.cls", "*.sty", "*.bib", "*.dtx", "*.ins"]), - ("textile", &["*.textile"]), - ("thrift", &["*.thrift"]), - ("tf", &["*.tf"]), - ("ts", &["*.ts", "*.tsx"]), - ("txt", &["*.txt"]), - ("toml", &["*.toml", "Cargo.lock"]), - ("twig", &["*.twig"]), - ("vala", &["*.vala"]), - ("vb", &["*.vb"]), - ("verilog", &["*.v", "*.vh", "*.sv", "*.svh"]), - ("vhdl", &["*.vhd", "*.vhdl"]), - ("vim", &["*.vim"]), - ("vimscript", &["*.vim"]), - ("wiki", &["*.mediawiki", "*.wiki"]), - ("webidl", &["*.idl", "*.webidl", "*.widl"]), - ("xml", &[ - "*.xml", "*.xml.dist", "*.dtd", "*.xsl", "*.xslt", "*.xsd", "*.xjb", - "*.rng", "*.sch", - ]), - ("xz", &["*.xz", "*.txz"]), - ("yacc", &["*.y"]), - ("yaml", &["*.yaml", "*.yml"]), - ("zig", &["*.zig"]), - ("zsh", &[ - ".zshenv", "zshenv", - ".zlogin", "zlogin", - ".zlogout", "zlogout", - ".zprofile", "zprofile", - ".zshrc", "zshrc", - "*.zsh", - ]), - ("zstd", &["*.zst", "*.zstd"]), -]; - /// Glob represents a single glob in a set of file type definitions. /// /// There may be more than one glob for a particular file type. @@ -352,7 +126,7 @@ which: usize, /// Whether the selection was negated or not. negated: bool, - } + }, } impl<'a> Glob<'a> { @@ -366,9 +140,7 @@ pub fn file_type_def(&self) -> Option<&FileTypeDef> { match self { Glob(GlobInner::UnmatchedIgnore) => None, - Glob(GlobInner::Matched { def, .. }) => { - Some(def) - }, + Glob(GlobInner::Matched { def, .. }) => Some(def), } } } @@ -515,7 +287,7 @@ return Match::None; } }; - let mut matches = self.matches.get_default().borrow_mut(); + let mut matches = self.matches.get_or_default().borrow_mut(); self.set.matches_into(name, &mut *matches); // The highest precedent match is the last one. if let Some(&i) = matches.last() { @@ -554,10 +326,7 @@ /// of default type definitions can be added with `add_defaults`, and /// additional type definitions can be added with `select` and `negate`. pub fn new() -> TypesBuilder { - TypesBuilder { - types: HashMap::new(), - selections: vec![], - } + TypesBuilder { types: HashMap::new(), selections: vec![] } } /// Build the current set of file type definitions *and* selections into @@ -582,19 +351,18 @@ GlobBuilder::new(glob) .literal_separator(true) .build() - .map_err(|err| { - Error::Glob { - glob: Some(glob.to_string()), - err: err.kind().to_string(), - } - })?); + .map_err(|err| Error::Glob { + glob: Some(glob.to_string()), + err: err.kind().to_string(), + })?, + ); glob_to_selection.push((isel, iglob)); } selections.push(selection.clone().map(move |_| def)); } - let set = build_set.build().map_err(|err| { - Error::Glob { glob: None, err: err.to_string() } - })?; + let set = build_set + .build() + .map_err(|err| Error::Glob { glob: None, err: err.to_string() })?; Ok(Types { defs: defs, selections: selections, @@ -666,9 +434,14 @@ return Err(Error::InvalidDefinition); } let (key, glob) = (name.to_string(), glob.to_string()); - self.types.entry(key).or_insert_with(|| { - FileTypeDef { name: name.to_string(), globs: vec![] } - }).globs.push(glob); + self.types + .entry(key) + .or_insert_with(|| FileTypeDef { + name: name.to_string(), + globs: vec![], + }) + .globs + .push(glob); Ok(()) } @@ -695,7 +468,10 @@ 3 => { let name = parts[0]; let types_string = parts[2]; - if name.is_empty() || parts[1] != "include" || types_string.is_empty() { + if name.is_empty() + || parts[1] != "include" + || types_string.is_empty() + { return Err(Error::InvalidDefinition); } let types = types_string.split(','); @@ -705,14 +481,15 @@ return Err(Error::InvalidDefinition); } for type_name in types { - let globs = self.types.get(type_name).unwrap().globs.clone(); + let globs = + self.types.get(type_name).unwrap().globs.clone(); for glob in globs { self.add(name, &glob)?; } } Ok(()) } - _ => Err(Error::InvalidDefinition) + _ => Err(Error::InvalidDefinition), } } @@ -769,7 +546,7 @@ "rust:*.rs", "js:*.js", "foo:*.{rs,foo}", - "combo:include:html,rust" + "combo:include:html,rust", ] } @@ -803,7 +580,7 @@ "combo:include:html,python", // Bad format "combo:foobar:html,rust", - "" + "", ]; for def in bad_defs { assert!(btypes.add_def(def).is_err()); diff -Nru rust-ignore-0.4.10/src/walk.rs rust-ignore-0.4.16/src/walk.rs --- rust-ignore-0.4.10/src/walk.rs 2019-08-06 13:41:08.000000000 +0000 +++ rust-ignore-0.4.16/src/walk.rs 2020-05-09 03:36:05.000000000 +0000 @@ -4,13 +4,12 @@ use std::fs::{self, FileType, Metadata}; use std::io; use std::path::{Path, PathBuf}; -use std::sync::Arc; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; +use std::sync::{Arc, Mutex}; use std::thread; use std::time::Duration; use std::vec; -use channel; use same_file::Handle; use walkdir::{self, WalkDir}; @@ -104,24 +103,15 @@ } fn new_stdin() -> DirEntry { - DirEntry { - dent: DirEntryInner::Stdin, - err: None, - } + DirEntry { dent: DirEntryInner::Stdin, err: None } } fn new_walkdir(dent: walkdir::DirEntry, err: Option) -> DirEntry { - DirEntry { - dent: DirEntryInner::Walkdir(dent), - err: err, - } + DirEntry { dent: DirEntryInner::Walkdir(dent), err: err } } fn new_raw(dent: DirEntryRaw, err: Option) -> DirEntry { - DirEntry { - dent: DirEntryInner::Raw(dent), - err: err, - } + DirEntry { dent: DirEntryInner::Raw(dent), err: err } } } @@ -182,14 +172,14 @@ match *self { Stdin => { let err = Error::Io(io::Error::new( - io::ErrorKind::Other, " has no metadata")); + io::ErrorKind::Other, + " has no metadata", + )); Err(err.with_path("")) } - Walkdir(ref x) => { - x.metadata().map_err(|err| { - Error::Io(io::Error::from(err)).with_path(x.path()) - }) - } + Walkdir(ref x) => x.metadata().map_err(|err| { + Error::Io(io::Error::from(err)).with_path(x.path()) + }), Raw(ref x) => x.metadata(), } } @@ -223,8 +213,8 @@ #[cfg(unix)] fn ino(&self) -> Option { - use walkdir::DirEntryExt; use self::DirEntryInner::*; + use walkdir::DirEntryExt; match *self { Stdin => None, Walkdir(ref x) => Some(x.ino()), @@ -297,7 +287,8 @@ fs::metadata(&self.path) } else { Ok(self.metadata.clone()) - }.map_err(|err| Error::Io(io::Error::from(err)).with_path(&self.path)) + } + .map_err(|err| Error::Io(io::Error::from(err)).with_path(&self.path)) } #[cfg(not(windows))] @@ -306,7 +297,8 @@ fs::metadata(&self.path) } else { fs::symlink_metadata(&self.path) - }.map_err(|err| Error::Io(io::Error::from(err)).with_path(&self.path)) + } + .map_err(|err| Error::Io(io::Error::from(err)).with_path(&self.path)) } fn file_type(&self) -> FileType { @@ -332,10 +324,7 @@ ) -> Result { let ty = ent.file_type().map_err(|err| { let err = Error::Io(io::Error::from(err)).with_path(ent.path()); - Error::WithDepth { - depth: depth, - err: Box::new(err), - } + Error::WithDepth { depth: depth, err: Box::new(err) } })?; DirEntryRaw::from_entry_os(depth, ent, ty) } @@ -348,10 +337,7 @@ ) -> Result { let md = ent.metadata().map_err(|err| { let err = Error::Io(io::Error::from(err)).with_path(ent.path()); - Error::WithDepth { - depth: depth, - err: Box::new(err), - } + Error::WithDepth { depth: depth, err: Box::new(err) } })?; Ok(DirEntryRaw { path: ent.path(), @@ -379,7 +365,8 @@ }) } - // Placeholder implementation to allow compiling on non-standard platforms (e.g. wasm32). + // Placeholder implementation to allow compiling on non-standard platforms + // (e.g. wasm32). #[cfg(not(any(windows, unix)))] fn from_entry_os( depth: usize, @@ -387,7 +374,9 @@ ty: fs::FileType, ) -> Result { Err(Error::Io(io::Error::new( - io::ErrorKind::Other, "unsupported platform"))) + io::ErrorKind::Other, + "unsupported platform", + ))) } #[cfg(windows)] @@ -396,9 +385,8 @@ pb: PathBuf, link: bool, ) -> Result { - let md = fs::metadata(&pb).map_err(|err| { - Error::Io(err).with_path(&pb) - })?; + let md = + fs::metadata(&pb).map_err(|err| Error::Io(err).with_path(&pb))?; Ok(DirEntryRaw { path: pb, ty: md.file_type(), @@ -416,9 +404,8 @@ ) -> Result { use std::os::unix::fs::MetadataExt; - let md = fs::metadata(&pb).map_err(|err| { - Error::Io(err).with_path(&pb) - })?; + let md = + fs::metadata(&pb).map_err(|err| Error::Io(err).with_path(&pb))?; Ok(DirEntryRaw { path: pb, ty: md.file_type(), @@ -428,7 +415,8 @@ }) } - // Placeholder implementation to allow compiling on non-standard platforms (e.g. wasm32). + // Placeholder implementation to allow compiling on non-standard platforms + // (e.g. wasm32). #[cfg(not(any(windows, unix)))] fn from_path( depth: usize, @@ -436,7 +424,9 @@ link: bool, ) -> Result { Err(Error::Io(io::Error::new( - io::ErrorKind::Other, "unsupported platform"))) + io::ErrorKind::Other, + "unsupported platform", + ))) } } @@ -499,14 +489,20 @@ sorter: Option, threads: usize, skip: Option>, + filter: Option, } #[derive(Clone)] enum Sorter { - ByName(Arc cmp::Ordering + Send + Sync + 'static>), + ByName( + Arc cmp::Ordering + Send + Sync + 'static>, + ), ByPath(Arc cmp::Ordering + Send + Sync + 'static>), } +#[derive(Clone)] +struct Filter(Arc bool + Send + Sync + 'static>); + impl fmt::Debug for WalkBuilder { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("WalkBuilder") @@ -539,6 +535,7 @@ sorter: None, threads: 0, skip: None, + filter: None, } } @@ -547,33 +544,38 @@ let follow_links = self.follow_links; let max_depth = self.max_depth; let sorter = self.sorter.clone(); - let its = self.paths.iter().map(move |p| { - if p == Path::new("-") { - (p.to_path_buf(), None) - } else { - let mut wd = WalkDir::new(p); - wd = wd.follow_links(follow_links || p.is_file()); - wd = wd.same_file_system(self.same_file_system); - if let Some(max_depth) = max_depth { - wd = wd.max_depth(max_depth); - } - if let Some(ref sorter) = sorter { - match sorter.clone() { - Sorter::ByName(cmp) => { - wd = wd.sort_by(move |a, b| { - cmp(a.file_name(), b.file_name()) - }); - } - Sorter::ByPath(cmp) => { - wd = wd.sort_by(move |a, b| { - cmp(a.path(), b.path()) - }); + let its = self + .paths + .iter() + .map(move |p| { + if p == Path::new("-") { + (p.to_path_buf(), None) + } else { + let mut wd = WalkDir::new(p); + wd = wd.follow_links(follow_links || p.is_file()); + wd = wd.same_file_system(self.same_file_system); + if let Some(max_depth) = max_depth { + wd = wd.max_depth(max_depth); + } + if let Some(ref sorter) = sorter { + match sorter.clone() { + Sorter::ByName(cmp) => { + wd = wd.sort_by(move |a, b| { + cmp(a.file_name(), b.file_name()) + }); + } + Sorter::ByPath(cmp) => { + wd = wd.sort_by(move |a, b| { + cmp(a.path(), b.path()) + }); + } } } + (p.to_path_buf(), Some(WalkEventIter::from(wd))) } - (p.to_path_buf(), Some(WalkEventIter::from(wd))) - } - }).collect::>().into_iter(); + }) + .collect::>() + .into_iter(); let ig_root = self.ig_builder.build(); Walk { its: its, @@ -582,6 +584,7 @@ ig: ig_root.clone(), max_filesize: self.max_filesize, skip: self.skip.clone(), + filter: self.filter.clone(), } } @@ -600,6 +603,7 @@ same_file_system: self.same_file_system, threads: self.threads, skip: self.skip.clone(), + filter: self.filter.clone(), } } @@ -657,8 +661,12 @@ let mut errs = PartialErrorBuilder::default(); errs.maybe_push(builder.add(path)); match builder.build() { - Ok(gi) => { self.ig_builder.add_ignore(gi); } - Err(err) => { errs.push(err); } + Ok(gi) => { + self.ig_builder.add_ignore(gi); + } + Err(err) => { + errs.push(err); + } } errs.into_error_option() } @@ -671,7 +679,7 @@ /// later names. pub fn add_custom_ignore_filename>( &mut self, - file_name: S + file_name: S, ) -> &mut WalkBuilder { self.ig_builder.add_custom_ignore_filename(file_name); self @@ -786,6 +794,16 @@ self } + /// Whether a git repository is required to apply git-related ignore + /// rules (global rules, .gitignore and local exclude rules). + /// + /// When disabled, git-related ignore rules are applied even when searching + /// outside a git repository. + pub fn require_git(&mut self, yes: bool) -> &mut WalkBuilder { + self.ig_builder.require_git(yes); + self + } + /// Process ignore files case insensitively /// /// This is disabled by default. @@ -808,11 +826,9 @@ /// by `sort_by_file_name`. /// /// Note that this is not used in the parallel iterator. - pub fn sort_by_file_path( - &mut self, - cmp: F, - ) -> &mut WalkBuilder - where F: Fn(&Path, &Path) -> cmp::Ordering + Send + Sync + 'static + pub fn sort_by_file_path(&mut self, cmp: F) -> &mut WalkBuilder + where + F: Fn(&Path, &Path) -> cmp::Ordering + Send + Sync + 'static, { self.sorter = Some(Sorter::ByPath(Arc::new(cmp))); self @@ -830,7 +846,8 @@ /// /// Note that this is not used in the parallel iterator. pub fn sort_by_file_name(&mut self, cmp: F) -> &mut WalkBuilder - where F: Fn(&OsStr, &OsStr) -> cmp::Ordering + Send + Sync + 'static + where + F: Fn(&OsStr, &OsStr) -> cmp::Ordering + Send + Sync + 'static, { self.sorter = Some(Sorter::ByName(Arc::new(cmp))); self @@ -868,6 +885,23 @@ } self } + + /// Yields only entries which satisfy the given predicate and skips + /// descending into directories that do not satisfy the given predicate. + /// + /// The predicate is applied to all entries. If the predicate is + /// true, iteration carries on as normal. If the predicate is false, the + /// entry is ignored and if it is a directory, it is not descended into. + /// + /// Note that the errors for reading entries that may not satisfy the + /// predicate will still be yielded. + pub fn filter_entry

(&mut self, filter: P) -> &mut WalkBuilder + where + P: Fn(&DirEntry) -> bool + Send + Sync + 'static, + { + self.filter = Some(Filter(Arc::new(filter))); + self + } } /// Walk is a recursive directory iterator over file paths in one or more @@ -883,6 +917,7 @@ ig: Ignore, max_filesize: Option, skip: Option>, + filter: Option, } impl Walk { @@ -915,6 +950,11 @@ &ent.metadata().ok(), )); } + if let Some(Filter(filter)) = &self.filter { + if !filter(ent) { + return Ok(true); + } + } Ok(false) } } @@ -1036,7 +1076,7 @@ None => None, Some(Err(err)) => Some(Err(err)), Some(Ok(dent)) => { - if dent.file_type().is_dir() { + if walkdir_is_dir(&dent) { self.depth += 1; Some(Ok(WalkEvent::Dir(dent))) } else { @@ -1066,11 +1106,70 @@ } impl WalkState { + fn is_continue(&self) -> bool { + *self == WalkState::Continue + } + fn is_quit(&self) -> bool { *self == WalkState::Quit } } +/// A builder for constructing a visitor when using +/// [`WalkParallel::visit`](struct.WalkParallel.html#method.visit). The builder +/// will be called for each thread started by `WalkParallel`. The visitor +/// returned from each builder is then called for every directory entry. +pub trait ParallelVisitorBuilder<'s> { + /// Create per-thread `ParallelVisitor`s for `WalkParallel`. + fn build(&mut self) -> Box; +} + +impl<'a, 's, P: ParallelVisitorBuilder<'s>> ParallelVisitorBuilder<'s> + for &'a mut P +{ + fn build(&mut self) -> Box { + (**self).build() + } +} + +/// Receives files and directories for the current thread. +/// +/// Setup for the traversal can be implemented as part of +/// [`ParallelVisitorBuilder::build`](trait.ParallelVisitorBuilder.html#tymethod.build). +/// Teardown when traversal finishes can be implemented by implementing the +/// `Drop` trait on your traversal type. +pub trait ParallelVisitor: Send { + /// Receives files and directories for the current thread. This is called + /// once for every directory entry visited by traversal. + fn visit(&mut self, entry: Result) -> WalkState; +} + +struct FnBuilder { + builder: F, +} + +impl<'s, F: FnMut() -> FnVisitor<'s>> ParallelVisitorBuilder<'s> + for FnBuilder +{ + fn build(&mut self) -> Box { + let visitor = (self.builder)(); + Box::new(FnVisitorImp { visitor }) + } +} + +type FnVisitor<'s> = + Box) -> WalkState + Send + 's>; + +struct FnVisitorImp<'s> { + visitor: FnVisitor<'s>, +} + +impl<'s> ParallelVisitor for FnVisitorImp<'s> { + fn visit(&mut self, entry: Result) -> WalkState { + (self.visitor)(entry) + } +} + /// WalkParallel is a parallel recursive directory iterator over files paths /// in one or more directories. /// @@ -1088,102 +1187,114 @@ same_file_system: bool, threads: usize, skip: Option>, + filter: Option, } impl WalkParallel { /// Execute the parallel recursive directory iterator. `mkf` is called /// for each thread used for iteration. The function produced by `mkf` /// is then in turn called for each visited file path. - pub fn run( - self, - mut mkf: F, - ) where F: FnMut() -> Box) -> WalkState + Send + 'static> { - let mut f = mkf(); + pub fn run<'s, F>(self, mkf: F) + where + F: FnMut() -> FnVisitor<'s>, + { + self.visit(&mut FnBuilder { builder: mkf }) + } + + /// Execute the parallel recursive directory iterator using a custom + /// visitor. + /// + /// The builder given is used to construct a visitor for every thread + /// used by this traversal. The visitor returned from each builder is then + /// called for every directory entry seen by that thread. + /// + /// Typically, creating a custom visitor is useful if you need to perform + /// some kind of cleanup once traversal is finished. This can be achieved + /// by implementing `Drop` for your builder (or for your visitor, if you + /// want to execute cleanup for every thread that is launched). + /// + /// For example, each visitor might build up a data structure of results + /// corresponding to the directory entries seen for each thread. Since each + /// visitor runs on only one thread, this build-up can be done without + /// synchronization. Then, once traversal is complete, all of the results + /// can be merged together into a single data structure. + pub fn visit(mut self, builder: &mut dyn ParallelVisitorBuilder) { let threads = self.threads(); - // TODO: Figure out how to use a bounded channel here. With an - // unbounded channel, the workers can run away and fill up memory - // with all of the file paths. But a bounded channel doesn't work since - // our producers are also are consumers, so they end up getting stuck. - // - // We probably need to rethink parallel traversal completely to fix - // this. The best case scenario would be finding a way to use rayon - // to do this. - let (tx, rx) = channel::unbounded(); - let mut any_work = false; - // Send the initial set of root paths to the pool of workers. - // Note that we only send directories. For files, we send to them the - // callback directly. - for path in self.paths { - let (dent, root_device) = - if path == Path::new("-") { + let stack = Arc::new(Mutex::new(vec![])); + { + let mut stack = stack.lock().unwrap(); + let mut visitor = builder.build(); + let mut paths = Vec::new().into_iter(); + std::mem::swap(&mut paths, &mut self.paths); + // Send the initial set of root paths to the pool of workers. Note + // that we only send directories. For files, we send to them the + // callback directly. + for path in paths { + let (dent, root_device) = if path == Path::new("-") { (DirEntry::new_stdin(), None) } else { - let root_device = - if !self.same_file_system { - None - } else { - match device_num(&path) { - Ok(root_device) => Some(root_device), - Err(err) => { - let err = Error::Io(err).with_path(path); - if f(Err(err)).is_quit() { - return; - } - continue; + let root_device = if !self.same_file_system { + None + } else { + match device_num(&path) { + Ok(root_device) => Some(root_device), + Err(err) => { + let err = Error::Io(err).with_path(path); + if visitor.visit(Err(err)).is_quit() { + return; } + continue; } - }; + } + }; match DirEntryRaw::from_path(0, path, false) { Ok(dent) => { (DirEntry::new_raw(dent, None), root_device) } Err(err) => { - if f(Err(err)).is_quit() { + if visitor.visit(Err(err)).is_quit() { return; } continue; } } }; - tx.send(Message::Work(Work { - dent: dent, - ignore: self.ig_root.clone(), - root_device: root_device, - })).unwrap(); - any_work = true; - } - // ... but there's no need to start workers if we don't need them. - if !any_work { - return; + stack.push(Message::Work(Work { + dent: dent, + ignore: self.ig_root.clone(), + root_device: root_device, + })); + } + // ... but there's no need to start workers if we don't need them. + if stack.is_empty() { + return; + } } // Create the workers and then wait for them to finish. - let num_waiting = Arc::new(AtomicUsize::new(0)); - let num_quitting = Arc::new(AtomicUsize::new(0)); let quit_now = Arc::new(AtomicBool::new(false)); - let mut handles = vec![]; - for _ in 0..threads { - let worker = Worker { - f: mkf(), - tx: tx.clone(), - rx: rx.clone(), - quit_now: quit_now.clone(), - is_waiting: false, - is_quitting: false, - num_waiting: num_waiting.clone(), - num_quitting: num_quitting.clone(), - threads: threads, - max_depth: self.max_depth, - max_filesize: self.max_filesize, - follow_links: self.follow_links, - skip: self.skip.clone(), - }; - handles.push(thread::spawn(|| worker.run())); - } - drop(tx); - drop(rx); - for handle in handles { - handle.join().unwrap(); - } + let num_pending = + Arc::new(AtomicUsize::new(stack.lock().unwrap().len())); + crossbeam_utils::thread::scope(|s| { + let mut handles = vec![]; + for _ in 0..threads { + let worker = Worker { + visitor: builder.build(), + stack: stack.clone(), + quit_now: quit_now.clone(), + num_pending: num_pending.clone(), + max_depth: self.max_depth, + max_filesize: self.max_filesize, + follow_links: self.follow_links, + skip: self.skip.clone(), + filter: self.filter.clone(), + }; + handles.push(s.spawn(|_| worker.run())); + } + for handle in handles { + handle.join().unwrap(); + } + }) + .unwrap(); // Pass along panics from threads } fn threads(&self) -> usize { @@ -1201,7 +1312,7 @@ /// Work items for entries that should be skipped or ignored should not /// be produced. Work(Work), - /// This instruction indicates that the worker should start quitting. + /// This instruction indicates that the worker should quit. Quit, } @@ -1273,28 +1384,22 @@ /// ignore matchers, producing new work and invoking the caller's callback. /// /// Note that a worker is *both* a producer and a consumer. -struct Worker { +struct Worker<'s> { /// The caller's callback. - f: Box) -> WalkState + Send + 'static>, - /// The push side of our mpmc queue. - tx: channel::Sender, - /// The receive side of our mpmc queue. - rx: channel::Receiver, - /// Whether all workers should quit at the next opportunity. Note that - /// this is distinct from quitting because of exhausting the contents of - /// a directory. Instead, this is used when the caller's callback indicates - /// that the iterator should quit immediately. + visitor: Box, + /// A stack of work to do. + /// + /// We use a stack instead of a channel because a stack lets us visit + /// directories in depth first order. This can substantially reduce peak + /// memory usage by keeping both the number of files path and gitignore + /// matchers in memory lower. + stack: Arc>>, + /// Whether all workers should terminate at the next opportunity. Note + /// that we need this because we don't want other `Work` to be done after + /// we quit. We wouldn't need this if have a priority channel. quit_now: Arc, - /// Whether this worker is waiting for more work. - is_waiting: bool, - /// Whether this worker has started to quit. - is_quitting: bool, - /// The number of workers waiting for more work. - num_waiting: Arc, - /// The number of workers waiting to quit. - num_quitting: Arc, - /// The total number of workers. - threads: usize, + /// The number of outstanding work items. + num_pending: Arc, /// The maximum depth of directories to descend. A value of `0` means no /// descension at all. max_depth: Option, @@ -1307,92 +1412,100 @@ /// A file handle to skip, currently is either `None` or stdout, if it's /// a file and it has been requested to skip files identical to stdout. skip: Option>, + /// A predicate applied to dir entries. If true, the entry and all + /// children will be skipped. + filter: Option, } -impl Worker { +impl<'s> Worker<'s> { /// Runs this worker until there is no more work left to do. /// /// The worker will call the caller's callback for all entries that aren't /// skipped by the ignore matcher. fn run(mut self) { - while let Some(mut work) = self.get_work() { - // If the work is not a directory, then we can just execute the - // caller's callback immediately and move on. - if work.is_symlink() || !work.is_dir() { - if (self.f)(Ok(work.dent)).is_quit() { - self.quit_now(); - return; - } - continue; + while let Some(work) = self.get_work() { + if let WalkState::Quit = self.run_one(work) { + self.quit_now(); } - if let Some(err) = work.add_parents() { - if (self.f)(Err(err)).is_quit() { - self.quit_now(); - return; - } + self.work_done(); + } + } + + fn run_one(&mut self, mut work: Work) -> WalkState { + // If the work is not a directory, then we can just execute the + // caller's callback immediately and move on. + if work.is_symlink() || !work.is_dir() { + return self.visitor.visit(Ok(work.dent)); + } + if let Some(err) = work.add_parents() { + let state = self.visitor.visit(Err(err)); + if state.is_quit() { + return state; } - let readdir = match work.read_dir() { - Ok(readdir) => readdir, + } + + let descend = if let Some(root_device) = work.root_device { + match is_same_file_system(root_device, work.dent.path()) { + Ok(true) => true, + Ok(false) => false, Err(err) => { - if (self.f)(Err(err)).is_quit() { - self.quit_now(); - return; - } - continue; - } - }; - let descend = - if let Some(root_device) = work.root_device { - match is_same_file_system(root_device, work.dent.path()) { - Ok(true) => true, - Ok(false) => false, - Err(err) => { - if (self.f)(Err(err)).is_quit() { - self.quit_now(); - return; - } - false - } + let state = self.visitor.visit(Err(err)); + if state.is_quit() { + return state; } - } else { - true - }; - - let depth = work.dent.depth(); - match (self.f)(Ok(work.dent)) { - WalkState::Continue => {} - WalkState::Skip => continue, - WalkState::Quit => { - self.quit_now(); - return; + false } } - if !descend { - continue; - } - if self.max_depth.map_or(false, |max| depth >= max) { - continue; + } else { + true + }; + + // Try to read the directory first before we transfer ownership + // to the provided closure. Do not unwrap it immediately, though, + // as we may receive an `Err` value e.g. in the case when we do not + // have sufficient read permissions to list the directory. + // In that case we still want to provide the closure with a valid + // entry before passing the error value. + let readdir = work.read_dir(); + let depth = work.dent.depth(); + let state = self.visitor.visit(Ok(work.dent)); + if !state.is_continue() { + return state; + } + if !descend { + return WalkState::Skip; + } + + let readdir = match readdir { + Ok(readdir) => readdir, + Err(err) => { + return self.visitor.visit(Err(err)); } - for result in readdir { - let state = self.run_one( - &work.ignore, - depth + 1, - work.root_device, - result, - ); - if state.is_quit() { - self.quit_now(); - return; - } + }; + + if self.max_depth.map_or(false, |max| depth >= max) { + return WalkState::Skip; + } + for result in readdir { + let state = self.generate_work( + &work.ignore, + depth + 1, + work.root_device, + result, + ); + if state.is_quit() { + return state; } } + WalkState::Continue } - /// Runs the worker on a single entry from a directory iterator. + /// Decides whether to submit the given directory entry as a file to + /// search. /// /// If the entry is a path that should be ignored, then this is a no-op. /// Otherwise, the entry is pushed on to the queue. (The actual execution - /// of the callback happens in `run`.) + /// of the callback happens in `run_one`.) /// /// If an error occurs while reading the entry, then it is sent to the /// caller's callback. @@ -1400,7 +1513,7 @@ /// `ig` is the `Ignore` matcher for the parent directory. `depth` should /// be the depth of this entry. `result` should be the item yielded by /// a directory iterator. - fn run_one( + fn generate_work( &mut self, ig: &Ignore, depth: usize, @@ -1410,13 +1523,15 @@ let fs_dent = match result { Ok(fs_dent) => fs_dent, Err(err) => { - return (self.f)(Err(Error::from(err).with_depth(depth))); + return self + .visitor + .visit(Err(Error::from(err).with_depth(depth))); } }; let mut dent = match DirEntryRaw::from_entry(depth, &fs_dent) { Ok(dent) => DirEntry::new_raw(dent, None), Err(err) => { - return (self.f)(Err(err)); + return self.visitor.visit(Err(err)); } }; let is_symlink = dent.file_type().map_or(false, |ft| ft.is_symlink()); @@ -1425,19 +1540,19 @@ dent = match DirEntryRaw::from_path(depth, path, true) { Ok(dent) => DirEntry::new_raw(dent, None), Err(err) => { - return (self.f)(Err(err)); + return self.visitor.visit(Err(err)); } }; if dent.is_dir() { if let Err(err) = check_symlink_loop(ig, dent.path(), depth) { - return (self.f)(Err(err)); + return self.visitor.visit(Err(err)); } } } if let Some(ref stdout) = self.skip { let is_stdout = match path_equals(&dent, stdout) { Ok(is_stdout) => is_stdout, - Err(err) => return (self.f)(Err(err)), + Err(err) => return self.visitor.visit(Err(err)), }; if is_stdout { return WalkState::Continue; @@ -1454,13 +1569,15 @@ } else { false }; - - if !should_skip_path && !should_skip_filesize { - self.tx.send(Message::Work(Work { - dent: dent, - ignore: ig.clone(), - root_device: root_device, - })).unwrap(); + let should_skip_filtered = + if let Some(Filter(predicate)) = &self.filter { + !predicate(&dent) + } else { + false + }; + if !should_skip_path && !should_skip_filesize && !should_skip_filtered + { + self.send(Work { dent, ignore: ig.clone(), root_device }); } WalkState::Continue } @@ -1470,62 +1587,49 @@ /// If all work has been exhausted, then this returns None. The worker /// should then subsequently quit. fn get_work(&mut self) -> Option { + let mut value = self.recv(); loop { + // Simulate a priority channel: If quit_now flag is set, we can + // receive only quit messages. if self.is_quit_now() { - return None; + value = Some(Message::Quit) } - match self.rx.try_recv() { - Ok(Message::Work(work)) => { - self.waiting(false); - self.quitting(false); + match value { + Some(Message::Work(work)) => { return Some(work); } - Ok(Message::Quit) => { - // We can't just quit because a Message::Quit could be - // spurious. For example, it's possible to observe that - // all workers are waiting even if there's more work to - // be done. - // - // Therefore, we do a bit of a dance to wait until all - // workers have signaled that they're ready to quit before - // actually quitting. - // - // If the Quit message turns out to be spurious, then the - // loop below will break and we'll go back to looking for - // more work. - self.waiting(true); - self.quitting(true); - while !self.is_quit_now() { - let nwait = self.num_waiting(); - let nquit = self.num_quitting(); - // If the number of waiting workers dropped, then - // abort our attempt to quit. - if nwait < self.threads { - break; - } - // If all workers are in this quit loop, then we - // can stop. - if nquit == self.threads { - return None; - } - // Otherwise, spin. - } + Some(Message::Quit) => { + // Repeat quit message to wake up sleeping threads, if + // any. The domino effect will ensure that every thread + // will quit. + self.send_quit(); + return None; } - Err(_) => { - self.waiting(true); - self.quitting(false); - if self.num_waiting() == self.threads { - for _ in 0..self.threads { - self.tx.send(Message::Quit).unwrap(); + None => { + // Once num_pending reaches 0, it is impossible for it to + // ever increase again. Namely, it only reaches 0 once + // all jobs have run such that no jobs have produced more + // work. We have this guarantee because num_pending is + // always incremented before each job is submitted and only + // decremented once each job is completely finished. + // Therefore, if this reaches zero, then there can be no + // other job running. + if self.num_pending() == 0 { + // Every other thread is blocked at the next recv(). + // Send the initial quit message and quit. + self.send_quit(); + return None; + } + // Wait for next `Work` or `Quit` message. + loop { + if let Some(v) = self.recv() { + value = Some(v); + break; } - } else { - // You're right to consider this suspicious, but it's - // a useful heuristic to permit producers to catch up - // to consumers without burning the CPU. It is also - // useful as a means to prevent burning the CPU if only - // one worker is left doing actual work. It's not - // perfect and it doesn't leave the CPU completely - // idle, but it's not clear what else we can do. :-/ + // Our stack isn't blocking. Instead of burning the + // CPU waiting, we let the thread sleep for a bit. In + // general, this tends to only occur once the search is + // approaching termination. thread::sleep(Duration::from_millis(1)); } } @@ -1543,44 +1647,33 @@ self.quit_now.load(Ordering::SeqCst) } - /// Returns the total number of workers waiting for work. - fn num_waiting(&self) -> usize { - self.num_waiting.load(Ordering::SeqCst) + /// Returns the number of pending jobs. + fn num_pending(&self) -> usize { + self.num_pending.load(Ordering::SeqCst) } - /// Returns the total number of workers ready to quit. - fn num_quitting(&self) -> usize { - self.num_quitting.load(Ordering::SeqCst) + /// Send work. + fn send(&self, work: Work) { + self.num_pending.fetch_add(1, Ordering::SeqCst); + let mut stack = self.stack.lock().unwrap(); + stack.push(Message::Work(work)); } - /// Sets this worker's "quitting" state to the value of `yes`. - fn quitting(&mut self, yes: bool) { - if yes { - if !self.is_quitting { - self.is_quitting = true; - self.num_quitting.fetch_add(1, Ordering::SeqCst); - } - } else { - if self.is_quitting { - self.is_quitting = false; - self.num_quitting.fetch_sub(1, Ordering::SeqCst); - } - } + /// Send a quit message. + fn send_quit(&self) { + let mut stack = self.stack.lock().unwrap(); + stack.push(Message::Quit); } - /// Sets this worker's "waiting" state to the value of `yes`. - fn waiting(&mut self, yes: bool) { - if yes { - if !self.is_waiting { - self.is_waiting = true; - self.num_waiting.fetch_add(1, Ordering::SeqCst); - } - } else { - if self.is_waiting { - self.is_waiting = false; - self.num_waiting.fetch_sub(1, Ordering::SeqCst); - } - } + /// Receive work. + fn recv(&self) -> Option { + let mut stack = self.stack.lock().unwrap(); + stack.pop() + } + + /// Signal that work has been received. + fn work_done(&self) { + self.num_pending.fetch_sub(1, Ordering::SeqCst); } } @@ -1600,7 +1693,8 @@ return Err(Error::Loop { ancestor: ig.path().to_path_buf(), child: child_path.to_path_buf(), - }.with_depth(child_depth)); + } + .with_depth(child_depth)); } } Ok(()) @@ -1611,11 +1705,11 @@ fn skip_filesize( max_filesize: u64, path: &Path, - ent: &Option + ent: &Option, ) -> bool { let filesize = match *ent { Some(ref md) => Some(md.len()), - None => None + None => None, }; if let Some(fs) = filesize { @@ -1630,10 +1724,7 @@ } } -fn should_skip_entry( - ig: &Ignore, - dent: &DirEntry, -) -> bool { +fn should_skip_entry(ig: &Ignore, dent: &DirEntry) -> bool { let m = ig.matched_dir_entry(dent); if m.is_ignore() { debug!("ignoring {}: {:?}", dent.path().display(), m); @@ -1692,31 +1783,49 @@ .map_err(|err| Error::Io(err).with_path(dent.path())) } +/// Returns true if the given walkdir entry corresponds to a directory. +/// +/// This is normally just `dent.file_type().is_dir()`, but when we aren't +/// following symlinks, the root directory entry may be a symlink to a +/// directory that we *do* follow---by virtue of it being specified by the user +/// explicitly. In that case, we need to follow the symlink and query whether +/// it's a directory or not. But we only do this for root entries to avoid an +/// additional stat check in most cases. +fn walkdir_is_dir(dent: &walkdir::DirEntry) -> bool { + if dent.file_type().is_dir() { + return true; + } + if !dent.file_type().is_symlink() || dent.depth() > 0 { + return false; + } + dent.path().metadata().ok().map_or(false, |md| md.file_type().is_dir()) +} + /// Returns true if and only if the given path is on the same device as the /// given root device. fn is_same_file_system(root_device: u64, path: &Path) -> Result { - let dent_device = device_num(path) - .map_err(|err| Error::Io(err).with_path(path))?; + let dent_device = + device_num(path).map_err(|err| Error::Io(err).with_path(path))?; Ok(root_device == dent_device) } #[cfg(unix)] -fn device_num>(path: P)-> io::Result { +fn device_num>(path: P) -> io::Result { use std::os::unix::fs::MetadataExt; path.as_ref().metadata().map(|md| md.dev()) } - #[cfg(windows)] +#[cfg(windows)] fn device_num>(path: P) -> io::Result { - use winapi_util::{Handle, file}; + use winapi_util::{file, Handle}; let h = Handle::from_path_any(path)?; file::information(h).map(|info| info.volume_serial_number()) } #[cfg(not(any(unix, windows)))] -fn device_num>(_: P)-> io::Result { +fn device_num>(_: P) -> io::Result { Err(io::Error::new( io::ErrorKind::Other, "walkdir: same_file_system option not supported on this platform", @@ -1725,13 +1834,14 @@ #[cfg(test)] mod tests { + use std::ffi::OsStr; use std::fs::{self, File}; use std::io::Write; use std::path::Path; use std::sync::{Arc, Mutex}; - use tests::TempDir; use super::{DirEntry, WalkBuilder, WalkState}; + use tests::TempDir; fn wfile>(path: P, contents: &str) { let mut file = File::create(path).unwrap(); @@ -1816,15 +1926,11 @@ paths } - fn tmpdir(prefix: &str) -> TempDir { + fn tmpdir() -> TempDir { TempDir::new().unwrap() } - fn assert_paths( - prefix: &Path, - builder: &WalkBuilder, - expected: &[&str], - ) { + fn assert_paths(prefix: &Path, builder: &WalkBuilder, expected: &[&str]) { let got = walk_collect(prefix, builder); assert_eq!(got, mkpaths(expected), "single threaded"); let got = walk_collect_parallel(prefix, builder); @@ -1833,20 +1939,22 @@ #[test] fn no_ignores() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); mkdirp(td.path().join("a/b/c")); mkdirp(td.path().join("x/y")); wfile(td.path().join("a/b/foo"), ""); wfile(td.path().join("x/y/foo"), ""); - assert_paths(td.path(), &WalkBuilder::new(td.path()), &[ - "x", "x/y", "x/y/foo", "a", "a/b", "a/b/foo", "a/b/c", - ]); + assert_paths( + td.path(), + &WalkBuilder::new(td.path()), + &["x", "x/y", "x/y/foo", "a", "a/b", "a/b/foo", "a/b/c"], + ); } #[test] fn custom_ignore() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); let custom_ignore = ".customignore"; mkdirp(td.path().join("a")); wfile(td.path().join(custom_ignore), "foo"); @@ -1862,7 +1970,7 @@ #[test] fn custom_ignore_exclusive_use() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); let custom_ignore = ".customignore"; mkdirp(td.path().join("a")); wfile(td.path().join(custom_ignore), "foo"); @@ -1882,7 +1990,7 @@ #[test] fn gitignore() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); mkdirp(td.path().join(".git")); mkdirp(td.path().join("a")); wfile(td.path().join(".gitignore"), "foo"); @@ -1891,14 +1999,16 @@ wfile(td.path().join("bar"), ""); wfile(td.path().join("a/bar"), ""); - assert_paths(td.path(), &WalkBuilder::new(td.path()), &[ - "bar", "a", "a/bar", - ]); + assert_paths( + td.path(), + &WalkBuilder::new(td.path()), + &["bar", "a", "a/bar"], + ); } #[test] fn explicit_ignore() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); let igpath = td.path().join(".not-an-ignore"); mkdirp(td.path().join("a")); wfile(&igpath, "foo"); @@ -1914,7 +2024,7 @@ #[test] fn explicit_ignore_exclusive_use() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); let igpath = td.path().join(".not-an-ignore"); mkdirp(td.path().join("a")); wfile(&igpath, "foo"); @@ -1926,13 +2036,16 @@ let mut builder = WalkBuilder::new(td.path()); builder.standard_filters(false); assert!(builder.add_ignore(&igpath).is_none()); - assert_paths(td.path(), &builder, - &[".not-an-ignore", "bar", "a", "a/bar"]); + assert_paths( + td.path(), + &builder, + &[".not-an-ignore", "bar", "a", "a/bar"], + ); } #[test] fn gitignore_parent() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); mkdirp(td.path().join(".git")); mkdirp(td.path().join("a")); wfile(td.path().join(".gitignore"), "foo"); @@ -1945,7 +2058,7 @@ #[test] fn max_depth() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); mkdirp(td.path().join("a/b/c")); wfile(td.path().join("foo"), ""); wfile(td.path().join("a/foo"), ""); @@ -1953,19 +2066,23 @@ wfile(td.path().join("a/b/c/foo"), ""); let mut builder = WalkBuilder::new(td.path()); - assert_paths(td.path(), &builder, &[ - "a", "a/b", "a/b/c", "foo", "a/foo", "a/b/foo", "a/b/c/foo", - ]); + assert_paths( + td.path(), + &builder, + &["a", "a/b", "a/b/c", "foo", "a/foo", "a/b/foo", "a/b/c/foo"], + ); assert_paths(td.path(), builder.max_depth(Some(0)), &[]); assert_paths(td.path(), builder.max_depth(Some(1)), &["a", "foo"]); - assert_paths(td.path(), builder.max_depth(Some(2)), &[ - "a", "a/b", "foo", "a/foo", - ]); + assert_paths( + td.path(), + builder.max_depth(Some(2)), + &["a", "a/b", "foo", "a/foo"], + ); } #[test] fn max_filesize() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); mkdirp(td.path().join("a/b")); wfile_size(td.path().join("foo"), 0); wfile_size(td.path().join("bar"), 400); @@ -1975,41 +2092,49 @@ wfile_size(td.path().join("a/baz"), 200); let mut builder = WalkBuilder::new(td.path()); - assert_paths(td.path(), &builder, &[ - "a", "a/b", "foo", "bar", "baz", "a/foo", "a/bar", "a/baz", - ]); - assert_paths(td.path(), builder.max_filesize(Some(0)), &[ - "a", "a/b", "foo" - ]); - assert_paths(td.path(), builder.max_filesize(Some(500)), &[ - "a", "a/b", "foo", "bar", "a/bar", "a/baz" - ]); - assert_paths(td.path(), builder.max_filesize(Some(50000)), &[ - "a", "a/b", "foo", "bar", "baz", "a/foo", "a/bar", "a/baz", - ]); + assert_paths( + td.path(), + &builder, + &["a", "a/b", "foo", "bar", "baz", "a/foo", "a/bar", "a/baz"], + ); + assert_paths( + td.path(), + builder.max_filesize(Some(0)), + &["a", "a/b", "foo"], + ); + assert_paths( + td.path(), + builder.max_filesize(Some(500)), + &["a", "a/b", "foo", "bar", "a/bar", "a/baz"], + ); + assert_paths( + td.path(), + builder.max_filesize(Some(50000)), + &["a", "a/b", "foo", "bar", "baz", "a/foo", "a/bar", "a/baz"], + ); } #[cfg(unix)] // because symlinks on windows are weird #[test] fn symlinks() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); mkdirp(td.path().join("a/b")); symlink(td.path().join("a/b"), td.path().join("z")); wfile(td.path().join("a/b/foo"), ""); let mut builder = WalkBuilder::new(td.path()); - assert_paths(td.path(), &builder, &[ - "a", "a/b", "a/b/foo", "z", - ]); - assert_paths(td.path(), &builder.follow_links(true), &[ - "a", "a/b", "a/b/foo", "z", "z/foo", - ]); + assert_paths(td.path(), &builder, &["a", "a/b", "a/b/foo", "z"]); + assert_paths( + td.path(), + &builder.follow_links(true), + &["a", "a/b", "a/b/foo", "z", "z/foo"], + ); } #[cfg(unix)] // because symlinks on windows are weird #[test] fn first_path_not_symlink() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); mkdirp(td.path().join("foo")); let dents = WalkBuilder::new(td.path().join("foo")) @@ -2020,9 +2145,9 @@ assert_eq!(1, dents.len()); assert!(!dents[0].path_is_symlink()); - let dents = walk_collect_entries_parallel( - &WalkBuilder::new(td.path().join("foo")), - ); + let dents = walk_collect_entries_parallel(&WalkBuilder::new( + td.path().join("foo"), + )); assert_eq!(1, dents.len()); assert!(!dents[0].path_is_symlink()); } @@ -2030,17 +2155,13 @@ #[cfg(unix)] // because symlinks on windows are weird #[test] fn symlink_loop() { - let td = tmpdir("walk-test-"); + let td = tmpdir(); mkdirp(td.path().join("a/b")); symlink(td.path().join("a"), td.path().join("a/b/c")); let mut builder = WalkBuilder::new(td.path()); - assert_paths(td.path(), &builder, &[ - "a", "a/b", "a/b/c", - ]); - assert_paths(td.path(), &builder.follow_links(true), &[ - "a", "a/b", - ]); + assert_paths(td.path(), &builder, &["a", "a/b", "a/b/c"]); + assert_paths(td.path(), &builder.follow_links(true), &["a", "a/b"]); } // It's a little tricky to test the 'same_file_system' option since @@ -2060,7 +2181,7 @@ // If our test directory actually isn't a different volume from /sys, // then this test is meaningless and we shouldn't run it. - let td = tmpdir("walk-test-"); + let td = tmpdir(); if device_num(td.path()).unwrap() == device_num("/sys").unwrap() { return; } @@ -2074,8 +2195,47 @@ // completely. let mut builder = WalkBuilder::new(td.path()); builder.follow_links(true).same_file_system(true); - assert_paths(td.path(), &builder, &[ - "same_file", "same_file/alink", - ]); + assert_paths(td.path(), &builder, &["same_file", "same_file/alink"]); + } + + #[cfg(target_os = "linux")] + #[test] + fn no_read_permissions() { + let dir_path = Path::new("/root"); + + // There's no /etc/sudoers.d, skip the test. + if !dir_path.is_dir() { + return; + } + // We're the root, so the test won't check what we want it to. + if fs::read_dir(&dir_path).is_ok() { + return; + } + + // Check that we can't descend but get an entry for the parent dir. + let builder = WalkBuilder::new(&dir_path); + assert_paths(dir_path.parent().unwrap(), &builder, &["root"]); + } + + #[test] + fn filter() { + let td = tmpdir(); + mkdirp(td.path().join("a/b/c")); + mkdirp(td.path().join("x/y")); + wfile(td.path().join("a/b/foo"), ""); + wfile(td.path().join("x/y/foo"), ""); + + assert_paths( + td.path(), + &WalkBuilder::new(td.path()), + &["x", "x/y", "x/y/foo", "a", "a/b", "a/b/foo", "a/b/c"], + ); + + assert_paths( + td.path(), + &WalkBuilder::new(td.path()) + .filter_entry(|entry| entry.file_name() != OsStr::new("a")), + &["x", "x/y", "x/y/foo"], + ); } } diff -Nru rust-ignore-0.4.10/tests/gitignore_matched_path_or_any_parents_tests.rs rust-ignore-0.4.16/tests/gitignore_matched_path_or_any_parents_tests.rs --- rust-ignore-0.4.10/tests/gitignore_matched_path_or_any_parents_tests.rs 2019-08-01 20:47:08.000000000 +0000 +++ rust-ignore-0.4.16/tests/gitignore_matched_path_or_any_parents_tests.rs 2020-03-15 13:36:53.000000000 +0000 @@ -55,7 +55,6 @@ assert!(m("ROOT/file_root_33").is_none()); } - #[test] fn test_files_in_deep() { let gitignore = get_gitignore(); @@ -88,7 +87,6 @@ assert!(m("ROOT/parent_dir/file_deep_33").is_none()); } - #[test] fn test_dirs_in_root() { let gitignore = get_gitignore(); @@ -193,7 +191,6 @@ assert!(m("ROOT/dir_root_33/child_dir/file", false).is_ignore()); } - #[test] fn test_dirs_in_deep() { let gitignore = get_gitignore(); @@ -205,17 +202,13 @@ assert!(m("ROOT/parent_dir/dir_deep_00", true).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_00/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_00/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_00/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_00/child_dir/file", false).is_ignore()); // 01 assert!(m("ROOT/parent_dir/dir_deep_01", true).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_01/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_01/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_01/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_01/child_dir/file", false).is_ignore()); // 02 assert!(m("ROOT/parent_dir/dir_deep_02", true).is_none()); @@ -257,67 +250,51 @@ assert!(m("ROOT/parent_dir/dir_deep_20", true).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_20/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_20/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_20/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_20/child_dir/file", false).is_ignore()); // 21 assert!(m("ROOT/parent_dir/dir_deep_21", true).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_21/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_21/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_21/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_21/child_dir/file", false).is_ignore()); // 22 // dir itself doesn't match assert!(m("ROOT/parent_dir/dir_deep_22", true).is_none()); assert!(m("ROOT/parent_dir/dir_deep_22/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_22/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_22/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_22/child_dir/file", false).is_ignore()); // 23 // dir itself doesn't match assert!(m("ROOT/parent_dir/dir_deep_23", true).is_none()); assert!(m("ROOT/parent_dir/dir_deep_23/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_23/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_23/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_23/child_dir/file", false).is_ignore()); // 30 assert!(m("ROOT/parent_dir/dir_deep_30", true).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_30/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_30/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_30/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_30/child_dir/file", false).is_ignore()); // 31 assert!(m("ROOT/parent_dir/dir_deep_31", true).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_31/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_31/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_31/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_31/child_dir/file", false).is_ignore()); // 32 // dir itself doesn't match assert!(m("ROOT/parent_dir/dir_deep_32", true).is_none()); assert!(m("ROOT/parent_dir/dir_deep_32/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_32/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_32/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_32/child_dir/file", false).is_ignore()); // 33 // dir itself doesn't match assert!(m("ROOT/parent_dir/dir_deep_33", true).is_none()); assert!(m("ROOT/parent_dir/dir_deep_33/file", false).is_ignore()); assert!(m("ROOT/parent_dir/dir_deep_33/child_dir", true).is_ignore()); - assert!( - m("ROOT/parent_dir/dir_deep_33/child_dir/file", false).is_ignore() - ); + assert!(m("ROOT/parent_dir/dir_deep_33/child_dir/file", false).is_ignore()); }