fd-find-8.3.1/.cargo_vcs_info.json0000644000000001360000000000100123540ustar { "git": { "sha1": "72895675efa5842a4fe9049dfbad5736d1bf23a1" }, "path_in_vcs": "" }fd-find-8.3.1/.github/ISSUE_TEMPLATE/bug_report.yaml000064400000000000000000000022020072674642500177470ustar 00000000000000name: Bug Report description: Report a bug. title: "[BUG] " labels: bug body: - type: markdown attributes: value: | Please check out the [troubleshooting section](https://github.com/sharkdp/fd#troubleshooting) first. - type: checkboxes attributes: options: - label: I have read the troubleshooting section and still think this is a bug. required: true - type: textarea id: bug attributes: label: "Describe the bug you encountered:" validations: required: true - type: textarea id: expected attributes: label: "Describe what you expected to happen:" - type: input id: version attributes: label: "What version of `fd` are you using?" placeholder: "paste the output of `fd --version` here" validations: required: true - type: textarea id: os attributes: label: Which operating system / distribution are you on? placeholder: | Unix: paste the output of `uname -srm` and `lsb_release -a` here. Windows: please tell us your Windows version render: shell validations: required: true fd-find-8.3.1/.github/ISSUE_TEMPLATE/config.yml000064400000000000000000000000330072674642500167030ustar 00000000000000blank_issues_enabled: true fd-find-8.3.1/.github/ISSUE_TEMPLATE/feature_request.md000064400000000000000000000001700072674642500204420ustar 00000000000000--- name: Feature Request about: Suggest an idea for this project. title: '' labels: feature-request assignees: '' --- fd-find-8.3.1/.github/ISSUE_TEMPLATE/question.md000064400000000000000000000002700072674642500171070ustar 00000000000000--- name: Question about: Ask a question about 'fd'. title: '' labels: question assignees: '' --- **What version of `fd` are you using?** [paste the output of `fd --version` here] fd-find-8.3.1/.github/dependabot.yml000064400000000000000000000001560072674642500153660ustar 00000000000000version: 2 updates: - package-ecosystem: "cargo" directory: "/" schedule: interval: "monthly" fd-find-8.3.1/.github/workflows/CICD.yml000064400000000000000000000340470072674642500160260ustar 00000000000000name: CICD env: MIN_SUPPORTED_RUST_VERSION: "1.53.0" CICD_INTERMEDIATES_DIR: "_cicd-intermediates" on: workflow_dispatch: pull_request: push: branches: - master tags: - '*' jobs: code_quality: name: Code quality runs-on: ubuntu-20.04 steps: - name: Checkout source code uses: actions/checkout@v2 - name: Install rust toolchain uses: actions-rs/toolchain@v1 with: toolchain: stable default: true profile: minimal # minimal component installation (ie, no documentation) components: clippy, rustfmt - name: Ensure `cargo fmt` has been run uses: actions-rs/cargo@v1 with: command: fmt args: -- --check - name: Ensure MSRV is set in `clippy.toml` run: grep "^msrv = \"${{ env.MIN_SUPPORTED_RUST_VERSION }}\"\$" clippy.toml - name: Run clippy uses: actions-rs/cargo@v1 with: command: clippy args: --locked --all-targets --all-features min_version: name: Minimum supported rust version runs-on: ubuntu-20.04 steps: - name: Checkout source code uses: actions/checkout@v2 - name: Install rust toolchain (v${{ env.MIN_SUPPORTED_RUST_VERSION }}) uses: actions-rs/toolchain@v1 with: toolchain: ${{ env.MIN_SUPPORTED_RUST_VERSION }} default: true profile: minimal # minimal component installation (ie, no documentation) - name: Run tests uses: actions-rs/cargo@v1 with: command: test args: --locked build: name: ${{ matrix.job.os }} (${{ matrix.job.target }}) runs-on: ${{ matrix.job.os }} strategy: fail-fast: false matrix: job: - { os: ubuntu-20.04, target: arm-unknown-linux-gnueabihf , use-cross: true } - { os: ubuntu-20.04, target: arm-unknown-linux-musleabihf, use-cross: true } - { os: ubuntu-20.04, target: aarch64-unknown-linux-gnu , use-cross: true } - { os: ubuntu-20.04, target: i686-unknown-linux-gnu , use-cross: true } - { os: ubuntu-20.04, target: i686-unknown-linux-musl , use-cross: true } - { os: ubuntu-20.04, target: x86_64-unknown-linux-gnu } - { os: ubuntu-20.04, target: x86_64-unknown-linux-musl , use-cross: true } - { os: macos-10.15 , target: x86_64-apple-darwin } # - { os: windows-2019, target: i686-pc-windows-gnu } ## disabled; error: linker `i686-w64-mingw32-gcc` not found - { os: windows-2019, target: i686-pc-windows-msvc } - { os: windows-2019, target: x86_64-pc-windows-gnu } - { os: windows-2019, target: x86_64-pc-windows-msvc } steps: - name: Checkout source code uses: actions/checkout@v2 - name: Install prerequisites shell: bash run: | case ${{ matrix.job.target }} in arm-unknown-linux-*) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;; aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;; esac - name: Extract crate information shell: bash run: | echo "PROJECT_NAME=fd" >> $GITHUB_ENV echo "PROJECT_VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' Cargo.toml | head -n1)" >> $GITHUB_ENV echo "PROJECT_MAINTAINER=$(sed -n 's/^authors = \["\(.*\)"\]/\1/p' Cargo.toml)" >> $GITHUB_ENV echo "PROJECT_HOMEPAGE=$(sed -n 's/^homepage = "\(.*\)"/\1/p' Cargo.toml)" >> $GITHUB_ENV - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: toolchain: stable target: ${{ matrix.job.target }} override: true profile: minimal # minimal component installation (ie, no documentation) - name: Show version information (Rust, cargo, GCC) shell: bash run: | gcc --version || true rustup -V rustup toolchain list rustup default cargo -V rustc -V - name: Build uses: actions-rs/cargo@v1 with: use-cross: ${{ matrix.job.use-cross }} command: build args: --locked --release --target=${{ matrix.job.target }} - name: Strip debug information from executable id: strip shell: bash run: | # Figure out suffix of binary EXE_suffix="" case ${{ matrix.job.target }} in *-pc-windows-*) EXE_suffix=".exe" ;; esac; # Figure out what strip tool to use if any STRIP="strip" case ${{ matrix.job.target }} in arm-unknown-linux-*) STRIP="arm-linux-gnueabihf-strip" ;; aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;; *-pc-windows-msvc) STRIP="" ;; esac; # Setup paths BIN_DIR="${{ env.CICD_INTERMEDIATES_DIR }}/stripped-release-bin/" mkdir -p "${BIN_DIR}" BIN_NAME="${{ env.PROJECT_NAME }}${EXE_suffix}" BIN_PATH="${BIN_DIR}/${BIN_NAME}" # Copy the release build binary to the result location cp "target/${{ matrix.job.target }}/release/${BIN_NAME}" "${BIN_DIR}" # Also strip if possible if [ -n "${STRIP}" ]; then "${STRIP}" "${BIN_PATH}" fi # Let subsequent steps know where to find the (stripped) bin echo ::set-output name=BIN_PATH::${BIN_PATH} echo ::set-output name=BIN_NAME::${BIN_NAME} - name: Set testing options id: test-options shell: bash run: | # test only library unit tests and binary for arm-type targets unset CARGO_TEST_OPTIONS unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac; echo ::set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS} - name: Run tests uses: actions-rs/cargo@v1 with: use-cross: ${{ matrix.job.use-cross }} command: test args: --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}} - name: Create tarball id: package shell: bash run: | PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac; PKG_BASENAME=${PROJECT_NAME}-v${PROJECT_VERSION}-${{ matrix.job.target }} PKG_NAME=${PKG_BASENAME}${PKG_suffix} echo ::set-output name=PKG_NAME::${PKG_NAME} PKG_STAGING="${{ env.CICD_INTERMEDIATES_DIR }}/package" ARCHIVE_DIR="${PKG_STAGING}/${PKG_BASENAME}/" mkdir -p "${ARCHIVE_DIR}" mkdir -p "${ARCHIVE_DIR}/autocomplete" # Binary cp "${{ steps.strip.outputs.BIN_PATH }}" "$ARCHIVE_DIR" # Man page cp 'doc/${{ env.PROJECT_NAME }}.1' "$ARCHIVE_DIR" # README, LICENSE and CHANGELOG files cp "README.md" "LICENSE-MIT" "LICENSE-APACHE" "CHANGELOG.md" "$ARCHIVE_DIR" # Autocompletion files cp 'target/${{ matrix.job.target }}/release/build/${{ env.PROJECT_NAME }}'*/out/'${{ env.PROJECT_NAME }}.bash' "$ARCHIVE_DIR/autocomplete/" cp 'target/${{ matrix.job.target }}/release/build/${{ env.PROJECT_NAME }}'*/out/'${{ env.PROJECT_NAME }}.fish' "$ARCHIVE_DIR/autocomplete/" cp 'target/${{ matrix.job.target }}/release/build/${{ env.PROJECT_NAME }}'*/out/'_${{ env.PROJECT_NAME }}.ps1' "$ARCHIVE_DIR/autocomplete/" cp 'contrib/completion/_fd' "$ARCHIVE_DIR/autocomplete/" # base compressed package pushd "${PKG_STAGING}/" >/dev/null case ${{ matrix.job.target }} in *-pc-windows-*) 7z -y a "${PKG_NAME}" "${PKG_BASENAME}"/* | tail -2 ;; *) tar czf "${PKG_NAME}" "${PKG_BASENAME}"/* ;; esac; popd >/dev/null # Let subsequent steps know where to find the compressed package echo ::set-output name=PKG_PATH::"${PKG_STAGING}/${PKG_NAME}" - name: Create Debian package id: debian-package shell: bash if: startsWith(matrix.job.os, 'ubuntu') run: | COPYRIGHT_YEARS="2018 - "$(date "+%Y") DPKG_STAGING="${{ env.CICD_INTERMEDIATES_DIR }}/debian-package" DPKG_DIR="${DPKG_STAGING}/dpkg" mkdir -p "${DPKG_DIR}" DPKG_BASENAME=${PROJECT_NAME} DPKG_CONFLICTS=${PROJECT_NAME}-musl case ${{ matrix.job.target }} in *-musl) DPKG_BASENAME=${PROJECT_NAME}-musl ; DPKG_CONFLICTS=${PROJECT_NAME} ;; esac; DPKG_VERSION=${PROJECT_VERSION} unset DPKG_ARCH case ${{ matrix.job.target }} in aarch64-*-linux-*) DPKG_ARCH=arm64 ;; arm-*-linux-*hf) DPKG_ARCH=armhf ;; i686-*-linux-*) DPKG_ARCH=i686 ;; x86_64-*-linux-*) DPKG_ARCH=amd64 ;; *) DPKG_ARCH=notset ;; esac; DPKG_NAME="${DPKG_BASENAME}_${DPKG_VERSION}_${DPKG_ARCH}.deb" echo ::set-output name=DPKG_NAME::${DPKG_NAME} # Binary install -Dm755 "${{ steps.strip.outputs.BIN_PATH }}" "${DPKG_DIR}/usr/bin/${{ steps.strip.outputs.BIN_NAME }}" # Man page install -Dm644 'doc/${{ env.PROJECT_NAME }}.1' "${DPKG_DIR}/usr/share/man/man1/${{ env.PROJECT_NAME }}.1" gzip -n --best "${DPKG_DIR}/usr/share/man/man1/${{ env.PROJECT_NAME }}.1" # Autocompletion files install -Dm644 'target/${{ matrix.job.target }}/release/build/${{ env.PROJECT_NAME }}'*/out/'${{ env.PROJECT_NAME }}.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/${{ env.PROJECT_NAME }}" install -Dm644 'target/${{ matrix.job.target }}/release/build/${{ env.PROJECT_NAME }}'*/out/'${{ env.PROJECT_NAME }}.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/${{ env.PROJECT_NAME }}.fish" install -Dm644 'contrib/completion/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_${{ env.PROJECT_NAME }}" # README and LICENSE install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md" install -Dm644 "LICENSE-MIT" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-MIT" install -Dm644 "LICENSE-APACHE" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-APACHE" install -Dm644 "CHANGELOG.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog" gzip -n --best "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog" cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" < "${DPKG_DIR}/DEBIAN/control" <` option to limit the number of search results, see #472, #476 and #555 This can be useful to speed up searches in cases where you know that there are only N results. Using this option is also (slightly) faster than piping to `head -n ` where `fd` can only exit when it finds the search results ` + 1`. - Add the alias `-1` for `--max-results=1`, see #561. (@SimplyDanny). - Add new `--type socket` and `--type pipe` filters, see #511. - Add new `--min-depth ` and `--exact-depth ` options in addition to the existing option to limit the maximum depth. See #404. - Support additional ANSI font styles in `LS_COLORS`: faint, slow blink, rapid blink, dimmed, hidden and strikethrough. ## Bugfixes - Preserve non-UTF8 filenames: invalid UTF-8 filenames are now properly passed to child-processes when using `--exec`, `--exec-batch` or `--list-details`. In `fd`'s output, we replace non-UTF-8 sequences with the "�" character. However, if the output of `fd` goes to another process, we print the actual bytes of the filename. For more details, see #558 and #295. - `LS_COLORS` entries with unsupported font styles are not completely ignored, see #552 ## Changes - Colored output will now be enabled by default on older Windows versions. This allows the use of colored output if the terminal supports it (e.g. MinTTY, Git Bash). On the other hand, this will be a regression for users on older Windows versions with terminals that do not support ANSI escape sequences. Affected users can use an alias `fd="fd --color=never"` to continue using `fd` without colors. There is no change of behavior for Windows 10. See #469. - When using `--glob` in combination with `--full-path`, a `*` character does not match a path separation character (`/` or `\\`) anymore. You can use `**` for that. This allows things like `fd -p -g '/some/base/path/*/*/*.txt'` which would previously match to arbitrary depths (instead of exactly two folders below `/some/base/path`. See #404. - "Legacy" support to use `fd -exec` (with a single dash) has been removed. Use `fd -x` or `fd --exec` instead. - Overall improved error handling and error messages. ## Other - Korean translation of the README, see: [한국어](https://github.com/spearkkk/fd-kor) (@spearkkk) # v7.5.0 ## Features - Added `--one-file-system` (aliases: `--mount`, `--xdev`) to not cross file system boundaries on Unix and Windows, see #507 (@FallenWarrior2k). - Added `--base-directory` to change the working directory in which `fd` is run, see #509 and #475 (@hajdamak). - `fd` will not use colored output if the `NO_COLOR` environment variable is set, see #550 and #551 (@metadave). - `fd --exec` will return exit code 1 if one of the executed commands fails, see #526 and #531 (@fusillicode and @Giuffre) ## Bug Fixes - Fixed 'command not found' error when using zsh completion, see #487 (@barskern). - `fd -L` should include broken symlinks, see #357 and #497 (@tommilligan, @neersighted and @sharkdp) - Display directories even if we don't have permission to enter, see #437 (@sharkdp) ## Changes - A flag can now be passed multiple times without producing an error, see #488 and #496 (@rootbid). - Search results are sorted when using the `-X` option to match the behaviour of piping to `xargs`, see #441 and #524 (@Marcoleni @crash-g). # v7.4.0 ## Performance improvements - Reduce number of `stat` syscalls, improving the performance for searches where file metadata is required (`--type`, `--size`, `--changed-within`, …), see #434 (@tavianator) - Use jemalloc by default, improving the performance for almost all searches, see #481. Note that Windows and `*musl*` builds do not profit from this. ## Features - Added a new `-g`/`--glob` option to switch to glob-based searches (instead of regular expression based searches). This is accompanied by a new `--regex` option that can be used to switch back, if users want to `alias fd="fd --glob"`. See #284 - Added a new `--path-separator ` option which can be useful for Windows users who want/need `fd` to use `/` instead of `\`, see #428 and #153 (@mookid) - Added support for hidden files on Windows, see #379 - When `fd` is run with the `--exec-batch`/`-X` option, it now exposes the exit status of the command that was run, see #333. - Exit immediately when Ctrl-C has been pressed twice, see #423 ## Bugfixes - Make `--changed-within`/`--changed-before` work for directories, see #470 ## Other - Pre-built `fd` binaries should now be available for `armhf` targets, see #457 (@detly) - `fd` is now available on Alpine Linux, see #451 (@5paceToast) - `fd` is now in the officla FreeBSD repositories, see #412 (@t6) - Added OpenBSD install instructions, see #421 (@evitalis) - Added metadata to the Debian package, see #416 (@cathalgarvey) - `fd` can be installed via npm, see #438 (@pablopunk) # v7.3.0 ## Features - New `--exec-batch `/`-X ` option for batch execution of commands, see #360 (@kimsnj). This allows you to do things like: ``` bash fd … -X vim # open all search results in vim (or any other editor) fd … -X ls -l # view detailed stats about the search results with 'ls' fd -e svg -X inkscape # open all SVG files in Inkscape ``` - Support for 24-bit color codes (when specified via `LS_COLORS`) as well as different font styles (bold, italic, underline). ## Changes - A few performance improvements, in particular when printing lots of colorized results to the console, see #370 - The `LS_COLORS` handling has been "outsourced" to a separate crate (https://github.com/sharkdp/lscolors) that is now being used by other tools as well: [fselect](https://github.com/jhspetersson/fselect), [lsd](https://github.com/Peltoche/lsd/pull/84). For details, see #363. ## Other - `fd` will be available in Ubuntu Disco DIngo (19.04), see #373 (@sylvestre) - This release should come with a static ARM binary (`arm-unknown-linux-musleabihf`), see #320 (@duncanfinney) - Various documentation improvements, see #389 ## Thanks Special thanks to @alexmaco for his awesome work on refactoring and code improvements! (see #401, #398, and #383) # v7.2.0 ## Features * Added support for filtering by file modification time by adding two new options `--changed-before ` and `--changed-within <..>`. For more details, see the `--help` text, the man page, the relevant issue #165 and the PR #339 (@kimsnj) * Added `--show-errors` option to enable the display of filesystem error messages such as "permission denied", see #311 (@psinghal20 and @majecty) * Added `--maxdepth` as a (hidden) alias for `--max-depth`, see #323 (@mqudsi) * Added `--search-path` option which can be supplied to replace the positional `path` argument at any position. ## Changes * Loosen strict handling of missing `--ignore-file`, see #280 (@psinghal20) * Re-enabled `.ignore` files, see #156. ## Bugfixes * `fd` could previously get stuck when run from the root directory in the presence of zombie processes. This curious bug has been fixed in Rust 1.29 and higher. For more details, see #288, [rust-lang/rust#50619](https://github.com/rust-lang/rust/issues/50619) and [the fix](https://github.com/rust-lang/rust/pull/50630) ## Other * `fd` has officially landed in Debian! See #345 for details. Thanks goes to @sylvestre, @paride and possibly others I don't know about. * Added Chinese translation of README (@chinanf-boy) ## Thanks A special thanks goes to @joshleeb for his amazing improvements throughout the code base (new tests, refactoring work and various other things)! # v7.1.0 ## Features * Added `--size` filter option, see #276 (@stevepentland, @JonathanxD and @alexmaco) * Added `--type empty` (or `-t e`) to search for empty files and/or directories, see #273 ## Changes * With the new version, `.gitignore` files will only be respected in Git repositories, not outside. * A few performance improvements for `--type` searches, see 641976cf7ad311ba741571ca8b7f02b2654b6955 and 50a2bab5cd52d26d4a3bc786885a2c270ed3b227 ## Other * Starting with this release, we will offer pre-built ARM binaries, see #244 * Added instructions on how to use `fd` with `emacs`, see #282 (@redguardtoo) * `fd` is now in the official openSUSE repositories, see #275 (@avindra) * `fd` is now available via MacPorts, see #291 (@raimue) # v7.0.0 ## Features * Added `--type executable` (or `-t x`) to search for executable files only, see #246 (@PramodBisht) * Added support for `.fdignore` files, see #156 and #241. * Added `--ignore-file` option to add custom ignore files, see #156. * Suggest `--fixed-strings` on invalid regular expressions, see #234 (@PramodBisht) * Detect when user supplied path instead of pattern, see #235. ## Changes * `.ignore` and `.rgignore` files are not parsed anymore. Use `.fdignore` files or add custom files via `--ignore-file` instead. * Updated to `regex-syntax` 0.5 (@cuviper) ## Bugfixes * Properly normalize absolute paths, see #268 * Invalid utf8 filenames displayed when `-e` is used, see #250 * If `--type` is used, fifos/sockets/etc. are always shown, see #260 ## Other * Packaging: * The Arch Linux package is now simply called `fd`. * There is now a `fd` ebuild for Gentoo Linux. * There is a `scoop` package for `fd` (Windows). * There is a `Chocolatey` package for `fd` (Windows). * There is a Fedora `copr` package for `fd`. # v6.3.0 ## Features * Files with multiple extensions can now be found via `--extension`/`-e`, see #214 (@althonos) ``` bash > fd -e tar.gz ``` * Added new `-F`/`--fixed-strings`/`--literal` option that treats the pattern as a literal string instead of a regular expression, see #157 ``` bash > fd -F 'file(1).txt' ``` * Allow `-exec` to work as `--exec`, see #226 (@stevepentland) ## Bugfixes * Fixed `Ctrl-C` handling when using `--exec`, see #224 (@Doxterpepper) * Fixed wrong file owner for files in deb package, see #213 ## Other * Replaced old gif by a fancy new SVG screencast (@marionebl) * Updated [benchmark results](https://github.com/sharkdp/fd#benchmark) (fd has become faster in the meantime!). There is a new repository that hosts several benchmarking scripts for fd: https://github.com/sharkdp/fd-benchmarks # v6.2.0 ## Features * Support for filtering by multiple file extensions and multiple file types, see #199 and #177 (@tkadur). For example, it's possible to search for C++ source or header files: ``` bash > fd -e cpp -e c -e cxx -e h pattern ``` ## Changes * The size of the output buffer (for sorting search results) is now limited to 1000 entries. This improves the search speed significantly if there are a lot of results, see #191 (@sharkdp). ## Bugfixes * Fix a bug where long-running searches could not be killed via Ctrl-C, see #210 (@Doxterpepper) * fd's exit codes are now in accordance with Unix standards, see #201 (@Doxterpepper) ## Other * Bash, zsh and fish completion should now work with the Ubuntu `.deb` packages, see #195 and #209 (@tmccombs and @sharkdp) * There is a new section on how to set up `fzf` to use `fd` in the [README](https://github.com/sharkdp/fd#using-fd-with-fzf), see #168. # v6.1.0 ## Features * Support for multiple search paths, see #166 (@Doxterpepper) * Added `--no-ignore-vcs` option to disable `.gitignore` and other VCS ignore files, without disabling `.ignore` files - see #156 (@ptzz). ## Bugfixes * Handle terminal signals, see #128 (@Doxterpepper) * Fixed hang on `--exec` when user input was required, see #178 and #193 (@reima) ## Other * Debian packages are now created via Travis CI and should be available for this and all future releases (@tmccombs). * fd is now available on Void Linux (@maxice8) * The minimum required Rust version is now 1.20 ## Thanks @Doxterpepper deserves a special mention for his great work that is included in this release and for the support in ticket discussions and concerning Travis CI fixes. Thank you very much! Thanks also go out to @tmccombs for the work on Debian packages and for reviewing a lot of pull requests! # v6.0.0 ## Changes - The `--exec`/`-x` option does not spawn an intermediate shell anymore. This improves the performance of parallel command execution and fixes a whole class of (present and potentially future) problems with shell escaping. The drawback is that shell commands cannot directly be called with `--exec`. See #155 for the full discussion. These changes have been implemented by @reima (Thanks!). ## Bugfixes - `--exec` does not escape cmd.exe metacharacters on Windows (see #155, as above). ## Other * *fd* is now available in the FreeBSD ports (@andoriyu) * The minimal `rustc` version is now checked when building with `cargo`, see #164 (@matematikaadit) * The output directory for the shell completion files is created if it does not exist (@andoriyu) # v5.0.0 ## Features * Added new `--exec`, `-x` option for parallel command execution (@mmstick, see #84 and #116). See the corresponding [README section](https://github.com/sharkdp/fd#parallel-command-execution) for an introduction. * Auto-disable color output on unsupported Windows shells like `cmd.exe` (@iology, see #129) * Added the `--exclude`, `-X` option to suppress certain files/directories in the search results (see #89). * Added ripgrep aliases `-u` and `-uu` for `--no-ignore` and `--no-ignore --hidden`, respectively (@unsignedint, see #92) * Added `-i`, `--ignore-case` (@iology, see #95) * Made smart case really smart (@reima, see #103) * Added RedoxOS support (@goyox86, see #131) ## Changes * The dot `.` can now match newlines in file names (@iology, see #111) * The short `--type` argument for symlinks has been changed from `s` to `l` (@jcpetkovich, see #83) ## Bugfixes * Various improvements in root-path and symlink handling (@iology, see #82, #107, and #113) * Fixed absolute path handling on Windows (@reima, #93) * Fixed: current directory not included when using relative path (see #81) * Fixed `--type` behavior for unknown file types (@iology, see #150) * Some fixes around `--exec` (@iology, see #142) ## Other * Major updates and bugfixes to our continuous integration and deployment tooling on Travis (@matematikaadit, see #149, #145, #133) * Code style improvements & automatic style checking via `rustfmt` on Travis (@Detegr, see #99) * Added a man page (@pickfire, see #77) * *fd* has been relicensed under the dual license MIT/Apache-2.0 (@Detegr, see #105) * Major refactorings and code improvements (Big thanks to @gsquire, @reima, @iology) * First version of [`CONTRIBUTING`](https://github.com/sharkdp/fd/blob/master/CONTRIBUTING.md) guidelines * There is now a Nix package (@mehandes) * *fd* is now in the official Arch Linux repos (@cassava) * Improved tooling around shell completion files (@ImbaKnugel, see #124) * Updated tutorial in the [`README`](https://github.com/sharkdp/fd/blob/master/README.md) * The minimum required version of Rust has been bumped to 1.19. ## Thanks A *lot* of things have happened since the last release and I'd like to thank all contributors for their great support. I'd also like to thank those that have contributed by reporting bugs and by posting feature requests. I'd also like to take this chance to say a special Thank You to a few people that have stood out in one way or another: To @iology, for contributing a multitude of bugfixes, improvements and new features. To @reima and @Detegr for their continuing great support. To @mmstick, for implementing the most advanced new feature of *fd*. And to @matematikaadit for the CI/tooling upgrades. # v4.0.0 ## Features * Added filtering by file extension, for example `fd -e txt`, see #56 (@reima) * Add option to force colored output: `--color always`, see #49 (@Detegr) * Generate Shell completions for Bash, ZSH, Fish and Powershell, see #64 (@ImbaKnugel) * Better & extended `--help` text (@abaez and @Detegr) * Proper Windows support, see #70 ## Changes * The integration tests have been re-written in Rust :sparkles:, making them platform-independent and easily callable via `cargo test` - see #65 (many thanks to @reima!) * New tutorial in the README (@deg4uss3r) * Reduced number of `stat` syscalls for each result from 3 to 1, see #36. * Enabled Appveyor CI # v3.1.0 ## Features - Added file type filtering, e.g. `find --type directory` or `find -t f` (@exitium) # v3.0.0 ## Features - Directories are now traversed in parallel, leading to significant performance improvements (see [benchmarks](https://github.com/sharkdp/fd#benchmark)) - Added `--print0` option (@michaelmior) - Added AUR packages (@wezm) ## Changes - Changed short flag for `--follow` from `-f` to `-L` (consistency with `ripgrep`) # v2.0.0 * Changed `--sensitive` to `--case-sensitive` * Changed `--absolute` to `--absolute-path` * Throw an error if root directory is not existent, see #39 * Use absolute paths if the root dir is an absolute path, see #40 * Handle invalid UTF-8, see #34 #38 * Support `-V`, `--version` by switching from `getopts` to `clap`. Misc: * It's now possible to install `fd` via homebrew on macOS: `brew install fd`. # v1.1.0 - Windows compatibility (@sebasv), see #29 #35 - Safely exit on broken output pipes (e.g.: usage with `head`, `tail`, ..), see #24 - Backport for rust 1.16, see #23 # v1.0.0 * Respect `.(git)ignore` files * Use `LS_COLORS` environment variable directly, instead of `~/.dir_colors` file. * Added unit and integration tests * Added optional second argument (search path) # v0.3.0 - Parse dircolors files, closes #20 - Colorize each path component, closes #19 - Add short command line option for --hidden, see #18 # v0.2.0 - Option to follow symlinks, disable colors, closes #16, closes #17 - `--filename` instead of `--full-path` - Option to search hidden directories, closes #12 - Configurable search depth, closes #13 - Detect interactive terminal, closes #11 # v0.1.0 Initial release fd-find-8.3.1/CONTRIBUTING.md000064400000000000000000000031170072674642500134270ustar 00000000000000## Contributing to *fd* **Thank you very much for considering to contribute to this project!** We welcome any form of contribution: * New issues (feature requests, bug reports, questions, ideas, ...) * Pull requests (documentation improvements, code improvements, new features, ...) **Note**: Before you take the time to open a pull request, please open a ticket first. This will give us the chance to discuss any potential changes first. ## Add an entry to the changelog If your contribution changes the behavior of `fd` (as opposed to a typo-fix in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md) file and describe your changes. This makes the release process much easier and therefore helps to get your changes into a new `fd` release faster. The top of the `CHANGELOG` contains a *"unreleased"* section with a few subsections (Features, Bugfixes, …). Please add your entry to the subsection that best describes your change. Entries follow this format: ``` - Short description of what has been changed, see #123 (@user) ``` Here, `#123` is the number of the original issue and/or your pull request. Please replace `@user` by your GitHub username. ## Important links * [Open issues](https://github.com/sharkdp/fd/issues) * [Open pull requests](https://github.com/sharkdp/fd/pulls) * [Development section in the README](https://github.com/sharkdp/fd#development) * [fd on crates.io](https://crates.io/crates/fd-find) * [LICENSE-APACHE](https://github.com/sharkdp/fd/blob/master/LICENSE-APACHE) and [LICENSE-MIT](https://github.com/sharkdp/fd/blob/master/LICENSE-MIT) fd-find-8.3.1/Cargo.lock0000644000000377060000000000100103440ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "aho-corasick" version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" dependencies = [ "memchr", ] [[package]] name = "ansi_term" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" dependencies = [ "winapi", ] [[package]] name = "anyhow" version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84450d0b4a8bd1ba4144ce8ce718fbc5d071358b1e5384bace6536b3d1f2d5b3" [[package]] name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ "hermit-abi", "libc", "winapi", ] [[package]] name = "autocfg" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bstr" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" dependencies = [ "memchr", ] [[package]] name = "cc" version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" dependencies = [ "libc", "num-integer", "num-traits", "time", "winapi", ] [[package]] name = "clap" version = "2.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ "ansi_term", "atty", "bitflags", "strsim", "term_size", "textwrap", "unicode-width", "vec_map", ] [[package]] name = "crossbeam-utils" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" dependencies = [ "cfg-if", "lazy_static", ] [[package]] name = "ctrlc" version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a19c6cedffdc8c03a3346d723eb20bd85a13362bb96dc2ac000842c6381ec7bf" dependencies = [ "nix", "winapi", ] [[package]] name = "diff" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" [[package]] name = "dirs-next" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ "cfg-if", "dirs-sys-next", ] [[package]] name = "dirs-sys-next" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", "redox_users", "winapi", ] [[package]] name = "fd-find" version = "8.3.1" dependencies = [ "ansi_term", "anyhow", "atty", "chrono", "clap", "ctrlc", "diff", "dirs-next", "filetime", "globset", "humantime", "ignore", "jemallocator", "libc", "lscolors", "nix", "normpath", "num_cpus", "once_cell", "regex", "regex-syntax", "tempdir", "test-case", "users", "version_check", ] [[package]] name = "filetime" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" dependencies = [ "cfg-if", "libc", "redox_syscall", "winapi", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "fs_extra" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394" [[package]] name = "fuchsia-cprng" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" [[package]] name = "getrandom" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "globset" version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10463d9ff00a2a068db14231982f5132edebad0d7660cd956a1c30292dbcbfbd" dependencies = [ "aho-corasick", "bstr", "fnv", "log", "regex", ] [[package]] name = "hermit-abi" version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ "libc", ] [[package]] name = "humantime" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "ignore" version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d" dependencies = [ "crossbeam-utils", "globset", "lazy_static", "log", "memchr", "regex", "same-file", "thread_local", "walkdir", "winapi-util", ] [[package]] name = "jemalloc-sys" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45" dependencies = [ "cc", "fs_extra", "libc", ] [[package]] name = "jemallocator" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43ae63fcfc45e99ab3d1b29a46782ad679e98436c3169d15a167a1108a724b69" dependencies = [ "jemalloc-sys", "libc", ] [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" version = "0.2.112" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125" [[package]] name = "log" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" dependencies = [ "cfg-if", ] [[package]] name = "lscolors" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dd58d8727f3035fa6d5272f16b519741fd4875936b99d8a7cde21291b7d9174" dependencies = [ "ansi_term", ] [[package]] name = "memchr" version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" [[package]] name = "memoffset" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" dependencies = [ "autocfg", ] [[package]] name = "nix" version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f866317acbd3a240710c63f065ffb1e4fd466259045ccb504130b7f668f35c6" dependencies = [ "bitflags", "cc", "cfg-if", "libc", "memoffset", ] [[package]] name = "normpath" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "640c20e9df4a2d4a5adad5b47e17d76dac3e824346b181931c3ec9f7a85687b1" dependencies = [ "winapi", ] [[package]] name = "num-integer" version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" dependencies = [ "autocfg", "num-traits", ] [[package]] name = "num-traits" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" dependencies = [ "autocfg", ] [[package]] name = "num_cpus" version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" dependencies = [ "hermit-abi", "libc", ] [[package]] name = "once_cell" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" [[package]] name = "proc-macro2" version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba508cc11742c0dc5c1659771673afbab7a0efab23aa17e854cbab0837ed0b43" dependencies = [ "unicode-xid", ] [[package]] name = "quote" version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" dependencies = [ "proc-macro2", ] [[package]] name = "rand" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" dependencies = [ "fuchsia-cprng", "libc", "rand_core 0.3.1", "rdrand", "winapi", ] [[package]] name = "rand_core" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" dependencies = [ "rand_core 0.4.2", ] [[package]] name = "rand_core" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" [[package]] name = "rdrand" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" dependencies = [ "rand_core 0.3.1", ] [[package]] name = "redox_syscall" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" dependencies = [ "bitflags", ] [[package]] name = "redox_users" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64" dependencies = [ "getrandom", "redox_syscall", ] [[package]] name = "regex" version = "1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.6.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" [[package]] name = "remove_dir_all" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" dependencies = [ "winapi", ] [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "strsim" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" [[package]] name = "syn" version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8daf5dd0bb60cbd4137b1b587d2fc0ae729bc07cf01cd70b36a1ed5ade3b9d59" dependencies = [ "proc-macro2", "quote", "unicode-xid", ] [[package]] name = "tempdir" version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" dependencies = [ "rand", "remove_dir_all", ] [[package]] name = "term_size" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e4129646ca0ed8f45d09b929036bafad5377103edd06e50bf574b353d2b08d9" dependencies = [ "libc", "winapi", ] [[package]] name = "test-case" version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7cad0a06f9a61e94355aa3b3dc92d85ab9c83406722b1ca5e918d4297c12c23" dependencies = [ "cfg-if", "proc-macro2", "quote", "syn", "version_check", ] [[package]] name = "textwrap" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ "term_size", "unicode-width", ] [[package]] name = "thread_local" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8018d24e04c95ac8790716a5987d0fec4f8b27249ffa0f7d33f1369bdfb88cbd" dependencies = [ "once_cell", ] [[package]] name = "time" version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" dependencies = [ "libc", "winapi", ] [[package]] name = "unicode-width" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" [[package]] name = "unicode-xid" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" [[package]] name = "users" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24cc0f6d6f267b73e5a2cadf007ba8f9bc39c6a6f9666f8cf25ea809a153b032" dependencies = [ "libc", "log", ] [[package]] name = "vec_map" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "walkdir" version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" dependencies = [ "same-file", "winapi", "winapi-util", ] [[package]] name = "wasi" version = "0.10.2+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ "winapi", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" fd-find-8.3.1/Cargo.toml0000644000000050570000000000100103610ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "fd-find" version = "8.3.1" authors = ["David Peter "] build = "build.rs" exclude = ["/benchmarks/*"] description = "fd is a simple, fast and user-friendly alternative to find." homepage = "https://github.com/sharkdp/fd" readme = "README.md" keywords = ["search", "find", "file", "filesystem", "tool"] categories = ["command-line-utilities"] license = "MIT/Apache-2.0" repository = "https://github.com/sharkdp/fd" [profile.release] lto = true codegen-units = 1 [[bin]] name = "fd" path = "src/main.rs" [dependencies.ansi_term] version = "0.12" [dependencies.anyhow] version = "1.0" [dependencies.atty] version = "0.2" [dependencies.chrono] version = "0.4" [dependencies.clap] version = "2.34.0" features = ["suggestions", "color", "wrap_help"] [dependencies.ctrlc] version = "3.2" [dependencies.dirs-next] version = "2.0" [dependencies.globset] version = "0.4" [dependencies.humantime] version = "2.1" [dependencies.ignore] version = "0.4.3" [dependencies.lscolors] version = "0.8" [dependencies.normpath] version = "0.3" [dependencies.num_cpus] version = "1.13" [dependencies.once_cell] version = "1.9.0" [dependencies.regex] version = "1.5.4" [dependencies.regex-syntax] version = "0.6" [dev-dependencies.diff] version = "0.1" [dev-dependencies.filetime] version = "0.2" [dev-dependencies.tempdir] version = "0.3" [dev-dependencies.test-case] version = "1.2" [build-dependencies.clap] version = "2.34.0" [build-dependencies.version_check] version = "0.9" [features] default = ["use-jemalloc"] use-jemalloc = ["jemallocator"] [target."cfg(all(not(windows), not(target_os = \"android\"), not(target_os = \"macos\"), not(target_os = \"freebsd\"), not(target_env = \"musl\"), not(target_arch = \"riscv64\")))".dependencies.jemallocator] version = "0.3.0" optional = true [target."cfg(all(unix, not(target_os = \"redox\")))".dependencies.libc] version = "0.2" [target."cfg(unix)".dependencies.nix] version = "0.23.1" [target."cfg(unix)".dependencies.users] version = "0.11.0" [badges.appveyor] repository = "sharkdp/fd" [badges.travis-ci] repository = "sharkdp/fd" fd-find-8.3.1/Cargo.toml.orig000064400000000000000000000034460072674642500140720ustar 00000000000000[package] authors = ["David Peter "] build = "build.rs" categories = ["command-line-utilities"] description = "fd is a simple, fast and user-friendly alternative to find." exclude = ["/benchmarks/*"] homepage = "https://github.com/sharkdp/fd" keywords = [ "search", "find", "file", "filesystem", "tool", ] license = "MIT/Apache-2.0" name = "fd-find" readme = "README.md" repository = "https://github.com/sharkdp/fd" version = "8.3.1" edition= "2018" [badges.appveyor] repository = "sharkdp/fd" [badges.travis-ci] repository = "sharkdp/fd" [[bin]] name = "fd" path = "src/main.rs" [build-dependencies] clap = "2.34.0" version_check = "0.9" [dependencies] ansi_term = "0.12" atty = "0.2" ignore = "0.4.3" num_cpus = "1.13" regex = "1.5.4" regex-syntax = "0.6" ctrlc = "3.2" humantime = "2.1" lscolors = "0.8" globset = "0.4" anyhow = "1.0" dirs-next = "2.0" normpath = "0.3" chrono = "0.4" once_cell = "1.9.0" [dependencies.clap] version = "2.34.0" features = ["suggestions", "color", "wrap_help"] [target.'cfg(unix)'.dependencies] users = "0.11.0" nix = "0.23.1" [target.'cfg(all(unix, not(target_os = "redox")))'.dependencies] libc = "0.2" # FIXME: Re-enable jemalloc on macOS # jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS # Catalina. See https://github.com/sharkdp/fd/issues/498 for details. [target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_env = "musl"), not(target_arch = "riscv64")))'.dependencies] jemallocator = {version = "0.3.0", optional = true} [dev-dependencies] diff = "0.1" tempdir = "0.3" filetime = "0.2" test-case = "1.2" [profile.release] lto = true codegen-units = 1 [features] use-jemalloc = ["jemallocator"] default = ["use-jemalloc"] fd-find-8.3.1/LICENSE-APACHE000064400000000000000000000251260072674642500131260ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2017-2020 fd developers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. fd-find-8.3.1/LICENSE-MIT000064400000000000000000000020670072674642500126350ustar 00000000000000MIT License Copyright (c) 2017-2021 The fd developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. fd-find-8.3.1/README.md000064400000000000000000000571310072674642500124620ustar 00000000000000# fd [![CICD](https://github.com/sharkdp/fd/actions/workflows/CICD.yml/badge.svg)](https://github.com/sharkdp/fd/actions/workflows/CICD.yml) [![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find) [[中文](https://github.com/chinanf-boy/fd-zh)] [[한국어](https://github.com/spearkkk/fd-kor)] `fd` is a program to find entries in your filesystem. It is a simple, fast and user-friendly alternative to [`find`](https://www.gnu.org/software/findutils/). While it does not aim to support all of `find`'s powerful functionality, it provides sensible (opinionated) defaults for a majority of use cases. Quick links: * [How to use](#how-to-use) * [Installation](#installation) * [Troubleshooting](#troubleshooting) ## Features * Intuitive syntax: `fd PATTERN` instead of `find -iname '*PATTERN*'`. * Regular expression (default) and glob-based patterns. * [Very fast](#benchmark) due to parallelized directory traversal. * Uses colors to highlight different file types (same as *ls*). * Supports [parallel command execution](#command-execution) * Smart case: the search is case-insensitive by default. It switches to case-sensitive if the pattern contains an uppercase character[\*](http://vimdoc.sourceforge.net/htmldoc/options.html#'smartcase'). * Ignores hidden directories and files, by default. * Ignores patterns from your `.gitignore`, by default. * The command name is *50%* shorter[\*](https://github.com/ggreer/the_silver_searcher) than `find` :-). ## Demo ![Demo](doc/screencast.svg) ## How to use First, to get an overview of all available command line options, you can either run [`fd -h`](#command-line-options) for a concise help message or `fd --help` for a more detailed version. ### Simple search *fd* is designed to find entries in your filesystem. The most basic search you can perform is to run *fd* with a single argument: the search pattern. For example, assume that you want to find an old script of yours (the name included `netflix`): ``` bash > fd netfl Software/python/imdb-ratings/netflix-details.py ``` If called with just a single argument like this, *fd* searches the current directory recursively for any entries that *contain* the pattern `netfl`. ### Regular expression search The search pattern is treated as a regular expression. Here, we search for entries that start with `x` and end with `rc`: ``` bash > cd /etc > fd '^x.*rc$' X11/xinit/xinitrc X11/xinit/xserverrc ``` The regular expression syntax used by `fd` is [documented here](https://docs.rs/regex/1.0.0/regex/#syntax). ### Specifying the root directory If we want to search a specific directory, it can be given as a second argument to *fd*: ``` bash > fd passwd /etc /etc/default/passwd /etc/pam.d/passwd /etc/passwd ``` ### List all files, recursively *fd* can be called with no arguments. This is very useful to get a quick overview of all entries in the current directory, recursively (similar to `ls -R`): ``` bash > cd fd/tests > fd testenv testenv/mod.rs tests.rs ``` If you want to use this functionality to list all files in a given directory, you have to use a catch-all pattern such as `.` or `^`: ``` bash > fd . fd/tests/ testenv testenv/mod.rs tests.rs ``` ### Searching for a particular file extension Often, we are interested in all files of a particular type. This can be done with the `-e` (or `--extension`) option. Here, we search for all Markdown files in the fd repository: ``` bash > cd fd > fd -e md CONTRIBUTING.md README.md ``` The `-e` option can be used in combination with a search pattern: ``` bash > fd -e rs mod src/fshelper/mod.rs src/lscolors/mod.rs tests/testenv/mod.rs ``` ### Searching for a particular file name To find files with exactly the provided search pattern, use the `-g` (or `--glob`) option: ``` bash > fd -g libc.so /usr /usr/lib32/libc.so /usr/lib/libc.so ``` ### Hidden and ignored files By default, *fd* does not search hidden directories and does not show hidden files in the search results. To disable this behavior, we can use the `-H` (or `--hidden`) option: ``` bash > fd pre-commit > fd -H pre-commit .git/hooks/pre-commit.sample ``` If we work in a directory that is a Git repository (or includes Git repositories), *fd* does not search folders (and does not show files) that match one of the `.gitignore` patterns. To disable this behavior, we can use the `-I` (or `--no-ignore`) option: ``` bash > fd num_cpu > fd -I num_cpu target/debug/deps/libnum_cpus-f5ce7ef99006aa05.rlib ``` To really search *all* files and directories, simply combine the hidden and ignore features to show everything (`-HI`). ### Matching the full path By default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option, you can match against the full path. ```bash > fd -p -g '**/.git/config' > fd -p '.*/lesson-\d+/[a-z]+.(jpg|png)' ``` ### Command execution Instead of just showing the search results, you often want to *do something* with them. `fd` provides two ways to execute external commands for each of your search results: * The `-x`/`--exec` option runs an external command *for each of the search results* (in parallel). * The `-X`/`--exec-batch` option launches the external command once, with *all search results as arguments*. #### Examples Recursively find all zip archives and unpack them: ``` bash fd -e zip -x unzip ``` If there are two such files, `file1.zip` and `backup/file2.zip`, this would execute `unzip file1.zip` and `unzip backup/file2.zip`. The two `unzip` processes run in parallel (if the files are found fast enough). Find all `*.h` and `*.cpp` files and auto-format them inplace with `clang-format -i`: ``` bash fd -e h -e cpp -x clang-format -i ``` Note how the `-i` option to `clang-format` can be passed as a separate argument. This is why we put the `-x` option last. Find all `test_*.py` files and open them in your favorite editor: ``` bash fd -g 'test_*.py' -X vim ``` Note that we use capital `-X` here to open a single `vim` instance. If there are two such files, `test_basic.py` and `lib/test_advanced.py`, this will run `vim test_basic.py lib/test_advanced.py`. To see details like file permissions, owners, file sizes etc., you can tell `fd` to show them by running `ls` for each result: ``` bash fd … -X ls -lhd --color=always ``` This pattern is so useful that `fd` provides a shortcut. You can use the `-l`/`--list-details` option to execute `ls` in this way: `fd … -l`. The `-X` option is also useful when combining `fd` with [ripgrep](https://github.com/BurntSushi/ripgrep/) (`rg`) in order to search within a certain class of files, like all C++ source files: ```bash fd -e cpp -e cxx -e h -e hpp -X rg 'std::cout' ``` Convert all `*.jpg` files to `*.png` files: ``` bash fd -e jpg -x convert {} {.}.png ``` Here, `{}` is a placeholder for the search result. `{.}` is the same, without the file extension. See below for more details on the placeholder syntax. #### Placeholder syntax The `-x` and `-X` options take a *command template* as a series of arguments (instead of a single string). If you want to add additional options to `fd` after the command template, you can terminate it with a `\;`. The syntax for generating commands is similar to that of [GNU Parallel](https://www.gnu.org/software/parallel/): - `{}`: A placeholder token that will be replaced with the path of the search result (`documents/images/party.jpg`). - `{.}`: Like `{}`, but without the file extension (`documents/images/party`). - `{/}`: A placeholder that will be replaced by the basename of the search result (`party.jpg`). - `{//}`: The parent of the discovered path (`documents/images`). - `{/.}`: The basename, with the extension removed (`party`). If you do not include a placeholder, *fd* automatically adds a `{}` at the end. #### Parallel vs. serial execution For `-x`/`--exec`, you can control the number of parallel jobs by using the `-j`/`--threads` option. Use `--threads=1` for serial execution. ### Excluding specific files or directories Sometimes we want to ignore search results from a specific subdirectory. For example, we might want to search all hidden files and directories (`-H`) but exclude all matches from `.git` directories. We can use the `-E` (or `--exclude`) option for this. It takes an arbitrary glob pattern as an argument: ``` bash > fd -H -E .git … ``` We can also use this to skip mounted directories: ``` bash > fd -E /mnt/external-drive … ``` .. or to skip certain file types: ``` bash > fd -E '*.bak' … ``` To make exclude-patterns like these permanent, you can create a `.fdignore` file. They work like `.gitignore` files, but are specific to `fd`. For example: ``` bash > cat ~/.fdignore /mnt/external-drive *.bak ``` Note: `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`. If you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file. This is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\fd\ignore` in Windows. ### Deleting files You can use `fd` to remove all files and directories that are matched by your search pattern. If you only want to remove files, you can use the `--exec-batch`/`-X` option to call `rm`. For example, to recursively remove all `.DS_Store` files, run: ``` bash > fd -H '^\.DS_Store$' -tf -X rm ``` If you are unsure, always call `fd` without `-X rm` first. Alternatively, use `rm`s "interactive" option: ``` bash > fd -H '^\.DS_Store$' -tf -X rm -i ``` If you also want to remove a certain class of directories, you can use the same technique. You will have to use `rm`s `--recursive`/`-r` flag to remove directories. Note: there are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a path like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a situation where the outer `foo` directory is removed first, leading to (harmless) *"'foo/bar/foo': No such file or directory"* errors in the `rm` call. ### Command-line options This is the output of `fd -h`. To see the full set of command-line options, use `fd --help` which also includes a much more detailed help text. ``` USAGE: fd [FLAGS/OPTIONS] [] [...] FLAGS: -H, --hidden Search hidden files and directories -I, --no-ignore Do not respect .(git|fd)ignore files -s, --case-sensitive Case-sensitive search (default: smart case) -i, --ignore-case Case-insensitive search (default: smart case) -g, --glob Glob-based search (default: regular expression) -a, --absolute-path Show absolute instead of relative paths -l, --list-details Use a long listing format with file metadata -L, --follow Follow symbolic links -p, --full-path Search full abs. path (default: filename only) -h, --help Prints help information -V, --version Prints version information OPTIONS: -d, --max-depth Set maximum search depth (default: none) -t, --type ... Filter by type: file (f), directory (d), symlink (l), executable (x), empty (e), socket (s), pipe (p) -e, --extension ... Filter by file extension -x, --exec Execute a command for each search result -X, --exec-batch Execute a command with all search results at once -E, --exclude ... Exclude entries that match the given glob pattern -c, --color When to use colors: never, *auto*, always -S, --size ... Limit results based on the size of files --changed-within Filter by file modification time (newer than) --changed-before Filter by file modification time (older than) -o, --owner Filter by owning user and/or group ARGS: the search pattern (a regular expression, unless '--glob' is used; optional) ... the root directory for the filesystem search (optional) ``` ## Benchmark Let's search my home folder for files that end in `[0-9].jpg`. It contains ~190.000 subdirectories and about a million files. For averaging and statistical analysis, I'm using [hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends). Let's start with `find`: ``` Benchmark #1: find ~ -iregex '.*[0-9]\.jpg$' Time (mean ± σ): 7.236 s ± 0.090 s Range (min … max): 7.133 s … 7.385 s ``` `find` is much faster if it does not need to perform a regular-expression search: ``` Benchmark #2: find ~ -iname '*[0-9].jpg' Time (mean ± σ): 3.914 s ± 0.027 s Range (min … max): 3.876 s … 3.964 s ``` Now let's try the same for `fd`. Note that `fd` *always* performs a regular expression search. The options `--hidden` and `--no-ignore` are needed for a fair comparison, otherwise `fd` does not have to traverse hidden folders and ignored paths (see below): ``` Benchmark #3: fd -HI '.*[0-9]\.jpg$' ~ Time (mean ± σ): 811.6 ms ± 26.9 ms Range (min … max): 786.0 ms … 870.7 ms ``` For this particular example, `fd` is approximately nine times faster than `find -iregex` and about five times faster than `find -iname`. By the way, both tools found the exact same 20880 files :smile:. Finally, let's run `fd` without `--hidden` and `--no-ignore` (this can lead to different search results, of course). If *fd* does not have to traverse the hidden and git-ignored folders, it is almost an order of magnitude faster: ``` Benchmark #4: fd '[0-9]\.jpg$' ~ Time (mean ± σ): 123.7 ms ± 6.0 ms Range (min … max): 118.8 ms … 140.0 ms ``` **Note**: This is *one particular* benchmark on *one particular* machine. While I have performed quite a lot of different tests (and found consistent results), things might be different for you! I encourage everyone to try it out on their own. See [this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts. Concerning *fd*'s speed, the main credit goes to the `regex` and `ignore` crates that are also used in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!). ## Troubleshooting ### Colorized output `fd` can colorize files by extension, just like `ls`. In order for this to work, the environment variable [`LS_COLORS`](https://linux.die.net/man/5/dir_colors) has to be set. Typically, the value of this variable is set by the `dircolors` command which provides a convenient configuration format to define colors for different file formats. On most distributions, `LS_COLORS` should be set already. If you are on Windows or if you are looking for alternative, more complete (or more colorful) variants, see [here](https://github.com/sharkdp/vivid), [here](https://github.com/seebi/dircolors-solarized) or [here](https://github.com/trapd00r/LS_COLORS). `fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable. ### `fd` does not find my file! Remember that `fd` ignores hidden directories and files by default. It also ignores patterns from `.gitignore` files. If you want to make sure to find absolutely every possible file, always use the options `-H` and `-I` to disable these two features: ``` bash > fd -HI … ``` ### `fd` doesn't seem to interpret my regex pattern correctly A lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your shell. If in doubt, always make sure to put single quotes around the regex pattern: ``` bash > fd '^[A-Z][0-9]+$' ``` If your pattern starts with a dash, you have to add `--` to signal the end of command line options. Otherwise, the pattern will be interpreted as a command-line option. Alternatively, use a character class with a single hyphen character: ``` bash > fd -- '-pattern' > fd '[-]pattern' ``` ### "Command not found" for `alias`es or shell functions Shell `alias`es and shell functions can not be used for command execution via `fd -x` or `fd -X`. In `zsh`, you can make the alias global via `alias -g myalias="…"`. In `bash`, you can use `export -f my_function` to make available to child processes. You would still need to call `fd -x bash -c 'my_function "$1"' bash`. For other use cases or shells, use a (temporary) shell script. ## Integration with other programs ### Using fd with `fzf` You can use *fd* to generate input for the command-line fuzzy finder [fzf](https://github.com/junegunn/fzf): ``` bash export FZF_DEFAULT_COMMAND='fd --type file' export FZF_CTRL_T_COMMAND="$FZF_DEFAULT_COMMAND" ``` Then, you can type `vim ` on your terminal to open fzf and search through the fd-results. Alternatively, you might like to follow symbolic links and include hidden files (but exclude `.git` folders): ``` bash export FZF_DEFAULT_COMMAND='fd --type file --follow --hidden --exclude .git' ``` You can even use fd's colored output inside fzf by setting: ``` bash export FZF_DEFAULT_COMMAND="fd --type file --color=always" export FZF_DEFAULT_OPTS="--ansi" ``` For more details, see the [Tips section](https://github.com/junegunn/fzf#tips) of the fzf README. ### Using fd with `rofi` [*rofi*](https://github.com/davatorium/rofi) is a graphical launch menu application that is able to create menus by reading from *stdin*. Piping `fd` output into `rofi`s `-dmenu` mode creates fuzzy-searchable lists of files and directories. #### Example Create a case-insensitive searchable multi-select list of *PDF* files under your `$HOME` directory and open the selection with your configured PDF viewer. To list all file types, drop the `-e pdf` argument. ``` bash fd --type f -e pdf . $HOME | rofi -keep-right -dmenu -i -p FILES -multi-select | xargs -I {} xdg-open {} ``` To modify the list that is presented by rofi, add arguments to the `fd` command. To modify the search behaviour of rofi, add arguments to the `rofi` command. ### Using fd with `emacs` The emacs package [find-file-in-project](https://github.com/technomancy/find-file-in-project) can use *fd* to find files. After installing `find-file-in-project`, add the line `(setq ffip-use-rust-fd t)` to your `~/.emacs` or `~/.emacs.d/init.el` file. In emacs, run `M-x find-file-in-project-by-selected` to find matching files. Alternatively, run `M-x find-file-in-project` to list all available files in the project. ### Printing the output as a tree To format the output of `fd` similar to the `tree` command, install [`as-tree`] and pipe the output of `fd` to `as-tree`: ```bash fd | as-tree ``` This can be more useful than running `tree` by itself because `tree` does not ignore any files by default, nor does it support as rich a set of options as `fd` does to control what to print: ```bash ❯ fd --extension rs | as-tree . ├── build.rs └── src ├── app.rs └── error.rs ``` For more information about `as-tree`, see [the `as-tree` README][`as-tree`]. [`as-tree`]: https://github.com/jez/as-tree ### Using fd with `xargs` or `parallel` Note that `fd` has a builtin feature for [command execution](#command-execution) with its `-x`/`--exec` and `-X`/`--exec-batch` options. If you prefer, you can still use it in combination with `xargs`: ``` bash > fd -0 -e rs | xargs -0 wc -l ``` Here, the `-0` option tells *fd* to separate search results by the NULL character (instead of newlines). In the same way, the `-0` option of `xargs` tells it to read the input in this way. ## Installation [![Packaging status](https://repology.org/badge/vertical-allrepos/fd-find.svg)](https://repology.org/project/fd-find/versions) ### On Ubuntu *... and other Debian-based Linux distributions.* If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the [officially maintained package](https://packages.ubuntu.com/fd-find): ``` sudo apt install fd-find ``` Note that the binary is called `fdfind` as the binary name `fd` is already used by another package. It is recommended that after installation, you add a link to `fd` by executing command `ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation. Make sure that `$HOME/.local/bin` is in your `$PATH`. If you use an older version of Ubuntu, you can download the latest `.deb` package from the [release page](https://github.com/sharkdp/fd/releases) and install it via: ``` bash sudo dpkg -i fd_8.3.1_amd64.deb # adapt version number and architecture ``` ### On Debian If you run Debian Buster or newer, you can install the [officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find): ``` sudo apt-get install fd-find ``` Note that the binary is called `fdfind` as the binary name `fd` is already used by another package. It is recommended that after installation, you add a link to `fd` by executing command `ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation. Make sure that `$HOME/.local/bin` is in your `$PATH`. ### On Fedora Starting with Fedora 28, you can install `fd` from the official package sources: ``` bash dnf install fd-find ``` For older versions, you can use this [Fedora copr](https://copr.fedorainfracloud.org/coprs/keefle/fd/) to install `fd`: ``` bash dnf copr enable keefle/fd dnf install fd ``` ### On Alpine Linux You can install [the fd package](https://pkgs.alpinelinux.org/packages?name=fd) from the official sources, provided you have the appropriate repository enabled: ``` apk add fd ``` ### On Arch Linux You can install [the fd package](https://www.archlinux.org/packages/community/x86_64/fd/) from the official repos: ``` pacman -S fd ``` ### On Gentoo Linux You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo: ``` emerge -av fd ``` ### On openSUSE Linux You can install [the fd package](https://software.opensuse.org/package/fd) from the official repo: ``` zypper in fd ``` ### On Void Linux You can install `fd` via xbps-install: ``` xbps-install -S fd ``` ### On macOS You can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd): ``` brew install fd ``` … or with MacPorts: ``` sudo port install fd ``` ### On Windows You can download pre-built binaries from the [release page](https://github.com/sharkdp/fd/releases). Alternatively, you can install `fd` via [Scoop](http://scoop.sh): ``` scoop install fd ``` Or via [Chocolatey](https://chocolatey.org): ``` choco install fd ``` ### On NixOS / via Nix You can use the [Nix package manager](https://nixos.org/nix/) to install `fd`: ``` nix-env -i fd ``` ### On FreeBSD You can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo: ``` pkg install fd-find ``` ### From npm On linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package: ``` npm install -g fd-find ``` ### From source With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can install *fd* via: ``` cargo install fd-find ``` Note that rust version *1.53.0* or later is required. `make` is also needed for the build. ### From binaries The [release page](https://github.com/sharkdp/fd/releases) includes precompiled binaries for Linux, macOS and Windows. Statically-linked binaries are also available: look for archives with `musl` in the file name. ## Development ```bash git clone https://github.com/sharkdp/fd # Build cd fd cargo build # Run unit tests and integration tests cargo test # Install cargo install --path . ``` ## Maintainers - [sharkdp](https://github.com/sharkdp) - [tmccombs](https://github.com/tmccombs) - [tavianator](https://github.com/tavianator) ## License Copyright (c) 2017-2021 The fd developers `fd` is distributed under the terms of both the MIT License and the Apache License 2.0. See the [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) files for license details. fd-find-8.3.1/build.rs000064400000000000000000000014400072674642500126400ustar 00000000000000use std::fs; use clap::Shell; include!("src/app.rs"); fn main() { let min_version = "1.53"; match version_check::is_min_version(min_version) { Some(true) => {} // rustc version too small or can't figure it out _ => { eprintln!("'fd' requires rustc >= {}", min_version); std::process::exit(1); } } let var = std::env::var_os("SHELL_COMPLETIONS_DIR").or_else(|| std::env::var_os("OUT_DIR")); let outdir = match var { None => return, Some(outdir) => outdir, }; fs::create_dir_all(&outdir).unwrap(); let mut app = build_app(); app.gen_completions("fd", Shell::Bash, &outdir); app.gen_completions("fd", Shell::Fish, &outdir); app.gen_completions("fd", Shell::PowerShell, &outdir); } fd-find-8.3.1/clippy.toml000064400000000000000000000000200072674642500133610ustar 00000000000000msrv = "1.53.0" fd-find-8.3.1/contrib/completion/_fd000064400000000000000000000260160072674642500154650ustar 00000000000000#compdef fd ## # zsh completion function for fd # # Based on ripgrep completion function. # Originally based on code from the zsh-users project — see copyright notice # below. autoload -U is-at-least _fd() { local curcontext="$curcontext" no='!' ret=1 local -a context line state state_descr _arguments_options fd_types fd_args local -A opt_args if is-at-least 5.2; then _arguments_options=( -s -S ) else _arguments_options=( -s ) fi fd_types=( {f,file}'\:"regular files"' {d,directory}'\:"directories"' {l,symlink}'\:"symbolic links"' {e,empty}'\:"empty files or directories"' {x,executable}'\:"executable (files)"' {s,socket}'\:"sockets"' {p,pipe}'\:"named pipes (FIFOs)"' ) # Do not complete rare options unless either the current prefix # matches one of those options or the user has the `complete-all` # style set. Note that this prefix check has to be updated manually to account # for all of the potential negation options listed below! if # (--[bpsu]* => match all options marked with '$no') [[ $PREFIX$SUFFIX == --[bopsu]* ]] || zstyle -t ":complete:$curcontext:*" complete-all then no= fi # We make heavy use of argument groups here to prevent the option specs from # growing unwieldy. These aren't supported in zsh <5.4, though, so we'll strip # them out below if necessary. This makes the exclusions inaccurate on those # older versions, but oh well — it's not that big a deal fd_args=( + '(hidden)' # hidden files {-H,--hidden}'[search hidden files/directories]' + '(no-ignore-full)' # all ignore files '(no-ignore-partial)'{-I,--no-ignore}"[don't respect .(git|fd)ignore and global ignore files]" $no'(no-ignore-partial)*'{-u,--unrestricted}'[alias for --no-ignore, when repeated also alias for --hidden]' + no-ignore-partial # some ignore files "(no-ignore-full --no-ignore-vcs)--no-ignore-vcs[don't respect .gitignore files]" "!(no-ignore-full --no-global-ignore-file)--no-global-ignore-file[don't respect the global ignore file]" $no'(no-ignore-full --no-ignore-parent)--no-ignore-parent[]' + '(case)' # case-sensitivity {-s,--case-sensitive}'[perform a case-sensitive search]' {-i,--ignore-case}'[perform a case-insensitive search]' + '(regex-pattern)' # regex-based search pattern '(no-regex-pattern)--regex[perform a regex-based search (default)]' + '(no-regex-pattern)' # non-regex-based search pattern {-g,--glob}'[perform a glob-based search]' {-F,--fixed-strings}'[treat pattern as literal string instead of a regex]' + '(match-full)' # match against full path {-p,--full-path}'[match the pattern against the full path instead of the basename]' + '(follow)' # follow symlinks {-L,--follow}'[follow symbolic links to directories]' + '(abs-path)' # show absolute paths '(long-listing)'{-a,--absolute-path}'[show absolute paths instead of relative paths]' + '(null-sep)' # use null separator for output '(long-listing)'{-0,--print0}'[separate search results by the null character]' + '(long-listing)' # long-listing output '(abs-path null-sep max-results exec-cmds)'{-l,--list-details}'[use a long listing format with file metadata]' + '(max-results)' # max number of results '(long-listing exec-cmds)--max-results=[limit number of search results to given count and quit]:count' '(long-listing exec-cmds)-1[limit to a single search result and quit]' + '(fs-errors)' # file-system errors $no'--show-errors[enable the display of filesystem errors]' + '(fs-traversal)' # file-system traversal $no"--one-file-system[don't descend into directories on other file systems]" '!--mount' '!--xdev' + dir-depth # directory depth '(--exact-depth -d --max-depth)'{-d+,--max-depth=}'[set max directory depth to descend when searching]:depth' '!(--exact-depth -d --max-depth)--maxdepth:depth' '(--exact-depth --min-depth)--min-depth=[set directory depth to descend before start searching]:depth' '(--exact-depth -d --max-depth --maxdepth --min-depth)--exact-depth=[only search at the exact given directory depth]:depth' + prune # pruning "--prune[don't traverse into matching directories]" + filter-misc # filter search '*'{-t+,--type=}"[filter search by type]:type:(($fd_types))" '*'{-e+,--extension=}'[filter search by file extension]:extension' '*'{-E+,--exclude=}'[exclude files/directories that match the given glob pattern]:glob pattern' '*'{-S+,--size=}'[limit search by file size]:size limit:->size' '(-o --owner)'{-o+,--owner=}'[filter by owning user and/or group]:owner and/or group:->owner' + ignore-file # extra ignore files '*--ignore-file=[add a custom, low-precedence ignore-file with .gitignore format]: :_files' + '(filter-mtime-newer)' # filter by files modified after than '--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration' '!--change-newer-than=:date/duration' '!--newer=:date/duration' + '(filter-mtime-older)' # filter by files modified before than '--changed-before=[limit search to files/directories modified before the given date/duration]:date or duration' '!--change-older-than=:date/duration' '!--older=:date/duration' + '(color)' # colorize output {-c+,--color=}'[declare when to colorize search results]:when to colorize:(( auto\:"show colors if the output goes to an interactive console (default)" never\:"do not use colorized output" always\:"always use colorized output" ))' + '(threads)' {-j+,--threads=}'[set the number of threads for searching and executing]:number of threads' + '(exec-cmds)' # execute command '(long-listing max-results)'{-x+,--exec=}'[execute command for each search result]:command: _command_names -e:*\;::program arguments: _normal' '(long-listing max-results)'{-X+,--exec-batch=}'[execute command for all search results at once]:command: _command_names -e:*\;::program arguments: _normal' '(long-listing max-results)--batch-size=[max number of args for each -X call]:size' + other '!(--max-buffer-time)--max-buffer-time=[set amount of time to buffer before showing output]:time (ms)' + '(about)' # about flags '(: * -)'{-h,--help}'[display help message]' '(: * -)'{-v,--version}'[display version information]' + path-sep # set path separator for output $no'(--path-separator)--path-separator=[set the path separator to use when printing file paths]:path separator' + search-path $no'(--base-directory)--base-directory=[change the current working directory to the given path]:directory:_files -/' $no'(*)*--search-path=[set search path (instead of positional arguments)]:directory:_files -/' + strip-cwd-prefix $no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix[Strip ./ prefix when output is redirected]' + args # positional arguments '1: :_guard "^-*" pattern' '(--search-path)*:directory:_files -/' ) # Strip out argument groups where unsupported (see above) is-at-least 5.4 || fd_args=( ${(@)args:#(#i)(+|[a-z0-9][a-z0-9_-]#|\([a-z0-9][a-z0-9_-]#\))} ) _arguments $_arguments_options : $fd_args && ret=0 case ${state} in owner) compset -P '(\\|)\!' if compset -P '*:'; then _groups && ret=0 else if compset -S ':*' || # Do not add the colon suffix when completing "!user # (with a starting double-quote) otherwise pressing tab again # after the inserted colon "!user: will complete history modifiers [[ $IPREFIX == (\\|\!)* && ($QIPREFIX == \"* && -z $QISUFFIX) ]] then _users && ret=0 else local q # Since quotes are needed when using the negation prefix !, # automatically remove the colon suffix also when closing the quote if [[ $QIPREFIX == [\'\"]* ]]; then q=${QIPREFIX:0:1} fi _users -r ": \t\n\-$q" -S : && ret=0 fi fi ;; size) if compset -P '[-+][0-9]##'; then local -a suff=( 'B:bytes' 'K:kilobytes (10^3 = 1000 bytes)' 'M:megabytes (10^6 = 1000^2 bytes)' 'G:gigabytes (10^9 = 1000^3 bytes)' 'T:terabytes (10^12 = 1000^4 bytes)' 'Ki:kibibytes ( 2^10 = 1024 bytes)' 'Mi:mebibytes ( 2^20 = 1024^2 bytes)' 'Gi:gigibytes ( 2^30 = 1024^3 bytes)' 'Ti:tebibytes ( 2^40 = 1024^4 bytes)' ) _describe -t units 'size limit units' suff -V 'units' elif compset -P '[-+]'; then _message -e 'size limit number (full format: <+->)' else _values 'size limit prefix (full format: )' \ '\+[file size must be greater or equal to]'\ '-[file size must be less than or equal to]' && ret=0 fi ;; esac return ret } _fd "$@" # ------------------------------------------------------------------------------ # Copyright (c) 2011 GitHub zsh-users - http://github.com/zsh-users # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the zsh-users nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ------------------------------------------------------------------------------ # Description # ----------- # # Completion script for fd # # ------------------------------------------------------------------------------ # Authors # ------- # # * smancill (https://github.com/smancill) # # ------------------------------------------------------------------------------ # Local Variables: # mode: shell-script # coding: utf-8-unix # indent-tabs-mode: nil # sh-indentation: 2 # sh-basic-offset: 2 # End: # vim: ft=zsh sw=2 ts=2 et fd-find-8.3.1/doc/.gitattributes000064400000000000000000000000240072674642500146300ustar 00000000000000* linguist-vendored fd-find-8.3.1/doc/fd.1000064400000000000000000000315060072674642500124210ustar 00000000000000.TH FD 1 .SH NAME fd \- find entries in the filesystem .SH SYNOPSIS .B fd .RB [ \-HIEsiaLp0hV ] .RB [ \-d .IR depth ] .RB [ \-t .IR filetype ] .RB [ \-e .IR ext ] .RB [ \-E .IR exclude ] .RB [ \-c .IR when ] .RB [ \-j .IR num ] .RB [ \-x .IR cmd ] .RI [ pattern ] .RI [ path... ] .SH DESCRIPTION .B fd is a simple, fast and user-friendly alternative to .BR find (1). .SH OPTIONS .TP .B \-H, \-\-hidden Include hidden files and directories in the search results (default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'. .TP .B \-I, \-\-no\-ignore Show search results from files and directories that would otherwise be ignored by .RS .IP \[bu] 2 .I .gitignore .IP \[bu] .I .git/info/exclude .IP \[bu] The global gitignore configuration (by default .IR $HOME/.config/git/ignore ) .IP \[bu] .I .ignore .IP \[bu] .I .fdignore .IP \[bu] The global fd ignore file (usually .I $HOME/.config/fd/ignore ) .RE .IP The flag can be overridden with '--ignore'. .TP .B \-u, \-\-unrestricted Alias for '--no-ignore'. Can be repeated; '-uu' is an alias for '--no-ignore --hidden'. .TP .B \-\-no\-ignore\-vcs Show search results from files and directories that would otherwise be ignored by gitignore files including .IR .gitignore , .IR .git/info/exclude , and the global gitignore configuration .RI ( core.excludesFile git setting, which defaults to .IR $HOME/.config/git/ignore ). The flag can be overridden with '--ignore-vcs'. .TP .B \-\-no\-ignore\-parent Show search results from files and directories that would otherwise be ignored by gitignore files in parent directories. .TP .B \-s, \-\-case\-sensitive Perform a case-sensitive search. By default, fd uses case-insensitive searches, unless the pattern contains an uppercase character (smart case). .TP .B \-i, \-\-ignore\-case Perform a case-insensitive search. By default, fd uses case-insensitive searches, unless the pattern contains an uppercase character (smart case). .TP .B \-g, \-\-glob Perform a glob-based search instead of a regular expression search. If combined with the '\-\-full-path' option, '**' can be used to match multiple path components. .TP .B \-\-regex Perform a regular-expression based search (default). This can be used to override --glob. .TP .B \-F, \-\-fixed\-strings Treat the pattern as a literal string instead of a regular expression. Note that this also performs substring comparison. If you want to match on an exact filename, consider using '\-\-glob'. .TP .B \-a, \-\-absolute\-path Shows the full path starting from the root as opposed to relative paths. The flag can be overridden with '--relative-path'. .TP .B \-l, \-\-list\-details Use a detailed listing format like 'ls -l'. This is basically an alias for '--exec-batch ls -l' with some additional 'ls' options. This can be used to see more metadata, to show symlink targets and to achieve a deterministic sort order. .TP .B \-L, \-\-follow By default, fd does not descend into symlinked directories. Using this flag, symbolic links are also traversed. The flag can be overridden with '--no-follow'. .TP .B \-p, \-\-full\-path By default, the search pattern is only matched against the filename (or directory name). Using this flag, the .I pattern is matched against the full path. .TP .B \-0, \-\-print0 Separate search results by the null character (instead of newlines). Useful for piping results to .IR xargs . .TP .B \-\-max\-results count Limit the number of search results to 'count' and quit immediately. .TP .B \-1 Limit the search to a single result and quit immediately. This is an alias for '--max-results=1'. .TP .B \-q, \-\-quiet When the flag is present, the program does not print anything and will instead exit with a code of 0 if there is at least one search result. Otherwise, the exit code will be 1. This is mainly for usage in scripts and can be faster than checking for output because the search can be stopped early after the first match. .B \-\-has\-results can be used as an alias. .TP .B \-\-show-errors Enable the display of filesystem errors for situations such as insufficient permissions or dead symlinks. .TP .B \-\-strip-cwd-prefix By default, relative paths are prefixed with './' when the output goes to a non interactive terminal (TTY). Use this flag to disable this behaviour. .TP .B \-\-one\-file\-system, \-\-mount, \-\-xdev By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1). .TP .B \-h, \-\-help Print help information. .TP .B \-V, \-\-version Print version information. .TP .BI "\-d, \-\-max\-depth " d Limit directory traversal to at most .I d levels of depth. By default, there is no limit on the search depth. .TP .BI "\-\-min\-depth " d Only show search results starting at the given depth. See also: '--max-depth' and '--exact-depth'. .TP .BI "\-\-exact\-depth " d Only show search results at the exact given depth. This is an alias for '--min-depth --max-depth '. .TP .B \-\-prune Do not traverse into matching directories. .TP .BI "\-t, \-\-type " filetype Filter search by type: .RS .IP "f, file" regular files .IP "d, directory" directories .IP "l, symlink" symbolic links .IP "s, socket" sockets .IP "p, pipe" named pipes (FIFOs) .IP "x, executable" executable (files) .IP "e, empty" empty files or directories .RE .RS This option can be specified more than once to include multiple file types. Searching for '--type file --type symlink' will show both regular files as well as symlinks. Note that the 'executable' and 'empty' filters work differently: '--type executable' implies '--type file' by default. And '--type empty' searches for empty files and directories, unless either '--type file' or '--type directory' is specified in addition. Examples: - Only search for files: fd --type file … fd -tf … - Find both files and symlinks fd --type file --type symlink … fd -tf -tl … - Find executable files: fd --type executable fd -tx - Find empty files: fd --type empty --type file fd -te -tf - Find empty directories: fd --type empty --type directory fd -te -td .RE .TP .BI "\-e, \-\-extension " ext Filter search results by file extension .IR ext . This option can be used repeatedly to allow for multiple possible file extensions. If you want to search for files without extension, you can use the regex '^[^.]+$' as a normal search pattern. .TP .BI "\-E, \-\-exclude " pattern Exclude files/directories that match the given glob pattern. This overrides any other ignore logic. Multiple exclude patterns can be specified. Examples: \-\-exclude '*.pyc' \-\-exclude node_modules .TP .BI "\-\-ignore-file " path Add a custom ignore-file in '.gitignore' format. These files have a low precedence. .TP .BI "\-c, \-\-color " when Declare .I when to colorize search results: .RS .IP auto Colorize output when standard output is connected to terminal (default). .IP never Do not colorize output. .IP always Always colorize output. .RE .TP .BI "\-j, \-\-threads " num Set number of threads to use for searching & executing (default: number of available CPU cores). .TP .BI "\-S, \-\-size " size Limit results based on the size of files using the format .I <+-> .RS .IP '+' file size must be greater than or equal to this .IP '-' file size must be less than or equal to this .P If neither '+' nor '-' is specified, file size must be exactly equal to this. .IP 'NUM' The numeric size (e.g. 500) .IP 'UNIT' The units for NUM. They are not case-sensitive. Allowed unit values: .RS .IP 'b' bytes .IP 'k' kilobytes (base ten, 10^3 = 1000 bytes) .IP 'm' megabytes .IP 'g' gigabytes .IP 't' terabytes .IP 'ki' kibibytes (base two, 2^10 = 1024 bytes) .IP 'mi' mebibytes .IP 'gi' gibibytes .IP 'ti' tebibytes .RE .RE .TP .BI "\-\-changed-within " date|duration Filter results based on the file modification time. Files with modification times greater than the argument will be returned. The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point in time in either full RFC3339 format with time zone, or as a date or datetime in the local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR). .B --change-newer-than can be used as an alias. Examples: \-\-changed-within 2weeks \-\-change-newer-than "2018-10-27 10:00:00" .TP .BI "\-\-changed-before " date|duration Filter results based on the file modification time. Files with modification times less than the argument will be returned. The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point in time in either full RFC3339 format with time zone, or as a date or datetime in the local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR). .B --change-older-than can be used as an alias. Examples: \-\-changed-before "2018-10-27 10:00:00" \-\-change-older-than 2weeks .TP .BI "-o, \-\-owner " [user][:group] Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side is optional. Precede either side with a '!' to exclude files instead. Examples: \-\-owner john \-\-owner :students \-\-owner "!john:students" .TP .BI "\-\-base\-directory " path Change the current working directory of fd to the provided path. This means that search results will be shown with respect to the given base path. Note that relative paths which are passed to fd via the positional \fIpath\fR argument or the \fB\-\-search\-path\fR option will also be resolved relative to this directory. .TP .BI "\-\-path\-separator " separator Set the path separator to use when printing file paths. The default is the OS-specific separator ('/' on Unix, '\\' on Windows). .TP .BI "\-\-search\-path " search\-path Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to \'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\' .TP .BI "\-x, \-\-exec " command .RS Execute .I command for each search result in parallel (use --threads=1 for sequential command execution). Note that all subsequent positional arguments are considered to be arguments to the .I command - not to fd. It is therefore recommended to place the \-x/\-\-exec option last. Alternatively, you can supply a ';' argument to end the argument list and continue with more fd options. Most shells require ';' to be escaped: '\\;'. The following placeholders are substituted before the command is executed: .RS .IP {} path (of the current search result) .IP {/} basename .IP {//} parent directory .IP {.} path without file extension .IP {/.} basename without file extension .RE If no placeholder is present, an implicit "{}" at the end is assumed. Examples: - find all *.zip files and unzip them: fd -e zip -x unzip - find *.h and *.cpp files and run "clang-format -i .." for each of them: fd -e h -e cpp -x clang-format -i - Convert all *.jpg files to *.png files: fd -e jpg -x convert {} {.}.png .RE .TP .BI "\-X, \-\-exec-batch " command .RS Execute .I command once, with all search results as arguments. One of the following placeholders is substituted before the command is executed: .RS .IP {} path (of all search results) .IP {/} basename .IP {//} parent directory .IP {.} path without file extension .IP {/.} basename without file extension .RE If no placeholder is present, an implicit "{}" at the end is assumed. Examples: - Find all test_*.py files and open them in your favorite editor: fd -g 'test_*.py' -X vim Note that this executes a single "vim" process with all search results as arguments. - Find all *.rs files and count the lines with "wc -l ...": fd -e rs -X wc -l .RE .TP .BI "\-\-batch-size " size Maximum number of arguments to pass to the command given with -X. If the number of results is greater than the given size, the command given with -X is run again with remaining arguments. A batch size of zero means there is no limit. .SH PATTERN SYNTAX The regular expression syntax used by fd is documented here: https://docs.rs/regex/1.0.0/regex/#syntax The glob syntax is documented here: https://docs.rs/globset/#syntax .SH ENVIRONMENT .TP .B LS_COLORS Determines how to colorize search results, see .BR dircolors (1) . .TP .B NO_COLOR Disables colorized output. .TP .B XDG_CONFIG_HOME, HOME Used to locate the global ignore file. If .B XDG_CONFIG_HOME is set, use .IR $XDG_CONFIG_HOME/fd/ignore . Otherwise, use .IR $HOME/.config/fd/ignore . .SH EXAMPLES .TP .RI "Find files and directories that match the pattern '" needle "':" $ fd needle .TP .RI "Start a search in a given directory (" /var/log "):" $ fd nginx /var/log .TP .RI "Find all Python files (all files with the extension " .py ") in the current directory:" $ fd -e py .TP .RI "Open all search results with vim:" $ fd pattern -X vim .TP .BI "\-\-batch\-size " size Pass at most .I size arguments to each call to the command given with -X. .TP .SH SEE ALSO .BR find (1) fd-find-8.3.1/doc/screencast.sh000064400000000000000000000014040072674642500144260ustar 00000000000000#!/bin/bash # Designed to be executed via svg-term from the fd root directory: # svg-term --command="bash doc/screencast.sh" --out doc/screencast.svg --padding=10 set -e set -u PROMPT="▶" enter() { INPUT=$1 DELAY=1 prompt sleep "$DELAY" type "$INPUT" sleep 0.5 printf '%b' "\\n" eval "$INPUT" type "\\n" } prompt() { printf '%b ' "$PROMPT" | pv -q } type() { printf '%b' "$1" | pv -qL $((10+(-2 + RANDOM%5))) } main() { IFS='%' enter "fd" enter "fd app" enter "fd sh" enter "fd sh --type f" enter "fd -e md" enter "fd -e md --exec wc -l" enter "fd '^[A-Z]'" enter "fd --exclude src" enter "fd --hidden sample" prompt sleep 3 echo "" unset IFS } main fd-find-8.3.1/doc/screencast.svg000064400000000000000000003462450072674642500146320ustar 00000000000000ffdCONTRIBUTING.mdCargo.lockCargo.tomlLICENSE-APACHELICENSE-MITREADME.mdappveyor.ymlbuild.rscici/before_deploy.bashdocdoc/fd.1doc/screencast.shdoc/screencast.svgsrcsrc/app.rssrc/execsrc/exec/command.rssrc/exec/input.rssrc/exec/job.rssrc/exec/mod.rssrc/exec/token.rssrc/exit_codes.rssrc/fshelpersrc/fshelper/mod.rssrc/internal.rssrc/lscolorssrc/lscolors/mod.rssrc/main.rssrc/output.rssrc/walk.rssrc/windows.rsteststests/testenvtests/testenv/mod.rstests/tests.rsfdappfdsfdshfdsh--typefdsh--typeffd-fd-efd-emfd-emdfd-emd--execfd-emd--execwcfd-emd--execwc-l20CONTRIBUTING.md356README.mdfd'^[A-Z]'fd--excludefd--excludesrcfd--hiddenfd--hiddensample.git/hooks/applypatch-msg.sample.git/hooks/commit-msg.sample.git/hooks/fsmonitor-watchman.sample.git/hooks/post-update.sample.git/hooks/pre-applypatch.sample.git/hooks/pre-commit.sample.git/hooks/pre-push.sample.git/hooks/pre-rebase.sample.git/hooks/pre-receive.sample.git/hooks/prepare-commit-msg.sample.git/hooks/update.samplefdafdapfdsh-fdsh--fdsh--tfdsh--tyfdsh--typfd-emd-fd-emd--fd-emd--efd-emd--exfd-emd--exefd-emd--execwfd-emd--execwc-fd'fd'^fd'^[fd'^[Afd'^[A-fd'^[A-Zfd'^[A-Z]fd--efd--exfd--excfd--exclfd--exclufd--excludesfd--excludesrfd--fd--hfd--hifd--hidfd--hiddfd--hiddefd--hiddensfd--hiddensafd--hiddensamfd--hiddensampfd--hiddensampl fd-find-8.3.1/src/app.rs000064400000000000000000001022470072674642500131170ustar 00000000000000use clap::{crate_version, App, AppSettings, Arg}; pub fn build_app() -> App<'static, 'static> { let clap_color_setting = if std::env::var_os("NO_COLOR").is_none() { AppSettings::ColoredHelp } else { AppSettings::ColorNever }; let mut app = App::new("fd") .version(crate_version!()) .usage("fd [FLAGS/OPTIONS] [] [...]") .setting(clap_color_setting) .setting(AppSettings::DeriveDisplayOrder) .after_help( "Note: `fd -h` prints a short and concise overview while `fd --help` gives all \ details.", ) .arg( Arg::with_name("hidden") .long("hidden") .short("H") .overrides_with("hidden") .help("Search hidden files and directories") .long_help( "Include hidden directories and files in the search results (default: \ hidden files and directories are skipped). Files and directories are \ considered to be hidden if their name starts with a `.` sign (dot). \ The flag can be overridden with --no-hidden.", ), ) .arg( Arg::with_name("no-hidden") .long("no-hidden") .overrides_with("hidden") .hidden(true) .long_help( "Overrides --hidden.", ), ) .arg( Arg::with_name("no-ignore") .long("no-ignore") .short("I") .overrides_with("no-ignore") .help("Do not respect .(git|fd)ignore files") .long_help( "Show search results from files and directories that would otherwise be \ ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file. \ The flag can be overridden with --ignore.", ), ) .arg( Arg::with_name("ignore") .long("ignore") .overrides_with("no-ignore") .hidden(true) .long_help( "Overrides --no-ignore.", ), ) .arg( Arg::with_name("no-ignore-vcs") .long("no-ignore-vcs") .overrides_with("no-ignore-vcs") .hidden_short_help(true) .help("Do not respect .gitignore files") .long_help( "Show search results from files and directories that would otherwise be \ ignored by '.gitignore' files. The flag can be overridden with --ignore-vcs.", ), ) .arg( Arg::with_name("ignore-vcs") .long("ignore-vcs") .overrides_with("no-ignore-vcs") .hidden(true) .long_help( "Overrides --no-ignore-vcs.", ), ) .arg( Arg::with_name("no-ignore-parent") .long("no-ignore-parent") .overrides_with("no-ignore-parent") .hidden_short_help(true) .help("Do not respect .(git|fd)ignore files in parent directories") .long_help( "Show search results from files and directories that would otherwise be \ ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories.", ), ) .arg( Arg::with_name("no-global-ignore-file") .long("no-global-ignore-file") .hidden(true) .help("Do not respect the global ignore file") .long_help("Do not respect the global ignore file."), ) .arg( Arg::with_name("rg-alias-hidden-ignore") .short("u") .long("unrestricted") .overrides_with_all(&["ignore", "no-hidden"]) .multiple(true) .hidden_short_help(true) .help("Alias for '--no-ignore', and '--hidden' when given twice") .long_help( "Alias for '--no-ignore'. Can be repeated. '-uu' is an alias for \ '--no-ignore --hidden'.", ), ) .arg( Arg::with_name("case-sensitive") .long("case-sensitive") .short("s") .overrides_with_all(&["ignore-case", "case-sensitive"]) .help("Case-sensitive search (default: smart case)") .long_help( "Perform a case-sensitive search. By default, fd uses case-insensitive \ searches, unless the pattern contains an uppercase character (smart \ case).", ), ) .arg( Arg::with_name("ignore-case") .long("ignore-case") .short("i") .overrides_with_all(&["case-sensitive", "ignore-case"]) .help("Case-insensitive search (default: smart case)") .long_help( "Perform a case-insensitive search. By default, fd uses case-insensitive \ searches, unless the pattern contains an uppercase character (smart \ case).", ), ) .arg( Arg::with_name("glob") .long("glob") .short("g") .conflicts_with("fixed-strings") .overrides_with("glob") .help("Glob-based search (default: regular expression)") .long_help("Perform a glob-based search instead of a regular expression search."), ) .arg( Arg::with_name("regex") .long("regex") .overrides_with_all(&["glob", "regex"]) .hidden_short_help(true) .help("Regular-expression based search (default)") .long_help( "Perform a regular-expression based search (default). This can be used to \ override --glob.", ), ) .arg( Arg::with_name("fixed-strings") .long("fixed-strings") .short("F") .alias("literal") .overrides_with("fixed-strings") .hidden_short_help(true) .help("Treat pattern as literal string instead of regex") .long_help( "Treat the pattern as a literal string instead of a regular expression. Note \ that this also performs substring comparison. If you want to match on an \ exact filename, consider using '--glob'.", ), ) .arg( Arg::with_name("absolute-path") .long("absolute-path") .short("a") .overrides_with("absolute-path") .help("Show absolute instead of relative paths") .long_help( "Shows the full path starting from the root as opposed to relative paths. \ The flag can be overridden with --relative-path.", ), ) .arg( Arg::with_name("relative-path") .long("relative-path") .overrides_with("absolute-path") .hidden(true) .long_help( "Overrides --absolute-path.", ), ) .arg( Arg::with_name("list-details") .long("list-details") .short("l") .conflicts_with("absolute-path") .help("Use a long listing format with file metadata") .long_help( "Use a detailed listing format like 'ls -l'. This is basically an alias \ for '--exec-batch ls -l' with some additional 'ls' options. This can be \ used to see more metadata, to show symlink targets and to achieve a \ deterministic sort order.", ), ) .arg( Arg::with_name("follow") .long("follow") .short("L") .alias("dereference") .overrides_with("follow") .help("Follow symbolic links") .long_help( "By default, fd does not descend into symlinked directories. Using this \ flag, symbolic links are also traversed. \ Flag can be overriden with --no-follow.", ), ) .arg( Arg::with_name("no-follow") .long("no-follow") .overrides_with("follow") .hidden(true) .long_help( "Overrides --follow.", ), ) .arg( Arg::with_name("full-path") .long("full-path") .short("p") .overrides_with("full-path") .help("Search full abs. path (default: filename only)") .long_help( "By default, the search pattern is only matched against the filename (or \ directory name). Using this flag, the pattern is matched against the full \ (absolute) path. Example:\n \ fd --glob -p '**/.git/config'", ), ) .arg( Arg::with_name("null_separator") .long("print0") .short("0") .overrides_with("print0") .conflicts_with("list-details") .hidden_short_help(true) .help("Separate results by the null character") .long_help( "Separate search results by the null character (instead of newlines). \ Useful for piping results to 'xargs'.", ), ) .arg( Arg::with_name("max-depth") .long("max-depth") .short("d") .takes_value(true) .value_name("depth") .help("Set maximum search depth (default: none)") .long_help( "Limit the directory traversal to a given depth. By default, there is no \ limit on the search depth.", ), ) // support --maxdepth as well, for compatibility with rg .arg( Arg::with_name("rg-depth") .long("maxdepth") .hidden(true) .takes_value(true) .help("Set maximum search depth (default: none)") ) .arg( Arg::with_name("min-depth") .long("min-depth") .takes_value(true) .value_name("depth") .hidden_short_help(true) .help("Only show results starting at given depth") .long_help( "Only show search results starting at the given depth. \ See also: '--max-depth' and '--exact-depth'", ), ) .arg( Arg::with_name("exact-depth") .long("exact-depth") .takes_value(true) .value_name("depth") .hidden_short_help(true) .conflicts_with_all(&["max-depth", "min-depth"]) .help("Only show results at exact given depth") .long_help( "Only show search results at the exact given depth. This is an alias for \ '--min-depth --max-depth '.", ), ) .arg( Arg::with_name("prune") .long("prune") .conflicts_with_all(&["size", "exact-depth"]) .hidden_short_help(true) .help("Do not traverse into matching directories") .long_help("Do not traverse into directories that match the search criteria. If \ you want to exclude specific directories, use the '--exclude=…' option.") ) .arg( Arg::with_name("file-type") .long("type") .short("t") .multiple(true) .number_of_values(1) .takes_value(true) .value_name("filetype") .possible_values(&[ "f", "file", "d", "directory", "l", "symlink", "x", "executable", "e", "empty", "s", "socket", "p", "pipe", ]) .hide_possible_values(true) .help( "Filter by type: file (f), directory (d), symlink (l),\nexecutable (x), \ empty (e), socket (s), pipe (p)", ) .long_help( "Filter the search by type:\n \ 'f' or 'file': regular files\n \ 'd' or 'directory': directories\n \ 'l' or 'symlink': symbolic links\n \ 's' or 'socket': socket\n \ 'p' or 'pipe': named pipe (FIFO)\n\n \ 'x' or 'executable': executables\n \ 'e' or 'empty': empty files or directories\n\n\ This option can be specified more than once to include multiple file types. \ Searching for '--type file --type symlink' will show both regular files as \ well as symlinks. Note that the 'executable' and 'empty' filters work differently: \ '--type executable' implies '--type file' by default. And '--type empty' searches \ for empty files and directories, unless either '--type file' or '--type directory' \ is specified in addition.\n\n\ Examples:\n \ - Only search for files:\n \ fd --type file …\n \ fd -tf …\n \ - Find both files and symlinks\n \ fd --type file --type symlink …\n \ fd -tf -tl …\n \ - Find executable files:\n \ fd --type executable\n \ fd -tx\n \ - Find empty files:\n \ fd --type empty --type file\n \ fd -te -tf\n \ - Find empty directories:\n \ fd --type empty --type directory\n \ fd -te -td" ), ) .arg( Arg::with_name("extension") .long("extension") .short("e") .multiple(true) .number_of_values(1) .takes_value(true) .value_name("ext") .help("Filter by file extension") .long_help( "(Additionally) filter search results by their file extension. Multiple \ allowable file extensions can be specified.\n\ If you want to search for files without extension, \ you can use the regex '^[^.]+$' as a normal search pattern.", ), ) .arg( Arg::with_name("exec") .long("exec") .short("x") .min_values(1) .allow_hyphen_values(true) .value_terminator(";") .value_name("cmd") .conflicts_with("list-details") .help("Execute a command for each search result") .long_help( "Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \ All positional arguments following --exec are considered to be arguments to the command - not to fd. \ It is therefore recommended to place the '-x'/'--exec' option last.\n\ The following placeholders are substituted before the command is executed:\n \ '{}': path (of the current search result)\n \ '{/}': basename\n \ '{//}': parent directory\n \ '{.}': path without file extension\n \ '{/.}': basename without file extension\n\n\ If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\ Examples:\n\n \ - find all *.zip files and unzip them:\n\n \ fd -e zip -x unzip\n\n \ - find *.h and *.cpp files and run \"clang-format -i ..\" for each of them:\n\n \ fd -e h -e cpp -x clang-format -i\n\n \ - Convert all *.jpg files to *.png files:\n\n \ fd -e jpg -x convert {} {.}.png\ ", ), ) .arg( Arg::with_name("exec-batch") .long("exec-batch") .short("X") .min_values(1) .allow_hyphen_values(true) .value_terminator(";") .value_name("cmd") .conflicts_with_all(&["exec", "list-details"]) .help("Execute a command with all search results at once") .long_help( "Execute the given command once, with all search results as arguments.\n\ One of the following placeholders is substituted before the command is executed:\n \ '{}': path (of all search results)\n \ '{/}': basename\n \ '{//}': parent directory\n \ '{.}': path without file extension\n \ '{/.}': basename without file extension\n\n\ If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\ Examples:\n\n \ - Find all test_*.py files and open them in your favorite editor:\n\n \ fd -g 'test_*.py' -X vim\n\n \ - Find all *.rs files and count the lines with \"wc -l ...\":\n\n \ fd -e rs -X wc -l\ " ), ) .arg( Arg::with_name("batch-size") .long("batch-size") .takes_value(true) .value_name("size") .hidden_short_help(true) .requires("exec-batch") .help("Max number of arguments to run as a batch with -X") .long_help( "Maximum number of arguments to pass to the command given with -X. \ If the number of results is greater than the given size, \ the command given with -X is run again with remaining arguments. \ A batch size of zero means there is no limit.", ), ) .arg( Arg::with_name("exclude") .long("exclude") .short("E") .takes_value(true) .value_name("pattern") .number_of_values(1) .multiple(true) .help("Exclude entries that match the given glob pattern") .long_help( "Exclude files/directories that match the given glob pattern. This \ overrides any other ignore logic. Multiple exclude patterns can be \ specified.\n\n\ Examples:\n \ --exclude '*.pyc'\n \ --exclude node_modules", ), ) .arg( Arg::with_name("ignore-file") .long("ignore-file") .takes_value(true) .value_name("path") .number_of_values(1) .multiple(true) .hidden_short_help(true) .help("Add custom ignore-file in '.gitignore' format") .long_help( "Add a custom ignore-file in '.gitignore' format. These files have a low \ precedence.", ), ) .arg( Arg::with_name("color") .long("color") .short("c") .takes_value(true) .value_name("when") .possible_values(&["never", "auto", "always"]) .hide_possible_values(true) .help("When to use colors: never, *auto*, always") .long_help( "Declare when to use color for the pattern match output:\n \ 'auto': show colors if the output goes to an interactive console (default)\n \ 'never': do not use colorized output\n \ 'always': always use colorized output", ), ) .arg( Arg::with_name("threads") .long("threads") .short("j") .takes_value(true) .value_name("num") .hidden_short_help(true) .help("Set number of threads") .long_help( "Set number of threads to use for searching & executing (default: number \ of available CPU cores)", ), ) .arg( Arg::with_name("size") .long("size") .short("S") .takes_value(true) .number_of_values(1) .allow_hyphen_values(true) .multiple(true) .help("Limit results based on the size of files") .long_help( "Limit results based on the size of files using the format <+->.\n \ '+': file size must be greater than or equal to this\n \ '-': file size must be less than or equal to this\n\ If neither '+' nor '-' is specified, file size must be exactly equal to this.\n \ 'NUM': The numeric size (e.g. 500)\n \ 'UNIT': The units for NUM. They are not case-sensitive.\n\ Allowed unit values:\n \ 'b': bytes\n \ 'k': kilobytes (base ten, 10^3 = 1000 bytes)\n \ 'm': megabytes\n \ 'g': gigabytes\n \ 't': terabytes\n \ 'ki': kibibytes (base two, 2^10 = 1024 bytes)\n \ 'mi': mebibytes\n \ 'gi': gibibytes\n \ 'ti': tebibytes", ), ) .arg( Arg::with_name("max-buffer-time") .long("max-buffer-time") .takes_value(true) .hidden(true) .help("Milliseconds to buffer before streaming search results to console") .long_help( "Amount of time in milliseconds to buffer, before streaming the search \ results to the console.", ), ) .arg( Arg::with_name("changed-within") .long("changed-within") .alias("change-newer-than") .alias("newer") .takes_value(true) .value_name("date|dur") .number_of_values(1) .help("Filter by file modification time (newer than)") .long_help( "Filter results based on the file modification time. The argument can be provided \ as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min). \ If the time is not specified, it defaults to 00:00:00. \ '--change-newer-than' or '--newer' can be used as aliases.\n\ Examples:\n \ --changed-within 2weeks\n \ --change-newer-than '2018-10-27 10:00:00'\n \ --newer 2018-10-27", ), ) .arg( Arg::with_name("changed-before") .long("changed-before") .alias("change-older-than") .alias("older") .takes_value(true) .value_name("date|dur") .number_of_values(1) .help("Filter by file modification time (older than)") .long_help( "Filter results based on the file modification time. The argument can be provided \ as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min). \ '--change-older-than' or '--older' can be used as aliases.\n\ Examples:\n \ --changed-before '2018-10-27 10:00:00'\n \ --change-older-than 2weeks\n \ --older 2018-10-27", ), ) .arg( Arg::with_name("max-results") .long("max-results") .takes_value(true) .value_name("count") // We currently do not support --max-results in combination with // program execution because the results that come up in a --max-results // search are non-deterministic. Users might think that they can run the // same search with `--exec rm` attached and get a reliable removal of // the files they saw in the previous search. .conflicts_with_all(&["exec", "exec-batch", "list-details"]) .hidden_short_help(true) .help("Limit number of search results") .long_help("Limit the number of search results to 'count' and quit immediately."), ) .arg( Arg::with_name("max-one-result") .short("1") .hidden_short_help(true) .overrides_with("max-results") .conflicts_with_all(&["exec", "exec-batch", "list-details"]) .help("Limit search to a single result") .long_help("Limit the search to a single result and quit immediately. \ This is an alias for '--max-results=1'.") ) .arg( Arg::with_name("quiet") .long("quiet") .short("q") .alias("has-results") .hidden_short_help(true) .conflicts_with_all(&["exec", "exec-batch", "list-details", "max-results"]) .help("Print nothing, exit code 0 if match found, 1 otherwise") .long_help( "When the flag is present, the program does not print anything and will \ return with an exit code of 0 if there is at least one match. Otherwise, the \ exit code will be 1. \ '--has-results' can be used as an alias." ) ) .arg( Arg::with_name("show-errors") .long("show-errors") .hidden_short_help(true) .overrides_with("show-errors") .help("Show filesystem errors") .long_help( "Enable the display of filesystem errors for situations such as \ insufficient permissions or dead symlinks.", ), ) .arg( Arg::with_name("base-directory") .long("base-directory") .takes_value(true) .value_name("path") .number_of_values(1) .hidden_short_help(true) .help("Change current working directory") .long_help( "Change the current working directory of fd to the provided path. This \ means that search results will be shown with respect to the given base \ path. Note that relative paths which are passed to fd via the positional \ argument or the '--search-path' option will also be resolved \ relative to this directory.", ), ) .arg( Arg::with_name("pattern").help( "the search pattern (a regular expression, unless '--glob' is used; optional)", ).long_help( "the search pattern which is either a regular expression (default) or a glob \ pattern (if --glob is used). If no pattern has been specified, every entry \ is considered a match. If your pattern starts with a dash (-), make sure to \ pass '--' first, or it will be considered as a flag (fd -- '-foo').") ) .arg( Arg::with_name("path-separator") .takes_value(true) .value_name("separator") .long("path-separator") .hidden_short_help(true) .help("Set path separator when printing file paths") .long_help( "Set the path separator to use when printing file paths. The default is \ the OS-specific separator ('/' on Unix, '\\' on Windows).", ), ) .arg( Arg::with_name("path") .multiple(true) .help("the root directory for the filesystem search (optional)") .long_help( "The directory where the filesystem search is rooted (optional). If \ omitted, search the current working directory.", ), ) .arg( Arg::with_name("search-path") .long("search-path") .takes_value(true) .conflicts_with("path") .multiple(true) .hidden_short_help(true) .number_of_values(1) .help("Provide paths to search as an alternative to the positional ") .long_help( "Provide paths to search as an alternative to the positional \ argument. Changes the usage to `fd [FLAGS/OPTIONS] --search-path \ --search-path []`", ), ) .arg( Arg::with_name("strip-cwd-prefix") .long("strip-cwd-prefix") .conflicts_with_all(&["path", "search-path"]) .hidden_short_help(true) .help("strip './' prefix from non-tty outputs") .long_help( "By default, relative paths are prefixed with './' when the output goes to a non \ interactive terminal (TTY). Use this flag to disable this behaviour." ) ); if cfg!(unix) { app = app.arg( Arg::with_name("owner") .long("owner") .short("o") .takes_value(true) .value_name("user:group") .help("Filter by owning user and/or group") .long_help( "Filter files by their user and/or group. \ Format: [(user|uid)][:(group|gid)]. Either side is optional. \ Precede either side with a '!' to exclude files instead.\n\ Examples:\n \ --owner john\n \ --owner :students\n \ --owner '!john:students'", ), ); } // Make `--one-file-system` available only on Unix and Windows platforms, as per the // restrictions on the corresponding option in the `ignore` crate. // Provide aliases `mount` and `xdev` for people coming from `find`. if cfg!(any(unix, windows)) { app = app.arg( Arg::with_name("one-file-system") .long("one-file-system") .aliases(&["mount", "xdev"]) .hidden_short_help(true) .help("Do not descend into a different file system") .long_help( "By default, fd will traverse the file system tree as far as other options \ dictate. With this flag, fd ensures that it does not descend into a \ different file system than the one it started in. Comparable to the -mount \ or -xdev filters of find(1).", ), ); } app } fd-find-8.3.1/src/config.rs000064400000000000000000000100540072674642500135760ustar 00000000000000use std::{path::PathBuf, sync::Arc, time::Duration}; use lscolors::LsColors; use regex::bytes::RegexSet; use crate::exec::CommandTemplate; use crate::filetypes::FileTypes; #[cfg(unix)] use crate::filter::OwnerFilter; use crate::filter::{SizeFilter, TimeFilter}; /// Configuration options for *fd*. pub struct Config { /// Whether the search is case-sensitive or case-insensitive. pub case_sensitive: bool, /// Whether to search within the full file path or just the base name (filename or directory /// name). pub search_full_path: bool, /// Whether to ignore hidden files and directories (or not). pub ignore_hidden: bool, /// Whether to respect `.fdignore` files or not. pub read_fdignore: bool, /// Whether to respect ignore files in parent directories or not. pub read_parent_ignore: bool, /// Whether to respect VCS ignore files (`.gitignore`, ..) or not. pub read_vcsignore: bool, /// Whether to respect the global ignore file or not. pub read_global_ignore: bool, /// Whether to follow symlinks or not. pub follow_links: bool, /// Whether to limit the search to starting file system or not. pub one_file_system: bool, /// Whether elements of output should be separated by a null character pub null_separator: bool, /// The maximum search depth, or `None` if no maximum search depth should be set. /// /// A depth of `1` includes all files under the current directory, a depth of `2` also includes /// all files under subdirectories of the current directory, etc. pub max_depth: Option, /// The minimum depth for reported entries, or `None`. pub min_depth: Option, /// Whether to stop traversing into matching directories. pub prune: bool, /// The number of threads to use. pub threads: usize, /// If true, the program doesn't print anything and will instead return an exit code of 0 /// if there's at least one match. Otherwise, the exit code will be 1. pub quiet: bool, /// Time to buffer results internally before streaming to the console. This is useful to /// provide a sorted output, in case the total execution time is shorter than /// `max_buffer_time`. pub max_buffer_time: Option, /// `None` if the output should not be colorized. Otherwise, a `LsColors` instance that defines /// how to style different filetypes. pub ls_colors: Option, /// Whether or not we are writing to an interactive terminal pub interactive_terminal: bool, /// The type of file to search for. If set to `None`, all file types are displayed. If /// set to `Some(..)`, only the types that are specified are shown. pub file_types: Option, /// The extension to search for. Only entries matching the extension will be included. /// /// The value (if present) will be a lowercase string without leading dots. pub extensions: Option, /// If a value is supplied, each item found will be used to generate and execute commands. pub command: Option>, /// Maximum number of search results to pass to each `command`. If zero, the number is /// unlimited. pub batch_size: usize, /// A list of glob patterns that should be excluded from the search. pub exclude_patterns: Vec, /// A list of custom ignore files. pub ignore_files: Vec, /// The given constraints on the size of returned files pub size_constraints: Vec, /// Constraints on last modification time of files pub time_constraints: Vec, #[cfg(unix)] /// User/group ownership constraint pub owner_constraint: Option, /// Whether or not to display filesystem errors pub show_filesystem_errors: bool, /// The separator used to print file paths. pub path_separator: Option, /// The maximum number of search results pub max_results: Option, /// Whether or not to strip the './' prefix for search results pub strip_cwd_prefix: bool, } fd-find-8.3.1/src/error.rs000064400000000000000000000001340072674642500134600ustar 00000000000000pub fn print_error(msg: impl Into) { eprintln!("[fd error]: {}", msg.into()); } fd-find-8.3.1/src/exec/command.rs000064400000000000000000000030700072674642500146730ustar 00000000000000use std::io; use std::io::Write; use std::process::Command; use std::sync::Mutex; use crate::error::print_error; use crate::exit_codes::ExitCode; /// Executes a command. pub fn execute_command( mut cmd: Command, out_perm: &Mutex<()>, enable_output_buffering: bool, ) -> ExitCode { // Spawn the supplied command. let output = if enable_output_buffering { cmd.output() } else { // If running on only one thread, don't buffer output // Allows for viewing and interacting with intermediate command output cmd.spawn().and_then(|c| c.wait_with_output()) }; // Then wait for the command to exit, if it was spawned. match output { Ok(output) => { // While this lock is active, this thread will be the only thread allowed // to write its outputs. let _lock = out_perm.lock().unwrap(); let stdout = io::stdout(); let stderr = io::stderr(); let _ = stdout.lock().write_all(&output.stdout); let _ = stderr.lock().write_all(&output.stderr); if output.status.code() == Some(0) { ExitCode::Success } else { ExitCode::GeneralError } } Err(ref why) if why.kind() == io::ErrorKind::NotFound => { print_error(format!("Command not found: {:?}", cmd)); ExitCode::GeneralError } Err(why) => { print_error(format!("Problem while executing command: {}", why)); ExitCode::GeneralError } } } fd-find-8.3.1/src/exec/input.rs000064400000000000000000000055100072674642500144150ustar 00000000000000use std::ffi::{OsStr, OsString}; use std::path::{Path, PathBuf}; use crate::filesystem::strip_current_dir; /// Removes the parent component of the path pub fn basename(path: &Path) -> &OsStr { path.file_name().unwrap_or_else(|| path.as_os_str()) } /// Removes the extension from the path pub fn remove_extension(path: &Path) -> OsString { let dirname = dirname(path); let stem = path.file_stem().unwrap_or_else(|| path.as_os_str()); let path = PathBuf::from(dirname).join(stem); strip_current_dir(&path).to_owned().into_os_string() } /// Removes the basename from the path. pub fn dirname(path: &Path) -> OsString { path.parent() .map(|p| { if p == OsStr::new("") { OsString::from(".") } else { p.as_os_str().to_owned() } }) .unwrap_or_else(|| path.as_os_str().to_owned()) } #[cfg(test)] mod path_tests { use super::*; use std::path::MAIN_SEPARATOR; fn correct(input: &str) -> String { input.replace('/', &MAIN_SEPARATOR.to_string()) } macro_rules! func_tests { ($($name:ident: $func:ident for $input:expr => $output:expr)+) => { $( #[test] fn $name() { let input_path = PathBuf::from(&correct($input)); let output_string = OsString::from(correct($output)); assert_eq!($func(&input_path), output_string); } )+ } } func_tests! { remove_ext_simple: remove_extension for "foo.txt" => "foo" remove_ext_dir: remove_extension for "dir/foo.txt" => "dir/foo" hidden: remove_extension for ".foo" => ".foo" remove_ext_utf8: remove_extension for "💖.txt" => "💖" remove_ext_empty: remove_extension for "" => "" basename_simple: basename for "foo.txt" => "foo.txt" basename_dir: basename for "dir/foo.txt" => "foo.txt" basename_empty: basename for "" => "" basename_utf8_0: basename for "💖/foo.txt" => "foo.txt" basename_utf8_1: basename for "dir/💖.txt" => "💖.txt" dirname_simple: dirname for "foo.txt" => "." dirname_dir: dirname for "dir/foo.txt" => "dir" dirname_utf8_0: dirname for "💖/foo.txt" => "💖" dirname_utf8_1: dirname for "dir/💖.txt" => "dir" } #[test] #[cfg(windows)] fn dirname_root() { assert_eq!(dirname(&PathBuf::from("C:")), OsString::from("C:")); assert_eq!(dirname(&PathBuf::from("\\")), OsString::from("\\")); } #[test] #[cfg(not(windows))] fn dirname_root() { assert_eq!(dirname(&PathBuf::from("/")), OsString::from("/")); } } fd-find-8.3.1/src/exec/job.rs000064400000000000000000000047460072674642500140420ustar 00000000000000use std::path::PathBuf; use std::sync::mpsc::Receiver; use std::sync::{Arc, Mutex}; use crate::error::print_error; use crate::exit_codes::{merge_exitcodes, ExitCode}; use crate::walk::WorkerResult; use super::CommandTemplate; /// An event loop that listens for inputs from the `rx` receiver. Each received input will /// generate a command with the supplied command template. The generated command will then /// be executed, and this process will continue until the receiver's sender has closed. pub fn job( rx: Arc>>, cmd: Arc, out_perm: Arc>, show_filesystem_errors: bool, buffer_output: bool, ) -> ExitCode { let mut results: Vec = Vec::new(); loop { // Create a lock on the shared receiver for this thread. let lock = rx.lock().unwrap(); // Obtain the next result from the receiver, else if the channel // has closed, exit from the loop let value: PathBuf = match lock.recv() { Ok(WorkerResult::Entry(path)) => path, Ok(WorkerResult::Error(err)) => { if show_filesystem_errors { print_error(err.to_string()); } continue; } Err(_) => break, }; // Drop the lock so that other threads can read from the receiver. drop(lock); // Generate a command, execute it and store its exit code. results.push(cmd.generate_and_execute(&value, Arc::clone(&out_perm), buffer_output)) } // Returns error in case of any error. merge_exitcodes(results) } pub fn batch( rx: Receiver, cmd: &CommandTemplate, show_filesystem_errors: bool, buffer_output: bool, limit: usize, ) -> ExitCode { let paths = rx.iter().filter_map(|value| match value { WorkerResult::Entry(path) => Some(path), WorkerResult::Error(err) => { if show_filesystem_errors { print_error(err.to_string()); } None } }); if limit == 0 { // no limit return cmd.generate_and_execute_batch(paths, buffer_output); } let mut exit_codes = Vec::new(); let mut peekable = paths.peekable(); while peekable.peek().is_some() { let limited = peekable.by_ref().take(limit); let exit_code = cmd.generate_and_execute_batch(limited, buffer_output); exit_codes.push(exit_code); } merge_exitcodes(exit_codes) } fd-find-8.3.1/src/exec/mod.rs000064400000000000000000000371620072674642500140450ustar 00000000000000mod command; mod input; mod job; mod token; use std::borrow::Cow; use std::ffi::{OsStr, OsString}; use std::path::{Component, Path, PathBuf, Prefix}; use std::process::{Command, Stdio}; use std::sync::{Arc, Mutex}; use anyhow::{anyhow, Result}; use once_cell::sync::Lazy; use regex::Regex; use crate::exit_codes::ExitCode; use self::command::execute_command; use self::input::{basename, dirname, remove_extension}; pub use self::job::{batch, job}; use self::token::Token; /// Execution mode of the command #[derive(Debug, Clone, Copy, PartialEq)] pub enum ExecutionMode { /// Command is executed for each search result OneByOne, /// Command is run for a batch of results at once Batch, } /// Represents a template that is utilized to generate command strings. /// /// The template is meant to be coupled with an input in order to generate a command. The /// `generate_and_execute()` method will be used to generate a command and execute it. #[derive(Debug, Clone, PartialEq)] pub struct CommandTemplate { args: Vec, mode: ExecutionMode, path_separator: Option, } impl CommandTemplate { pub fn new(input: I, path_separator: Option) -> CommandTemplate where I: IntoIterator, S: AsRef, { Self::build(input, ExecutionMode::OneByOne, path_separator) } pub fn new_batch(input: I, path_separator: Option) -> Result where I: IntoIterator, S: AsRef, { let cmd = Self::build(input, ExecutionMode::Batch, path_separator); if cmd.number_of_tokens() > 1 { return Err(anyhow!("Only one placeholder allowed for batch commands")); } if cmd.args[0].has_tokens() { return Err(anyhow!( "First argument of exec-batch is expected to be a fixed executable" )); } Ok(cmd) } fn build(input: I, mode: ExecutionMode, path_separator: Option) -> CommandTemplate where I: IntoIterator, S: AsRef, { static PLACEHOLDER_PATTERN: Lazy = Lazy::new(|| Regex::new(r"\{(/?\.?|//)\}").unwrap()); let mut args = Vec::new(); let mut has_placeholder = false; for arg in input { let arg = arg.as_ref(); let mut tokens = Vec::new(); let mut start = 0; for placeholder in PLACEHOLDER_PATTERN.find_iter(arg) { // Leading text before the placeholder. if placeholder.start() > start { tokens.push(Token::Text(arg[start..placeholder.start()].to_owned())); } start = placeholder.end(); match placeholder.as_str() { "{}" => tokens.push(Token::Placeholder), "{.}" => tokens.push(Token::NoExt), "{/}" => tokens.push(Token::Basename), "{//}" => tokens.push(Token::Parent), "{/.}" => tokens.push(Token::BasenameNoExt), _ => unreachable!("Unhandled placeholder"), } has_placeholder = true; } // Without a placeholder, the argument is just fixed text. if tokens.is_empty() { args.push(ArgumentTemplate::Text(arg.to_owned())); continue; } if start < arg.len() { // Trailing text after last placeholder. tokens.push(Token::Text(arg[start..].to_owned())); } args.push(ArgumentTemplate::Tokens(tokens)); } // If a placeholder token was not supplied, append one at the end of the command. if !has_placeholder { args.push(ArgumentTemplate::Tokens(vec![Token::Placeholder])); } CommandTemplate { args, mode, path_separator, } } fn number_of_tokens(&self) -> usize { self.args.iter().filter(|arg| arg.has_tokens()).count() } /// Generates and executes a command. /// /// Using the internal `args` field, and a supplied `input` variable, a `Command` will be /// build. Once all arguments have been processed, the command is executed. pub fn generate_and_execute( &self, input: &Path, out_perm: Arc>, buffer_output: bool, ) -> ExitCode { let mut cmd = Command::new(self.args[0].generate(&input, self.path_separator.as_deref())); for arg in &self.args[1..] { cmd.arg(arg.generate(&input, self.path_separator.as_deref())); } execute_command(cmd, &out_perm, buffer_output) } pub fn in_batch_mode(&self) -> bool { self.mode == ExecutionMode::Batch } pub fn generate_and_execute_batch(&self, paths: I, buffer_output: bool) -> ExitCode where I: Iterator, { let mut cmd = Command::new(self.args[0].generate("", None)); cmd.stdin(Stdio::inherit()); cmd.stdout(Stdio::inherit()); cmd.stderr(Stdio::inherit()); let mut paths: Vec<_> = paths.collect(); let mut has_path = false; for arg in &self.args[1..] { if arg.has_tokens() { paths.sort(); // A single `Tokens` is expected // So we can directly consume the iterator once and for all for path in &mut paths { cmd.arg(arg.generate(path, self.path_separator.as_deref())); has_path = true; } } else { cmd.arg(arg.generate("", None)); } } if has_path { execute_command(cmd, &Mutex::new(()), buffer_output) } else { ExitCode::Success } } } /// Represents a template for a single command argument. /// /// The argument is either a collection of `Token`s including at least one placeholder variant, or /// a fixed text. #[derive(Clone, Debug, PartialEq)] enum ArgumentTemplate { Tokens(Vec), Text(String), } impl ArgumentTemplate { pub fn has_tokens(&self) -> bool { matches!(self, ArgumentTemplate::Tokens(_)) } /// Generate an argument from this template. If path_separator is Some, then it will replace /// the path separator in all placeholder tokens. Text arguments and tokens are not affected by /// path separator substitution. pub fn generate(&self, path: impl AsRef, path_separator: Option<&str>) -> OsString { use self::Token::*; let path = path.as_ref(); match *self { ArgumentTemplate::Tokens(ref tokens) => { let mut s = OsString::new(); for token in tokens { match *token { Basename => s.push(Self::replace_separator(basename(path), path_separator)), BasenameNoExt => s.push(Self::replace_separator( &remove_extension(basename(path).as_ref()), path_separator, )), NoExt => s.push(Self::replace_separator( &remove_extension(path), path_separator, )), Parent => s.push(Self::replace_separator(&dirname(path), path_separator)), Placeholder => { s.push(Self::replace_separator(path.as_ref(), path_separator)) } Text(ref string) => s.push(string), } } s } ArgumentTemplate::Text(ref text) => OsString::from(text), } } /// Replace the path separator in the input with the custom separator string. If path_separator /// is None, simply return a borrowed Cow of the input. Otherwise, the input is /// interpreted as a Path and its components are iterated through and re-joined into a new /// OsString. fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> { // fast-path - no replacement necessary if path_separator.is_none() { return Cow::Borrowed(path); } let path_separator = path_separator.unwrap(); let mut out = OsString::with_capacity(path.len()); let mut components = Path::new(path).components().peekable(); while let Some(comp) = components.next() { match comp { // Absolute paths on Windows are tricky. A Prefix component is usually a drive // letter or UNC path, and is usually followed by RootDir. There are also // "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to // ignore verbatim path prefixes here because they're very rare, might be // impossible to reach here, and there's no good way to deal with them. If users // are doing something advanced involving verbatim windows paths, they can do their // own output filtering with a tool like sed. Component::Prefix(prefix) => { if let Prefix::UNC(server, share) = prefix.kind() { // Prefix::UNC is a parsed version of '\\server\share' out.push(path_separator); out.push(path_separator); out.push(server); out.push(path_separator); out.push(share); } else { // All other Windows prefix types are rendered as-is. This results in e.g. "C:" for // drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted, // but they're not returned by directories fd can search anyway so we don't worry // about them. out.push(comp.as_os_str()); } } // Root directory is always replaced with the custom separator. Component::RootDir => out.push(path_separator), // Everything else is joined normally, with a trailing separator if we're not last _ => { out.push(comp.as_os_str()); if components.peek().is_some() { out.push(path_separator); } } } } Cow::Owned(out) } } #[cfg(test)] mod tests { use super::*; #[test] fn tokens_with_placeholder() { assert_eq!( CommandTemplate::new(&[&"echo", &"${SHELL}:"], None), CommandTemplate { args: vec![ ArgumentTemplate::Text("echo".into()), ArgumentTemplate::Text("${SHELL}:".into()), ArgumentTemplate::Tokens(vec![Token::Placeholder]), ], mode: ExecutionMode::OneByOne, path_separator: None, } ); } #[test] fn tokens_with_no_extension() { assert_eq!( CommandTemplate::new(&["echo", "{.}"], None), CommandTemplate { args: vec![ ArgumentTemplate::Text("echo".into()), ArgumentTemplate::Tokens(vec![Token::NoExt]), ], mode: ExecutionMode::OneByOne, path_separator: None, } ); } #[test] fn tokens_with_basename() { assert_eq!( CommandTemplate::new(&["echo", "{/}"], None), CommandTemplate { args: vec![ ArgumentTemplate::Text("echo".into()), ArgumentTemplate::Tokens(vec![Token::Basename]), ], mode: ExecutionMode::OneByOne, path_separator: None, } ); } #[test] fn tokens_with_parent() { assert_eq!( CommandTemplate::new(&["echo", "{//}"], None), CommandTemplate { args: vec![ ArgumentTemplate::Text("echo".into()), ArgumentTemplate::Tokens(vec![Token::Parent]), ], mode: ExecutionMode::OneByOne, path_separator: None, } ); } #[test] fn tokens_with_basename_no_extension() { assert_eq!( CommandTemplate::new(&["echo", "{/.}"], None), CommandTemplate { args: vec![ ArgumentTemplate::Text("echo".into()), ArgumentTemplate::Tokens(vec![Token::BasenameNoExt]), ], mode: ExecutionMode::OneByOne, path_separator: None, } ); } #[test] fn tokens_multiple() { assert_eq!( CommandTemplate::new(&["cp", "{}", "{/.}.ext"], None), CommandTemplate { args: vec![ ArgumentTemplate::Text("cp".into()), ArgumentTemplate::Tokens(vec![Token::Placeholder]), ArgumentTemplate::Tokens(vec![ Token::BasenameNoExt, Token::Text(".ext".into()) ]), ], mode: ExecutionMode::OneByOne, path_separator: None, } ); } #[test] fn tokens_single_batch() { assert_eq!( CommandTemplate::new_batch(&["echo", "{.}"], None).unwrap(), CommandTemplate { args: vec![ ArgumentTemplate::Text("echo".into()), ArgumentTemplate::Tokens(vec![Token::NoExt]), ], mode: ExecutionMode::Batch, path_separator: None, } ); } #[test] fn tokens_multiple_batch() { assert!(CommandTemplate::new_batch(&["echo", "{.}", "{}"], None).is_err()); } #[test] fn generate_custom_path_separator() { let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]); macro_rules! check { ($input:expr, $expected:expr) => { assert_eq!(arg.generate($input, Some("#")), OsString::from($expected)); }; } check!("foo", "foo"); check!("foo/bar", "foo#bar"); check!("/foo/bar/baz", "#foo#bar#baz"); } #[cfg(windows)] #[test] fn generate_custom_path_separator_windows() { let arg = ArgumentTemplate::Tokens(vec![Token::Placeholder]); macro_rules! check { ($input:expr, $expected:expr) => { assert_eq!(arg.generate($input, Some("#")), OsString::from($expected)); }; } // path starting with a drive letter check!(r"C:\foo\bar", "C:#foo#bar"); // UNC path check!(r"\\server\share\path", "##server#share#path"); // Drive Relative path - no separator after the colon omits the RootDir path component. // This is uncommon, but valid check!(r"C:foo\bar", "C:foo#bar"); // forward slashes should get normalized and interpreted as separators check!("C:/foo/bar", "C:#foo#bar"); check!("C:foo/bar", "C:foo#bar"); // Rust does not interpret "//server/share" as a UNC path, but rather as a normal // absolute path that begins with RootDir, and the two slashes get combined together as // a single path separator during normalization. //check!("//server/share/path", "##server#share#path"); } } fd-find-8.3.1/src/exec/token.rs000064400000000000000000000015560072674642500144040ustar 00000000000000use std::fmt::{self, Display, Formatter}; /// Designates what should be written to a buffer /// /// Each `Token` contains either text, or a placeholder variant, which will be used to generate /// commands after all tokens for a given command template have been collected. #[derive(Clone, Debug, PartialEq)] pub enum Token { Placeholder, Basename, Parent, NoExt, BasenameNoExt, Text(String), } impl Display for Token { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self { Token::Placeholder => f.write_str("{}")?, Token::Basename => f.write_str("{/}")?, Token::Parent => f.write_str("{//}")?, Token::NoExt => f.write_str("{.}")?, Token::BasenameNoExt => f.write_str("{/.}")?, Token::Text(ref string) => f.write_str(string)?, } Ok(()) } } fd-find-8.3.1/src/exit_codes.rs000064400000000000000000000045730072674642500144700ustar 00000000000000use std::process; #[cfg(unix)] use nix::sys::signal::{raise, signal, SigHandler, Signal}; #[derive(Debug, Clone, Copy, PartialEq)] pub enum ExitCode { Success, HasResults(bool), GeneralError, KilledBySigint, } impl From for i32 { fn from(code: ExitCode) -> Self { match code { ExitCode::Success => 0, ExitCode::HasResults(has_results) => !has_results as i32, ExitCode::GeneralError => 1, ExitCode::KilledBySigint => 130, } } } impl ExitCode { fn is_error(self) -> bool { i32::from(self) != 0 } /// Exit the process with the appropriate code. pub fn exit(self) -> ! { #[cfg(unix)] if self == ExitCode::KilledBySigint { // Get rid of the SIGINT handler, if present, and raise SIGINT unsafe { if signal(Signal::SIGINT, SigHandler::SigDfl).is_ok() { let _ = raise(Signal::SIGINT); } } } process::exit(self.into()) } } pub fn merge_exitcodes(results: impl IntoIterator) -> ExitCode { if results.into_iter().any(ExitCode::is_error) { return ExitCode::GeneralError; } ExitCode::Success } #[cfg(test)] mod tests { use super::*; #[test] fn success_when_no_results() { assert_eq!(merge_exitcodes([]), ExitCode::Success); } #[test] fn general_error_if_at_least_one_error() { assert_eq!( merge_exitcodes([ExitCode::GeneralError]), ExitCode::GeneralError ); assert_eq!( merge_exitcodes([ExitCode::KilledBySigint]), ExitCode::GeneralError ); assert_eq!( merge_exitcodes([ExitCode::KilledBySigint, ExitCode::Success]), ExitCode::GeneralError ); assert_eq!( merge_exitcodes([ExitCode::Success, ExitCode::GeneralError]), ExitCode::GeneralError ); assert_eq!( merge_exitcodes([ExitCode::GeneralError, ExitCode::KilledBySigint]), ExitCode::GeneralError ); } #[test] fn success_if_no_error() { assert_eq!(merge_exitcodes([ExitCode::Success]), ExitCode::Success); assert_eq!( merge_exitcodes([ExitCode::Success, ExitCode::Success]), ExitCode::Success ); } } fd-find-8.3.1/src/filesystem.rs000064400000000000000000000073550072674642500145270ustar 00000000000000use std::borrow::Cow; use std::env; use std::ffi::OsStr; use std::fs; use std::io; #[cfg(any(unix, target_os = "redox"))] use std::os::unix::fs::{FileTypeExt, PermissionsExt}; use std::path::{Path, PathBuf}; use normpath::PathExt; use crate::walk; pub fn path_absolute_form(path: &Path) -> io::Result { if path.is_absolute() { return Ok(path.to_path_buf()); } let path = path.strip_prefix(".").unwrap_or(path); env::current_dir().map(|path_buf| path_buf.join(path)) } pub fn absolute_path(path: &Path) -> io::Result { let path_buf = path_absolute_form(path)?; #[cfg(windows)] let path_buf = Path::new( path_buf .as_path() .to_string_lossy() .trim_start_matches(r"\\?\"), ) .to_path_buf(); Ok(path_buf) } pub fn is_existing_directory(path: &Path) -> bool { // Note: we do not use `.exists()` here, as `.` always exists, even if // the CWD has been deleted. path.is_dir() && (path.file_name().is_some() || path.normalize().is_ok()) } #[cfg(any(unix, target_os = "redox"))] pub fn is_executable(md: &fs::Metadata) -> bool { md.permissions().mode() & 0o111 != 0 } #[cfg(windows)] pub fn is_executable(_: &fs::Metadata) -> bool { false } pub fn is_empty(entry: &walk::DirEntry) -> bool { if let Some(file_type) = entry.file_type() { if file_type.is_dir() { if let Ok(mut entries) = fs::read_dir(entry.path()) { entries.next().is_none() } else { false } } else if file_type.is_file() { entry.metadata().map(|m| m.len() == 0).unwrap_or(false) } else { false } } else { false } } #[cfg(any(unix, target_os = "redox"))] pub fn is_socket(ft: fs::FileType) -> bool { ft.is_socket() } #[cfg(windows)] pub fn is_socket(_: fs::FileType) -> bool { false } #[cfg(any(unix, target_os = "redox"))] pub fn is_pipe(ft: fs::FileType) -> bool { ft.is_fifo() } #[cfg(windows)] pub fn is_pipe(_: fs::FileType) -> bool { false } #[cfg(any(unix, target_os = "redox"))] pub fn osstr_to_bytes(input: &OsStr) -> Cow<[u8]> { use std::os::unix::ffi::OsStrExt; Cow::Borrowed(input.as_bytes()) } #[cfg(windows)] pub fn osstr_to_bytes(input: &OsStr) -> Cow<[u8]> { let string = input.to_string_lossy(); match string { Cow::Owned(string) => Cow::Owned(string.into_bytes()), Cow::Borrowed(string) => Cow::Borrowed(string.as_bytes()), } } /// Remove the `./` prefix from a path. pub fn strip_current_dir(path: &Path) -> &Path { path.strip_prefix(".").unwrap_or(path) } /// Default value for the path_separator, mainly for MSYS/MSYS2, which set the MSYSTEM /// environment variable, and we set fd's path separator to '/' rather than Rust's default of '\'. /// /// Returns Some to use a nonstandard path separator, or None to use rust's default on the target /// platform. pub fn default_path_separator() -> Option { if cfg!(windows) { let msystem = env::var("MSYSTEM").ok()?; match msystem.as_str() { "MINGW64" | "MINGW32" | "MSYS" => Some("/".to_owned()), _ => None, } } else { None } } #[cfg(test)] mod tests { use super::strip_current_dir; use std::path::Path; #[test] fn strip_current_dir_basic() { assert_eq!(strip_current_dir(Path::new("./foo")), Path::new("foo")); assert_eq!(strip_current_dir(Path::new("foo")), Path::new("foo")); assert_eq!( strip_current_dir(Path::new("./foo/bar/baz")), Path::new("foo/bar/baz") ); assert_eq!( strip_current_dir(Path::new("foo/bar/baz")), Path::new("foo/bar/baz") ); } } fd-find-8.3.1/src/filetypes.rs000064400000000000000000000024500072674642500143360ustar 00000000000000use crate::filesystem; use crate::walk; /// Whether or not to show #[derive(Default)] pub struct FileTypes { pub files: bool, pub directories: bool, pub symlinks: bool, pub sockets: bool, pub pipes: bool, pub executables_only: bool, pub empty_only: bool, } impl FileTypes { pub fn should_ignore(&self, entry: &walk::DirEntry) -> bool { if let Some(ref entry_type) = entry.file_type() { (!self.files && entry_type.is_file()) || (!self.directories && entry_type.is_dir()) || (!self.symlinks && entry_type.is_symlink()) || (!self.sockets && filesystem::is_socket(*entry_type)) || (!self.pipes && filesystem::is_pipe(*entry_type)) || (self.executables_only && !entry .metadata() .map(filesystem::is_executable) .unwrap_or(false)) || (self.empty_only && !filesystem::is_empty(entry)) || !(entry_type.is_file() || entry_type.is_dir() || entry_type.is_symlink() || filesystem::is_socket(*entry_type) || filesystem::is_pipe(*entry_type)) } else { true } } } fd-find-8.3.1/src/filter/mod.rs000064400000000000000000000002360072674642500143760ustar 00000000000000pub use self::size::SizeFilter; pub use self::time::TimeFilter; #[cfg(unix)] pub use self::owner::OwnerFilter; mod size; mod time; #[cfg(unix)] mod owner; fd-find-8.3.1/src/filter/owner.rs000064400000000000000000000071530072674642500147560ustar 00000000000000use anyhow::{anyhow, Result}; use std::fs; #[derive(Clone, Copy, Debug, PartialEq)] pub struct OwnerFilter { uid: Check, gid: Check, } #[derive(Clone, Copy, Debug, PartialEq)] enum Check { Equal(T), NotEq(T), Ignore, } impl OwnerFilter { /// Parses an owner constraint /// Returns an error if the string is invalid /// Returns Ok(None) when string is acceptable but a noop (such as "" or ":") pub fn from_string(input: &str) -> Result> { let mut it = input.split(':'); let (fst, snd) = (it.next(), it.next()); if it.next().is_some() { return Err(anyhow!( "more than one ':' present in owner string '{}'. See 'fd --help'.", input )); } let uid = Check::parse(fst, |s| { s.parse() .ok() .or_else(|| users::get_user_by_name(s).map(|user| user.uid())) .ok_or_else(|| anyhow!("'{}' is not a recognized user name", s)) })?; let gid = Check::parse(snd, |s| { s.parse() .ok() .or_else(|| users::get_group_by_name(s).map(|group| group.gid())) .ok_or_else(|| anyhow!("'{}' is not a recognized group name", s)) })?; if let (Check::Ignore, Check::Ignore) = (uid, gid) { Ok(None) } else { Ok(Some(OwnerFilter { uid, gid })) } } pub fn matches(&self, md: &fs::Metadata) -> bool { use std::os::unix::fs::MetadataExt; self.uid.check(md.uid()) && self.gid.check(md.gid()) } } impl Check { fn check(&self, v: T) -> bool { match self { Check::Equal(x) => v == *x, Check::NotEq(x) => v != *x, Check::Ignore => true, } } fn parse(s: Option<&str>, f: F) -> Result where F: Fn(&str) -> Result, { let (s, equality) = match s { Some("") | None => return Ok(Check::Ignore), Some(s) if s.starts_with('!') => (&s[1..], false), Some(s) => (s, true), }; f(s).map(|x| { if equality { Check::Equal(x) } else { Check::NotEq(x) } }) } } #[cfg(test)] mod owner_parsing { use super::OwnerFilter; macro_rules! owner_tests { ($($name:ident: $value:expr => $result:pat,)*) => { $( #[test] fn $name() { let o = OwnerFilter::from_string($value); match o { $result => {}, _ => panic!("{:?} does not match {}", o, stringify!($result)), } } )* }; } use super::Check::*; owner_tests! { empty: "" => Ok(None), uid_only: "5" => Ok(Some(OwnerFilter { uid: Equal(5), gid: Ignore })), uid_gid: "9:3" => Ok(Some(OwnerFilter { uid: Equal(9), gid: Equal(3) })), gid_only: ":8" => Ok(Some(OwnerFilter { uid: Ignore, gid: Equal(8) })), colon_only: ":" => Ok(None), trailing: "5:" => Ok(Some(OwnerFilter { uid: Equal(5), gid: Ignore })), uid_negate: "!5" => Ok(Some(OwnerFilter { uid: NotEq(5), gid: Ignore })), both_negate:"!4:!3" => Ok(Some(OwnerFilter { uid: NotEq(4), gid: NotEq(3) })), uid_not_gid:"6:!8" => Ok(Some(OwnerFilter { uid: Equal(6), gid: NotEq(8) })), more_colons:"3:5:" => Err(_), only_colons:"::" => Err(_), } } fd-find-8.3.1/src/filter/size.rs000064400000000000000000000223640072674642500145770ustar 00000000000000use once_cell::sync::Lazy; use regex::Regex; static SIZE_CAPTURES: Lazy = Lazy::new(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap()); #[derive(Clone, Copy, Debug, PartialEq)] pub enum SizeFilter { Max(u64), Min(u64), Equals(u64), } // SI prefixes (powers of 10) const KILO: u64 = 1000; const MEGA: u64 = KILO * 1000; const GIGA: u64 = MEGA * 1000; const TERA: u64 = GIGA * 1000; // Binary prefixes (powers of 2) const KIBI: u64 = 1024; const MEBI: u64 = KIBI * 1024; const GIBI: u64 = MEBI * 1024; const TEBI: u64 = GIBI * 1024; impl SizeFilter { pub fn from_string(s: &str) -> Option { if !SIZE_CAPTURES.is_match(s) { return None; } let captures = SIZE_CAPTURES.captures(s)?; let limit_kind = captures.get(1).map_or("+", |m| m.as_str()); let quantity = captures .get(2) .and_then(|v| v.as_str().parse::().ok())?; let multiplier = match &captures.get(3).map_or("b", |m| m.as_str()).to_lowercase()[..] { v if v.starts_with("ki") => KIBI, v if v.starts_with('k') => KILO, v if v.starts_with("mi") => MEBI, v if v.starts_with('m') => MEGA, v if v.starts_with("gi") => GIBI, v if v.starts_with('g') => GIGA, v if v.starts_with("ti") => TEBI, v if v.starts_with('t') => TERA, "b" => 1, _ => return None, }; let size = quantity * multiplier; match limit_kind { "+" => Some(SizeFilter::Min(size)), "-" => Some(SizeFilter::Max(size)), "" => Some(SizeFilter::Equals(size)), _ => None, } } pub fn is_within(&self, size: u64) -> bool { match *self { SizeFilter::Max(limit) => size <= limit, SizeFilter::Min(limit) => size >= limit, SizeFilter::Equals(limit) => size == limit, } } } #[cfg(test)] mod tests { use super::*; macro_rules! gen_size_filter_parse_test { ($($name: ident: $val: expr,)*) => { $( #[test] fn $name() { let (txt, expected) = $val; let actual = SizeFilter::from_string(txt).unwrap(); assert_eq!(actual, expected); } )* }; } // Parsing and size conversion tests data. Ensure that each type gets properly interpreted. // Call with higher base values to ensure expected multiplication (only need a couple) gen_size_filter_parse_test! { byte_plus: ("+1b", SizeFilter::Min(1)), byte_plus_multiplier: ("+10b", SizeFilter::Min(10)), byte_minus: ("-1b", SizeFilter::Max(1)), kilo_plus: ("+1k", SizeFilter::Min(1000)), kilo_plus_suffix: ("+1kb", SizeFilter::Min(1000)), kilo_minus: ("-1k", SizeFilter::Max(1000)), kilo_minus_multiplier: ("-100k", SizeFilter::Max(100_000)), kilo_minus_suffix: ("-1kb", SizeFilter::Max(1000)), kilo_plus_upper: ("+1K", SizeFilter::Min(1000)), kilo_plus_suffix_upper: ("+1KB", SizeFilter::Min(1000)), kilo_minus_upper: ("-1K", SizeFilter::Max(1000)), kilo_minus_suffix_upper: ("-1Kb", SizeFilter::Max(1000)), kibi_plus: ("+1ki", SizeFilter::Min(1024)), kibi_plus_multiplier: ("+10ki", SizeFilter::Min(10_240)), kibi_plus_suffix: ("+1kib", SizeFilter::Min(1024)), kibi_minus: ("-1ki", SizeFilter::Max(1024)), kibi_minus_multiplier: ("-100ki", SizeFilter::Max(102_400)), kibi_minus_suffix: ("-1kib", SizeFilter::Max(1024)), kibi_plus_upper: ("+1KI", SizeFilter::Min(1024)), kibi_plus_suffix_upper: ("+1KiB", SizeFilter::Min(1024)), kibi_minus_upper: ("-1Ki", SizeFilter::Max(1024)), kibi_minus_suffix_upper: ("-1KIB", SizeFilter::Max(1024)), mega_plus: ("+1m", SizeFilter::Min(1_000_000)), mega_plus_suffix: ("+1mb", SizeFilter::Min(1_000_000)), mega_minus: ("-1m", SizeFilter::Max(1_000_000)), mega_minus_suffix: ("-1mb", SizeFilter::Max(1_000_000)), mega_plus_upper: ("+1M", SizeFilter::Min(1_000_000)), mega_plus_suffix_upper: ("+1MB", SizeFilter::Min(1_000_000)), mega_minus_upper: ("-1M", SizeFilter::Max(1_000_000)), mega_minus_suffix_upper: ("-1Mb", SizeFilter::Max(1_000_000)), mebi_plus: ("+1mi", SizeFilter::Min(1_048_576)), mebi_plus_suffix: ("+1mib", SizeFilter::Min(1_048_576)), mebi_minus: ("-1mi", SizeFilter::Max(1_048_576)), mebi_minus_suffix: ("-1mib", SizeFilter::Max(1_048_576)), mebi_plus_upper: ("+1MI", SizeFilter::Min(1_048_576)), mebi_plus_suffix_upper: ("+1MiB", SizeFilter::Min(1_048_576)), mebi_minus_upper: ("-1Mi", SizeFilter::Max(1_048_576)), mebi_minus_suffix_upper: ("-1MIB", SizeFilter::Max(1_048_576)), giga_plus: ("+1g", SizeFilter::Min(1_000_000_000)), giga_plus_suffix: ("+1gb", SizeFilter::Min(1_000_000_000)), giga_minus: ("-1g", SizeFilter::Max(1_000_000_000)), giga_minus_suffix: ("-1gb", SizeFilter::Max(1_000_000_000)), giga_plus_upper: ("+1G", SizeFilter::Min(1_000_000_000)), giga_plus_suffix_upper: ("+1GB", SizeFilter::Min(1_000_000_000)), giga_minus_upper: ("-1G", SizeFilter::Max(1_000_000_000)), giga_minus_suffix_upper: ("-1Gb", SizeFilter::Max(1_000_000_000)), gibi_plus: ("+1gi", SizeFilter::Min(1_073_741_824)), gibi_plus_suffix: ("+1gib", SizeFilter::Min(1_073_741_824)), gibi_minus: ("-1gi", SizeFilter::Max(1_073_741_824)), gibi_minus_suffix: ("-1gib", SizeFilter::Max(1_073_741_824)), gibi_plus_upper: ("+1GI", SizeFilter::Min(1_073_741_824)), gibi_plus_suffix_upper: ("+1GiB", SizeFilter::Min(1_073_741_824)), gibi_minus_upper: ("-1Gi", SizeFilter::Max(1_073_741_824)), gibi_minus_suffix_upper: ("-1GIB", SizeFilter::Max(1_073_741_824)), tera_plus: ("+1t", SizeFilter::Min(1_000_000_000_000)), tera_plus_suffix: ("+1tb", SizeFilter::Min(1_000_000_000_000)), tera_minus: ("-1t", SizeFilter::Max(1_000_000_000_000)), tera_minus_suffix: ("-1tb", SizeFilter::Max(1_000_000_000_000)), tera_plus_upper: ("+1T", SizeFilter::Min(1_000_000_000_000)), tera_plus_suffix_upper: ("+1TB", SizeFilter::Min(1_000_000_000_000)), tera_minus_upper: ("-1T", SizeFilter::Max(1_000_000_000_000)), tera_minus_suffix_upper: ("-1Tb", SizeFilter::Max(1_000_000_000_000)), tebi_plus: ("+1ti", SizeFilter::Min(1_099_511_627_776)), tebi_plus_suffix: ("+1tib", SizeFilter::Min(1_099_511_627_776)), tebi_minus: ("-1ti", SizeFilter::Max(1_099_511_627_776)), tebi_minus_suffix: ("-1tib", SizeFilter::Max(1_099_511_627_776)), tebi_plus_upper: ("+1TI", SizeFilter::Min(1_099_511_627_776)), tebi_plus_suffix_upper: ("+1TiB", SizeFilter::Min(1_099_511_627_776)), tebi_minus_upper: ("-1Ti", SizeFilter::Max(1_099_511_627_776)), tebi_minus_suffix_upper: ("-1TIB", SizeFilter::Max(1_099_511_627_776)), } /// Invalid parse testing macro_rules! gen_size_filter_failure { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let i = SizeFilter::from_string($value); assert!(i.is_none()); } )* }; } // Invalid parse data gen_size_filter_failure! { ensure_missing_number_returns_none: "+g", ensure_missing_unit_returns_none: "+18", ensure_bad_format_returns_none_1: "$10M", ensure_bad_format_returns_none_2: "badval", ensure_bad_format_returns_none_3: "9999", ensure_invalid_unit_returns_none_1: "+50a", ensure_invalid_unit_returns_none_2: "-10v", ensure_invalid_unit_returns_none_3: "+1Mv", ensure_bib_format_returns_none: "+1bib", ensure_bb_format_returns_none: "+1bb", } #[test] fn is_within_less_than() { let f = SizeFilter::from_string("-1k").unwrap(); assert!(f.is_within(999)); } #[test] fn is_within_less_than_equal() { let f = SizeFilter::from_string("-1k").unwrap(); assert!(f.is_within(1000)); } #[test] fn is_within_greater_than() { let f = SizeFilter::from_string("+1k").unwrap(); assert!(f.is_within(1001)); } #[test] fn is_within_greater_than_equal() { let f = SizeFilter::from_string("+1K").unwrap(); assert!(f.is_within(1000)); } } fd-find-8.3.1/src/filter/time.rs000064400000000000000000000101610072674642500145530ustar 00000000000000use chrono::{offset::TimeZone, DateTime, Local, NaiveDate}; use std::time::SystemTime; /// Filter based on time ranges. #[derive(Debug, PartialEq)] pub enum TimeFilter { Before(SystemTime), After(SystemTime), } impl TimeFilter { fn from_str(ref_time: &SystemTime, s: &str) -> Option { humantime::parse_duration(s) .map(|duration| *ref_time - duration) .ok() .or_else(|| { DateTime::parse_from_rfc3339(s) .map(|dt| dt.into()) .ok() .or_else(|| { NaiveDate::parse_from_str(s, "%F") .map(|nd| nd.and_hms(0, 0, 0)) .ok() .and_then(|ndt| Local.from_local_datetime(&ndt).single()) }) .or_else(|| Local.datetime_from_str(s, "%F %T").ok()) .map(|dt| dt.into()) }) } pub fn before(ref_time: &SystemTime, s: &str) -> Option { TimeFilter::from_str(ref_time, s).map(TimeFilter::Before) } pub fn after(ref_time: &SystemTime, s: &str) -> Option { TimeFilter::from_str(ref_time, s).map(TimeFilter::After) } pub fn applies_to(&self, t: &SystemTime) -> bool { match self { TimeFilter::Before(limit) => t < limit, TimeFilter::After(limit) => t > limit, } } } #[cfg(test)] mod tests { use super::*; use std::time::Duration; #[test] fn is_time_filter_applicable() { let ref_time = Local .datetime_from_str("2010-10-10 10:10:10", "%F %T") .unwrap() .into(); assert!(TimeFilter::after(&ref_time, "1min") .unwrap() .applies_to(&ref_time)); assert!(!TimeFilter::before(&ref_time, "1min") .unwrap() .applies_to(&ref_time)); let t1m_ago = ref_time - Duration::from_secs(60); assert!(!TimeFilter::after(&ref_time, "30sec") .unwrap() .applies_to(&t1m_ago)); assert!(TimeFilter::after(&ref_time, "2min") .unwrap() .applies_to(&t1m_ago)); assert!(TimeFilter::before(&ref_time, "30sec") .unwrap() .applies_to(&t1m_ago)); assert!(!TimeFilter::before(&ref_time, "2min") .unwrap() .applies_to(&t1m_ago)); let t10s_before = "2010-10-10 10:10:00"; assert!(!TimeFilter::before(&ref_time, t10s_before) .unwrap() .applies_to(&ref_time)); assert!(TimeFilter::before(&ref_time, t10s_before) .unwrap() .applies_to(&t1m_ago)); assert!(TimeFilter::after(&ref_time, t10s_before) .unwrap() .applies_to(&ref_time)); assert!(!TimeFilter::after(&ref_time, t10s_before) .unwrap() .applies_to(&t1m_ago)); let same_day = "2010-10-10"; assert!(!TimeFilter::before(&ref_time, same_day) .unwrap() .applies_to(&ref_time)); assert!(!TimeFilter::before(&ref_time, same_day) .unwrap() .applies_to(&t1m_ago)); assert!(TimeFilter::after(&ref_time, same_day) .unwrap() .applies_to(&ref_time)); assert!(TimeFilter::after(&ref_time, same_day) .unwrap() .applies_to(&t1m_ago)); let ref_time = DateTime::parse_from_rfc3339("2010-10-10T10:10:10+00:00") .unwrap() .into(); let t1m_ago = ref_time - Duration::from_secs(60); let t10s_before = "2010-10-10T10:10:00+00:00"; assert!(!TimeFilter::before(&ref_time, t10s_before) .unwrap() .applies_to(&ref_time)); assert!(TimeFilter::before(&ref_time, t10s_before) .unwrap() .applies_to(&t1m_ago)); assert!(TimeFilter::after(&ref_time, t10s_before) .unwrap() .applies_to(&ref_time)); assert!(!TimeFilter::after(&ref_time, t10s_before) .unwrap() .applies_to(&t1m_ago)); } } fd-find-8.3.1/src/main.rs000064400000000000000000000620170072674642500132630ustar 00000000000000mod app; mod config; mod error; mod exec; mod exit_codes; mod filesystem; mod filetypes; mod filter; mod output; mod regex_helper; mod walk; use std::env; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time; use anyhow::{anyhow, Context, Result}; use atty::Stream; use globset::GlobBuilder; use lscolors::LsColors; use normpath::PathExt; use regex::bytes::{RegexBuilder, RegexSetBuilder}; use crate::config::Config; use crate::error::print_error; use crate::exec::CommandTemplate; use crate::exit_codes::ExitCode; use crate::filetypes::FileTypes; #[cfg(unix)] use crate::filter::OwnerFilter; use crate::filter::{SizeFilter, TimeFilter}; use crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_with_leading_dot}; // We use jemalloc for performance reasons, see https://github.com/sharkdp/fd/pull/481 // FIXME: re-enable jemalloc on macOS, see comment in Cargo.toml file for more infos #[cfg(all( not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_env = "musl"), not(target_arch = "riscv64"), feature = "use-jemalloc" ))] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; // vivid --color-mode 8-bit generate molokai const DEFAULT_LS_COLORS: &str = " ow=0:or=0;38;5;16;48;5;203:no=0:ex=1;38;5;203:cd=0;38;5;203;48;5;236:mi=0;38;5;16;48;5;203:*~=0;38;5;243:st=0:pi=0;38;5;16;48;5;81:fi=0:di=0;38;5;81:so=0;38;5;16;48;5;203:bd=0;38;5;81;48;5;236:tw=0:ln=0;38;5;203:*.m=0;38;5;48:*.o=0;38;5;243:*.z=4;38;5;203:*.a=1;38;5;203:*.r=0;38;5;48:*.c=0;38;5;48:*.d=0;38;5;48:*.t=0;38;5;48:*.h=0;38;5;48:*.p=0;38;5;48:*.cc=0;38;5;48:*.ll=0;38;5;48:*.jl=0;38;5;48:*css=0;38;5;48:*.md=0;38;5;185:*.gz=4;38;5;203:*.nb=0;38;5;48:*.mn=0;38;5;48:*.go=0;38;5;48:*.xz=4;38;5;203:*.so=1;38;5;203:*.rb=0;38;5;48:*.pm=0;38;5;48:*.bc=0;38;5;243:*.py=0;38;5;48:*.as=0;38;5;48:*.pl=0;38;5;48:*.rs=0;38;5;48:*.sh=0;38;5;48:*.7z=4;38;5;203:*.ps=0;38;5;186:*.cs=0;38;5;48:*.el=0;38;5;48:*.rm=0;38;5;208:*.hs=0;38;5;48:*.td=0;38;5;48:*.ui=0;38;5;149:*.ex=0;38;5;48:*.js=0;38;5;48:*.cp=0;38;5;48:*.cr=0;38;5;48:*.la=0;38;5;243:*.kt=0;38;5;48:*.ml=0;38;5;48:*.vb=0;38;5;48:*.gv=0;38;5;48:*.lo=0;38;5;243:*.hi=0;38;5;243:*.ts=0;38;5;48:*.ko=1;38;5;203:*.hh=0;38;5;48:*.pp=0;38;5;48:*.di=0;38;5;48:*.bz=4;38;5;203:*.fs=0;38;5;48:*.png=0;38;5;208:*.zsh=0;38;5;48:*.mpg=0;38;5;208:*.pid=0;38;5;243:*.xmp=0;38;5;149:*.iso=4;38;5;203:*.m4v=0;38;5;208:*.dot=0;38;5;48:*.ods=0;38;5;186:*.inc=0;38;5;48:*.sxw=0;38;5;186:*.aif=0;38;5;208:*.git=0;38;5;243:*.gvy=0;38;5;48:*.tbz=4;38;5;203:*.log=0;38;5;243:*.txt=0;38;5;185:*.ico=0;38;5;208:*.csx=0;38;5;48:*.vob=0;38;5;208:*.pgm=0;38;5;208:*.pps=0;38;5;186:*.ics=0;38;5;186:*.img=4;38;5;203:*.fon=0;38;5;208:*.hpp=0;38;5;48:*.bsh=0;38;5;48:*.sql=0;38;5;48:*TODO=1:*.php=0;38;5;48:*.pkg=4;38;5;203:*.ps1=0;38;5;48:*.csv=0;38;5;185:*.ilg=0;38;5;243:*.ini=0;38;5;149:*.pyc=0;38;5;243:*.psd=0;38;5;208:*.htc=0;38;5;48:*.swp=0;38;5;243:*.mli=0;38;5;48:*hgrc=0;38;5;149:*.bst=0;38;5;149:*.ipp=0;38;5;48:*.fsi=0;38;5;48:*.tcl=0;38;5;48:*.exs=0;38;5;48:*.out=0;38;5;243:*.jar=4;38;5;203:*.xls=0;38;5;186:*.ppm=0;38;5;208:*.apk=4;38;5;203:*.aux=0;38;5;243:*.rpm=4;38;5;203:*.dll=1;38;5;203:*.eps=0;38;5;208:*.exe=1;38;5;203:*.doc=0;38;5;186:*.wma=0;38;5;208:*.deb=4;38;5;203:*.pod=0;38;5;48:*.ind=0;38;5;243:*.nix=0;38;5;149:*.lua=0;38;5;48:*.epp=0;38;5;48:*.dpr=0;38;5;48:*.htm=0;38;5;185:*.ogg=0;38;5;208:*.bin=4;38;5;203:*.otf=0;38;5;208:*.yml=0;38;5;149:*.pro=0;38;5;149:*.cxx=0;38;5;48:*.tex=0;38;5;48:*.fnt=0;38;5;208:*.erl=0;38;5;48:*.sty=0;38;5;243:*.bag=4;38;5;203:*.rst=0;38;5;185:*.pdf=0;38;5;186:*.pbm=0;38;5;208:*.xcf=0;38;5;208:*.clj=0;38;5;48:*.gif=0;38;5;208:*.rar=4;38;5;203:*.elm=0;38;5;48:*.bib=0;38;5;149:*.tsx=0;38;5;48:*.dmg=4;38;5;203:*.tmp=0;38;5;243:*.bcf=0;38;5;243:*.mkv=0;38;5;208:*.svg=0;38;5;208:*.cpp=0;38;5;48:*.vim=0;38;5;48:*.bmp=0;38;5;208:*.ltx=0;38;5;48:*.fls=0;38;5;243:*.flv=0;38;5;208:*.wav=0;38;5;208:*.m4a=0;38;5;208:*.mid=0;38;5;208:*.hxx=0;38;5;48:*.pas=0;38;5;48:*.wmv=0;38;5;208:*.tif=0;38;5;208:*.kex=0;38;5;186:*.mp4=0;38;5;208:*.bak=0;38;5;243:*.xlr=0;38;5;186:*.dox=0;38;5;149:*.swf=0;38;5;208:*.tar=4;38;5;203:*.tgz=4;38;5;203:*.cfg=0;38;5;149:*.xml=0; 38;5;185:*.jpg=0;38;5;208:*.mir=0;38;5;48:*.sxi=0;38;5;186:*.bz2=4;38;5;203:*.odt=0;38;5;186:*.mov=0;38;5;208:*.toc=0;38;5;243:*.bat=1;38;5;203:*.asa=0;38;5;48:*.awk=0;38;5;48:*.sbt=0;38;5;48:*.vcd=4;38;5;203:*.kts=0;38;5;48:*.arj=4;38;5;203:*.blg=0;38;5;243:*.c++=0;38;5;48:*.odp=0;38;5;186:*.bbl=0;38;5;243:*.idx=0;38;5;243:*.com=1;38;5;203:*.mp3=0;38;5;208:*.avi=0;38;5;208:*.def=0;38;5;48:*.cgi=0;38;5;48:*.zip=4;38;5;203:*.ttf=0;38;5;208:*.ppt=0;38;5;186:*.tml=0;38;5;149:*.fsx=0;38;5;48:*.h++=0;38;5;48:*.rtf=0;38;5;186:*.inl=0;38;5;48:*.yaml=0;38;5;149:*.html=0;38;5;185:*.mpeg=0;38;5;208:*.java=0;38;5;48:*.hgrc=0;38;5;149:*.orig=0;38;5;243:*.conf=0;38;5;149:*.dart=0;38;5;48:*.psm1=0;38;5;48:*.rlib=0;38;5;243:*.fish=0;38;5;48:*.bash=0;38;5;48:*.make=0;38;5;149:*.docx=0;38;5;186:*.json=0;38;5;149:*.psd1=0;38;5;48:*.lisp=0;38;5;48:*.tbz2=4;38;5;203:*.diff=0;38;5;48:*.epub=0;38;5;186:*.xlsx=0;38;5;186:*.pptx=0;38;5;186:*.toml=0;38;5;149:*.h264=0;38;5;208:*.purs=0;38;5;48:*.flac=0;38;5;208:*.tiff=0;38;5;208:*.jpeg=0;38;5;208:*.lock=0;38;5;243:*.less=0;38;5;48:*.dyn_o=0;38;5;243:*.scala=0;38;5;48:*.mdown=0;38;5;185:*.shtml=0;38;5;185:*.class=0;38;5;243:*.cache=0;38;5;243:*.cmake=0;38;5;149:*passwd=0;38;5;149:*.swift=0;38;5;48:*shadow=0;38;5;149:*.xhtml=0;38;5;185:*.patch=0;38;5;48:*.cabal=0;38;5;48:*README=0;38;5;16;48;5;186:*.toast=4;38;5;203:*.ipynb=0;38;5;48:*COPYING=0;38;5;249:*.gradle=0;38;5;48:*.matlab=0;38;5;48:*.config=0;38;5;149:*LICENSE=0;38;5;249:*.dyn_hi=0;38;5;243:*.flake8=0;38;5;149:*.groovy=0;38;5;48:*INSTALL=0;38;5;16;48;5;186:*TODO.md=1:*.ignore=0;38;5;149:*Doxyfile=0;38;5;149:*TODO.txt=1:*setup.py=0;38;5;149:*Makefile=0;38;5;149:*.gemspec=0;38;5;149:*.desktop=0;38;5;149:*.rgignore=0;38;5;149:*.markdown=0;38;5;185:*COPYRIGHT=0;38;5;249:*configure=0;38;5;149:*.DS_Store=0;38;5;243:*.kdevelop=0;38;5;149:*.fdignore=0;38;5;149:*README.md=0;38;5;16;48;5;186:*.cmake.in=0;38;5;149:*SConscript=0;38;5;149:*CODEOWNERS=0;38;5;149:*.localized=0;38;5;243:*.gitignore=0;38;5;149:*Dockerfile=0;38;5;149:*.gitconfig=0;38;5;149:*INSTALL.md=0;38;5;16;48;5;186:*README.txt=0;38;5;16;48;5;186:*SConstruct=0;38;5;149:*.scons_opt=0;38;5;243:*.travis.yml=0;38;5;186:*.gitmodules=0;38;5;149:*.synctex.gz=0;38;5;243:*LICENSE-MIT=0;38;5;249:*MANIFEST.in=0;38;5;149:*Makefile.in=0;38;5;243:*Makefile.am=0;38;5;149:*INSTALL.txt=0;38;5;16;48;5;186:*configure.ac=0;38;5;149:*.applescript=0;38;5;48:*appveyor.yml=0;38;5;186:*.fdb_latexmk=0;38;5;243:*CONTRIBUTORS=0;38;5;16;48;5;186:*.clang-format=0;38;5;149:*LICENSE-APACHE=0;38;5;249:*CMakeLists.txt=0;38;5;149:*CMakeCache.txt=0;38;5;243:*.gitattributes=0;38;5;149:*CONTRIBUTORS.md=0;38;5;16;48;5;186:*.sconsign.dblite=0;38;5;243:*requirements.txt=0;38;5;149:*CONTRIBUTORS.txt=0;38;5;16;48;5;186:*package-lock.json=0;38;5;243:*.CFUserTextEncoding=0;38;5;243 "; fn main() { let result = run(); match result { Ok(exit_code) => { exit_code.exit(); } Err(err) => { eprintln!("[fd error]: {:#}", err); ExitCode::GeneralError.exit(); } } } fn run() -> Result { let matches = app::build_app().get_matches_from(env::args_os()); set_working_dir(&matches)?; let current_directory = Path::new("."); ensure_current_directory_exists(current_directory)?; let search_paths = extract_search_paths(&matches, current_directory)?; let pattern = extract_search_pattern(&matches)?; ensure_search_pattern_is_not_a_path(&matches, pattern)?; let pattern_regex = build_pattern_regex(&matches, pattern)?; let config = construct_config(matches, &pattern_regex)?; ensure_use_hidden_option_for_leading_dot_pattern(&config, &pattern_regex)?; let re = build_regex(pattern_regex, &config)?; walk::scan(&search_paths, Arc::new(re), Arc::new(config)) } fn set_working_dir(matches: &clap::ArgMatches) -> Result<()> { if let Some(base_directory) = matches.value_of_os("base-directory") { let base_directory = Path::new(base_directory); if !filesystem::is_existing_directory(base_directory) { return Err(anyhow!( "The '--base-directory' path '{}' is not a directory.", base_directory.to_string_lossy() )); } env::set_current_dir(base_directory).with_context(|| { format!( "Could not set '{}' as the current working directory", base_directory.to_string_lossy() ) })?; } Ok(()) } fn ensure_current_directory_exists(current_directory: &Path) -> Result<()> { if filesystem::is_existing_directory(current_directory) { Ok(()) } else { Err(anyhow!( "Could not retrieve current directory (has it been deleted?)." )) } } fn extract_search_pattern<'a>(matches: &'a clap::ArgMatches) -> Result<&'a str> { let pattern = matches .value_of_os("pattern") .map(|p| { p.to_str() .ok_or_else(|| anyhow!("The search pattern includes invalid UTF-8 sequences.")) }) .transpose()? .unwrap_or(""); Ok(pattern) } fn extract_search_paths( matches: &clap::ArgMatches, current_directory: &Path, ) -> Result> { let mut search_paths = matches .values_of_os("path") .or_else(|| matches.values_of_os("search-path")) .map_or_else( || vec![current_directory.to_path_buf()], |paths| { paths .filter_map(|path| { let path_buffer = PathBuf::from(path); if filesystem::is_existing_directory(&path_buffer) { Some(path_buffer) } else { print_error(format!( "Search path '{}' is not a directory.", path_buffer.to_string_lossy() )); None } }) .collect() }, ); if search_paths.is_empty() { return Err(anyhow!("No valid search paths given.")); } if matches.is_present("absolute-path") { update_to_absolute_paths(&mut search_paths); } Ok(search_paths) } fn update_to_absolute_paths(search_paths: &mut [PathBuf]) { for buffer in search_paths.iter_mut() { *buffer = filesystem::absolute_path(buffer.normalize().unwrap().as_path()).unwrap(); } } /// Detect if the user accidentally supplied a path instead of a search pattern fn ensure_search_pattern_is_not_a_path(matches: &clap::ArgMatches, pattern: &str) -> Result<()> { if !matches.is_present("full-path") && pattern.contains(std::path::MAIN_SEPARATOR) && Path::new(pattern).is_dir() { Err(anyhow!( "The search pattern '{pattern}' contains a path-separation character ('{sep}') \ and will not lead to any search results.\n\n\ If you want to search for all files inside the '{pattern}' directory, use a match-all pattern:\n\n \ fd . '{pattern}'\n\n\ Instead, if you want your pattern to match the full file path, use:\n\n \ fd --full-path '{pattern}'", pattern = pattern, sep = std::path::MAIN_SEPARATOR, )) } else { Ok(()) } } fn build_pattern_regex(matches: &clap::ArgMatches, pattern: &str) -> Result { Ok(if matches.is_present("glob") && !pattern.is_empty() { let glob = GlobBuilder::new(pattern).literal_separator(true).build()?; glob.regex().to_owned() } else if matches.is_present("fixed-strings") { // Treat pattern as literal string if '--fixed-strings' is used regex::escape(pattern) } else { String::from(pattern) }) } fn check_path_separator_length(path_separator: Option<&str>) -> Result<()> { match (cfg!(windows), path_separator) { (true, Some(sep)) if sep.len() > 1 => Err(anyhow!( "A path separator must be exactly one byte, but \ the given separator is {} bytes: '{}'.\n\ In some shells on Windows, '/' is automatically \ expanded. Try to use '//' instead.", sep.len(), sep )), _ => Ok(()), } } fn construct_config(matches: clap::ArgMatches, pattern_regex: &str) -> Result { // The search will be case-sensitive if the command line flag is set or // if the pattern has an uppercase character (smart case). let case_sensitive = !matches.is_present("ignore-case") && (matches.is_present("case-sensitive") || pattern_has_uppercase_char(pattern_regex)); let path_separator = matches .value_of("path-separator") .map_or_else(filesystem::default_path_separator, |s| Some(s.to_owned())); check_path_separator_length(path_separator.as_deref())?; let size_limits = extract_size_limits(&matches)?; let time_constraints = extract_time_constraints(&matches)?; #[cfg(unix)] let owner_constraint = matches .value_of("owner") .map(OwnerFilter::from_string) .transpose()? .flatten(); #[cfg(windows)] let ansi_colors_support = ansi_term::enable_ansi_support().is_ok() || std::env::var_os("TERM").is_some(); #[cfg(not(windows))] let ansi_colors_support = true; let interactive_terminal = atty::is(Stream::Stdout); let colored_output = match matches.value_of("color") { Some("always") => true, Some("never") => false, _ => ansi_colors_support && env::var_os("NO_COLOR").is_none() && interactive_terminal, }; let ls_colors = if colored_output { Some(LsColors::from_env().unwrap_or_else(|| LsColors::from_string(DEFAULT_LS_COLORS))) } else { None }; let command = extract_command(&matches, path_separator.as_deref(), colored_output)?; Ok(Config { case_sensitive, search_full_path: matches.is_present("full-path"), ignore_hidden: !(matches.is_present("hidden") || matches.occurrences_of("rg-alias-hidden-ignore") >= 2), read_fdignore: !(matches.is_present("no-ignore") || matches.is_present("rg-alias-hidden-ignore")), read_vcsignore: !(matches.is_present("no-ignore") || matches.is_present("rg-alias-hidden-ignore") || matches.is_present("no-ignore-vcs")), read_parent_ignore: !matches.is_present("no-ignore-parent"), read_global_ignore: !(matches.is_present("no-ignore") || matches.is_present("rg-alias-hidden-ignore") || matches.is_present("no-global-ignore-file")), follow_links: matches.is_present("follow"), one_file_system: matches.is_present("one-file-system"), null_separator: matches.is_present("null_separator"), quiet: matches.is_present("quiet"), max_depth: matches .value_of("max-depth") .or_else(|| matches.value_of("rg-depth")) .or_else(|| matches.value_of("exact-depth")) .map(|n| n.parse::()) .transpose() .context("Failed to parse argument to --max-depth/--exact-depth")?, min_depth: matches .value_of("min-depth") .or_else(|| matches.value_of("exact-depth")) .map(|n| n.parse::()) .transpose() .context("Failed to parse argument to --min-depth/--exact-depth")?, prune: matches.is_present("prune"), threads: std::cmp::max( matches .value_of("threads") .map(|n| n.parse::()) .transpose() .context("Failed to parse number of threads")? .map(|n| { if n > 0 { Ok(n) } else { Err(anyhow!("Number of threads must be positive.")) } }) .transpose()? .unwrap_or_else(num_cpus::get), 1, ), max_buffer_time: matches .value_of("max-buffer-time") .map(|n| n.parse::()) .transpose() .context("Failed to parse max. buffer time argument")? .map(time::Duration::from_millis), ls_colors, interactive_terminal, file_types: matches.values_of("file-type").map(|values| { let mut file_types = FileTypes::default(); for value in values { match value { "f" | "file" => file_types.files = true, "d" | "directory" => file_types.directories = true, "l" | "symlink" => file_types.symlinks = true, "x" | "executable" => { file_types.executables_only = true; file_types.files = true; } "e" | "empty" => file_types.empty_only = true, "s" | "socket" => file_types.sockets = true, "p" | "pipe" => file_types.pipes = true, _ => unreachable!(), } } // If only 'empty' was specified, search for both files and directories: if file_types.empty_only && !(file_types.files || file_types.directories) { file_types.files = true; file_types.directories = true; } file_types }), extensions: matches .values_of("extension") .map(|exts| { let patterns = exts .map(|e| e.trim_start_matches('.')) .map(|e| format!(r".\.{}$", regex::escape(e))); RegexSetBuilder::new(patterns) .case_insensitive(true) .build() }) .transpose()?, command: command.map(Arc::new), batch_size: matches .value_of("batch-size") .map(|n| n.parse::()) .transpose() .context("Failed to parse --batch-size argument")? .unwrap_or_default(), exclude_patterns: matches .values_of("exclude") .map(|v| v.map(|p| String::from("!") + p).collect()) .unwrap_or_else(Vec::new), ignore_files: matches .values_of("ignore-file") .map(|vs| vs.map(PathBuf::from).collect()) .unwrap_or_else(Vec::new), size_constraints: size_limits, time_constraints, #[cfg(unix)] owner_constraint, show_filesystem_errors: matches.is_present("show-errors"), path_separator, max_results: matches .value_of("max-results") .map(|n| n.parse::()) .transpose() .context("Failed to parse --max-results argument")? .filter(|&n| n > 0) .or_else(|| { if matches.is_present("max-one-result") { Some(1) } else { None } }), strip_cwd_prefix: (!matches.is_present("path") && !matches.is_present("search-path") && (interactive_terminal || matches.is_present("strip-cwd-prefix"))), }) } fn extract_command( matches: &clap::ArgMatches, path_separator: Option<&str>, colored_output: bool, ) -> Result> { None.or_else(|| { matches.values_of("exec").map(|args| { Ok(CommandTemplate::new( args, path_separator.map(str::to_string), )) }) }) .or_else(|| { matches .values_of("exec-batch") .map(|args| CommandTemplate::new_batch(args, path_separator.map(str::to_string))) }) .or_else(|| { if !matches.is_present("list-details") { return None; } let color = matches.value_of("color").unwrap_or("auto"); let color_arg = format!("--color={}", color); let res = determine_ls_command(&color_arg, colored_output).map(|cmd| { CommandTemplate::new_batch(cmd, path_separator.map(str::to_string)).unwrap() }); Some(res) }) .transpose() } fn determine_ls_command(color_arg: &str, colored_output: bool) -> Result> { #[allow(unused)] let gnu_ls = |command_name| { // Note: we use short options here (instead of --long-options) to support more // platforms (like BusyBox). vec![ command_name, "-l", // long listing format "-h", // human readable file sizes "-d", // list directories themselves, not their contents color_arg, ] }; let cmd: Vec<&str> = if cfg!(unix) { if !cfg!(any( target_os = "macos", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd", target_os = "openbsd" )) { // Assume ls is GNU ls gnu_ls("ls") } else { // MacOS, DragonFlyBSD, FreeBSD use std::process::{Command, Stdio}; // Use GNU ls, if available (support for --color=auto, better LS_COLORS support) let gnu_ls_exists = Command::new("gls") .arg("--version") .stdout(Stdio::null()) .stderr(Stdio::null()) .status() .is_ok(); if gnu_ls_exists { gnu_ls("gls") } else { let mut cmd = vec![ "ls", // BSD version of ls "-l", // long listing format "-h", // '--human-readable' is not available, '-h' is "-d", // '--directory' is not available, but '-d' is ]; if !cfg!(any(target_os = "netbsd", target_os = "openbsd")) && colored_output { // -G is not available in NetBSD's and OpenBSD's ls cmd.push("-G"); } cmd } } } else if cfg!(windows) { use std::process::{Command, Stdio}; // Use GNU ls, if available let gnu_ls_exists = Command::new("ls") .arg("--version") .stdout(Stdio::null()) .stderr(Stdio::null()) .status() .is_ok(); if gnu_ls_exists { gnu_ls("ls") } else { return Err(anyhow!( "'fd --list-details' is not supported on Windows unless GNU 'ls' is installed." )); } } else { return Err(anyhow!( "'fd --list-details' is not supported on this platform." )); }; Ok(cmd) } fn extract_size_limits(matches: &clap::ArgMatches) -> Result> { matches.values_of("size").map_or(Ok(Vec::new()), |vs| { vs.map(|sf| { SizeFilter::from_string(sf) .ok_or_else(|| anyhow!("'{}' is not a valid size constraint. See 'fd --help'.", sf)) }) .collect::>>() }) } fn extract_time_constraints(matches: &clap::ArgMatches) -> Result> { let now = time::SystemTime::now(); let mut time_constraints: Vec = Vec::new(); if let Some(t) = matches.value_of("changed-within") { if let Some(f) = TimeFilter::after(&now, t) { time_constraints.push(f); } else { return Err(anyhow!( "'{}' is not a valid date or duration. See 'fd --help'.", t )); } } if let Some(t) = matches.value_of("changed-before") { if let Some(f) = TimeFilter::before(&now, t) { time_constraints.push(f); } else { return Err(anyhow!( "'{}' is not a valid date or duration. See 'fd --help'.", t )); } } Ok(time_constraints) } fn ensure_use_hidden_option_for_leading_dot_pattern( config: &Config, pattern_regex: &str, ) -> Result<()> { if cfg!(unix) && config.ignore_hidden && pattern_matches_strings_with_leading_dot(pattern_regex) { Err(anyhow!( "The pattern seems to only match files with a leading dot, but hidden files are \ filtered by default. Consider adding -H/--hidden to search hidden files as well \ or adjust your search pattern." )) } else { Ok(()) } } fn build_regex(pattern_regex: String, config: &Config) -> Result { RegexBuilder::new(&pattern_regex) .case_insensitive(!config.case_sensitive) .dot_matches_new_line(true) .build() .map_err(|e| { anyhow!( "{}\n\nNote: You can use the '--fixed-strings' option to search for a \ literal string instead of a regular expression. Alternatively, you can \ also use the '--glob' option to match on a glob pattern.", e.to_string() ) }) } fd-find-8.3.1/src/output.rs000064400000000000000000000076170072674642500137040ustar 00000000000000use std::borrow::Cow; use std::io::{self, Write}; use std::path::Path; use lscolors::{Indicator, LsColors, Style}; use crate::config::Config; use crate::error::print_error; use crate::exit_codes::ExitCode; use crate::filesystem::strip_current_dir; fn replace_path_separator(path: &str, new_path_separator: &str) -> String { path.replace(std::path::MAIN_SEPARATOR, new_path_separator) } // TODO: this function is performance critical and can probably be optimized pub fn print_entry(stdout: &mut W, entry: &Path, config: &Config) { let path = if config.strip_cwd_prefix { strip_current_dir(entry) } else { entry }; let r = if let Some(ref ls_colors) = config.ls_colors { print_entry_colorized(stdout, path, config, ls_colors) } else { print_entry_uncolorized(stdout, path, config) }; if let Err(e) = r { if e.kind() == ::std::io::ErrorKind::BrokenPipe { // Exit gracefully in case of a broken pipe (e.g. 'fd ... | head -n 3'). ExitCode::Success.exit(); } else { print_error(format!("Could not write to output: {}", e)); ExitCode::GeneralError.exit(); } } } // TODO: this function is performance critical and can probably be optimized fn print_entry_colorized( stdout: &mut W, path: &Path, config: &Config, ls_colors: &LsColors, ) -> io::Result<()> { // Split the path between the parent and the last component let mut offset = 0; let path_str = path.to_string_lossy(); if let Some(parent) = path.parent() { offset = parent.to_string_lossy().len(); for c in path_str[offset..].chars() { if std::path::is_separator(c) { offset += c.len_utf8(); } else { break; } } } if offset > 0 { let mut parent_str = Cow::from(&path_str[..offset]); if let Some(ref separator) = config.path_separator { *parent_str.to_mut() = replace_path_separator(&parent_str, separator); } let style = ls_colors .style_for_indicator(Indicator::Directory) .map(Style::to_ansi_term_style) .unwrap_or_default(); write!(stdout, "{}", style.paint(parent_str))?; } let style = ls_colors .style_for_path(path) .map(Style::to_ansi_term_style) .unwrap_or_default(); write!(stdout, "{}", style.paint(&path_str[offset..]))?; if config.null_separator { write!(stdout, "\0")?; } else { writeln!(stdout)?; } Ok(()) } // TODO: this function is performance critical and can probably be optimized fn print_entry_uncolorized_base( stdout: &mut W, path: &Path, config: &Config, ) -> io::Result<()> { let separator = if config.null_separator { "\0" } else { "\n" }; let mut path_string = path.to_string_lossy(); if let Some(ref separator) = config.path_separator { *path_string.to_mut() = replace_path_separator(&path_string, separator); } write!(stdout, "{}{}", path_string, separator) } #[cfg(not(unix))] fn print_entry_uncolorized( stdout: &mut W, path: &Path, config: &Config, ) -> io::Result<()> { print_entry_uncolorized_base(stdout, path, config) } #[cfg(unix)] fn print_entry_uncolorized( stdout: &mut W, path: &Path, config: &Config, ) -> io::Result<()> { use std::os::unix::ffi::OsStrExt; if config.interactive_terminal || config.path_separator.is_some() { // Fall back to the base implementation print_entry_uncolorized_base(stdout, path, config) } else { // Print path as raw bytes, allowing invalid UTF-8 filenames to be passed to other processes let separator = if config.null_separator { b"\0" } else { b"\n" }; stdout.write_all(path.as_os_str().as_bytes())?; stdout.write_all(separator) } } fd-find-8.3.1/src/regex_helper.rs000064400000000000000000000066520072674642500150130ustar 00000000000000use regex_syntax::hir::Hir; use regex_syntax::ParserBuilder; /// Determine if a regex pattern contains a literal uppercase character. pub fn pattern_has_uppercase_char(pattern: &str) -> bool { let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build(); parser .parse(pattern) .map(|hir| hir_has_uppercase_char(&hir)) .unwrap_or(false) } /// Determine if a regex expression contains a literal uppercase character. fn hir_has_uppercase_char(hir: &Hir) -> bool { use regex_syntax::hir::*; match *hir.kind() { HirKind::Literal(Literal::Unicode(c)) => c.is_uppercase(), HirKind::Literal(Literal::Byte(b)) => char::from(b).is_uppercase(), HirKind::Class(Class::Unicode(ref ranges)) => ranges .iter() .any(|r| r.start().is_uppercase() || r.end().is_uppercase()), HirKind::Class(Class::Bytes(ref ranges)) => ranges .iter() .any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()), HirKind::Group(Group { ref hir, .. }) | HirKind::Repetition(Repetition { ref hir, .. }) => { hir_has_uppercase_char(hir) } HirKind::Concat(ref hirs) | HirKind::Alternation(ref hirs) => { hirs.iter().any(hir_has_uppercase_char) } _ => false, } } /// Determine if a regex pattern only matches strings starting with a literal dot (hidden files) pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool { let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build(); parser .parse(pattern) .map(|hir| hir_matches_strings_with_leading_dot(&hir)) .unwrap_or(false) } /// See above. fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool { use regex_syntax::hir::*; // Note: this only really detects the simplest case where a regex starts with // "^\\.", i.e. a start text anchor and a literal dot character. There are a lot // of other patterns that ONLY match hidden files, e.g. ^(\\.foo|\\.bar) which are // not (yet) detected by this algorithm. match *hir.kind() { HirKind::Concat(ref hirs) => { let mut hirs = hirs.iter(); if let Some(hir) = hirs.next() { if *hir.kind() != HirKind::Anchor(Anchor::StartText) { return false; } } else { return false; } if let Some(hir) = hirs.next() { *hir.kind() == HirKind::Literal(Literal::Unicode('.')) } else { false } } _ => false, } } #[test] fn pattern_has_uppercase_char_simple() { assert!(pattern_has_uppercase_char("A")); assert!(pattern_has_uppercase_char("foo.EXE")); assert!(!pattern_has_uppercase_char("a")); assert!(!pattern_has_uppercase_char("foo.exe123")); } #[test] fn pattern_has_uppercase_char_advanced() { assert!(pattern_has_uppercase_char("foo.[a-zA-Z]")); assert!(!pattern_has_uppercase_char(r"\Acargo")); assert!(!pattern_has_uppercase_char(r"carg\x6F")); } #[test] fn matches_strings_with_leading_dot_simple() { assert!(pattern_matches_strings_with_leading_dot("^\\.gitignore")); assert!(!pattern_matches_strings_with_leading_dot("^.gitignore")); assert!(!pattern_matches_strings_with_leading_dot("\\.gitignore")); assert!(!pattern_matches_strings_with_leading_dot("^gitignore")); } fd-find-8.3.1/src/walk.rs000064400000000000000000000515630072674642500133010ustar 00000000000000use std::ffi::OsStr; use std::fs::{FileType, Metadata}; use std::io; use std::mem; use std::path::{Path, PathBuf}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::mpsc::{channel, Receiver, RecvTimeoutError, Sender}; use std::sync::{Arc, Mutex}; use std::thread; use std::time::{Duration, Instant}; use std::{borrow::Cow, io::Write}; use anyhow::{anyhow, Result}; use ignore::overrides::OverrideBuilder; use ignore::{self, WalkBuilder}; use once_cell::unsync::OnceCell; use regex::bytes::Regex; use crate::config::Config; use crate::error::print_error; use crate::exec; use crate::exit_codes::{merge_exitcodes, ExitCode}; use crate::filesystem; use crate::output; /// The receiver thread can either be buffering results or directly streaming to the console. #[derive(PartialEq)] enum ReceiverMode { /// Receiver is still buffering in order to sort the results, if the search finishes fast /// enough. Buffering, /// Receiver is directly printing results to the output. Streaming, } /// The Worker threads can result in a valid entry having PathBuf or an error. pub enum WorkerResult { Entry(PathBuf), Error(ignore::Error), } /// Maximum size of the output buffer before flushing results to the console pub const MAX_BUFFER_LENGTH: usize = 1000; /// Default duration until output buffering switches to streaming. pub const DEFAULT_MAX_BUFFER_TIME: Duration = Duration::from_millis(100); /// Recursively scan the given search path for files / pathnames matching the pattern. /// /// If the `--exec` argument was supplied, this will create a thread pool for executing /// jobs in parallel from a given command line and the discovered paths. Otherwise, each /// path will simply be written to standard output. pub fn scan(path_vec: &[PathBuf], pattern: Arc, config: Arc) -> Result { let mut path_iter = path_vec.iter(); let first_path_buf = path_iter .next() .expect("Error: Path vector can not be empty"); let (tx, rx) = channel(); let mut override_builder = OverrideBuilder::new(first_path_buf.as_path()); for pattern in &config.exclude_patterns { override_builder .add(pattern) .map_err(|e| anyhow!("Malformed exclude pattern: {}", e))?; } let overrides = override_builder .build() .map_err(|_| anyhow!("Mismatch in exclude patterns"))?; let mut walker = WalkBuilder::new(first_path_buf.as_path()); walker .hidden(config.ignore_hidden) .ignore(config.read_fdignore) .parents(config.read_parent_ignore && (config.read_fdignore || config.read_vcsignore)) .git_ignore(config.read_vcsignore) .git_global(config.read_vcsignore) .git_exclude(config.read_vcsignore) .overrides(overrides) .follow_links(config.follow_links) // No need to check for supported platforms, option is unavailable on unsupported ones .same_file_system(config.one_file_system) .max_depth(config.max_depth); if config.read_fdignore { walker.add_custom_ignore_filename(".fdignore"); } if config.read_global_ignore { #[cfg(target_os = "macos")] let config_dir_op = std::env::var_os("XDG_CONFIG_HOME") .map(PathBuf::from) .filter(|p| p.is_absolute()) .or_else(|| dirs_next::home_dir().map(|d| d.join(".config"))); #[cfg(not(target_os = "macos"))] let config_dir_op = dirs_next::config_dir(); if let Some(global_ignore_file) = config_dir_op .map(|p| p.join("fd").join("ignore")) .filter(|p| p.is_file()) { let result = walker.add_ignore(global_ignore_file); match result { Some(ignore::Error::Partial(_)) => (), Some(err) => { print_error(format!( "Malformed pattern in global ignore file. {}.", err.to_string() )); } None => (), } } } for ignore_file in &config.ignore_files { let result = walker.add_ignore(ignore_file); match result { Some(ignore::Error::Partial(_)) => (), Some(err) => { print_error(format!( "Malformed pattern in custom ignore file. {}.", err.to_string() )); } None => (), } } for path_entry in path_iter { walker.add(path_entry.as_path()); } let parallel_walker = walker.threads(config.threads).build_parallel(); // Flag for cleanly shutting down the parallel walk let quit_flag = Arc::new(AtomicBool::new(false)); // Flag specifically for quitting due to ^C let interrupt_flag = Arc::new(AtomicBool::new(false)); if config.ls_colors.is_some() && config.command.is_none() { let quit_flag = Arc::clone(&quit_flag); let interrupt_flag = Arc::clone(&interrupt_flag); ctrlc::set_handler(move || { quit_flag.store(true, Ordering::Relaxed); if interrupt_flag.fetch_or(true, Ordering::Relaxed) { // Ctrl-C has been pressed twice, exit NOW ExitCode::KilledBySigint.exit(); } }) .unwrap(); } // Spawn the thread that receives all results through the channel. let receiver_thread = spawn_receiver(&config, &quit_flag, &interrupt_flag, rx); // Spawn the sender threads. spawn_senders(&config, &quit_flag, pattern, parallel_walker, tx); // Wait for the receiver thread to print out all results. let exit_code = receiver_thread.join().unwrap(); if interrupt_flag.load(Ordering::Relaxed) { Ok(ExitCode::KilledBySigint) } else { Ok(exit_code) } } /// Wrapper for the receiver thread's buffering behavior. struct ReceiverBuffer { /// The configuration. config: Arc, /// For shutting down the senders. quit_flag: Arc, /// The ^C notifier. interrupt_flag: Arc, /// Receiver for worker results. rx: Receiver, /// Standard output. stdout: W, /// The current buffer mode. mode: ReceiverMode, /// The deadline to switch to streaming mode. deadline: Instant, /// The buffer of quickly received paths. buffer: Vec, /// Result count. num_results: usize, } impl ReceiverBuffer { /// Create a new receiver buffer. fn new( config: Arc, quit_flag: Arc, interrupt_flag: Arc, rx: Receiver, stdout: W, ) -> Self { let max_buffer_time = config.max_buffer_time.unwrap_or(DEFAULT_MAX_BUFFER_TIME); let deadline = Instant::now() + max_buffer_time; Self { config, quit_flag, interrupt_flag, rx, stdout, mode: ReceiverMode::Buffering, deadline, buffer: Vec::with_capacity(MAX_BUFFER_LENGTH), num_results: 0, } } /// Process results until finished. fn process(&mut self) -> ExitCode { loop { if let Err(ec) = self.poll() { self.quit_flag.store(true, Ordering::Relaxed); return ec; } } } /// Receive the next worker result. fn recv(&self) -> Result { match self.mode { ReceiverMode::Buffering => { // Wait at most until we should switch to streaming let now = Instant::now(); self.deadline .checked_duration_since(now) .ok_or(RecvTimeoutError::Timeout) .and_then(|t| self.rx.recv_timeout(t)) } ReceiverMode::Streaming => { // Wait however long it takes for a result Ok(self.rx.recv()?) } } } /// Wait for a result or state change. fn poll(&mut self) -> Result<(), ExitCode> { match self.recv() { Ok(WorkerResult::Entry(path)) => { if self.config.quiet { return Err(ExitCode::HasResults(true)); } match self.mode { ReceiverMode::Buffering => { self.buffer.push(path); if self.buffer.len() > MAX_BUFFER_LENGTH { self.stream()?; } } ReceiverMode::Streaming => { self.print(&path)?; self.flush()?; } } self.num_results += 1; if let Some(max_results) = self.config.max_results { if self.num_results >= max_results { return self.stop(); } } } Ok(WorkerResult::Error(err)) => { if self.config.show_filesystem_errors { print_error(err.to_string()); } } Err(RecvTimeoutError::Timeout) => { self.stream()?; } Err(RecvTimeoutError::Disconnected) => { return self.stop(); } } Ok(()) } /// Output a path. fn print(&mut self, path: &Path) -> Result<(), ExitCode> { output::print_entry(&mut self.stdout, path, &self.config); if self.interrupt_flag.load(Ordering::Relaxed) { // Ignore any errors on flush, because we're about to exit anyway let _ = self.flush(); return Err(ExitCode::KilledBySigint); } Ok(()) } /// Switch ourselves into streaming mode. fn stream(&mut self) -> Result<(), ExitCode> { self.mode = ReceiverMode::Streaming; let buffer = mem::take(&mut self.buffer); for path in buffer { self.print(&path)?; } self.flush() } /// Stop looping. fn stop(&mut self) -> Result<(), ExitCode> { if self.mode == ReceiverMode::Buffering { self.buffer.sort(); self.stream()?; } if self.config.quiet { Err(ExitCode::HasResults(self.num_results > 0)) } else { Err(ExitCode::Success) } } /// Flush stdout if necessary. fn flush(&mut self) -> Result<(), ExitCode> { if self.config.interactive_terminal && self.stdout.flush().is_err() { // Probably a broken pipe. Exit gracefully. return Err(ExitCode::GeneralError); } Ok(()) } } fn spawn_receiver( config: &Arc, quit_flag: &Arc, interrupt_flag: &Arc, rx: Receiver, ) -> thread::JoinHandle { let config = Arc::clone(config); let quit_flag = Arc::clone(quit_flag); let interrupt_flag = Arc::clone(interrupt_flag); let show_filesystem_errors = config.show_filesystem_errors; let threads = config.threads; // This will be used to check if output should be buffered when only running a single thread let enable_output_buffering: bool = threads > 1; thread::spawn(move || { // This will be set to `Some` if the `--exec` argument was supplied. if let Some(ref cmd) = config.command { if cmd.in_batch_mode() { exec::batch( rx, cmd, show_filesystem_errors, enable_output_buffering, config.batch_size, ) } else { let shared_rx = Arc::new(Mutex::new(rx)); let out_perm = Arc::new(Mutex::new(())); // Each spawned job will store it's thread handle in here. let mut handles = Vec::with_capacity(threads); for _ in 0..threads { let rx = Arc::clone(&shared_rx); let cmd = Arc::clone(cmd); let out_perm = Arc::clone(&out_perm); // Spawn a job thread that will listen for and execute inputs. let handle = thread::spawn(move || { exec::job( rx, cmd, out_perm, show_filesystem_errors, enable_output_buffering, ) }); // Push the handle of the spawned thread into the vector for later joining. handles.push(handle); } // Wait for all threads to exit before exiting the program. let exit_codes = handles .into_iter() .map(|handle| handle.join().unwrap()) .collect::>(); merge_exitcodes(exit_codes) } } else { let stdout = io::stdout(); let stdout = stdout.lock(); let stdout = io::BufWriter::new(stdout); let mut rxbuffer = ReceiverBuffer::new(config, quit_flag, interrupt_flag, rx, stdout); rxbuffer.process() } }) } enum DirEntryInner { Normal(ignore::DirEntry), BrokenSymlink(PathBuf), } pub struct DirEntry { inner: DirEntryInner, metadata: OnceCell>, } impl DirEntry { fn normal(e: ignore::DirEntry) -> Self { Self { inner: DirEntryInner::Normal(e), metadata: OnceCell::new(), } } fn broken_symlink(path: PathBuf) -> Self { Self { inner: DirEntryInner::BrokenSymlink(path), metadata: OnceCell::new(), } } pub fn path(&self) -> &Path { match &self.inner { DirEntryInner::Normal(e) => e.path(), DirEntryInner::BrokenSymlink(pathbuf) => pathbuf.as_path(), } } pub fn file_type(&self) -> Option { match &self.inner { DirEntryInner::Normal(e) => e.file_type(), DirEntryInner::BrokenSymlink(_) => self.metadata().map(|m| m.file_type()), } } pub fn metadata(&self) -> Option<&Metadata> { self.metadata .get_or_init(|| match &self.inner { DirEntryInner::Normal(e) => e.metadata().ok(), DirEntryInner::BrokenSymlink(path) => path.symlink_metadata().ok(), }) .as_ref() } pub fn depth(&self) -> Option { match &self.inner { DirEntryInner::Normal(e) => Some(e.depth()), DirEntryInner::BrokenSymlink(_) => None, } } } fn spawn_senders( config: &Arc, quit_flag: &Arc, pattern: Arc, parallel_walker: ignore::WalkParallel, tx: Sender, ) { parallel_walker.run(|| { let config = Arc::clone(config); let pattern = Arc::clone(&pattern); let tx_thread = tx.clone(); let quit_flag = Arc::clone(quit_flag); Box::new(move |entry_o| { if quit_flag.load(Ordering::Relaxed) { return ignore::WalkState::Quit; } let entry = match entry_o { Ok(ref e) if e.depth() == 0 => { // Skip the root directory entry. return ignore::WalkState::Continue; } Ok(e) => DirEntry::normal(e), Err(ignore::Error::WithPath { path, err: inner_err, }) => match inner_err.as_ref() { ignore::Error::Io(io_error) if io_error.kind() == io::ErrorKind::NotFound && path .symlink_metadata() .ok() .map_or(false, |m| m.file_type().is_symlink()) => { DirEntry::broken_symlink(path) } _ => { return match tx_thread.send(WorkerResult::Error(ignore::Error::WithPath { path, err: inner_err, })) { Ok(_) => ignore::WalkState::Continue, Err(_) => ignore::WalkState::Quit, } } }, Err(err) => { return match tx_thread.send(WorkerResult::Error(err)) { Ok(_) => ignore::WalkState::Continue, Err(_) => ignore::WalkState::Quit, } } }; if let Some(min_depth) = config.min_depth { if entry.depth().map_or(true, |d| d < min_depth) { return ignore::WalkState::Continue; } } // Check the name first, since it doesn't require metadata let entry_path = entry.path(); let search_str: Cow = if config.search_full_path { let path_abs_buf = filesystem::path_absolute_form(entry_path) .expect("Retrieving absolute path succeeds"); Cow::Owned(path_abs_buf.as_os_str().to_os_string()) } else { match entry_path.file_name() { Some(filename) => Cow::Borrowed(filename), None => unreachable!( "Encountered file system entry without a file name. This should only \ happen for paths like 'foo/bar/..' or '/' which are not supposed to \ appear in a file system traversal." ), } }; if !pattern.is_match(&filesystem::osstr_to_bytes(search_str.as_ref())) { return ignore::WalkState::Continue; } // Filter out unwanted extensions. if let Some(ref exts_regex) = config.extensions { if let Some(path_str) = entry_path.file_name() { if !exts_regex.is_match(&filesystem::osstr_to_bytes(path_str)) { return ignore::WalkState::Continue; } } else { return ignore::WalkState::Continue; } } // Filter out unwanted file types. if let Some(ref file_types) = config.file_types { if file_types.should_ignore(&entry) { return ignore::WalkState::Continue; } } #[cfg(unix)] { if let Some(ref owner_constraint) = config.owner_constraint { if let Some(metadata) = entry.metadata() { if !owner_constraint.matches(metadata) { return ignore::WalkState::Continue; } } else { return ignore::WalkState::Continue; } } } // Filter out unwanted sizes if it is a file and we have been given size constraints. if !config.size_constraints.is_empty() { if entry_path.is_file() { if let Some(metadata) = entry.metadata() { let file_size = metadata.len(); if config .size_constraints .iter() .any(|sc| !sc.is_within(file_size)) { return ignore::WalkState::Continue; } } else { return ignore::WalkState::Continue; } } else { return ignore::WalkState::Continue; } } // Filter out unwanted modification times if !config.time_constraints.is_empty() { let mut matched = false; if let Some(metadata) = entry.metadata() { if let Ok(modified) = metadata.modified() { matched = config .time_constraints .iter() .all(|tf| tf.applies_to(&modified)); } } if !matched { return ignore::WalkState::Continue; } } let send_result = tx_thread.send(WorkerResult::Entry(entry_path.to_owned())); if send_result.is_err() { return ignore::WalkState::Quit; } // Apply pruning. if config.prune { return ignore::WalkState::Skip; } ignore::WalkState::Continue }) }); } fd-find-8.3.1/tests/testenv/mod.rs000064400000000000000000000234530072674642500151620ustar 00000000000000use std::env; use std::fs; use std::io::{self, Write}; #[cfg(unix)] use std::os::unix; #[cfg(windows)] use std::os::windows; use std::path::{Path, PathBuf}; use std::process; use tempdir::TempDir; /// Environment for the integration tests. pub struct TestEnv { /// Temporary working directory. temp_dir: TempDir, /// Path to the *fd* executable. fd_exe: PathBuf, /// Normalize each line by sorting the whitespace-separated words normalize_line: bool, } /// Create the working directory and the test files. fn create_working_directory( directories: &[&'static str], files: &[&'static str], ) -> Result { let temp_dir = TempDir::new("fd-tests")?; { let root = temp_dir.path(); // Pretend that this is a Git repository in order for `.gitignore` files to be respected fs::create_dir_all(root.join(".git"))?; for directory in directories { fs::create_dir_all(root.join(directory))?; } for file in files { fs::File::create(root.join(file))?; } #[cfg(unix)] unix::fs::symlink(root.join("one/two"), root.join("symlink"))?; // Note: creating symlinks on Windows requires the `SeCreateSymbolicLinkPrivilege` which // is by default only granted for administrators. #[cfg(windows)] windows::fs::symlink_dir(root.join("one/two"), root.join("symlink"))?; fs::File::create(root.join(".fdignore"))?.write_all(b"fdignored.foo")?; fs::File::create(root.join(".gitignore"))?.write_all(b"gitignored.foo")?; } Ok(temp_dir) } /// Find the *fd* executable. fn find_fd_exe() -> PathBuf { // Tests exe is in target/debug/deps, the *fd* exe is in target/debug let root = env::current_exe() .expect("tests executable") .parent() .expect("tests executable directory") .parent() .expect("fd executable directory") .to_path_buf(); let exe_name = if cfg!(windows) { "fd.exe" } else { "fd" }; root.join(exe_name) } /// Format an error message for when *fd* did not exit successfully. fn format_exit_error(args: &[&str], output: &process::Output) -> String { format!( "`fd {}` did not exit successfully.\nstdout:\n---\n{}---\nstderr:\n---\n{}---", args.join(" "), String::from_utf8_lossy(&output.stdout), String::from_utf8_lossy(&output.stderr) ) } /// Format an error message for when the output of *fd* did not match the expected output. fn format_output_error(args: &[&str], expected: &str, actual: &str) -> String { // Generate diff text. let diff_text = diff::lines(expected, actual) .into_iter() .map(|diff| match diff { diff::Result::Left(l) => format!("-{}", l), diff::Result::Both(l, _) => format!(" {}", l), diff::Result::Right(r) => format!("+{}", r), }) .collect::>() .join("\n"); format!( concat!( "`fd {}` did not produce the expected output.\n", "Showing diff between expected and actual:\n{}\n" ), args.join(" "), diff_text ) } /// Normalize the output for comparison. fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String { // Split into lines and normalize separators. let mut lines = s .replace('\0', "NULL\n") .lines() .map(|line| { let line = if trim_start { line.trim_start() } else { line }; let line = line.replace('/', &std::path::MAIN_SEPARATOR.to_string()); if normalize_line { let mut words: Vec<_> = line.split_whitespace().collect(); words.sort_unstable(); return words.join(" "); } line }) .collect::>(); lines.sort(); lines.join("\n") } impl TestEnv { pub fn new(directories: &[&'static str], files: &[&'static str]) -> TestEnv { let temp_dir = create_working_directory(directories, files).expect("working directory"); let fd_exe = find_fd_exe(); TestEnv { temp_dir, fd_exe, normalize_line: false, } } pub fn normalize_line(self, normalize: bool) -> TestEnv { TestEnv { temp_dir: self.temp_dir, fd_exe: self.fd_exe, normalize_line: normalize, } } /// Create a broken symlink at the given path in the temp_dir. pub fn create_broken_symlink>( &mut self, link_path: P, ) -> Result { let root = self.test_root(); let broken_symlink_link = root.join(link_path); { let temp_target_dir = TempDir::new("fd-tests-broken-symlink")?; let broken_symlink_target = temp_target_dir.path().join("broken_symlink_target"); fs::File::create(&broken_symlink_target)?; #[cfg(unix)] unix::fs::symlink(&broken_symlink_target, &broken_symlink_link)?; #[cfg(windows)] windows::fs::symlink_file(&broken_symlink_target, &broken_symlink_link)?; } Ok(broken_symlink_link) } /// Get the root directory for the tests. pub fn test_root(&self) -> PathBuf { self.temp_dir.path().to_path_buf() } /// Get the root directory of the file system. pub fn system_root(&self) -> PathBuf { let mut components = self.temp_dir.path().components(); PathBuf::from(components.next().expect("root directory").as_os_str()) } /// Assert that calling *fd* in the specified path under the root working directory, /// and with the specified arguments produces the expected output. pub fn assert_success_and_get_output>( &self, path: P, args: &[&str], ) -> process::Output { // Setup *fd* command. let mut cmd = process::Command::new(&self.fd_exe); cmd.current_dir(self.temp_dir.path().join(path)); cmd.arg("--no-global-ignore-file").args(args); // Run *fd*. let output = cmd.output().expect("fd output"); // Check for exit status. if !output.status.success() { panic!("{}", format_exit_error(args, &output)); } output } /// Assert that calling *fd* with the specified arguments produces the expected output. pub fn assert_output(&self, args: &[&str], expected: &str) { self.assert_output_subdirectory(".", args, expected) } /// Similar to assert_output, but able to handle non-utf8 output #[cfg(all(unix, not(target_os = "macos")))] pub fn assert_output_raw(&self, args: &[&str], expected: &[u8]) { let output = self.assert_success_and_get_output(".", args); assert_eq!(expected, &output.stdout[..]); } /// Assert that calling *fd* in the specified path under the root working directory, /// and with the specified arguments produces the expected output. pub fn assert_output_subdirectory>( &self, path: P, args: &[&str], expected: &str, ) { let output = self.assert_success_and_get_output(path, args); // Normalize both expected and actual output. let expected = normalize_output(expected, true, self.normalize_line); let actual = normalize_output( &String::from_utf8_lossy(&output.stdout), false, self.normalize_line, ); // Compare actual output to expected output. if expected != actual { panic!("{}", format_output_error(args, &expected, &actual)); } } /// Assert that calling *fd* with the specified arguments produces the expected error, /// and does not succeed. pub fn assert_failure_with_error(&self, args: &[&str], expected: &str) { let status = self.assert_error_subdirectory(".", args, Some(expected)); if status.success() { panic!("error '{}' did not occur.", expected); } } /// Assert that calling *fd* with the specified arguments does not succeed. pub fn assert_failure(&self, args: &[&str]) { let status = self.assert_error_subdirectory(".", args, None); if status.success() { panic!("Failure did not occur as expected."); } } /// Assert that calling *fd* with the specified arguments produces the expected error. pub fn assert_error(&self, args: &[&str], expected: &str) -> process::ExitStatus { self.assert_error_subdirectory(".", args, Some(expected)) } /// Assert that calling *fd* in the specified path under the root working directory, /// and with the specified arguments produces an error with the expected message. fn assert_error_subdirectory>( &self, path: P, args: &[&str], expected: Option<&str>, ) -> process::ExitStatus { // Setup *fd* command. let mut cmd = process::Command::new(&self.fd_exe); cmd.current_dir(self.temp_dir.path().join(path)); cmd.arg("--no-global-ignore-file").args(args); // Run *fd*. let output = cmd.output().expect("fd output"); if let Some(expected) = expected { // Normalize both expected and actual output. let expected_error = normalize_output(expected, true, self.normalize_line); let actual_err = normalize_output( &String::from_utf8_lossy(&output.stderr), false, self.normalize_line, ); // Compare actual output to expected output. if !actual_err.trim_start().starts_with(&expected_error) { panic!( "{}", format_output_error(args, &expected_error, &actual_err) ); } } output.status } } fd-find-8.3.1/tests/tests.rs000064400000000000000000001474130072674642500140600ustar 00000000000000mod testenv; use std::fs; use std::io::Write; use std::path::Path; use std::time::{Duration, SystemTime}; use test_case::test_case; use normpath::PathExt; use regex::escape; use crate::testenv::TestEnv; static DEFAULT_DIRS: &[&str] = &["one/two/three", "one/two/three/directory_foo"]; static DEFAULT_FILES: &[&str] = &[ "a.foo", "one/b.foo", "one/two/c.foo", "one/two/C.Foo2", "one/two/three/d.foo", "fdignored.foo", "gitignored.foo", ".hidden.foo", "e1 e2", ]; #[allow(clippy::let_and_return)] fn get_absolute_root_path(env: &TestEnv) -> String { let path = env .test_root() .normalize() .expect("absolute path") .as_path() .to_str() .expect("string") .to_string(); #[cfg(windows)] let path = path.trim_start_matches(r"\\?\").to_string(); path } #[cfg(test)] fn get_test_env_with_abs_path(dirs: &[&'static str], files: &[&'static str]) -> (TestEnv, String) { let env = TestEnv::new(dirs, files); let root_path = get_absolute_root_path(&env); (env, root_path) } #[cfg(test)] fn create_file_with_size>(path: P, size_in_bytes: usize) { let content = "#".repeat(size_in_bytes); let mut f = fs::File::create::

(path).unwrap(); f.write_all(content.as_bytes()).unwrap(); } /// Simple test #[test] fn test_simple() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output(&["a.foo"], "./a.foo"); te.assert_output(&["b.foo"], "./one/b.foo"); te.assert_output(&["d.foo"], "./one/two/three/d.foo"); te.assert_output( &["foo"], "./a.foo ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three/d.foo ./one/two/three/directory_foo", ); } /// Test each pattern type with an empty pattern. #[test] fn test_empty_pattern() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let expected = "./a.foo ./e1 e2 ./one ./one/b.foo ./one/two ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three ./one/two/three/d.foo ./one/two/three/directory_foo ./symlink"; te.assert_output(&["--regex"], expected); te.assert_output(&["--fixed-strings"], expected); te.assert_output(&["--glob"], expected); } /// Test multiple directory searches #[test] fn test_multi_file() { let dirs = &["test1", "test2"]; let files = &["test1/a.foo", "test1/b.foo", "test2/a.foo"]; let te = TestEnv::new(dirs, files); te.assert_output( &["a.foo", "test1", "test2"], "test1/a.foo test2/a.foo", ); te.assert_output( &["", "test1", "test2"], "test1/a.foo test2/a.foo test1/b.foo", ); te.assert_output(&["a.foo", "test1"], "test1/a.foo"); te.assert_output(&["b.foo", "test1", "test2"], "test1/b.foo"); } /// Test search over multiple directory with missing #[test] fn test_multi_file_with_missing() { let dirs = &["real"]; let files = &["real/a.foo", "real/b.foo"]; let te = TestEnv::new(dirs, files); te.assert_output(&["a.foo", "real", "fake"], "real/a.foo"); te.assert_error( &["a.foo", "real", "fake"], "[fd error]: Search path 'fake' is not a directory.", ); te.assert_output( &["", "real", "fake"], "real/a.foo real/b.foo", ); te.assert_output( &["", "real", "fake1", "fake2"], "real/a.foo real/b.foo", ); te.assert_error( &["", "real", "fake1", "fake2"], "[fd error]: Search path 'fake1' is not a directory. [fd error]: Search path 'fake2' is not a directory.", ); te.assert_failure_with_error( &["", "fake1", "fake2"], "[fd error]: Search path 'fake1' is not a directory. [fd error]: Search path 'fake2' is not a directory. [fd error]: No valid search paths given.", ); } /// Explicit root path #[test] fn test_explicit_root_path() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["foo", "one"], "one/b.foo one/two/c.foo one/two/C.Foo2 one/two/three/d.foo one/two/three/directory_foo", ); te.assert_output( &["foo", "one/two/three"], "one/two/three/d.foo one/two/three/directory_foo", ); te.assert_output_subdirectory( "one/two", &["foo", "../../"], "../../a.foo ../../one/b.foo ../../one/two/c.foo ../../one/two/C.Foo2 ../../one/two/three/d.foo ../../one/two/three/directory_foo", ); te.assert_output_subdirectory( "one/two/three", &["", ".."], "../c.foo ../C.Foo2 ../three ../three/d.foo ../three/directory_foo", ); } /// Regex searches #[test] fn test_regex_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["[a-c].foo"], "./a.foo ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2", ); te.assert_output( &["--case-sensitive", "[a-c].foo"], "./a.foo ./one/b.foo ./one/two/c.foo", ); } /// Smart case #[test] fn test_smart_case() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["c.foo"], "./one/two/c.foo ./one/two/C.Foo2", ); te.assert_output(&["C.Foo"], "./one/two/C.Foo2"); te.assert_output(&["Foo"], "./one/two/C.Foo2"); // Only literal uppercase chars should trigger case sensitivity. te.assert_output( &["\\Ac"], "./one/two/c.foo ./one/two/C.Foo2", ); te.assert_output(&["\\AC"], "./one/two/C.Foo2"); } /// Case sensitivity (--case-sensitive) #[test] fn test_case_sensitive() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output(&["--case-sensitive", "c.foo"], "./one/two/c.foo"); te.assert_output(&["--case-sensitive", "C.Foo"], "./one/two/C.Foo2"); te.assert_output( &["--ignore-case", "--case-sensitive", "C.Foo"], "./one/two/C.Foo2", ); } /// Case insensitivity (--ignore-case) #[test] fn test_case_insensitive() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--ignore-case", "C.Foo"], "./one/two/c.foo ./one/two/C.Foo2", ); te.assert_output( &["--case-sensitive", "--ignore-case", "C.Foo"], "./one/two/c.foo ./one/two/C.Foo2", ); } /// Glob-based searches (--glob) #[test] fn test_glob_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--glob", "*.foo"], "./a.foo ./one/b.foo ./one/two/c.foo ./one/two/three/d.foo", ); te.assert_output( &["--glob", "[a-c].foo"], "./a.foo ./one/b.foo ./one/two/c.foo", ); te.assert_output( &["--glob", "[a-c].foo*"], "./a.foo ./one/b.foo ./one/two/C.Foo2 ./one/two/c.foo", ); } /// Glob-based searches (--glob) in combination with full path searches (--full-path) #[cfg(not(windows))] // TODO: make this work on Windows #[test] fn test_full_path_glob_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--glob", "--full-path", "**/one/**/*.foo"], "./one/b.foo ./one/two/c.foo ./one/two/three/d.foo", ); te.assert_output( &["--glob", "--full-path", "**/one/*/*.foo"], " ./one/two/c.foo", ); te.assert_output( &["--glob", "--full-path", "**/one/*/*/*.foo"], " ./one/two/three/d.foo", ); } #[test] fn test_smart_case_glob_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--glob", "c.foo*"], "./one/two/C.Foo2 ./one/two/c.foo", ); te.assert_output(&["--glob", "C.Foo*"], "./one/two/C.Foo2"); } /// Glob-based searches (--glob) in combination with --case-sensitive #[test] fn test_case_sensitive_glob_searches() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output(&["--glob", "--case-sensitive", "c.foo*"], "./one/two/c.foo"); } /// Glob-based searches (--glob) in combination with --extension #[test] fn test_glob_searches_with_extension() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--glob", "--extension", "foo2", "[a-z].*"], "./one/two/C.Foo2", ); } /// Make sure that --regex overrides --glob #[test] fn test_regex_overrides_glob() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output(&["--glob", "--regex", "Foo2$"], "./one/two/C.Foo2"); } /// Full path search (--full-path) #[test] fn test_full_path() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let root = te.system_root(); let prefix = escape(&root.to_string_lossy()); te.assert_output( &[ "--full-path", &format!("^{prefix}.*three.*foo$", prefix = prefix), ], "./one/two/three/d.foo ./one/two/three/directory_foo", ); } /// Hidden files (--hidden) #[test] fn test_hidden() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--hidden", "foo"], "./.hidden.foo ./a.foo ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three/d.foo ./one/two/three/directory_foo", ); } /// Hidden file attribute on Windows #[cfg(windows)] #[test] fn test_hidden_file_attribute() { use std::os::windows::fs::OpenOptionsExt; let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); // https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-setfileattributesa const FILE_ATTRIBUTE_HIDDEN: u32 = 2; fs::OpenOptions::new() .create(true) .write(true) .attributes(FILE_ATTRIBUTE_HIDDEN) .open(te.test_root().join("hidden-file.txt")) .unwrap(); te.assert_output(&["--hidden", "hidden-file.txt"], "./hidden-file.txt"); te.assert_output(&["hidden-file.txt"], ""); } /// Ignored files (--no-ignore) #[test] fn test_no_ignore() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--no-ignore", "foo"], "./a.foo ./fdignored.foo ./gitignored.foo ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three/d.foo ./one/two/three/directory_foo", ); te.assert_output( &["--hidden", "--no-ignore", "foo"], "./.hidden.foo ./a.foo ./fdignored.foo ./gitignored.foo ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three/d.foo ./one/two/three/directory_foo", ); } /// .gitignore and .fdignore #[test] fn test_gitignore_and_fdignore() { let files = &[ "ignored-by-nothing", "ignored-by-fdignore", "ignored-by-gitignore", "ignored-by-both", ]; let te = TestEnv::new(&[], files); fs::File::create(te.test_root().join(".fdignore")) .unwrap() .write_all(b"ignored-by-fdignore\nignored-by-both") .unwrap(); fs::File::create(te.test_root().join(".gitignore")) .unwrap() .write_all(b"ignored-by-gitignore\nignored-by-both") .unwrap(); te.assert_output(&["ignored"], "./ignored-by-nothing"); te.assert_output( &["--no-ignore-vcs", "ignored"], "./ignored-by-nothing ./ignored-by-gitignore", ); te.assert_output( &["--no-ignore", "ignored"], "./ignored-by-nothing ./ignored-by-fdignore ./ignored-by-gitignore ./ignored-by-both", ); } /// Ignore parent ignore files (--no-ignore-parent) #[test] fn test_no_ignore_parent() { let dirs = &["inner"]; let files = &[ "inner/parent-ignored", "inner/child-ignored", "inner/not-ignored", ]; let te = TestEnv::new(dirs, files); // Ignore 'parent-ignored' in root fs::File::create(te.test_root().join(".gitignore")) .unwrap() .write_all(b"parent-ignored") .unwrap(); // Ignore 'child-ignored' in inner fs::File::create(te.test_root().join("inner/.gitignore")) .unwrap() .write_all(b"child-ignored") .unwrap(); te.assert_output_subdirectory("inner", &[], "./not-ignored"); te.assert_output_subdirectory( "inner", &["--no-ignore-parent"], "./parent-ignored ./not-ignored", ); } /// Ignore parent ignore files (--no-ignore-parent) with an inner git repo #[test] fn test_no_ignore_parent_inner_git() { let dirs = &["inner"]; let files = &[ "inner/parent-ignored", "inner/child-ignored", "inner/not-ignored", ]; let te = TestEnv::new(dirs, files); // Make the inner folder also appear as a git repo fs::create_dir_all(te.test_root().join("inner/.git")).unwrap(); // Ignore 'parent-ignored' in root fs::File::create(te.test_root().join(".gitignore")) .unwrap() .write_all(b"parent-ignored") .unwrap(); // Ignore 'child-ignored' in inner fs::File::create(te.test_root().join("inner/.gitignore")) .unwrap() .write_all(b"child-ignored") .unwrap(); te.assert_output_subdirectory( "inner", &[], "./not-ignored ./parent-ignored", ); te.assert_output_subdirectory( "inner", &["--no-ignore-parent"], "./not-ignored ./parent-ignored", ); } /// Precedence of .fdignore files #[test] fn test_custom_ignore_precedence() { let dirs = &["inner"]; let files = &["inner/foo"]; let te = TestEnv::new(dirs, files); // Ignore 'foo' via .gitignore fs::File::create(te.test_root().join("inner/.gitignore")) .unwrap() .write_all(b"foo") .unwrap(); // Whitelist 'foo' via .fdignore fs::File::create(te.test_root().join(".fdignore")) .unwrap() .write_all(b"!foo") .unwrap(); te.assert_output(&["foo"], "./inner/foo"); te.assert_output(&["--no-ignore-vcs", "foo"], "./inner/foo"); te.assert_output(&["--no-ignore", "foo"], "./inner/foo"); } /// VCS ignored files (--no-ignore-vcs) #[test] fn test_no_ignore_vcs() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--no-ignore-vcs", "foo"], "./a.foo ./gitignored.foo ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three/d.foo ./one/two/three/directory_foo", ); } /// Test that --no-ignore-vcs still respects .fdignored in parent directory #[test] fn test_no_ignore_vcs_child_dir() { let te = TestEnv::new( &["inner"], &["inner/fdignored.foo", "inner/foo", "inner/gitignored.foo"], ); te.assert_output_subdirectory( "inner", &["--no-ignore-vcs", "foo"], "./foo ./gitignored.foo", ); } /// Custom ignore files (--ignore-file) #[test] fn test_custom_ignore_files() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); // Ignore 'C.Foo2' and everything in 'three'. fs::File::create(te.test_root().join("custom.ignore")) .unwrap() .write_all(b"C.Foo2\nthree") .unwrap(); te.assert_output( &["--ignore-file", "custom.ignore", "foo"], "./a.foo ./one/b.foo ./one/two/c.foo", ); } /// Ignored files with ripgrep aliases (-u / -uu) #[test] fn test_no_ignore_aliases() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["-u", "foo"], "./a.foo ./fdignored.foo ./gitignored.foo ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three/d.foo ./one/two/three/directory_foo", ); te.assert_output( &["-uu", "foo"], "./.hidden.foo ./a.foo ./fdignored.foo ./gitignored.foo ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three/d.foo ./one/two/three/directory_foo", ); } /// Symlinks (--follow) #[test] fn test_follow() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--follow", "c.foo"], "./one/two/c.foo ./one/two/C.Foo2 ./symlink/c.foo ./symlink/C.Foo2", ); } // File system boundaries (--one-file-system) // Limited to Unix because, to the best of my knowledge, there is no easy way to test a use case // file systems mounted into the tree on Windows. // Not limiting depth causes massive delay under Darwin, see BurntSushi/ripgrep#1429 #[test] #[cfg(unix)] fn test_file_system_boundaries() { // Helper function to get the device ID for a given path // Inspired by https://github.com/BurntSushi/ripgrep/blob/8892bf648cfec111e6e7ddd9f30e932b0371db68/ignore/src/walk.rs#L1693 fn device_num(path: impl AsRef) -> u64 { use std::os::unix::fs::MetadataExt; path.as_ref().metadata().map(|md| md.dev()).unwrap() } // Can't simulate file system boundaries let te = TestEnv::new(&[], &[]); let dev_null = Path::new("/dev/null"); // /dev/null should exist in all sane Unixes. Skip if it doesn't exist for some reason. // Also skip should it be on the same device as the root partition for some reason. if !dev_null.is_file() || device_num(dev_null) == device_num("/") { return; } te.assert_output( &["--full-path", "--max-depth", "2", "^/dev/null$", "/"], "/dev/null", ); te.assert_output( &[ "--one-file-system", "--full-path", "--max-depth", "2", "^/dev/null$", "/", ], "", ); } #[test] fn test_follow_broken_symlink() { let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.create_broken_symlink("broken_symlink") .expect("Failed to create broken symlink."); te.assert_output( &["symlink"], "./broken_symlink ./symlink", ); te.assert_output( &["--type", "symlink", "symlink"], "./broken_symlink ./symlink", ); te.assert_output(&["--type", "file", "symlink"], ""); te.assert_output( &["--follow", "--type", "symlink", "symlink"], "./broken_symlink", ); te.assert_output(&["--follow", "--type", "file", "symlink"], ""); } /// Null separator (--print0) #[test] fn test_print0() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--print0", "foo"], "./a.fooNULL ./one/b.fooNULL ./one/two/C.Foo2NULL ./one/two/c.fooNULL ./one/two/three/d.fooNULL ./one/two/three/directory_fooNULL", ); } /// Maximum depth (--max-depth) #[test] fn test_max_depth() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--max-depth", "3"], "./a.foo ./e1 e2 ./one ./one/b.foo ./one/two ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three ./symlink", ); te.assert_output( &["--max-depth", "2"], "./a.foo ./e1 e2 ./one ./one/b.foo ./one/two ./symlink", ); te.assert_output( &["--max-depth", "1"], "./a.foo ./e1 e2 ./one ./symlink", ); } /// Minimum depth (--min-depth) #[test] fn test_min_depth() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--min-depth", "3"], "./one/two/c.foo ./one/two/C.Foo2 ./one/two/three ./one/two/three/d.foo ./one/two/three/directory_foo", ); te.assert_output( &["--min-depth", "4"], "./one/two/three/d.foo ./one/two/three/directory_foo", ); } /// Exact depth (--exact-depth) #[test] fn test_exact_depth() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--exact-depth", "3"], "./one/two/c.foo ./one/two/C.Foo2 ./one/two/three", ); } /// Pruning (--prune) #[test] fn test_prune() { let dirs = &["foo/bar", "bar/foo", "baz"]; let files = &[ "./foo/foo.file", "./foo/bar/foo.file", "./bar/foo.file", "./bar/foo/foo.file", "./baz/foo.file", ]; let te = TestEnv::new(dirs, files); te.assert_output( &["foo"], "./foo ./foo/foo.file ./foo/bar/foo.file ./bar/foo.file ./bar/foo ./bar/foo/foo.file ./baz/foo.file", ); te.assert_output( &["--prune", "foo"], "./foo ./bar/foo ./bar/foo.file ./baz/foo.file", ); } /// Absolute paths (--absolute-path) #[test] fn test_absolute_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--absolute-path"], &format!( "{abs_path}/a.foo {abs_path}/e1 e2 {abs_path}/one {abs_path}/one/b.foo {abs_path}/one/two {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo {abs_path}/symlink", abs_path = &abs_path ), ); te.assert_output( &["--absolute-path", "foo"], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo", abs_path = &abs_path ), ); } /// Show absolute paths if the path argument is absolute #[test] fn test_implicit_absolute_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["foo", &abs_path], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo", abs_path = &abs_path ), ); } /// Absolute paths should be normalized #[test] fn test_normalized_absolute_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output_subdirectory( "one", &["--absolute-path", "foo", ".."], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo", abs_path = &abs_path ), ); } /// File type filter (--type) #[test] fn test_type() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--type", "f"], "./a.foo ./e1 e2 ./one/b.foo ./one/two/c.foo ./one/two/C.Foo2 ./one/two/three/d.foo", ); te.assert_output(&["--type", "f", "e1"], "./e1 e2"); te.assert_output( &["--type", "d"], "./one ./one/two ./one/two/three ./one/two/three/directory_foo", ); te.assert_output( &["--type", "d", "--type", "l"], "./one ./one/two ./one/two/three ./one/two/three/directory_foo ./symlink", ); te.assert_output(&["--type", "l"], "./symlink"); } /// Test `--type executable` #[cfg(unix)] #[test] fn test_type_executable() { use std::os::unix::fs::OpenOptionsExt; let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); fs::OpenOptions::new() .create(true) .write(true) .mode(0o777) .open(te.test_root().join("executable-file.sh")) .unwrap(); te.assert_output(&["--type", "executable"], "./executable-file.sh"); te.assert_output( &["--type", "executable", "--type", "directory"], "./executable-file.sh ./one ./one/two ./one/two/three ./one/two/three/directory_foo", ); } /// Test `--type empty` #[test] fn test_type_empty() { let te = TestEnv::new(&["dir_empty", "dir_nonempty"], &[]); create_file_with_size(te.test_root().join("0_bytes.foo"), 0); create_file_with_size(te.test_root().join("5_bytes.foo"), 5); create_file_with_size(te.test_root().join("dir_nonempty").join("2_bytes.foo"), 2); te.assert_output( &["--type", "empty"], "./0_bytes.foo ./dir_empty", ); te.assert_output( &["--type", "empty", "--type", "file", "--type", "directory"], "./0_bytes.foo ./dir_empty", ); te.assert_output(&["--type", "empty", "--type", "file"], "./0_bytes.foo"); te.assert_output(&["--type", "empty", "--type", "directory"], "./dir_empty"); } /// File extension (--extension) #[test] fn test_extension() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--extension", "foo"], "./a.foo ./one/b.foo ./one/two/c.foo ./one/two/three/d.foo", ); te.assert_output( &["--extension", ".foo"], "./a.foo ./one/b.foo ./one/two/c.foo ./one/two/three/d.foo", ); te.assert_output( &["--extension", ".foo", "--extension", "foo2"], "./a.foo ./one/b.foo ./one/two/c.foo ./one/two/three/d.foo ./one/two/C.Foo2", ); te.assert_output(&["--extension", ".foo", "a"], "./a.foo"); te.assert_output(&["--extension", "foo2"], "./one/two/C.Foo2"); let te2 = TestEnv::new(&[], &["spam.bar.baz", "egg.bar.baz", "yolk.bar.baz.sig"]); te2.assert_output( &["--extension", ".bar.baz"], "./spam.bar.baz ./egg.bar.baz", ); te2.assert_output(&["--extension", "sig"], "./yolk.bar.baz.sig"); te2.assert_output(&["--extension", "bar.baz.sig"], "./yolk.bar.baz.sig"); let te3 = TestEnv::new(&[], &["latin1.e\u{301}xt", "smiley.☻"]); te3.assert_output(&["--extension", "☻"], "./smiley.☻"); te3.assert_output(&["--extension", ".e\u{301}xt"], "./latin1.e\u{301}xt"); let te4 = TestEnv::new(&[], &[".hidden", "test.hidden"]); te4.assert_output(&["--hidden", "--extension", ".hidden"], "./test.hidden"); } /// No file extension (test for the pattern provided in the --help text) #[test] fn test_no_extension() { let te = TestEnv::new( DEFAULT_DIRS, &["a.foo", "aa", "one/b.foo", "one/bb", "one/two/three/d"], ); te.assert_output( &["^[^.]+$"], "./aa ./one ./one/bb ./one/two ./one/two/three ./one/two/three/d ./one/two/three/directory_foo ./symlink", ); te.assert_output( &["^[^.]+$", "--type", "file"], "./aa ./one/bb ./one/two/three/d", ); } /// Symlink as search directory #[test] fn test_symlink_as_root() { let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.create_broken_symlink("broken_symlink") .expect("Failed to create broken symlink."); // From: http://pubs.opengroup.org/onlinepubs/9699919799/functions/getcwd.html // The getcwd() function shall place an absolute pathname of the current working directory in // the array pointed to by buf, and return buf. The pathname shall contain no components that // are dot or dot-dot, or are symbolic links. // // Key points: // 1. The path of the current working directory of a Unix process cannot contain symlinks. // 2. The path of the current working directory of a Windows process can contain symlinks. // // More: // 1. On Windows, symlinks are resolved after the ".." component. // 2. On Unix, symlinks are resolved immediately as encountered. let parent_parent = if cfg!(windows) { ".." } else { "../.." }; te.assert_output_subdirectory( "symlink", &["", parent_parent], &format!( "{dir}/a.foo {dir}/broken_symlink {dir}/e1 e2 {dir}/one {dir}/one/b.foo {dir}/one/two {dir}/one/two/c.foo {dir}/one/two/C.Foo2 {dir}/one/two/three {dir}/one/two/three/d.foo {dir}/one/two/three/directory_foo {dir}/symlink", dir = &parent_parent ), ); } #[test] fn test_symlink_and_absolute_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); let expected_path = if cfg!(windows) { "symlink" } else { "one/two" }; te.assert_output_subdirectory( "symlink", &["--absolute-path"], &format!( "{abs_path}/{expected_path}/c.foo {abs_path}/{expected_path}/C.Foo2 {abs_path}/{expected_path}/three {abs_path}/{expected_path}/three/d.foo {abs_path}/{expected_path}/three/directory_foo", abs_path = &abs_path, expected_path = expected_path ), ); } #[test] fn test_symlink_as_absolute_root() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["", &format!("{abs_path}/symlink", abs_path = abs_path)], &format!( "{abs_path}/symlink/c.foo {abs_path}/symlink/C.Foo2 {abs_path}/symlink/three {abs_path}/symlink/three/d.foo {abs_path}/symlink/three/directory_foo", abs_path = &abs_path ), ); } #[test] fn test_symlink_and_full_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); let root = te.system_root(); let prefix = escape(&root.to_string_lossy()); let expected_path = if cfg!(windows) { "symlink" } else { "one/two" }; te.assert_output_subdirectory( "symlink", &[ "--absolute-path", "--full-path", &format!("^{prefix}.*three", prefix = prefix), ], &format!( "{abs_path}/{expected_path}/three {abs_path}/{expected_path}/three/d.foo {abs_path}/{expected_path}/three/directory_foo", abs_path = &abs_path, expected_path = expected_path ), ); } #[test] fn test_symlink_and_full_path_abs_path() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); let root = te.system_root(); let prefix = escape(&root.to_string_lossy()); te.assert_output( &[ "--full-path", &format!("^{prefix}.*symlink.*three", prefix = prefix), &format!("{abs_path}/symlink", abs_path = abs_path), ], &format!( "{abs_path}/symlink/three {abs_path}/symlink/three/d.foo {abs_path}/symlink/three/directory_foo", abs_path = &abs_path ), ); } /// Exclude patterns (--exclude) #[test] fn test_excludes() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--exclude", "*.foo"], "./one ./one/two ./one/two/C.Foo2 ./one/two/three ./one/two/three/directory_foo ./e1 e2 ./symlink", ); te.assert_output( &["--exclude", "*.foo", "--exclude", "*.Foo2"], "./one ./one/two ./one/two/three ./one/two/three/directory_foo ./e1 e2 ./symlink", ); te.assert_output( &["--exclude", "*.foo", "--exclude", "*.Foo2", "foo"], "./one/two/three/directory_foo", ); te.assert_output( &["--exclude", "one/two", "foo"], "./a.foo ./one/b.foo", ); te.assert_output( &["--exclude", "one/**/*.foo"], "./a.foo ./e1 e2 ./one ./one/two ./one/two/C.Foo2 ./one/two/three ./one/two/three/directory_foo ./symlink", ); } /// Shell script execution (--exec) #[test] fn test_exec() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); // TODO Windows tests: D:file.txt \file.txt \\server\share\file.txt ... if !cfg!(windows) { te.assert_output( &["--absolute-path", "foo", "--exec", "echo"], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/c.foo {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo", abs_path = &abs_path ), ); te.assert_output( &["foo", "--exec", "echo", "{}"], "./a.foo ./one/b.foo ./one/two/C.Foo2 ./one/two/c.foo ./one/two/three/d.foo ./one/two/three/directory_foo", ); te.assert_output( &["foo", "--exec", "echo", "{.}"], "a one/b one/two/C one/two/c one/two/three/d one/two/three/directory_foo", ); te.assert_output( &["foo", "--exec", "echo", "{/}"], "a.foo b.foo C.Foo2 c.foo d.foo directory_foo", ); te.assert_output( &["foo", "--exec", "echo", "{/.}"], "a b C c d directory_foo", ); te.assert_output( &["foo", "--exec", "echo", "{//}"], ". ./one ./one/two ./one/two ./one/two/three ./one/two/three", ); te.assert_output(&["e1", "--exec", "printf", "%s.%s\n"], "./e1 e2."); } } #[test] fn test_exec_batch() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); let te = te.normalize_line(true); // TODO Test for windows if !cfg!(windows) { te.assert_output( &["--absolute-path", "foo", "--exec-batch", "echo"], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/c.foo {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo", abs_path = &abs_path ), ); te.assert_output( &["foo", "--exec-batch", "echo", "{}"], "./a.foo ./one/b.foo ./one/two/C.Foo2 ./one/two/c.foo ./one/two/three/d.foo ./one/two/three/directory_foo", ); te.assert_output( &["foo", "--exec-batch", "echo", "{/}"], "a.foo b.foo C.Foo2 c.foo d.foo directory_foo", ); te.assert_output( &["no_match", "--exec-batch", "echo", "Matched: ", "{/}"], "", ); te.assert_failure_with_error( &["foo", "--exec-batch", "echo", "{}", "{}"], "[fd error]: Only one placeholder allowed for batch commands", ); te.assert_failure_with_error( &["foo", "--exec-batch", "echo", "{/}", ";", "-x", "echo"], "error: The argument '--exec ' cannot be used with '--exec-batch '", ); te.assert_failure_with_error( &["foo", "--exec-batch"], "error: The argument '--exec-batch ' requires a value but none was supplied", ); te.assert_failure_with_error( &["foo", "--exec-batch", "echo {}"], "[fd error]: First argument of exec-batch is expected to be a fixed executable", ); } } #[test] fn test_exec_batch_with_limit() { // TODO Test for windows if cfg!(windows) { return; } let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["foo", "--batch-size", "0", "--exec-batch", "echo", "{}"], "./a.foo ./one/b.foo ./one/two/C.Foo2 ./one/two/c.foo ./one/two/three/d.foo ./one/two/three/directory_foo", ); let output = te.assert_success_and_get_output( ".", &["foo", "--batch-size=2", "--exec-batch", "echo", "{}"], ); let stdout = String::from_utf8_lossy(&output.stdout); for line in stdout.lines() { assert_eq!(2, line.split_whitespace().count()); } let mut paths: Vec<_> = stdout .lines() .flat_map(|line| line.split_whitespace()) .collect(); paths.sort_unstable(); assert_eq!( &paths, &[ "./a.foo", "./one/b.foo", "./one/two/C.Foo2", "./one/two/c.foo", "./one/two/three/d.foo", "./one/two/three/directory_foo" ], ); } /// Shell script execution (--exec) with a custom --path-separator #[test] fn test_exec_with_separator() { let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &[ "--path-separator=#", "--absolute-path", "foo", "--exec", "echo", ], &format!( "{abs_path}#a.foo {abs_path}#one#b.foo {abs_path}#one#two#C.Foo2 {abs_path}#one#two#c.foo {abs_path}#one#two#three#d.foo {abs_path}#one#two#three#directory_foo", abs_path = abs_path.replace(std::path::MAIN_SEPARATOR, "#"), ), ); te.assert_output( &["--path-separator=#", "foo", "--exec", "echo", "{}"], ".#a.foo .#one#b.foo .#one#two#C.Foo2 .#one#two#c.foo .#one#two#three#d.foo .#one#two#three#directory_foo", ); te.assert_output( &["--path-separator=#", "foo", "--exec", "echo", "{.}"], "a one#b one#two#C one#two#c one#two#three#d one#two#three#directory_foo", ); te.assert_output( &["--path-separator=#", "foo", "--exec", "echo", "{/}"], "a.foo b.foo C.Foo2 c.foo d.foo directory_foo", ); te.assert_output( &["--path-separator=#", "foo", "--exec", "echo", "{/.}"], "a b C c d directory_foo", ); te.assert_output( &["--path-separator=#", "foo", "--exec", "echo", "{//}"], ". .#one .#one#two .#one#two .#one#two#three .#one#two#three", ); te.assert_output( &["--path-separator=#", "e1", "--exec", "printf", "%s.%s\n"], ".#e1 e2.", ); } /// Non-zero exit code (--quiet) #[test] fn test_quiet() { let dirs = &[]; let files = &["a.foo", "b.foo"]; let te = TestEnv::new(dirs, files); te.assert_output(&["-q"], ""); te.assert_output(&["--quiet"], ""); te.assert_output(&["--has-results"], ""); te.assert_failure_with_error(&["--quiet", "c.foo"], "") } /// Literal search (--fixed-strings) #[test] fn test_fixed_strings() { let dirs = &["test1", "test2"]; let files = &["test1/a.foo", "test1/a_foo", "test2/Download (1).tar.gz"]; let te = TestEnv::new(dirs, files); // Regex search, dot is treated as "any character" te.assert_output( &["a.foo"], "./test1/a.foo ./test1/a_foo", ); // Literal search, dot is treated as character te.assert_output(&["--fixed-strings", "a.foo"], "./test1/a.foo"); // Regex search, parens are treated as group te.assert_output(&["download (1)"], ""); // Literal search, parens are treated as characters te.assert_output( &["--fixed-strings", "download (1)"], "./test2/Download (1).tar.gz", ); // Combine with --case-sensitive te.assert_output(&["--fixed-strings", "--case-sensitive", "download (1)"], ""); } /// Filenames with invalid UTF-8 sequences #[cfg(target_os = "linux")] #[test] fn test_invalid_utf8() { use std::ffi::OsStr; use std::os::unix::ffi::OsStrExt; let dirs = &["test1"]; let files = &[]; let te = TestEnv::new(dirs, files); fs::File::create( te.test_root() .join(OsStr::from_bytes(b"test1/test_\xFEinvalid.txt")), ) .unwrap(); te.assert_output(&["", "test1/"], "test1/test_�invalid.txt"); te.assert_output(&["invalid", "test1/"], "test1/test_�invalid.txt"); // Should not be found under a different extension te.assert_output(&["-e", "zip", "", "test1/"], ""); } /// Filtering for file size (--size) #[test] fn test_size() { let te = TestEnv::new(&[], &[]); create_file_with_size(te.test_root().join("0_bytes.foo"), 0); create_file_with_size(te.test_root().join("11_bytes.foo"), 11); create_file_with_size(te.test_root().join("30_bytes.foo"), 30); create_file_with_size(te.test_root().join("3_kilobytes.foo"), 3 * 1000); create_file_with_size(te.test_root().join("4_kibibytes.foo"), 4 * 1024); // Zero and non-zero sized files. te.assert_output( &["", "--size", "+0B"], "./0_bytes.foo ./11_bytes.foo ./30_bytes.foo ./3_kilobytes.foo ./4_kibibytes.foo", ); // Zero sized files. te.assert_output(&["", "--size", "-0B"], "./0_bytes.foo"); te.assert_output(&["", "--size", "0B"], "./0_bytes.foo"); te.assert_output(&["", "--size=0B"], "./0_bytes.foo"); te.assert_output(&["", "-S", "0B"], "./0_bytes.foo"); // Files with 2 bytes or more. te.assert_output( &["", "--size", "+2B"], "./11_bytes.foo ./30_bytes.foo ./3_kilobytes.foo ./4_kibibytes.foo", ); // Files with 2 bytes or less. te.assert_output(&["", "--size", "-2B"], "./0_bytes.foo"); // Files with size between 1 byte and 11 bytes. te.assert_output(&["", "--size", "+1B", "--size", "-11B"], "./11_bytes.foo"); // Files with size equal 11 bytes. te.assert_output(&["", "--size", "11B"], "./11_bytes.foo"); // Files with size between 1 byte and 30 bytes. te.assert_output( &["", "--size", "+1B", "--size", "-30B"], "./11_bytes.foo ./30_bytes.foo", ); // Combine with a search pattern te.assert_output( &["^11_", "--size", "+1B", "--size", "-30B"], "./11_bytes.foo", ); // Files with size between 12 and 30 bytes. te.assert_output(&["", "--size", "+12B", "--size", "-30B"], "./30_bytes.foo"); // Files with size between 31 and 100 bytes. te.assert_output(&["", "--size", "+31B", "--size", "-100B"], ""); // Files with size between 3 kibibytes and 5 kibibytes. te.assert_output( &["", "--size", "+3ki", "--size", "-5ki"], "./4_kibibytes.foo", ); // Files with size between 3 kilobytes and 5 kilobytes. te.assert_output( &["", "--size", "+3k", "--size", "-5k"], "./3_kilobytes.foo ./4_kibibytes.foo", ); // Files with size greater than 3 kilobytes and less than 3 kibibytes. te.assert_output( &["", "--size", "+3k", "--size", "-3ki"], "./3_kilobytes.foo", ); // Files with size equal 4 kibibytes. te.assert_output( &["", "--size", "+4ki", "--size", "-4ki"], "./4_kibibytes.foo", ); te.assert_output(&["", "--size", "4ki"], "./4_kibibytes.foo"); } #[cfg(test)] fn create_file_with_modified>(path: P, duration_in_secs: u64) { let st = SystemTime::now() - Duration::from_secs(duration_in_secs); let ft = filetime::FileTime::from_system_time(st); fs::File::create(&path).expect("creation failed"); filetime::set_file_times(&path, ft, ft).expect("time modification failed"); } #[cfg(test)] fn remove_symlink>(path: P) { #[cfg(unix)] fs::remove_file(path).expect("remove symlink"); // On Windows, symlinks remember whether they point to files or directories, so try both #[cfg(windows)] fs::remove_file(path.as_ref()) .or_else(|_| fs::remove_dir(path.as_ref())) .expect("remove symlink"); } #[test] fn test_modified_relative() { let te = TestEnv::new(&[], &[]); remove_symlink(te.test_root().join("symlink")); create_file_with_modified(te.test_root().join("foo_0_now"), 0); create_file_with_modified(te.test_root().join("bar_1_min"), 60); create_file_with_modified(te.test_root().join("foo_10_min"), 600); create_file_with_modified(te.test_root().join("bar_1_h"), 60 * 60); create_file_with_modified(te.test_root().join("foo_2_h"), 2 * 60 * 60); create_file_with_modified(te.test_root().join("bar_1_day"), 24 * 60 * 60); te.assert_output( &["", "--changed-within", "15min"], "./foo_0_now ./bar_1_min ./foo_10_min", ); te.assert_output( &["", "--change-older-than", "15min"], "./bar_1_h ./foo_2_h ./bar_1_day", ); te.assert_output( &["foo", "--changed-within", "12h"], "./foo_0_now ./foo_10_min ./foo_2_h", ); } #[cfg(test)] fn change_file_modified>(path: P, iso_date: &str) { let st = humantime::parse_rfc3339(iso_date).expect("invalid date"); let ft = filetime::FileTime::from_system_time(st); filetime::set_file_times(path, ft, ft).expect("time modification failde"); } #[test] fn test_modified_absolute() { let te = TestEnv::new(&[], &["15mar2018", "30dec2017"]); remove_symlink(te.test_root().join("symlink")); change_file_modified(te.test_root().join("15mar2018"), "2018-03-15T12:00:00Z"); change_file_modified(te.test_root().join("30dec2017"), "2017-12-30T23:59:00Z"); te.assert_output( &["", "--change-newer-than", "2018-01-01 00:00:00"], "./15mar2018", ); te.assert_output( &["", "--changed-before", "2018-01-01 00:00:00"], "./30dec2017", ); } #[test] fn test_custom_path_separator() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["foo", "one", "--path-separator", "="], "one=b.foo one=two=c.foo one=two=C.Foo2 one=two=three=d.foo one=two=three=directory_foo", ); } #[test] fn test_base_directory() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--base-directory", "one"], "./b.foo ./two ./two/c.foo ./two/C.Foo2 ./two/three ./two/three/d.foo ./two/three/directory_foo", ); te.assert_output( &["--base-directory", "one/two", "foo"], "./c.foo ./C.Foo2 ./three/d.foo ./three/directory_foo", ); // Explicit root path te.assert_output( &["--base-directory", "one", "foo", "two"], "two/c.foo two/C.Foo2 two/three/d.foo two/three/directory_foo", ); // Ignore base directory when absolute path is used let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES); let abs_base_dir = &format!("{abs_path}/one/two", abs_path = &abs_path); te.assert_output( &["--base-directory", abs_base_dir, "foo", &abs_path], &format!( "{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/c.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo", abs_path = &abs_path ), ); } #[test] fn test_max_results() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); // Unrestricted te.assert_output( &["--max-results=0", "c.foo"], "./one/two/C.Foo2 ./one/two/c.foo", ); // Limited to two results te.assert_output( &["--max-results=2", "c.foo"], "./one/two/C.Foo2 ./one/two/c.foo", ); // Limited to one result. We could find either C.Foo2 or c.foo let assert_just_one_result_with_option = |option| { let output = te.assert_success_and_get_output(".", &[option, "c.foo"]); let stdout = String::from_utf8_lossy(&output.stdout) .trim() .replace(&std::path::MAIN_SEPARATOR.to_string(), "/"); assert!(stdout == "./one/two/C.Foo2" || stdout == "./one/two/c.foo"); }; assert_just_one_result_with_option("--max-results=1"); assert_just_one_result_with_option("-1"); } /// Filenames with non-utf8 paths are passed to the executed program unchanged /// /// Note: /// - the test is disabled on Darwin/OSX, since it coerces file names to UTF-8, /// even when the requested file name is not valid UTF-8. /// - the test is currently disabled on Windows because I'm not sure how to create /// invalid UTF-8 files on Windows #[cfg(all(unix, not(target_os = "macos")))] #[test] fn test_exec_invalid_utf8() { use std::ffi::OsStr; use std::os::unix::ffi::OsStrExt; let dirs = &["test1"]; let files = &[]; let te = TestEnv::new(dirs, files); fs::File::create( te.test_root() .join(OsStr::from_bytes(b"test1/test_\xFEinvalid.txt")), ) .unwrap(); te.assert_output_raw( &["", "test1/", "--exec", "echo", "{}"], b"test1/test_\xFEinvalid.txt\n", ); te.assert_output_raw( &["", "test1/", "--exec", "echo", "{/}"], b"test_\xFEinvalid.txt\n", ); te.assert_output_raw(&["", "test1/", "--exec", "echo", "{//}"], b"test1\n"); te.assert_output_raw( &["", "test1/", "--exec", "echo", "{.}"], b"test1/test_\xFEinvalid\n", ); te.assert_output_raw( &["", "test1/", "--exec", "echo", "{/.}"], b"test_\xFEinvalid\n", ); } #[test] fn test_list_details() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); // Make sure we can execute 'fd --list-details' without any errors. te.assert_success_and_get_output(".", &["--list-details"]); } /// Make sure that fd fails if numeric arguments can not be parsed #[test] fn test_number_parsing_errors() { let te = TestEnv::new(&[], &[]); te.assert_failure(&["--threads=a"]); te.assert_failure(&["-j", ""]); te.assert_failure(&["--threads=0"]); te.assert_failure(&["--min-depth=a"]); te.assert_failure(&["--max-depth=a"]); te.assert_failure(&["--maxdepth=a"]); te.assert_failure(&["--exact-depth=a"]); te.assert_failure(&["--max-buffer-time=a"]); te.assert_failure(&["--max-results=a"]); } #[test_case("--hidden", &["--no-hidden"] ; "hidden")] #[test_case("--no-ignore", &["--ignore"] ; "no-ignore")] #[test_case("--no-ignore-vcs", &["--ignore-vcs"] ; "no-ignore-vcs")] #[test_case("--follow", &["--no-follow"] ; "follow")] #[test_case("--absolute-path", &["--relative-path"] ; "absolute-path")] #[test_case("-u", &["--ignore"] ; "u")] #[test_case("-uu", &["--ignore", "--no-hidden"] ; "uu")] fn test_opposing(flag: &str, opposing_flags: &[&str]) { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); let mut flags = vec![flag]; flags.extend_from_slice(opposing_flags); let out_no_flags = te.assert_success_and_get_output(".", &[]); let out_opposing_flags = te.assert_success_and_get_output(".", &flags); assert_eq!( out_no_flags, out_opposing_flags, "{} should override {}", opposing_flags.join(" "), flag ); } /// Print error if search pattern starts with a dot and --hidden is not set /// (Unix only, hidden files on Windows work differently) #[test] #[cfg(unix)] fn test_error_if_hidden_not_set_and_pattern_starts_with_dot() { let te = TestEnv::new(&[], &[".gitignore", ".whatever", "non-hidden"]); te.assert_failure(&["^\\.gitignore"]); te.assert_failure(&["--glob", ".gitignore"]); te.assert_output(&["--hidden", "^\\.gitignore"], "./.gitignore"); te.assert_output(&["--hidden", "--glob", ".gitignore"], "./.gitignore"); te.assert_output(&[".gitignore"], ""); } #[test] fn test_strip_cwd_prefix() { let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES); te.assert_output( &["--strip-cwd-prefix", "."], "a.foo e1 e2 one one/b.foo one/two one/two/c.foo one/two/C.Foo2 one/two/three one/two/three/d.foo one/two/three/directory_foo symlink", ); }