findutils-0.7.0/.cargo_vcs_info.json0000644000000001360000000000100130410ustar { "git": { "sha1": "e3f4224f1212768e6844e3d2e8e7c8f82bf4c7cb" }, "path_in_vcs": "" }findutils-0.7.0/.github/dependabot.yml000064400000000000000000000004131046102023000160170ustar 00000000000000version: 2 updates: - package-ecosystem: "cargo" directory: "/" schedule: interval: "daily" open-pull-requests-limit: 5 - package-ecosystem: "github-actions" directory: "/" schedule: interval: daily open-pull-requests-limit: 5 findutils-0.7.0/.github/workflows/ci.yml000064400000000000000000000113711046102023000163470ustar 00000000000000on: [push, pull_request] name: Basic CI jobs: check: name: cargo check runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, macOS-latest, windows-latest] steps: - uses: actions/checkout@v4 - name: Install `rust` toolchain run: | ## Install `rust` toolchain rustup toolchain install stable --no-self-update -c rustfmt --profile minimal rustup default stable # For bindgen: https://github.com/rust-lang/rust-bindgen/issues/1797 - uses: KyleMayes/install-llvm-action@v2 if: matrix.os == 'windows-latest' with: version: "11.0" directory: ${{ runner.temp }}/llvm - run: echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" >> $env:GITHUB_ENV if: matrix.os == 'windows-latest' - name: Check run: | cargo check --all --all-features test: name: cargo test runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, macOS-latest, windows-latest] steps: - uses: actions/checkout@v4 - name: Install `rust` toolchain run: | ## Install `rust` toolchain rustup toolchain install stable --no-self-update -c rustfmt --profile minimal rustup default stable # For bindgen: https://github.com/rust-lang/rust-bindgen/issues/1797 - uses: KyleMayes/install-llvm-action@v2 if: matrix.os == 'windows-latest' with: version: "11.0" directory: ${{ runner.temp }}/llvm - run: echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" >> $env:GITHUB_ENV if: matrix.os == 'windows-latest' - name: Test run: | cargo test fmt: name: cargo fmt --all -- --check runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install `rust` toolchain run: | ## Install `rust` toolchain rustup toolchain install stable --no-self-update -c rustfmt --profile minimal rustup default stable - run: rustup component add rustfmt - name: cargo fmt run: | cargo fmt --all -- --check clippy: name: cargo clippy -- -D warnings runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Install `rust` toolchain run: | ## Install `rust` toolchain rustup toolchain install stable --no-self-update -c rustfmt --profile minimal rustup default stable - run: rustup component add clippy - name: cargo clippy run: | cargo clippy --all-targets -- -D warnings grcov: name: Code coverage runs-on: ${{ matrix.os }} strategy: matrix: os: - ubuntu-latest toolchain: - nightly cargo_flags: - "--all-features" steps: - name: Checkout source code uses: actions/checkout@v4 - name: Install `rust` toolchain run: | ## Install `rust` toolchain rustup toolchain install nightly --no-self-update -c rustfmt --profile minimal rustup default nightly - name: "`grcov` ~ install" run: cargo install grcov - name: cargo test run: | cargo test --all --no-fail-fast ${{ matrix.cargo_flags }} env: CARGO_INCREMENTAL: "0" RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort -Cdebug-assertions=off" RUSTDOCFLAGS: "-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort -Cdebug-assertions=off" - name: Generate coverage data id: grcov run: | grcov target/debug/ \ --branch \ --llvm \ --source-dir . \ --output-path lcov.info \ --ignore-not-existing \ --excl-line "#\\[derive\\(" \ --excl-br-line "#\\[derive\\(" \ --excl-start "#\\[cfg\\(test\\)\\]" \ --excl-br-start "#\\[cfg\\(test\\)\\]" \ --commit-sha ${{ github.sha }} \ --service-job-id ${{ github.job }} \ --service-name "GitHub Actions" \ --service-number ${{ github.run_id }} - name: Upload coverage as artifact uses: actions/upload-artifact@v4 with: name: lcov.info # path: ${{ steps.grcov.outputs.report }} path: lcov.info - name: Upload coverage to codecov.io uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} # file: ${{ steps.grcov.outputs.report }} file: lcov.info fail_ci_if_error: true findutils-0.7.0/.github/workflows/comment.yml000064400000000000000000000050131046102023000174120ustar 00000000000000on: workflow_run: workflows: [External-testsuites] types: [completed] name: Comment Test results on the PR permissions: {} jobs: upload-pr-comment: if: ${{ github.event.workflow_run.event == 'pull_request' }} name: Upload PR comment runs-on: ubuntu-latest permissions: actions: read pull-requests: write steps: - name: List Annotations uses: actions/github-script@v7 with: script: | let artifacts = await github.rest.actions.listWorkflowRunArtifacts({ owner: context.repo.owner, repo: context.repo.repo, run_id: ${{ github.event.workflow_run.id }}, }); // List all artifacts let matchArtifact = artifacts.data.artifacts.filter((artifact) => { return artifact.name == "comment" })[0]; // Download the artifact to github.workspace let download = await github.rest.actions.downloadArtifact({ owner: context.repo.owner, repo: context.repo.repo, artifact_id: matchArtifact.id, archive_format: 'zip', }); let fs = require('fs'); fs.writeFileSync('${{ github.workspace }}/comment.zip', Buffer.from(download.data)); - run: unzip comment.zip - name: Comment on PR uses: actions/github-script@v7 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | let fs = require('fs'); let annotations = JSON.parse(fs.readFileSync('./annotations.json', 'utf8')); let annotationContent = annotations .data .map(annotation => `${annotation.run}: ${annotation.annotation.message}`) .join('\n'); // check if no changes let gnuTestReport = annotationContent.includes('Run GNU findutils tests: Gnu tests No changes'); let bfsTestReport = annotationContent.includes('Run BFS tests: BFS tests No changes'); if (gnuTestReport && bfsTestReport) { console.log('No changes'); return; } // Comment on the PR github.rest.issues.createComment({ owner: context.repo.owner, repo: context.repo.repo, issue_number: annotations.pull_request_number, body: 'Commit ${{ github.event.workflow_run.head_sha }} has GNU testsuite comparison:\n```\n' + annotationContent + '\n```\n' });findutils-0.7.0/.github/workflows/compat.yml000064400000000000000000000144271046102023000172440ustar 00000000000000on: [push, pull_request] name: External-testsuites jobs: gnu-tests: name: Run GNU findutils tests runs-on: ubuntu-latest steps: - name: Checkout findutils uses: actions/checkout@v4 with: path: findutils - name: Checkout GNU findutils uses: actions/checkout@v4 with: repository: gnu-mirror-unofficial/findutils path: findutils.gnu ref: 5768a03ddfb5e18b1682e339d6cdd24ff721c510 submodules: true - name: Install `rust` toolchain run: | ## Install `rust` toolchain rustup toolchain install stable --no-self-update -c rustfmt --profile minimal rustup default stable - name: Install dependencies shell: bash run: | # Enable sources & install dependencies sudo find /etc/apt/sources.list* -type f -exec sed -i 'p; s/^deb /deb-src /' '{}' + sudo apt-get update sudo apt-get build-dep findutils - name: Run GNU tests shell: bash run: | cd findutils bash util/build-gnu.sh ||: - name: Extract testing info shell: bash run: | - name: Upload gnu-test-report uses: actions/upload-artifact@v4 with: name: gnu-test-report path: | findutils.gnu/find/testsuite/*.log findutils.gnu/xargs/testsuite/*.log findutils.gnu/tests/**/*.log - name: Upload gnu-result uses: actions/upload-artifact@v4 with: name: gnu-result path: gnu-result.json - name: Download the result uses: dawidd6/action-download-artifact@v6 with: workflow: compat.yml workflow_conclusion: completed name: gnu-result repo: uutils/findutils branch: main path: dl - name: Download the log uses: dawidd6/action-download-artifact@v6 with: workflow: compat.yml workflow_conclusion: completed name: gnu-test-report repo: uutils/findutils branch: main path: dl - name: Compare failing tests against master shell: bash run: | ./findutils/util/diff-gnu.sh ./dl ./findutils.gnu - name: Compare against main results shell: bash run: | mv dl/gnu-result.json latest-gnu-result.json python findutils/util/compare_gnu_result.py bfs-tests: name: Run BFS tests runs-on: ubuntu-latest steps: - name: Checkout findutils uses: actions/checkout@v4 with: path: findutils - name: Checkout BFS uses: actions/checkout@v4 with: repository: tavianator/bfs path: bfs ref: "4.0" - name: Install `rust` toolchain run: | ## Install `rust` toolchain rustup toolchain install stable --no-self-update -c rustfmt --profile minimal rustup default stable - name: Install dependencies shell: bash run: | # Enable sources & install dependencies sudo find /etc/apt/sources.list* -type f -exec sed -i 'p; s/^deb /deb-src /' '{}' + sudo apt-get update sudo apt-get build-dep bfs - name: Run BFS tests shell: bash run: | cd findutils bash util/build-bfs.sh ||: - name: Upload bfs-test-report uses: actions/upload-artifact@v4 with: name: bfs-test-report path: bfs/tests.log - name: Upload bfs-result uses: actions/upload-artifact@v4 with: name: bfs-result path: bfs-result.json - name: Download the result uses: dawidd6/action-download-artifact@v6 with: workflow: compat.yml workflow_conclusion: completed name: bfs-result repo: uutils/findutils branch: main path: dl - name: Download the log uses: dawidd6/action-download-artifact@v6 with: workflow: compat.yml workflow_conclusion: completed name: bfs-test-report repo: uutils/findutils branch: main path: dl - name: Compare failing tests against main shell: bash run: | ./findutils/util/diff-bfs.sh dl/tests.log bfs/tests.log - name: Compare against main results shell: bash run: | mv dl/bfs-result.json latest-bfs-result.json python findutils/util/compare_bfs_result.py upload-annotations: name: Upload annotations runs-on: ubuntu-latest needs: [gnu-tests, bfs-tests] if: ${{ github.event_name == 'pull_request' }} steps: - name: List Annotations uses: actions/github-script@v7 with: script: | let runs = await github.rest.checks.listForRef({ owner: context.repo.owner, repo: context.repo.repo, ref: '${{ github.event.pull_request.head.sha }}' }); let names = ['Run GNU findutils tests', 'Run BFS tests']; let results = []; runs.data.check_runs.filter(check => names.includes(check.name)).forEach(run => results.push(run)); let annotations = { data: [], pull_request_number: '${{ github.event.number }}' }; for (let result of results) { let run = await github.rest.checks.listAnnotations({ owner: context.repo.owner, repo: context.repo.repo, check_run_id: result.id }); run.data.forEach(data => { annotations.data.push({ run: result.name, annotation: data }); }); } // Remove duplicate items. annotations.data = annotations.data.filter((value, index, self) => self.findIndex(v => v.annotation.message === value.annotation.message) === index); let fs = require('fs'); fs.writeFileSync('${{ github.workspace }}/annotations.json', JSON.stringify(annotations)); - name: Upload annotations uses: actions/upload-artifact@v4 with: name: comment path: annotations.jsonfindutils-0.7.0/.github/workflows/release.yml000064400000000000000000000261261046102023000174000ustar 00000000000000# Copyright 2022-2023, axodotdev # SPDX-License-Identifier: MIT or Apache-2.0 # # CI that: # # * checks for a Git Tag that looks like a release # * builds artifacts with cargo-dist (archives, installers, hashes) # * uploads those artifacts to temporary workflow zip # * on success, uploads the artifacts to a Github Release # # Note that the Github Release will be created with a generated # title/body based on your changelogs. name: Release permissions: contents: write # This task will run whenever you push a git tag that looks like a version # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. # Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where # PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION # must be a Cargo-style SemVer Version (must have at least major.minor.patch). # # If PACKAGE_NAME is specified, then the announcement will be for that # package (erroring out if it doesn't have the given version or isn't cargo-dist-able). # # If PACKAGE_NAME isn't specified, then the announcement will be for all # (cargo-dist-able) packages in the workspace with that version (this mode is # intended for workspaces with only one dist-able package, or with all dist-able # packages versioned/released in lockstep). # # If you push multiple tags at once, separate instances of this workflow will # spin up, creating an independent announcement for each one. However Github # will hard limit this to 3 tags per commit, as it will assume more tags is a # mistake. # # If there's a prerelease-style suffix to the version, then the release(s) # will be marked as a prerelease. on: push: tags: - '**[0-9]+.[0-9]+.[0-9]+*' pull_request: jobs: # Run 'cargo dist plan' (or host) to determine what tasks we need to do plan: runs-on: ubuntu-latest outputs: val: ${{ steps.plan.outputs.manifest }} tag: ${{ !github.event.pull_request && github.ref_name || '' }} tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} publishing: ${{ !github.event.pull_request }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v4 with: submodules: recursive - name: Install cargo-dist # we specify bash to get pipefail; it guards against the `curl` command # failing. otherwise `sh` won't catch that `curl` returned non-0 shell: bash run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.12.0/cargo-dist-installer.sh | sh" # sure would be cool if github gave us proper conditionals... # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible # functionality based on whether this is a pull_request, and whether it's from a fork. # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* # but also really annoying to build CI around when it needs secrets to work right.) - id: plan run: | cargo dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json echo "cargo dist ran successfully" cat plan-dist-manifest.json echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" - name: "Upload dist-manifest.json" uses: actions/upload-artifact@v4 with: name: artifacts-plan-dist-manifest path: plan-dist-manifest.json # Build and packages all the platform-specific things build-local-artifacts: name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) # Let the initial task tell us to not run (currently very blunt) needs: - plan if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} strategy: fail-fast: false # Target platforms/runners are computed by cargo-dist in create-release. # Each member of the matrix has the following arguments: # # - runner: the github runner # - dist-args: cli flags to pass to cargo dist # - install-dist: expression to run to install cargo-dist on the runner # # Typically there will be: # - 1 "global" task that builds universal installers # - N "local" tasks that build each platform's binaries and platform-specific installers matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} runs-on: ${{ matrix.runner }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json steps: - uses: actions/checkout@v4 with: submodules: recursive - uses: swatinem/rust-cache@v2 - name: Install cargo-dist run: ${{ matrix.install_dist }} # Get the dist-manifest - name: Fetch local artifacts uses: actions/download-artifact@v4 with: pattern: artifacts-* path: target/distrib/ merge-multiple: true - name: Install dependencies run: | ${{ matrix.packages_install }} - name: Build artifacts run: | # Actually do builds and make zips and whatnot cargo dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json echo "cargo dist ran successfully" - id: cargo-dist name: Post-build # We force bash here just because github makes it really hard to get values up # to "real" actions without writing to env-vars, and writing to env-vars has # inconsistent syntax between shell and powershell. shell: bash run: | # Parse out what we just built and upload it to scratch storage echo "paths<> "$GITHUB_OUTPUT" jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT" echo "EOF" >> "$GITHUB_OUTPUT" cp dist-manifest.json "$BUILD_MANIFEST_NAME" - name: "Upload artifacts" uses: actions/upload-artifact@v4 with: name: artifacts-build-local-${{ join(matrix.targets, '_') }} path: | ${{ steps.cargo-dist.outputs.paths }} ${{ env.BUILD_MANIFEST_NAME }} # Build and package all the platform-agnostic(ish) things build-global-artifacts: needs: - plan - build-local-artifacts runs-on: "ubuntu-20.04" env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json steps: - uses: actions/checkout@v4 with: submodules: recursive - name: Install cargo-dist shell: bash run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.12.0/cargo-dist-installer.sh | sh" # Get all the local artifacts for the global tasks to use (for e.g. checksums) - name: Fetch local artifacts uses: actions/download-artifact@v4 with: pattern: artifacts-* path: target/distrib/ merge-multiple: true - id: cargo-dist shell: bash run: | cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json echo "cargo dist ran successfully" # Parse out what we just built and upload it to scratch storage echo "paths<> "$GITHUB_OUTPUT" jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT" echo "EOF" >> "$GITHUB_OUTPUT" cp dist-manifest.json "$BUILD_MANIFEST_NAME" - name: "Upload artifacts" uses: actions/upload-artifact@v4 with: name: artifacts-build-global path: | ${{ steps.cargo-dist.outputs.paths }} ${{ env.BUILD_MANIFEST_NAME }} # Determines if we should publish/announce host: needs: - plan - build-local-artifacts - build-global-artifacts # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} runs-on: "ubuntu-20.04" outputs: val: ${{ steps.host.outputs.manifest }} steps: - uses: actions/checkout@v4 with: submodules: recursive - name: Install cargo-dist run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.12.0/cargo-dist-installer.sh | sh" # Fetch artifacts from scratch-storage - name: Fetch artifacts uses: actions/download-artifact@v4 with: pattern: artifacts-* path: target/distrib/ merge-multiple: true # This is a harmless no-op for Github Releases, hosting for that happens in "announce" - id: host shell: bash run: | cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json echo "artifacts uploaded and released successfully" cat dist-manifest.json echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" - name: "Upload dist-manifest.json" uses: actions/upload-artifact@v4 with: # Overwrite the previous copy name: artifacts-dist-manifest path: dist-manifest.json # Create a Github Release while uploading all files to it announce: needs: - plan - host # use "always() && ..." to allow us to wait for all publish jobs while # still allowing individual publish jobs to skip themselves (for prereleases). # "host" however must run to completion, no skipping allowed! if: ${{ always() && needs.host.result == 'success' }} runs-on: "ubuntu-20.04" env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v4 with: submodules: recursive - name: "Download Github Artifacts" uses: actions/download-artifact@v4 with: pattern: artifacts-* path: artifacts merge-multiple: true - name: Cleanup run: | # Remove the granular manifests rm -f artifacts/*-dist-manifest.json - name: Create Github Release uses: ncipollo/release-action@v1 with: tag: ${{ needs.plan.outputs.tag }} name: ${{ fromJson(needs.host.outputs.val).announcement_title }} body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }} prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }} artifacts: "artifacts/*" findutils-0.7.0/.gitignore000064400000000000000000000001141046102023000136150ustar 00000000000000target .gitignore .project .cargo .settings test_data/links/link-* /public/ findutils-0.7.0/.pre-commit-config.yaml000064400000000000000000000010561046102023000161140ustar 00000000000000repos: - repo: local hooks: - id: rust-linting name: Rust linting description: Run cargo fmt on files included in the commit. entry: cargo +nightly fmt -- pass_filenames: true types: [file, rust] language: system - id: rust-clippy name: Rust clippy description: Run cargo clippy on files included in the commit. entry: cargo +nightly clippy --workspace --all-targets --all-features -- pass_filenames: false types: [file, rust] language: system findutils-0.7.0/CODE_OF_CONDUCT.md000064400000000000000000000121531046102023000144320ustar 00000000000000# Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socioeconomic status, nationality, personal appearance, race, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at sylvestre@debian.org. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at . Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see the FAQ at . Translations are available at . findutils-0.7.0/CONTRIBUTING.md000064400000000000000000000237571046102023000141000ustar 00000000000000 # Contributing to findutils Hi! Welcome to uutils/findutils! Thanks for wanting to contribute to this project! This document explains everything you need to know to contribute. Before you start make sure to also check out these documents: - Our community's [CODE_OF_CONDUCT.md](./CODE_OF_CONDUCT.md). - [DEVELOPMENT.md](./DEVELOPMENT.md) for setting up your development environment. Now follows a very important warning: > [!WARNING] > uutils is original code and cannot contain any code from GNU or > other implementations. This means that **we cannot accept any changes based on > the GNU source code**. To make sure that cannot happen, **you cannot link to > the GNU source code** either. It is however possible to look at other implementations > under a BSD or MIT license like [Apple's implementation](https://opensource.apple.com/source/file_cmds/) > or [OpenBSD](https://github.com/openbsd/src/tree/master/bin). Finally, feel free to join our [Discord](https://discord.gg/wQVJbvJ)! ## Design Goals We have the following goals with our development: - **Compatible**: The utilities should be a drop-in replacement for the GNU coreutils. - **Cross-platform**: All utilities should run on as many of the supported platforms as possible. - **Reliable**: The utilities should never unexpectedly fail. - **Performant**: Our utilities should be written in fast idiomatic Rust. We aim to match or exceed the performance of the GNU utilities. - **Well-tested**: We should have a lot of tests to be able to guarantee reliability and compatibility. ## How to Help There are several ways to help and writing code is just one of them. Reporting issues and writing documentation are just as important as writing code. ### Reporting Issues We can't fix bugs we don't know about, so good issues are super helpful! Here are some tips for writing good issues: - If you find a bug, make sure it's still a problem on the `main` branch. - Search through the existing issues to see whether it has already been reported. - Make sure to include all relevant information, such as: - Which version of uutils did you check? - Which version of GNU coreutils are you comparing with? - What platform are you on? - Provide a way to reliably reproduce the issue. - Be as specific as possible! ### Writing Documentation There's never enough documentation. If you come across any documentation that could be improved, feel free to submit a PR for it! ### Writing Code If you want to submit a PR, make sure that you've discussed the solution with the maintainers beforehand. We want to avoid situations where you put a lot of work into a fix that we can't merge! If there's no issue for what you're trying to fix yet, make one _before_ you start working on the PR. Generally, we try to follow what GNU is doing in terms of options and behavior. It is recommended to look at the GNU findtils manual ([on the web](https://www.gnu.org/software/findutils/manual/html_node/index.html), or locally using `info `). It is more in depth than the man pages and provides a good description of available features and their implementation details. But remember, you cannot look at the GNU source code! Also remember that we can only merge PRs which pass our test suite, follow rustfmt, and do not have any warnings from clippy. See [DEVELOPMENT.md](./DEVELOPMENT.md) for more information. Be sure to also read about our [Rust style](#our-rust-style). ## Our Rust Style We want uutils to be written in idiomatic Rust, so here are some guidelines to follow. Some of these are aspirational, meaning that we don't do them correctly everywhere in the code. If you find violations of the advice below, feel free to submit a patch! ### Don't `panic!` The coreutils should be very reliable. This means that we should never `panic!`. Therefore, you should avoid using `.unwrap()` and `panic!`. Sometimes the use of `unreachable!` can be justified with a comment explaining why that code is unreachable. ### Don't `exit` We want uutils to be embeddable in other programs. This means that no function in uutils should exit the program. Doing so would also lead to code with more confusing control flow. Avoid therefore `std::process::exit` and similar functions which exit the program early. ### `unsafe` uutils cannot be entirely safe, because we have to call out to `libc` and do syscalls. However, we still want to limit our use of `unsafe`. We generally only accept `unsafe` for FFI, with very few exceptions. Note that performance is very rarely a valid argument for using `unsafe`. If you still need to write code with `unsafe`, make sure to read the [Rustonomicon](https://doc.rust-lang.org/nomicon/intro.html) and annotate the calls with `// SAFETY:` comments explaining why the use of `unsafe` is sound. ### Macros Macros can be a great tool, but they are also usually hard to understand. They should be used sparingly. Make sure to explore simpler options before you reach for a solution involving macros. ### `str`, `OsStr` & `Path` Rust has many string-like types, and sometimes it's hard to choose the right one. It's tempting to use `str` (and `String`) for everything, but that is not always the right choice for uutils, because we need to support invalid UTF-8, just like the GNU coreutils. For example, paths on Linux might not be valid UTF-8! Whenever we are dealing with paths, we should therefore stick with `OsStr` and `Path`. Make sure that you only convert to `str`/`String` if you know that something is always valid UTF-8. If you need more operations on `OsStr`, you can use the [`bstr`](https://docs.rs/bstr/latest/bstr/) crate. ### Doc-comments We use rustdoc for our documentation, so it's best to follow [rustdoc's guidelines](https://doc.rust-lang.org/rustdoc/how-to-write-documentation.html#documenting-components). Make sure that your documentation is not just repeating the name of the function, but actually giving more useful information. Rustdoc recommends the following structure: ``` [short sentence explaining what it is] [more detailed explanation] [at least one code example that users can copy/paste to try it] [even more advanced explanations if necessary] ``` ### Other comments Comments should be written to _explain_ the code, not to _describe_ the code. Try to focus on explaining _why_ the code is the way it is. If you feel like you have to describe the code, that's usually a sign that you could improve the naming of variables and functions. If you edit a piece of code, make sure to update any comments that need to change as a result. The only thing worse than having no comments is having outdated comments! ## Git Etiquette To ensure easy collaboration, we have guidelines for using Git and GitHub. ### Commits - Make small and atomic commits. - Keep a clean history of commits. - Write informative commit messages. - Annotate your commit message with the component you're editing. For example: `cp: do not overwrite on with -i` or `uucore: add support for FreeBSD`. - Do not unnecessarily move items around in the code. This makes the changes much harder to review. If you do need to move things around, do that in a separate commit. ### Commit messages You can read this section in the Git book to learn how to write good commit messages: https://git-scm.com/book/ch5-2.html. In addition, here are a few examples for a summary line when committing to uutils: - commit for a single utility ``` nohup: cleanup and refactor ``` - commit for a utility's tests ``` tests/rm: test new feature ``` Beyond changes to an individual utility or its tests, other summary lines for non-utility modules include: ``` README: add help uucore: add new modules uutils: add new utility gitignore: add temporary files ``` ### PRs - Make the titles of PRs descriptive. - This means describing the problem you solve. For example, do not write `Fix #1234`, but `ls: fix version sort order`. - You can prefix the title with the utility the PR concerns. - Keep PRs small and self-contained. A set of small PRs is much more likely to get merged quickly than one large PR. - Make sure the CI passes (up to intermittently failing tests). - You know your code best, that's why it's best if you can solve merge conflicts on your branch yourself. - It's up to you whether you want to use `git merge main` or `git rebase main`. - Feel free to ask for help with merge conflicts. - You do not need to ping maintainers to request a review, but it's fine to do so if you don't get a response within a few days. ## Platforms We take pride in supporting many operating systems and architectures. Any code you contribute must at least compile without warnings for all platforms in the CI. However, you can use `#[cfg(...)]` attributes to create platform dependent features. **Tip:** For Windows, Microsoft provides some images (VMWare, Hyper-V, VirtualBox and Parallels) for development: ## Licensing uutils is distributed under the terms of the MIT License; see the `LICENSE` file for details. This is a permissive license, which allows the software to be used with few restrictions. Copyrights in the uutils project are retained by their contributors, and no copyright assignment is required to contribute. If you wish to add or change dependencies as part of a contribution to the project, a tool like `cargo-license` can be used to show their license details. The following types of license are acceptable: - MIT License - Dual- or tri-license with an MIT License option ("Apache-2.0 or MIT" is a popular combination) - "MIT equivalent" license (2-clause BSD, 3-clause BSD, ISC) - License less restrictive than the MIT License (CC0 1.0 Universal) - Apache License version 2.0 Licenses we will not use: - An ambiguous license, or no license - Strongly reciprocal licenses (GNU GPL, GNU LGPL) If you wish to add a reference but it doesn't meet these requirements, please raise an issue to describe the dependency. findutils-0.7.0/Cargo.lock0000644000000731350000000000100110250ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "android-tzdata" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_system_properties" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" dependencies = [ "libc", ] [[package]] name = "anstream" version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96b09b5178381e0874812a9b157f7fe84982617e48f71f4e3235482775e5b540" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", "utf8parse", ] [[package]] name = "anstyle" version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-parse" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" dependencies = [ "windows-sys 0.48.0", ] [[package]] name = "anstyle-wincon" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628" dependencies = [ "anstyle", "windows-sys 0.48.0", ] [[package]] name = "assert_cmd" version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc1835b7f27878de8525dc71410b5a31cdcc5f230aed5ba5df968e09c201b23d" dependencies = [ "anstyle", "bstr", "doc-comment", "libc", "predicates", "predicates-core", "predicates-tree", "wait-timeout", ] [[package]] name = "autocfg" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" [[package]] name = "bstr" version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" dependencies = [ "memchr", "regex-automata", "serde", ] [[package]] name = "bumpalo" version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "cc" version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "cfg_aliases" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" [[package]] name = "cfg_aliases" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "wasm-bindgen", "windows-targets 0.52.6", ] [[package]] name = "clap" version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" dependencies = [ "anstream", "anstyle", "clap_lex", "strsim", "terminal_size", ] [[package]] name = "clap_lex" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "colorchoice" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "core-foundation-sys" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "diff" version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "difflib" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" [[package]] name = "doc-comment" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dunce" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "errno" version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", "windows-sys 0.52.0", ] [[package]] name = "faccess" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ae66425802d6a903e268ae1a08b8c38ba143520f227a205edf4e9c7e3e26d5" dependencies = [ "bitflags 1.3.2", "libc", "winapi", ] [[package]] name = "fastrand" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "filetime" version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", "libredox", "windows-sys 0.59.0", ] [[package]] name = "findutils" version = "0.7.0" dependencies = [ "assert_cmd", "chrono", "clap", "faccess", "filetime", "nix 0.29.0", "once_cell", "onig", "predicates", "pretty_assertions", "regex", "serial_test", "tempfile", "uucore", "walkdir", ] [[package]] name = "float-cmp" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" dependencies = [ "num-traits", ] [[package]] name = "futures" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" dependencies = [ "futures-channel", "futures-core", "futures-executor", "futures-io", "futures-sink", "futures-task", "futures-util", ] [[package]] name = "futures-channel" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ "futures-core", "futures-sink", ] [[package]] name = "futures-core" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "futures-executor" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" dependencies = [ "futures-core", "futures-task", "futures-util", ] [[package]] name = "futures-io" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" [[package]] name = "futures-sink" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" [[package]] name = "futures-task" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" [[package]] name = "futures-util" version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ "futures-channel", "futures-core", "futures-io", "futures-sink", "futures-task", "memchr", "pin-project-lite", "pin-utils", "slab", ] [[package]] name = "glob" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "iana-time-zone" version = "0.1.47" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c495f162af0bf17656d0014a0eded5f3cd2f365fdd204548c2869db89359dc7" dependencies = [ "android_system_properties", "core-foundation-sys", "js-sys", "once_cell", "wasm-bindgen", "winapi", ] [[package]] name = "js-sys" version = "0.3.59" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "258451ab10b34f8af53416d1fdab72c22e805f0c92a1136d59470ec0b11138b2" dependencies = [ "wasm-bindgen", ] [[package]] name = "libc" version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libredox" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.4.1", "libc", "redox_syscall 0.5.3", ] [[package]] name = "linux-raw-sys" version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" [[package]] name = "lock_api" version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" dependencies = [ "autocfg", "scopeguard", ] [[package]] name = "log" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" dependencies = [ "cfg-if", ] [[package]] name = "memchr" version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" [[package]] name = "nix" version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ "bitflags 2.4.1", "cfg-if", "cfg_aliases 0.1.1", "libc", ] [[package]] name = "nix" version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ "bitflags 2.4.1", "cfg-if", "cfg_aliases 0.2.1", "libc", ] [[package]] name = "normalize-line-endings" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" [[package]] name = "num-traits" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" dependencies = [ "autocfg", ] [[package]] name = "number_prefix" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "once_cell" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "onig" version = "6.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c4b31c8722ad9171c6d77d3557db078cab2bd50afcc9d09c8b315c59df8ca4f" dependencies = [ "bitflags 1.3.2", "libc", "once_cell", "onig_sys", ] [[package]] name = "onig_sys" version = "69.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b829e3d7e9cc74c7e315ee8edb185bf4190da5acde74afd7fc59c35b1f086e7" dependencies = [ "cc", "pkg-config", ] [[package]] name = "os_display" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a6229bad892b46b0dcfaaeb18ad0d2e56400f5aaea05b768bde96e73676cf75" dependencies = [ "unicode-width", ] [[package]] name = "parking_lot" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", "redox_syscall 0.4.1", "smallvec", "windows-targets 0.48.0", ] [[package]] name = "pin-project-lite" version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" [[package]] name = "pin-utils" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "58893f751c9b0412871a09abd62ecd2a00298c6c83befa223ef98c52aef40cbe" [[package]] name = "predicates" version = "3.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97" dependencies = [ "anstyle", "difflib", "float-cmp", "normalize-line-endings", "predicates-core", "regex", ] [[package]] name = "predicates-core" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" [[package]] name = "predicates-tree" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aee95d988ee893cb35c06b148c80ed2cd52c8eea927f50ba7a0be1a786aeab73" dependencies = [ "predicates-core", "treeline", ] [[package]] name = "pretty_assertions" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" dependencies = [ "diff", "yansi", ] [[package]] name = "proc-macro2" version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" dependencies = [ "proc-macro2", ] [[package]] name = "redox_syscall" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_syscall" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" dependencies = [ "bitflags 2.4.1", ] [[package]] name = "regex" version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "rustix" version = "0.38.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" dependencies = [ "bitflags 2.4.1", "errno", "libc", "linux-raw-sys", "windows-sys 0.52.0", ] [[package]] name = "same-file" version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f20c4be53a8a1ff4c1f1b2bd14570d2f634628709752f0702ecdd2b3f9a5267" dependencies = [ "winapi-util", ] [[package]] name = "scc" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec96560eea317a9cc4e0bb1f6a2c93c09a19b8c4fc5cb3fcc0ec1c094cd783e2" dependencies = [ "sdd", ] [[package]] name = "scopeguard" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "sdd" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b84345e4c9bd703274a082fb80caaa99b7612be48dfaa1dd9266577ec412309d" [[package]] name = "serde" version = "1.0.147" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965" [[package]] name = "serial_test" version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" dependencies = [ "futures", "log", "once_cell", "parking_lot", "scc", "serial_test_derive", ] [[package]] name = "serial_test_derive" version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "slab" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" [[package]] name = "smallvec" version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" [[package]] name = "strsim" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" [[package]] name = "syn" version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "tempfile" version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" dependencies = [ "cfg-if", "fastrand", "once_cell", "rustix", "windows-sys 0.59.0", ] [[package]] name = "terminal_size" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" dependencies = [ "rustix", "windows-sys 0.48.0", ] [[package]] name = "treeline" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7f741b240f1a48843f9b8e0444fb55fb2a4ff67293b50a9179dfd5ea67f8d41" [[package]] name = "unicode-ident" version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0" [[package]] name = "unicode-width" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" [[package]] name = "utf8parse" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uucore" version = "0.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b54aad02cf7e96f5fafabb6b836efa73eef934783b17530095a29ffd4fdc154" dependencies = [ "clap", "dunce", "glob", "libc", "nix 0.28.0", "number_prefix", "once_cell", "os_display", "uucore_procs", "wild", "winapi-util", "windows-sys 0.48.0", ] [[package]] name = "uucore_procs" version = "0.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd6689d54dd59b145ae94458007cb0508a741716ee8dc494b45d129fb83d4d46" dependencies = [ "proc-macro2", "quote", "uuhelp_parser", ] [[package]] name = "uuhelp_parser" version = "0.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d84a98929941eee8952bed33fe3e7c8731d2596ad64eef53c4c7c6fbae828e9" [[package]] name = "wait-timeout" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" dependencies = [ "libc", ] [[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "wasm-bindgen" version = "0.2.91" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f" dependencies = [ "cfg-if", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.91" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.91" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.91" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" dependencies = [ "proc-macro2", "quote", "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.91" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838" [[package]] name = "wild" version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3131afc8c575281e1e80f36ed6a092aa502c08b18ed7524e86fbbb12bb410e1" dependencies = [ "glob", ] [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ "windows-targets 0.48.0", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-targets" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" dependencies = [ "windows_aarch64_gnullvm 0.48.0", "windows_aarch64_msvc 0.48.0", "windows_i686_gnu 0.48.0", "windows_i686_msvc 0.48.0", "windows_x86_64_gnu 0.48.0", "windows_x86_64_gnullvm 0.48.0", "windows_x86_64_msvc 0.48.0", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "yansi" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" findutils-0.7.0/Cargo.toml0000644000000034210000000000100110370ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "findutils" version = "0.7.0" authors = ["uutils developers"] description = "Rust implementation of GNU findutils" homepage = "https://github.com/uutils/findutils" readme = "README.md" license = "MIT" repository = "https://github.com/uutils/findutils" [profile.dist] lto = "thin" inherits = "release" [[bin]] name = "find" path = "src/find/main.rs" [[bin]] name = "xargs" path = "src/xargs/main.rs" [[bin]] name = "testing-commandline" path = "src/testing/commandline/main.rs" [dependencies.chrono] version = "0.4.38" [dependencies.clap] version = "4.5" [dependencies.faccess] version = "0.2.4" [dependencies.nix] version = "0.29" features = [ "fs", "user", ] [dependencies.once_cell] version = "1.19" [dependencies.onig] version = "6.4" default-features = false [dependencies.regex] version = "1.10" [dependencies.uucore] version = "0.0.27" features = [ "entries", "fs", "fsext", "mode", ] [dependencies.walkdir] version = "2.5" [dev-dependencies.assert_cmd] version = "2" [dev-dependencies.filetime] version = "0.2" [dev-dependencies.nix] version = "0.29" features = ["fs"] [dev-dependencies.predicates] version = "3" [dev-dependencies.pretty_assertions] version = "1.4.0" [dev-dependencies.serial_test] version = "3.1" [dev-dependencies.tempfile] version = "3" findutils-0.7.0/Cargo.toml.orig000064400000000000000000000030121046102023000145140ustar 00000000000000[package] name = "findutils" version = "0.7.0" homepage = "https://github.com/uutils/findutils" repository = "https://github.com/uutils/findutils" edition = "2021" license = "MIT" readme = "README.md" description = "Rust implementation of GNU findutils" authors = ["uutils developers"] [dependencies] chrono = "0.4.38" clap = "4.5" faccess = "0.2.4" walkdir = "2.5" regex = "1.10" once_cell = "1.19" onig = { version = "6.4", default-features = false } uucore = { version = "0.0.27", features = ["entries", "fs", "fsext", "mode"] } nix = { version = "0.29", features = ["fs", "user"] } [dev-dependencies] assert_cmd = "2" filetime = "0.2" nix = { version = "0.29", features = ["fs"] } predicates = "3" serial_test = "3.1" tempfile = "3" pretty_assertions = "1.4.0" [[bin]] name = "find" path = "src/find/main.rs" [[bin]] name = "xargs" path = "src/xargs/main.rs" [[bin]] name = "testing-commandline" path = "src/testing/commandline/main.rs" # The profile that 'cargo dist' will build with [profile.dist] inherits = "release" lto = "thin" # Config for 'cargo dist' [workspace.metadata.dist] # The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax) cargo-dist-version = "0.12.0" # CI backends to support ci = ["github"] # The installers to generate for each app installers = [] # Target platforms to build apps for (Rust target-triple syntax) targets = [ "aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc", ] # Publish jobs to run in CI pr-run-mode = "plan" findutils-0.7.0/DEVELOPMENT.md000064400000000000000000000127151046102023000137430ustar 00000000000000 # Setting up your local development environment For contributing rules and best practices please refer to [CONTRIBUTING.md](CONTRIBUTING.md) ## Before you start For this guide we assume that you already have a GitHub account and have `git` and your favorite code editor or IDE installed and configured. Before you start working on findutils, please follow these steps: 1. Fork the [findutils repository](https://github.com/uutils/findutils) to your GitHub account. ***Tip:*** See [this GitHub guide](https://docs.github.com/en/get-started/quickstart/fork-a-repo) for more information on this step. 2. Clone that fork to your local development environment: ```shell git clone https://github.com/YOUR-GITHUB-ACCOUNT/findutils cd findutils ``` ## Tools You will need the tools mentioned in this section to build and test your code changes locally. This section will explain how to install and configure these tools. We also have an extensive CI that uses these tools and will check your code before it can be merged. The next section [Testing](#testing) will explain how to run those checks locally to avoid waiting for the CI. ### Rust toolchain [Install Rust](https://www.rust-lang.org/tools/install) If you're using rustup to install and manage your Rust toolchains, `clippy` and `rustfmt` are usually already installed. If you are using one of the alternative methods, please make sure to install them manually. See following sub-sections for their usage: [clippy](#clippy) [rustfmt](#rustfmt). ***Tip*** You might also need to add 'llvm-tools' component if you are going to [generate code coverage reports locally](#code-coverage-report): ```shell rustup component add llvm-tools-preview ``` ### pre-commit hooks A configuration for `pre-commit` is provided in the repository. It allows automatically checking every git commit you make to ensure it compiles, and passes `clippy` and `rustfmt` without warnings. To use the provided hook: 1. [Install `pre-commit`](https://pre-commit.com/#install) 1. Run `pre-commit install` while in the repository directory Your git commits will then automatically be checked. If a check fails, an error message will explain why, and your commit will be canceled. You can then make the suggested changes, and run `git commit ...` again. **NOTE: On MacOS** the pre-commit hooks are currently broken. There are workarounds involving switching to unstable nightly Rust and components. ### clippy ```shell cargo clippy --all-targets --all-features ``` The `msrv` key in the clippy configuration file `clippy.toml` is used to disable lints pertaining to newer features by specifying the minimum supported Rust version (MSRV). ### rustfmt ```shell cargo fmt --all ``` ### cargo-deny This project uses [cargo-deny](https://github.com/EmbarkStudios/cargo-deny/) to detect duplicate dependencies, checks licenses, etc. To run it locally, first install it and then run with: ```shell cargo deny --all-features check all ``` ### Markdown linter We use [markdownlint](https://github.com/DavidAnson/markdownlint) to lint the Markdown files in the repository. ### Spell checker We use `cspell` as spell checker for all files in the project. If you are using VS Code, you can install the [code spell checker](https://marketplace.visualstudio.com/items?itemName=streetsidesoftware.code-spell-checker) extension to enable spell checking within your editor. Otherwise, you can install [cspell](https://cspell.org/) separately. If you want to make the spell checker ignore a word, you can add ```rust // spell-checker:ignore word_to_ignore ``` at the top of the file. ## Testing Just like with building, we follow the standard procedure for testing using Cargo: ```shell cargo test ``` ## Code coverage report Code coverage report can be generated using [grcov](https://github.com/mozilla/grcov). ### Using Nightly Rust To generate [gcov-based](https://github.com/mozilla/grcov#example-how-to-generate-gcda-files-for-a-rust-project) coverage report ```shell export CARGO_INCREMENTAL=0 export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort" export RUSTDOCFLAGS="-Cpanic=abort" cargo build cargo test grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing --ignore build.rs --excl-br-line "^\s*((debug_)?assert(_eq|_ne)?\#\[derive\()" -o ./target/debug/coverage/ # open target/debug/coverage/index.html in browser ``` if changes are not reflected in the report then run `cargo clean` and run the above commands. ### Using Stable Rust If you are using stable version of Rust that doesn't enable code coverage instrumentation by default then add `-Z-Zinstrument-coverage` flag to `RUSTFLAGS` env variable specified above. ## Tips for setting up on Mac ### C Compiler and linker On MacOS you'll need to install C compiler & linker: ```shell xcode-select --install ``` ## Tips for setting up on Windows ### MSVC build tools On Windows you'll need the MSVC build tools for Visual Studio 2013 or later. If you are using `rustup-init.exe` to install Rust toolchain, it will guide you through the process of downloading and installing these prerequisites. Otherwise please follow [this guide](https://learn.microsoft.com/en-us/windows/dev-environment/rust/setup). findutils-0.7.0/LICENSE000064400000000000000000000020331046102023000126340ustar 00000000000000Copyright (c) Google Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. findutils-0.7.0/README.md000064400000000000000000000030101046102023000131020ustar 00000000000000# findutils [![Crates.io](https://img.shields.io/crates/v/findutils.svg)](https://crates.io/crates/findutils) [![Discord](https://img.shields.io/badge/discord-join-7289DA.svg?logo=discord&longCache=true&style=flat)](https://discord.gg/wQVJbvJ) [![License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/uutils/findutils/blob/main/LICENSE) [![dependency status](https://deps.rs/repo/github/uutils/findutils/status.svg)](https://deps.rs/repo/github/uutils/findutils) [![codecov](https://codecov.io/gh/uutils/findutils/branch/master/graph/badge.svg)](https://codecov.io/gh/uutils/findutils) Rust implementation of [GNU findutils](https://www.gnu.org/software/findutils/): `xargs`, `find`, `locate` and `updatedb`. The goal is to be a full drop-in replacement of the original commands. ## Run the GNU testsuite on rust/findutils: ``` bash util/build-gnu.sh # To run a specific test: bash util/build-gnu.sh tests/misc/help-version.sh ``` ## Comparing with GNU ![Evolution over time - GNU testsuite](https://github.com/uutils/findutils-tracking/blob/main/gnu-results.png?raw=true) ![Evolution over time - BFS testsuite](https://github.com/uutils/findutils-tracking/blob/main/bfs-results.png?raw=true) ## Build/run with BFS [bfs](https://github.com/tavianator/bfs) is a variant of the UNIX find command that operates breadth-first rather than depth-first. ``` bash util/build-bfs.sh # To run a specific test: bash util/build-bfs.sh posix/basic ``` For more details, see https://github.com/uutils/findutils-tracking/ findutils-0.7.0/docs/book.toml000064400000000000000000000002501046102023000144050ustar 00000000000000[book] authors = ["Terts Diepraam"] language = "en" multilingual = false src = "src" title = "findutils" [preprocessor.toc] command = "mdbook-toc" renderer = ["html"] findutils-0.7.0/docs/src/SUMMARY.md000064400000000000000000000003611046102023000150270ustar 00000000000000# Summary [Introduction](index.md) * [Installation](installation.md) * [Build from source](build.md) * [Platform support](platforms.md) * [Contributing](contributing.md) * [GNU test coverage](test_coverage.md) * [Extensions](extensions.md) findutils-0.7.0/docs/src/build.md000064400000000000000000000000311046102023000147630ustar 00000000000000# Build from source TODOfindutils-0.7.0/docs/src/contributing.md000064400000000000000000000001121046102023000163730ustar 00000000000000 {{ #include ../../CONTRIBUTING.md }} findutils-0.7.0/docs/src/extensions.md000064400000000000000000000000221046102023000160630ustar 00000000000000# Extensions TODOfindutils-0.7.0/docs/src/index.md000064400000000000000000000023221046102023000150000ustar 00000000000000 {{#include logo.svg}} # uutils findutils Documentation The uutils findutils project reimplements the GNU findutils in Rust. It is available for Linux, Windows, Mac and other platforms. uutils is licensed under the [MIT License](https://github.com/uutils/findutils/blob/main/LICENSE). ## Useful links - [Releases](https://github.com/uutils/findutils/releases) - [Source Code](https://github.com/uutils/findutils) - [Issues](https://github.com/uutils/findutils/issues) - [Discord](https://discord.gg/wQVJbvJ) > Note: This manual is automatically generated from the source code and is a > work in progress. findutils-0.7.0/docs/src/installation.md000064400000000000000000000033111046102023000163710ustar 00000000000000 # Installation This is a list of uutils packages in various distributions and package managers. Note that these are packaged by third-parties and the packages might contain patches. You can also [build findutils from source](build.md). ## Cargo [![crates.io package](https://repology.org/badge/version-for-repo/crates_io/rust:findutils.svg)](https://crates.io/crates/findutils) ```shell cargo install findutils ``` ## Linux ### Debian [![Debian 13 package](https://repology.org/badge/version-for-repo/debian_13/rust:findutils.svg)](https://packages.debian.org/trixie/source/rust-findutils) [![Debian Unstable package](https://repology.org/badge/version-for-repo/debian_unstable/rust:findutils.svg)](https://packages.debian.org/sid/source/rust-findutils) ```shell apt install rust-findutils # To use it: export PATH=/usr/lib/cargo/bin/findutils:$PATH ``` ### Gentoo [![Gentoo package](https://repology.org/badge/version-for-repo/gentoo/uutils-findutils.svg)](https://packages.gentoo.org/packages/sys-apps/uutils-findutils) ```shell emerge -pv sys-apps/uutils-findutils ``` ## MacOS ### Homebrew [![Homebrew package](https://repology.org/badge/version-for-repo/homebrew/uutils-findutils.svg)](https://formulae.brew.sh/formula/uutils-findutils) ```shell brew install uutils-findutils ``` ## FreeBSD [![FreeBSD port](https://repology.org/badge/version-for-repo/freebsd/rust-findutils.svg)](https://repology.org/project/rust-findutils/versions) ```sh pkg install rust-findutils ``` ## Windows As far as we are aware, `findutils` has not been packaged for any package managers on Windows yet.findutils-0.7.0/docs/src/logo.svg000064400000000000000000000070401046102023000150320ustar 00000000000000 findutils-0.7.0/docs/src/platforms.md000064400000000000000000000000301046102023000156720ustar 00000000000000# Platform support TODOfindutils-0.7.0/docs/src/test_coverage.md000064400000000000000000000000311046102023000165160ustar 00000000000000# GNU test coverage TODOfindutils-0.7.0/src/find/main.rs000064400000000000000000000013221046102023000146300ustar 00000000000000// Copyright 2017 Google Inc. // // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. fn main() { // Ignores the SIGPIPE signal. // This is to solve the problem that when find is used with a pipe character, // the downstream software of the standard output stream closes the pipe and triggers a panic. uucore::panic::mute_sigpipe_panic(); let args = std::env::args().collect::>(); let strs: Vec<&str> = args.iter().map(std::convert::AsRef::as_ref).collect(); let deps = findutils::find::StandardDependencies::new(); std::process::exit(findutils::find::find_main(&strs, &deps)); } findutils-0.7.0/src/find/matchers/access.rs000064400000000000000000000027241046102023000167620ustar 00000000000000// Copyright 2022 Tavian Barnes // // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. use faccess::PathExt; use super::{Matcher, MatcherIO, WalkEntry}; /// Matcher for -{read,writ,execut}able. pub enum AccessMatcher { Readable, Writable, Executable, } impl Matcher for AccessMatcher { fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { let path = file_info.path(); match self { Self::Readable => path.readable(), Self::Writable => path.writable(), Self::Executable => path.executable(), } } } #[cfg(test)] mod tests { use super::*; use crate::find::matchers::tests::get_dir_entry_for; use crate::find::tests::FakeDependencies; #[test] fn access_matcher() { let file_info = get_dir_entry_for("test_data/simple", "abbbc"); let deps = FakeDependencies::new(); assert!( AccessMatcher::Readable.matches(&file_info, &mut deps.new_matcher_io()), "file should be readable" ); assert!( AccessMatcher::Writable.matches(&file_info, &mut deps.new_matcher_io()), "file should be writable" ); #[cfg(unix)] assert!( !AccessMatcher::Executable.matches(&file_info, &mut deps.new_matcher_io()), "file should not be executable" ); } } findutils-0.7.0/src/find/matchers/delete.rs000064400000000000000000000055611046102023000167650ustar 00000000000000/* * This file is part of the uutils findutils package. * * (c) Arcterus * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ use std::fs; use std::io::{self, stderr, Write}; use super::{Matcher, MatcherIO, WalkEntry}; pub struct DeleteMatcher; impl DeleteMatcher { pub fn new() -> Self { DeleteMatcher } fn delete(&self, entry: &WalkEntry) -> io::Result<()> { if entry.file_type().is_dir() && !entry.path_is_symlink() { fs::remove_dir(entry.path()) } else { fs::remove_file(entry.path()) } } } impl Matcher for DeleteMatcher { fn matches(&self, file_info: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { let path = file_info.path(); let path_str = path.to_string_lossy(); // This is a quirk in find's traditional semantics probably due to // POSIX rmdir() not accepting "." (EINVAL). std::fs::remove_dir() // inherits the same behavior, so no reason to buck tradition. if path_str == "." { return true; } match self.delete(file_info) { Ok(()) => true, Err(e) => { matcher_io.set_exit_code(1); writeln!(&mut stderr(), "Failed to delete {path_str}: {e}").unwrap(); false } } } fn has_side_effects(&self) -> bool { true } } #[cfg(test)] mod tests { use std::fs::{create_dir, File}; use tempfile::Builder; use super::*; use crate::find::matchers::tests::get_dir_entry_for; use crate::find::tests::FakeDependencies; #[test] fn delete_matcher() { let matcher = DeleteMatcher::new(); let deps = FakeDependencies::new(); let temp_dir = Builder::new().prefix("test_data").tempdir().unwrap(); let temp_dir_path = temp_dir.path().to_string_lossy(); File::create(temp_dir.path().join("test")).expect("created test file"); create_dir(temp_dir.path().join("test_dir")).expect("created test directory"); let test_entry = get_dir_entry_for(&temp_dir_path, "test"); assert!( matcher.matches(&test_entry, &mut deps.new_matcher_io()), "DeleteMatcher should match a simple file", ); assert!( !temp_dir.path().join("test").exists(), "DeleteMatcher should actually delete files it matches", ); let temp_dir_entry = get_dir_entry_for(&temp_dir_path, "test_dir"); assert!( matcher.matches(&temp_dir_entry, &mut deps.new_matcher_io()), "DeleteMatcher should match directories", ); assert!( !temp_dir.path().join("test_dir").exists(), "DeleteMatcher should actually delete (empty) directories it matches", ); } } findutils-0.7.0/src/find/matchers/empty.rs000064400000000000000000000056471046102023000166660ustar 00000000000000// Copyright 2021 Collabora, Ltd. // // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. use std::{ fs::read_dir, io::{stderr, Write}, }; use super::{Matcher, MatcherIO, WalkEntry}; pub struct EmptyMatcher; impl EmptyMatcher { pub fn new() -> EmptyMatcher { EmptyMatcher } } impl Matcher for EmptyMatcher { fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { if file_info.file_type().is_file() { match file_info.metadata() { Ok(meta) => meta.len() == 0, Err(err) => { writeln!( &mut stderr(), "Error getting size for {}: {}", file_info.path().display(), err ) .unwrap(); false } } } else if file_info.file_type().is_dir() { match read_dir(file_info.path()) { Ok(mut it) => it.next().is_none(), Err(err) => { writeln!( &mut stderr(), "Error getting contents of {}: {}", file_info.path().display(), err ) .unwrap(); false } } } else { false } } } #[cfg(test)] mod tests { use tempfile::Builder; use super::*; use crate::find::matchers::tests::get_dir_entry_for; use crate::find::tests::FakeDependencies; #[test] fn empty_files() { let empty_file_info = get_dir_entry_for("test_data/simple", "abbbc"); let nonempty_file_info = get_dir_entry_for("test_data/size", "512bytes"); let matcher = EmptyMatcher::new(); let deps = FakeDependencies::new(); assert!(matcher.matches(&empty_file_info, &mut deps.new_matcher_io())); assert!(!matcher.matches(&nonempty_file_info, &mut deps.new_matcher_io())); } #[test] fn empty_directories() { let temp_dir = Builder::new() .prefix("empty_directories") .tempdir() .unwrap(); let temp_dir_path = temp_dir.path().to_string_lossy(); let subdir_name = "subdir"; std::fs::create_dir(temp_dir.path().join(subdir_name)).unwrap(); let matcher = EmptyMatcher::new(); let deps = FakeDependencies::new(); let file_info = get_dir_entry_for(&temp_dir_path, subdir_name); assert!(matcher.matches(&file_info, &mut deps.new_matcher_io())); std::fs::File::create(temp_dir.path().join(subdir_name).join("a")).unwrap(); let file_info = get_dir_entry_for(&temp_dir_path, subdir_name); assert!(!matcher.matches(&file_info, &mut deps.new_matcher_io())); } } findutils-0.7.0/src/find/matchers/entry.rs000064400000000000000000000222331046102023000166570ustar 00000000000000//! Paths encountered during a walk. use std::cell::OnceCell; use std::error::Error; use std::ffi::OsStr; use std::fmt::{self, Display, Formatter}; use std::fs::{self, Metadata}; use std::io::{self, ErrorKind}; #[cfg(unix)] use std::os::unix::fs::FileTypeExt; use std::path::{Path, PathBuf}; use walkdir::DirEntry; use super::Follow; /// Wrapper for a directory entry. #[derive(Debug)] enum Entry { /// Wraps an explicit path and depth. Explicit(PathBuf, usize), /// Wraps a WalkDir entry. WalkDir(DirEntry), } /// File types. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum FileType { Unknown, Fifo, CharDevice, Directory, BlockDevice, Regular, Symlink, Socket, } impl FileType { pub fn is_dir(self) -> bool { self == Self::Directory } pub fn is_file(self) -> bool { self == Self::Regular } pub fn is_symlink(self) -> bool { self == Self::Symlink } } impl From for FileType { fn from(t: fs::FileType) -> FileType { if t.is_dir() { return FileType::Directory; } if t.is_file() { return FileType::Regular; } if t.is_symlink() { return FileType::Symlink; } #[cfg(unix)] { if t.is_fifo() { return FileType::Fifo; } if t.is_char_device() { return FileType::CharDevice; } if t.is_block_device() { return FileType::BlockDevice; } if t.is_socket() { return FileType::Socket; } } FileType::Unknown } } /// An error encountered while walking a file system. #[derive(Clone, Debug)] pub struct WalkError { /// The path that caused the error, if known. path: Option, /// The depth below the root path, if known. depth: Option, /// The io::Error::raw_os_error(), if known. raw: Option, } impl WalkError { /// Get the path this error occurred on, if known. pub fn path(&self) -> Option<&Path> { self.path.as_deref() } /// Get the traversal depth when this error occurred, if known. pub fn depth(&self) -> Option { self.depth } /// Get the kind of I/O error. pub fn kind(&self) -> ErrorKind { io::Error::from(self).kind() } /// Check for ErrorKind::{NotFound,NotADirectory}. pub fn is_not_found(&self) -> bool { if self.kind() == ErrorKind::NotFound { return true; } // NotADirectory is nightly-only #[cfg(unix)] { if self.raw == Some(uucore::libc::ENOTDIR) { return true; } } false } /// Check for ErrorKind::FilesystemLoop. pub fn is_loop(&self) -> bool { #[cfg(unix)] return self.raw == Some(uucore::libc::ELOOP); #[cfg(not(unix))] return false; } } impl Display for WalkError { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { let ioe = io::Error::from(self); if let Some(path) = &self.path { write!(f, "{}: {}", path.display(), ioe) } else { write!(f, "{}", ioe) } } } impl Error for WalkError {} impl From for WalkError { fn from(e: io::Error) -> WalkError { WalkError::from(&e) } } impl From<&io::Error> for WalkError { fn from(e: &io::Error) -> WalkError { WalkError { path: None, depth: None, raw: e.raw_os_error(), } } } impl From for WalkError { fn from(e: walkdir::Error) -> WalkError { WalkError::from(&e) } } impl From<&walkdir::Error> for WalkError { fn from(e: &walkdir::Error) -> WalkError { WalkError { path: e.path().map(|p| p.to_owned()), depth: Some(e.depth()), raw: e.io_error().and_then(|e| e.raw_os_error()), } } } impl From for io::Error { fn from(e: WalkError) -> io::Error { io::Error::from(&e) } } impl From<&WalkError> for io::Error { fn from(e: &WalkError) -> io::Error { e.raw .map(io::Error::from_raw_os_error) .unwrap_or_else(|| ErrorKind::Other.into()) } } /// A path encountered while walking a file system. #[derive(Debug)] pub struct WalkEntry { /// The wrapped path/dirent. inner: Entry, /// Whether to follow symlinks. follow: Follow, /// Cached metadata. meta: OnceCell>, } impl WalkEntry { /// Create a new WalkEntry for a specific file. pub fn new(path: impl Into, depth: usize, follow: Follow) -> Self { Self { inner: Entry::Explicit(path.into(), depth), follow, meta: OnceCell::new(), } } /// Convert a [walkdir::DirEntry] to a [WalkEntry]. Errors due to broken symbolic links will be /// converted to valid entries, but other errors will be propagated. pub fn from_walkdir( result: walkdir::Result, follow: Follow, ) -> Result { let result = result.map_err(WalkError::from); match result { Ok(entry) => { let ret = if entry.depth() == 0 && follow != Follow::Never { // DirEntry::file_type() is wrong for root symlinks when follow_root_links is set Self::new(entry.path(), 0, follow) } else { Self { inner: Entry::WalkDir(entry), follow, meta: OnceCell::new(), } }; Ok(ret) } Err(e) if e.is_not_found() => { // Detect broken symlinks and replace them with explicit entries if let (Some(path), Some(depth)) = (e.path(), e.depth()) { if let Ok(meta) = path.symlink_metadata() { return Ok(WalkEntry { inner: Entry::Explicit(path.into(), depth), follow: Follow::Never, meta: Ok(meta).into(), }); } } Err(e) } Err(e) => Err(e), } } /// Get the path to this entry. pub fn path(&self) -> &Path { match &self.inner { Entry::Explicit(path, _) => path.as_path(), Entry::WalkDir(ent) => ent.path(), } } /// Get the path to this entry. pub fn into_path(self) -> PathBuf { match self.inner { Entry::Explicit(path, _) => path, Entry::WalkDir(ent) => ent.into_path(), } } /// Get the name of this entry. pub fn file_name(&self) -> &OsStr { match &self.inner { Entry::Explicit(path, _) => { // Path::file_name() only works if the last component is normal path.components() .last() .map(|c| c.as_os_str()) .unwrap_or_else(|| path.as_os_str()) } Entry::WalkDir(ent) => ent.file_name(), } } /// Get the depth of this entry below the root. pub fn depth(&self) -> usize { match &self.inner { Entry::Explicit(_, depth) => *depth, Entry::WalkDir(ent) => ent.depth(), } } /// Get whether symbolic links are followed for this entry. pub fn follow(&self) -> bool { self.follow.follow_at_depth(self.depth()) } /// Get the metadata on a cache miss. fn get_metadata(&self) -> Result { self.follow.metadata_at_depth(self.path(), self.depth()) } /// Get the [Metadata] for this entry, following symbolic links if appropriate. /// Multiple calls to this function will cache and re-use the same [Metadata]. pub fn metadata(&self) -> Result<&Metadata, WalkError> { let result = self.meta.get_or_init(|| match &self.inner { Entry::Explicit(_, _) => Ok(self.get_metadata()?), Entry::WalkDir(ent) => Ok(ent.metadata()?), }); result.as_ref().map_err(|e| e.clone()) } /// Get the file type of this entry. pub fn file_type(&self) -> FileType { match &self.inner { Entry::Explicit(_, _) => self .metadata() .map(|m| m.file_type().into()) .unwrap_or(FileType::Unknown), Entry::WalkDir(ent) => ent.file_type().into(), } } /// Check whether this entry is a symbolic link, regardless of whether links /// are being followed. pub fn path_is_symlink(&self) -> bool { match &self.inner { Entry::Explicit(path, _) => { if self.follow() { path.symlink_metadata() .is_ok_and(|m| m.file_type().is_symlink()) } else { self.file_type().is_symlink() } } Entry::WalkDir(ent) => ent.path_is_symlink(), } } } findutils-0.7.0/src/find/matchers/exec.rs000064400000000000000000000057361046102023000164530ustar 00000000000000// Copyright 2017 Google Inc. // // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. use std::error::Error; use std::ffi::OsString; use std::io::{stderr, Write}; use std::path::Path; use std::process::Command; use super::{Matcher, MatcherIO, WalkEntry}; enum Arg { FileArg(Vec), LiteralArg(OsString), } pub struct SingleExecMatcher { executable: String, args: Vec, exec_in_parent_dir: bool, } impl SingleExecMatcher { pub fn new( executable: &str, args: &[&str], exec_in_parent_dir: bool, ) -> Result> { let transformed_args = args .iter() .map(|&a| { let parts = a.split("{}").collect::>(); if parts.len() == 1 { // No {} present Arg::LiteralArg(OsString::from(a)) } else { Arg::FileArg(parts.iter().map(OsString::from).collect()) } }) .collect(); Ok(Self { executable: executable.to_string(), args: transformed_args, exec_in_parent_dir, }) } } impl Matcher for SingleExecMatcher { fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { let mut command = Command::new(&self.executable); let path_to_file = if self.exec_in_parent_dir { if let Some(f) = file_info.path().file_name() { Path::new(".").join(f) } else { Path::new(".").join(file_info.path()) } } else { file_info.path().to_path_buf() }; for arg in &self.args { match *arg { Arg::LiteralArg(ref a) => command.arg(a.as_os_str()), Arg::FileArg(ref parts) => command.arg(parts.join(path_to_file.as_os_str())), }; } if self.exec_in_parent_dir { match file_info.path().parent() { None => { // Root paths like "/" have no parent. Run them from the root to match GNU find. command.current_dir(file_info.path()); } Some(parent) if parent == Path::new("") => { // Paths like "foo" have a parent of "". Avoid chdir(""). } Some(parent) => { command.current_dir(parent); } } } match command.status() { Ok(status) => status.success(), Err(e) => { writeln!(&mut stderr(), "Failed to run {}: {}", self.executable, e).unwrap(); false } } } fn has_side_effects(&self) -> bool { true } } #[cfg(test)] /// No tests here, because we need to call out to an external executable. See /// `tests/exec_unit_tests.rs` instead. mod tests {} findutils-0.7.0/src/find/matchers/fs.rs000064400000000000000000000131311046102023000161230ustar 00000000000000// This file is part of the uutils findutils package. // // For the full copyright and license information, please view the LICENSE // file that was distributed with this source code. use super::{Matcher, MatcherIO, WalkEntry}; /// The latest mapping from dev_id to fs_type, used for saving mount info reads #[cfg(unix)] pub struct Cache { dev_id: String, fs_type: String, } /// Get the filesystem type of a file. /// 1. get the metadata of the file /// 2. get the device ID of the metadata /// 3. search the cache, then the filesystem list /// /// Returns an empty string when no file system list matches. /// /// # Errors /// Returns an error if the metadata could not be read. /// Returns an error if the filesystem list could not be read. /// /// This is only supported on Unix. #[cfg(unix)] use std::{ cell::RefCell, error::Error, io::{stderr, Write}, path::Path, }; #[cfg(unix)] pub fn get_file_system_type( path: &Path, cache: &RefCell>, ) -> Result> { use std::os::unix::fs::MetadataExt; // use symlink_metadata (lstat under the hood) instead of metadata (stat) to make sure that it // does not return an error when there is a (broken) symlink; this is aligned with GNU find. let metadata = match path.symlink_metadata() { Ok(metadata) => metadata, Err(err) => Err(err)?, }; let dev_id = metadata.dev().to_string(); if let Some(cache) = cache.borrow().as_ref() { if cache.dev_id == dev_id { return Ok(cache.fs_type.clone()); } } let fs_list = match uucore::fsext::read_fs_list() { Ok(fs_list) => fs_list, Err(err) => Err(err)?, }; let result = fs_list .into_iter() .find(|fs| fs.dev_id == dev_id) .map_or_else(String::new, |fs| fs.fs_type); // cache the latest query if not a match before cache.replace(Some(Cache { dev_id, fs_type: result.clone(), })); Ok(result) } /// This matcher handles the -fstype argument. /// It matches the filesystem type of the file. /// /// This is only supported on Unix. pub struct FileSystemMatcher { #[cfg(unix)] fs_text: String, #[cfg(unix)] cache: RefCell>, } impl FileSystemMatcher { #[cfg(unix)] pub fn new(fs_text: String) -> Self { Self { fs_text, cache: RefCell::new(None), } } #[cfg(not(unix))] pub fn new(_fs_text: String) -> Self { Self {} } } impl Matcher for FileSystemMatcher { #[cfg(unix)] fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { match get_file_system_type(file_info.path(), &self.cache) { Ok(result) => result == self.fs_text, Err(_) => { writeln!( &mut stderr(), "Error getting filesystem type for {}", file_info.path().to_string_lossy() ) .unwrap(); false } } } #[cfg(not(unix))] fn matches(&self, _file_info: &WalkEntry, _: &mut MatcherIO) -> bool { false } } #[cfg(test)] mod tests { #[test] #[cfg(unix)] fn test_fs_matcher() { use crate::find::{ matchers::{ fs::{get_file_system_type, Cache}, tests::get_dir_entry_for, Matcher, }, tests::FakeDependencies, }; use std::cell::RefCell; use std::fs::File; use tempfile::Builder; let deps = FakeDependencies::new(); let mut matcher_io = deps.new_matcher_io(); // create temp file and get its fs type // We pass this file and the corresponding file system type into the Matcher for comparison. let temp_dir = Builder::new().prefix("fs_matcher").tempdir().unwrap(); let foo_path = temp_dir.path().join("foo"); let _ = File::create(foo_path).expect("create temp file"); let file_info = get_dir_entry_for(&temp_dir.path().to_string_lossy(), "foo"); // create an empty cache for initial fs type lookup let empty_cache = RefCell::new(None); let target_fs_type = get_file_system_type(file_info.path(), &empty_cache).unwrap(); // should work with unmatched cache, and the cache should be set to the last query result let unmatched_cache = RefCell::new(Some(Cache { dev_id: "foo".to_string(), fs_type: "bar".to_string(), })); let target_fs_type_unmatched_cache = get_file_system_type(file_info.path(), &unmatched_cache).unwrap(); assert_eq!( target_fs_type, target_fs_type_unmatched_cache, "get_file_system_type should return correct result with unmatched cache" ); assert_eq!( unmatched_cache.borrow().as_ref().unwrap().fs_type, target_fs_type, "get_file_system_type should set the cache to the last query result" ); // should match fs type let matcher = super::FileSystemMatcher::new(target_fs_type.clone()); assert!( matcher.matches(&file_info, &mut matcher_io), "{} should match {}", file_info.path().to_string_lossy(), target_fs_type ); // should not match fs type let matcher = super::FileSystemMatcher::new(target_fs_type.clone() + "foo"); assert!( !matcher.matches(&file_info, &mut matcher_io), "{} should not match {}", file_info.path().to_string_lossy(), target_fs_type ); } } findutils-0.7.0/src/find/matchers/glob.rs000064400000000000000000000177341046102023000164530ustar 00000000000000// Copyright 2022 Tavian Barnes // // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. use onig::{Regex, RegexOptions, Syntax}; /// Parse a string as a POSIX Basic Regular Expression. fn parse_bre(expr: &str, options: RegexOptions) -> Result { let bre = Syntax::posix_basic(); Regex::with_options(expr, bre.options() | options, bre) } /// Push a literal character onto a regex, escaping it if necessary. fn regex_push_literal(regex: &mut String, ch: char) { // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_03 if matches!(ch, '.' | '[' | '\\' | '*' | '^' | '$') { regex.push('\\'); } regex.push(ch); } /// Extracts a bracket expression from a glob. fn extract_bracket_expr(pattern: &str) -> Option<(String, &str)> { // https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_13_01 // // If an open bracket introduces a bracket expression as in XBD RE Bracket Expression, // except that the character ( '!' ) shall replace the // character ( '^' ) in its role in a non-matching list in the regular expression notation, // it shall introduce a pattern bracket expression. A bracket expression starting with an // unquoted character produces unspecified results. Otherwise, '[' shall match // the character itself. // // To check for valid bracket expressions, we scan for the closing bracket and // attempt to parse that segment as a regex. If that fails, we treat the '[' // literally. let mut expr = "[".to_string(); let mut chars = pattern.chars(); let mut next = chars.next(); // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_05 // // 3. A non-matching list expression begins with a ( '^' ) ... // // (but in a glob, '!' is used instead of '^') if next == Some('!') { expr.push('^'); next = chars.next(); } // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_05 // // 1. ... The ( ']' ) shall lose its special meaning and represent // itself in a bracket expression if it occurs first in the list (after an initial // ( '^' ), if any). if next == Some(']') { expr.push(']'); next = chars.next(); } while let Some(ch) = next { expr.push(ch); match ch { '[' => { // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_05 // // 4. A collating symbol is a collating element enclosed within bracket-period // ( "[." and ".]" ) delimiters. ... // // 5. An equivalence class expression shall ... be expressed by enclosing any // one of the collating elements in the equivalence class within bracket- // equal ( "[=" and "=]" ) delimiters. // // 6. ... A character class expression is expressed as a character class name // enclosed within bracket- ( "[:" and ":]" ) delimiters. next = chars.next(); if let Some(delim) = next { expr.push(delim); if matches!(delim, '.' | '=' | ':') { let rest = chars.as_str(); let end = rest.find([delim, ']'])? + 2; expr.push_str(&rest[..end]); chars = rest[end..].chars(); } } } ']' => { // https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap09.html#tag_09_03_05 // // 1. ... The ( ']' ) shall ... terminate the bracket // expression, unless it appears in a collating symbol (such as "[.].]" ) or is // the ending for a collating symbol, equivalence class, // or character class. break; } _ => {} } next = chars.next(); } if parse_bre(&expr, RegexOptions::REGEX_OPTION_NONE).is_ok() { Some((expr, chars.as_str())) } else { None } } /// Converts a POSIX glob into a POSIX Basic Regular Expression fn glob_to_regex(pattern: &str) -> String { let mut regex = String::new(); let mut chars = pattern.chars(); while let Some(ch) = chars.next() { // https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_13 match ch { '?' => regex.push('.'), '*' => regex.push_str(".*"), '\\' => { if let Some(ch) = chars.next() { regex_push_literal(&mut regex, ch); } else { // https://pubs.opengroup.org/onlinepubs/9699919799/functions/fnmatch.html // // If pattern ends with an unescaped , fnmatch() shall return a // non-zero value (indicating either no match or an error). // // Most implementations return FNM_NOMATCH in this case, so return a regex that // never matches. return "$.".to_string(); } } '[' => { if let Some((expr, rest)) = extract_bracket_expr(chars.as_str()) { regex.push_str(&expr); chars = rest.chars(); } else { regex_push_literal(&mut regex, ch); } } _ => regex_push_literal(&mut regex, ch), } } regex } /// An fnmatch()-style glob matcher. pub struct Pattern { regex: Regex, } impl Pattern { /// Parse an fnmatch()-style glob. pub fn new(pattern: &str, caseless: bool) -> Self { let options = if caseless { RegexOptions::REGEX_OPTION_IGNORECASE } else { RegexOptions::REGEX_OPTION_NONE }; // As long as glob_to_regex() is correct, this should never fail let regex = parse_bre(&glob_to_regex(pattern), options).unwrap(); Self { regex } } /// Test if this pattern matches a string. pub fn matches(&self, string: &str) -> bool { self.regex.is_match(string) } } #[cfg(test)] mod tests { use super::*; #[test] fn literals() { assert_eq!(glob_to_regex(r"foo.bar"), r"foo\.bar"); } #[test] fn regex_special() { assert_eq!(glob_to_regex(r"^foo.bar$"), r"\^foo\.bar\$"); } #[test] fn wildcards() { assert_eq!(glob_to_regex(r"foo?bar*baz"), r"foo.bar.*baz"); } #[test] fn escapes() { assert_eq!(glob_to_regex(r"fo\o\?bar\*baz\\"), r"foo?bar\*baz\\"); } #[test] fn incomplete_escape() { assert_eq!(glob_to_regex(r"foo\"), r"$."); } #[test] fn valid_brackets() { assert_eq!(glob_to_regex(r"foo[bar][!baz]"), r"foo[bar][^baz]"); } #[test] fn complex_brackets() { assert_eq!( glob_to_regex(r"[!]!.*[\[.].][=]=][:space:]-]"), r"[^]!.*[\[.].][=]=][:space:]-]" ); } #[test] fn invalid_brackets() { assert_eq!(glob_to_regex(r"foo[bar[!baz"), r"foo\[bar\[!baz"); } #[test] fn pattern_matches() { assert!(Pattern::new(r"foo*bar", false).matches("foo--bar")); assert!(!Pattern::new(r"foo*bar", false).matches("bar--foo")); } #[test] fn caseless_matches() { assert!(Pattern::new(r"foo*BAR", true).matches("FOO--bar")); assert!(!Pattern::new(r"foo*BAR", true).matches("BAR--foo")); } } findutils-0.7.0/src/find/matchers/group.rs000064400000000000000000000112661046102023000166560ustar 00000000000000// This file is part of the uutils findutils package. // // For the full copyright and license information, please view the LICENSE // file that was distributed with this source code. use super::{Matcher, MatcherIO, WalkEntry}; #[cfg(unix)] use nix::unistd::Group; #[cfg(unix)] use std::os::unix::fs::MetadataExt; pub struct GroupMatcher { gid: Option, } impl GroupMatcher { #[cfg(unix)] pub fn from_group_name(group: &str) -> GroupMatcher { // get gid from group name let Ok(group) = Group::from_name(group) else { return GroupMatcher { gid: None }; }; let Some(group) = group else { // This if branch is to determine whether a certain group exists in the system. // If a certain group does not exist in the system, // the result will need to be returned according to // the flag bit of whether to invert the result. return GroupMatcher { gid: None }; }; GroupMatcher { gid: Some(group.gid.as_raw()), } } #[cfg(unix)] pub fn from_gid(gid: u32) -> GroupMatcher { GroupMatcher { gid: Some(gid) } } #[cfg(windows)] pub fn from_group_name(_group: &str) -> GroupMatcher { GroupMatcher { gid: None } } #[cfg(windows)] pub fn from_gid(_gid: u32) -> GroupMatcher { GroupMatcher { gid: None } } pub fn gid(&self) -> &Option { &self.gid } } impl Matcher for GroupMatcher { #[cfg(unix)] fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { let Ok(metadata) = file_info.metadata() else { return false; }; let file_gid = metadata.gid(); // When matching the -group parameter in find/matcher/mod.rs, // it has been judged that the group does not exist and an error is returned. // So use unwarp() directly here. self.gid.unwrap() == file_gid } #[cfg(windows)] fn matches(&self, _file_info: &WalkEntry, _: &mut MatcherIO) -> bool { // The user group acquisition function for Windows systems is not implemented in MetadataExt, // so it is somewhat difficult to implement it. :( false } } pub struct NoGroupMatcher {} impl Matcher for NoGroupMatcher { #[cfg(unix)] fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { use nix::unistd::Gid; if file_info.path().is_symlink() { return false; } let Ok(metadata) = file_info.metadata() else { return true; }; let Ok(gid) = Group::from_gid(Gid::from_raw(metadata.gid())) else { return true; }; let Some(_group) = gid else { return true; }; false } #[cfg(windows)] fn matches(&self, _file_info: &WalkEntry, _: &mut MatcherIO) -> bool { false } } #[cfg(test)] mod tests { #[test] #[cfg(unix)] fn test_group_matcher() { use crate::find::matchers::{group::GroupMatcher, tests::get_dir_entry_for, Matcher}; use crate::find::tests::FakeDependencies; use chrono::Local; use nix::unistd::{Gid, Group}; use std::fs::File; use std::os::unix::fs::MetadataExt; use tempfile::Builder; let deps = FakeDependencies::new(); let mut matcher_io = deps.new_matcher_io(); let temp_dir = Builder::new().prefix("group_matcher").tempdir().unwrap(); let foo_path = temp_dir.path().join("foo"); let _ = File::create(foo_path).expect("create temp file"); let file_info = get_dir_entry_for(&temp_dir.path().to_string_lossy(), "foo"); let file_gid = file_info.metadata().unwrap().gid(); let file_group = Group::from_gid(Gid::from_raw(file_gid)) .unwrap() .unwrap() .name; let matcher = super::GroupMatcher::from_group_name(file_group.as_str()); assert!( matcher.matches(&file_info, &mut matcher_io), "group should match" ); // Testing a non-existent group name let time_string = Local::now().format("%Y%m%d%H%M%S").to_string(); let matcher = GroupMatcher::from_group_name(time_string.as_str()); assert!( matcher.gid().is_none(), "group name {} should not exist", time_string ); // Testing group id let matcher = GroupMatcher::from_gid(file_gid); assert!( matcher.gid().is_some(), "group id {} should exist", file_gid ); assert!( matcher.matches(&file_info, &mut matcher_io), "group id should match" ); } } findutils-0.7.0/src/find/matchers/lname.rs000064400000000000000000000060351046102023000166140ustar 00000000000000// Copyright 2017 Google Inc. // // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. use std::io::{stderr, Write}; use std::path::PathBuf; use super::glob::Pattern; use super::{Matcher, MatcherIO, WalkEntry}; fn read_link_target(file_info: &WalkEntry) -> Option { match file_info.path().read_link() { Ok(target) => Some(target), Err(err) => { // If it's not a symlink, then it's not an error that should be // shown. if err.kind() != std::io::ErrorKind::InvalidInput { writeln!( &mut stderr(), "Error reading target of {}: {}", file_info.path().display(), err ) .unwrap(); } None } } } /// This matcher makes a comparison of the link target against a shell wildcard /// pattern. See `glob::Pattern` for details on the exact syntax. pub struct LinkNameMatcher { pattern: Pattern, } impl LinkNameMatcher { pub fn new(pattern_string: &str, caseless: bool) -> LinkNameMatcher { let pattern = Pattern::new(pattern_string, caseless); Self { pattern } } } impl Matcher for LinkNameMatcher { fn matches(&self, file_info: &WalkEntry, _: &mut MatcherIO) -> bool { if let Some(target) = read_link_target(file_info) { self.pattern.matches(&target.to_string_lossy()) } else { false } } } #[cfg(test)] mod tests { use super::*; use crate::find::matchers::tests::get_dir_entry_for; use crate::find::tests::FakeDependencies; use std::io::ErrorKind; #[cfg(unix)] use std::os::unix::fs::symlink; #[cfg(windows)] use std::os::windows::fs::symlink_file; fn create_file_link() { #[cfg(unix)] if let Err(e) = symlink("abbbc", "test_data/links/link-f") { assert!( e.kind() == ErrorKind::AlreadyExists, "Failed to create sym link: {e:?}" ); } #[cfg(windows)] if let Err(e) = symlink_file("abbbc", "test_data/links/link-f") { assert!( e.kind() == ErrorKind::AlreadyExists, "Failed to create sym link: {:?}", e ); } } #[test] fn matches_against_link_target() { create_file_link(); let link_f = get_dir_entry_for("test_data/links", "link-f"); let matcher = LinkNameMatcher::new("ab?bc", false); let deps = FakeDependencies::new(); assert!(matcher.matches(&link_f, &mut deps.new_matcher_io())); } #[test] fn caseless_matches_against_link_target() { create_file_link(); let link_f = get_dir_entry_for("test_data/links", "link-f"); let matcher = LinkNameMatcher::new("AbB?c", true); let deps = FakeDependencies::new(); assert!(matcher.matches(&link_f, &mut deps.new_matcher_io())); } } findutils-0.7.0/src/find/matchers/logical_matchers.rs000064400000000000000000000443561046102023000210300ustar 00000000000000// Copyright 2017 Google Inc. // // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. //! This modules contains the matchers used for combining other matchers and //! performing boolean logic on them (and a couple of trivial always-true and //! always-false matchers). The design is strongly tied to the precedence rules //! when parsing command-line options (e.g. "-foo -o -bar -baz" is equivalent //! to "-foo -o ( -bar -baz )", not "( -foo -o -bar ) -baz"). use std::error::Error; use std::path::Path; use super::{Matcher, MatcherIO, WalkEntry}; /// This matcher contains a collection of other matchers. A file only matches /// if it matches ALL the contained sub-matchers. For sub-matchers that have /// side effects, the side effects occur in the same order as the sub-matchers /// were pushed into the collection. pub struct AndMatcher { submatchers: Vec>, } impl AndMatcher { pub fn new(submatchers: Vec>) -> Self { Self { submatchers } } } impl Matcher for AndMatcher { /// Returns true if all sub-matchers return true. Short-circuiting does take /// place. If the nth sub-matcher returns false, then we immediately return /// and don't make any further calls. fn matches(&self, dir_entry: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { for matcher in &self.submatchers { if !matcher.matches(dir_entry, matcher_io) { return false; } if matcher_io.should_quit() { break; } } true } fn has_side_effects(&self) -> bool { self.submatchers .iter() .any(super::Matcher::has_side_effects) } fn finished_dir(&self, dir: &Path) { for m in &self.submatchers { m.finished_dir(dir); } } fn finished(&self) { for m in &self.submatchers { m.finished(); } } } pub struct AndMatcherBuilder { submatchers: Vec>, } impl AndMatcherBuilder { pub fn new() -> Self { Self { submatchers: Vec::new(), } } pub fn new_and_condition(&mut self, matcher: impl Matcher) { self.submatchers.push(matcher.into_box()); } /// Builds a Matcher: consuming the builder in the process. pub fn build(mut self) -> Box { // special case. If there's only one submatcher, just return that directly if self.submatchers.len() == 1 { // safe to unwrap: we've just checked the size return self.submatchers.pop().unwrap(); } AndMatcher::new(self.submatchers).into_box() } } /// This matcher contains a collection of other matchers. A file matches /// if it matches any of the contained sub-matchers. For sub-matchers that have /// side effects, the side effects occur in the same order as the sub-matchers /// were pushed into the collection. pub struct OrMatcher { submatchers: Vec>, } impl OrMatcher { pub fn new(submatchers: Vec>) -> Self { Self { submatchers } } } impl Matcher for OrMatcher { /// Returns true if any sub-matcher returns true. Short-circuiting does take /// place. If the nth sub-matcher returns true, then we immediately return /// and don't make any further calls. fn matches(&self, dir_entry: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { for matcher in &self.submatchers { if matcher.matches(dir_entry, matcher_io) { return true; } if matcher_io.should_quit() { break; } } false } fn has_side_effects(&self) -> bool { self.submatchers .iter() .any(super::Matcher::has_side_effects) } fn finished_dir(&self, dir: &Path) { for m in &self.submatchers { m.finished_dir(dir); } } fn finished(&self) { for m in &self.submatchers { m.finished(); } } } pub struct OrMatcherBuilder { submatchers: Vec, } impl OrMatcherBuilder { pub fn new_and_condition(&mut self, matcher: impl Matcher) { // safe to unwrap. submatchers always has at least one member self.submatchers .last_mut() .unwrap() .new_and_condition(matcher); } pub fn new_or_condition(&mut self, arg: &str) -> Result<(), Box> { if self.submatchers.last().unwrap().submatchers.is_empty() { return Err(From::from(format!( "invalid expression; you have used a binary operator \ '{arg}' with nothing before it." ))); } self.submatchers.push(AndMatcherBuilder::new()); Ok(()) } pub fn new() -> Self { let mut o = Self { submatchers: Vec::new(), }; o.submatchers.push(AndMatcherBuilder::new()); o } /// Builds a Matcher: consuming the builder in the process. pub fn build(mut self) -> Box { // Special case: if there's only one submatcher, just return that directly if self.submatchers.len() == 1 { // safe to unwrap: we've just checked the size return self.submatchers.pop().unwrap().build(); } let mut submatchers = vec![]; for x in self.submatchers { submatchers.push(x.build()); } OrMatcher::new(submatchers).into_box() } } /// This matcher contains a collection of other matchers. In contrast to /// `OrMatcher` and `AndMatcher`, all the submatcher objects are called /// regardless of the results of previous submatchers. This is primarily used /// for submatchers with side-effects. For such sub-matchers the side effects /// occur in the same order as the sub-matchers were pushed into the collection. pub struct ListMatcher { submatchers: Vec>, } impl ListMatcher { pub fn new(submatchers: Vec>) -> Self { Self { submatchers } } } impl Matcher for ListMatcher { /// Calls matches on all submatcher objects, with no short-circuiting. /// Returns the result of the call to the final submatcher fn matches(&self, dir_entry: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { let mut rc = false; for matcher in &self.submatchers { rc = matcher.matches(dir_entry, matcher_io); if matcher_io.should_quit() { break; } } rc } fn has_side_effects(&self) -> bool { self.submatchers .iter() .any(super::Matcher::has_side_effects) } fn finished_dir(&self, dir: &Path) { for m in &self.submatchers { m.finished_dir(dir); } } fn finished(&self) { for m in &self.submatchers { m.finished(); } } } pub struct ListMatcherBuilder { submatchers: Vec, } impl ListMatcherBuilder { pub fn new_and_condition(&mut self, matcher: impl Matcher) { // safe to unwrap. submatchers always has at least one member self.submatchers .last_mut() .unwrap() .new_and_condition(matcher); } pub fn new_or_condition(&mut self, arg: &str) -> Result<(), Box> { self.submatchers.last_mut().unwrap().new_or_condition(arg) } pub fn check_new_and_condition(&mut self) -> Result<(), Box> { { let child_or_matcher = &self.submatchers.last().unwrap(); let grandchild_and_matcher = &child_or_matcher.submatchers.last().unwrap(); if grandchild_and_matcher.submatchers.is_empty() { return Err(From::from( "invalid expression; you have used a binary operator '-a' \ with nothing before it.", )); } } Ok(()) } pub fn new_list_condition(&mut self) -> Result<(), Box> { { let child_or_matcher = &self.submatchers.last().unwrap(); let grandchild_and_matcher = &child_or_matcher.submatchers.last().unwrap(); if grandchild_and_matcher.submatchers.is_empty() { return Err(From::from( "invalid expression; you have used a binary operator ',' \ with nothing before it.", )); } } self.submatchers.push(OrMatcherBuilder::new()); Ok(()) } pub fn new() -> Self { let mut o = Self { submatchers: Vec::new(), }; o.submatchers.push(OrMatcherBuilder::new()); o } /// Builds a Matcher: consuming the builder in the process. pub fn build(mut self) -> Box { // Special case: if there's only one submatcher, just return that directly if self.submatchers.len() == 1 { // safe to unwrap: we've just checked the size return self.submatchers.pop().unwrap().build(); } let mut submatchers = vec![]; for x in self.submatchers { submatchers.push(x.build()); } Box::new(ListMatcher::new(submatchers)) } } /// A simple matcher that always matches. pub struct TrueMatcher; impl Matcher for TrueMatcher { fn matches(&self, _dir_entry: &WalkEntry, _: &mut MatcherIO) -> bool { true } } /// A simple matcher that never matches. pub struct FalseMatcher; impl Matcher for FalseMatcher { fn matches(&self, _dir_entry: &WalkEntry, _: &mut MatcherIO) -> bool { false } } /// Matcher that wraps another matcher and inverts matching criteria. pub struct NotMatcher { submatcher: Box, } impl NotMatcher { pub fn new(submatcher: impl Matcher) -> Self { Self { submatcher: submatcher.into_box(), } } } impl Matcher for NotMatcher { fn matches(&self, dir_entry: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { !self.submatcher.matches(dir_entry, matcher_io) } fn has_side_effects(&self) -> bool { self.submatcher.has_side_effects() } fn finished_dir(&self, dir: &Path) { self.submatcher.finished_dir(dir); } fn finished(&self) { self.submatcher.finished(); } } #[cfg(test)] mod tests { use super::*; use crate::find::matchers::quit::QuitMatcher; use crate::find::matchers::tests::get_dir_entry_for; use crate::find::tests::FakeDependencies; use std::cell::RefCell; use std::rc::Rc; /// Simple Matcher impl that has side effects pub struct HasSideEffects; impl Matcher for HasSideEffects { fn matches(&self, _: &WalkEntry, _: &mut MatcherIO) -> bool { false } fn has_side_effects(&self) -> bool { true } } /// Matcher that counts its invocations struct Counter(Rc>); impl Matcher for Counter { fn matches(&self, _: &WalkEntry, _: &mut MatcherIO) -> bool { *self.0.borrow_mut() += 1; true } } #[test] fn and_matches_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let mut builder = AndMatcherBuilder::new(); let deps = FakeDependencies::new(); // start with one matcher returning true builder.new_and_condition(TrueMatcher); assert!(builder.build().matches(&abbbc, &mut deps.new_matcher_io())); builder = AndMatcherBuilder::new(); builder.new_and_condition(TrueMatcher); builder.new_and_condition(FalseMatcher); assert!(!builder.build().matches(&abbbc, &mut deps.new_matcher_io())); } #[test] fn or_matches_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let mut builder = OrMatcherBuilder::new(); let deps = FakeDependencies::new(); // start with one matcher returning false builder.new_and_condition(FalseMatcher); assert!(!builder.build().matches(&abbbc, &mut deps.new_matcher_io())); let mut builder = OrMatcherBuilder::new(); builder.new_and_condition(FalseMatcher); builder.new_or_condition("-o").unwrap(); builder.new_and_condition(TrueMatcher); assert!(builder.build().matches(&abbbc, &mut deps.new_matcher_io())); } #[test] fn list_matches_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let mut builder = ListMatcherBuilder::new(); let deps = FakeDependencies::new(); // result should always match that of the last pushed submatcher builder.new_and_condition(FalseMatcher); assert!(!builder.build().matches(&abbbc, &mut deps.new_matcher_io())); builder = ListMatcherBuilder::new(); builder.new_and_condition(FalseMatcher); builder.new_list_condition().unwrap(); builder.new_and_condition(TrueMatcher); assert!(builder.build().matches(&abbbc, &mut deps.new_matcher_io())); builder = ListMatcherBuilder::new(); builder.new_and_condition(FalseMatcher); builder.new_list_condition().unwrap(); builder.new_and_condition(TrueMatcher); builder.new_list_condition().unwrap(); builder.new_and_condition(FalseMatcher); assert!(!builder.build().matches(&abbbc, &mut deps.new_matcher_io())); } #[test] fn true_matches_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let matcher = TrueMatcher {}; let deps = FakeDependencies::new(); assert!(matcher.matches(&abbbc, &mut deps.new_matcher_io())); } #[test] fn false_matches_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let matcher = FalseMatcher {}; let deps = FakeDependencies::new(); assert!(!matcher.matches(&abbbc, &mut deps.new_matcher_io())); } #[test] fn and_has_side_effects_works() { let mut builder = AndMatcherBuilder::new(); // start with one matcher with no side effects false builder.new_and_condition(TrueMatcher); assert!(!builder.build().has_side_effects()); builder = AndMatcherBuilder::new(); builder.new_and_condition(TrueMatcher); builder.new_and_condition(HasSideEffects); assert!(builder.build().has_side_effects()); } #[test] fn or_has_side_effects_works() { let mut builder = OrMatcherBuilder::new(); // start with one matcher with no side effects false builder.new_and_condition(TrueMatcher); assert!(!builder.build().has_side_effects()); builder = OrMatcherBuilder::new(); builder.new_and_condition(TrueMatcher); builder.new_and_condition(HasSideEffects); assert!(builder.build().has_side_effects()); } #[test] fn list_has_side_effects_works() { let mut builder = ListMatcherBuilder::new(); // start with one matcher with no side effects false builder.new_and_condition(TrueMatcher); assert!(!builder.build().has_side_effects()); builder = ListMatcherBuilder::new(); builder.new_and_condition(TrueMatcher); builder.new_and_condition(HasSideEffects); assert!(builder.build().has_side_effects()); } #[test] fn true_has_side_effects_works() { let matcher = TrueMatcher {}; assert!(!matcher.has_side_effects()); } #[test] fn false_has_side_effects_works() { let matcher = FalseMatcher {}; assert!(!matcher.has_side_effects()); } #[test] fn not_matches_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let not_true = NotMatcher::new(TrueMatcher); let not_false = NotMatcher::new(FalseMatcher); let deps = FakeDependencies::new(); assert!(!not_true.matches(&abbbc, &mut deps.new_matcher_io())); assert!(not_false.matches(&abbbc, &mut deps.new_matcher_io())); } #[test] fn not_has_side_effects_works() { let has_fx = NotMatcher::new(HasSideEffects); let has_no_fx = NotMatcher::new(FalseMatcher); assert!(has_fx.has_side_effects()); assert!(!has_no_fx.has_side_effects()); } #[test] fn and_quit_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let mut builder = AndMatcherBuilder::new(); let deps = FakeDependencies::new(); let before = Rc::new(RefCell::new(0)); let after = Rc::new(RefCell::new(0)); builder.new_and_condition(Counter(before.clone())); builder.new_and_condition(QuitMatcher); builder.new_and_condition(Counter(after.clone())); builder.build().matches(&abbbc, &mut deps.new_matcher_io()); assert_eq!(*before.borrow(), 1); assert_eq!(*after.borrow(), 0); } #[test] fn or_quit_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let mut builder = OrMatcherBuilder::new(); let deps = FakeDependencies::new(); let before = Rc::new(RefCell::new(0)); let after = Rc::new(RefCell::new(0)); builder.new_and_condition(Counter(before.clone())); builder.new_or_condition("-o").unwrap(); builder.new_and_condition(QuitMatcher); builder.new_or_condition("-o").unwrap(); builder.new_and_condition(Counter(after.clone())); builder.build().matches(&abbbc, &mut deps.new_matcher_io()); assert_eq!(*before.borrow(), 1); assert_eq!(*after.borrow(), 0); } #[test] fn list_quit_works() { let abbbc = get_dir_entry_for("test_data/simple", "abbbc"); let mut builder = ListMatcherBuilder::new(); let deps = FakeDependencies::new(); let before = Rc::new(RefCell::new(0)); let after = Rc::new(RefCell::new(0)); builder.new_and_condition(Counter(before.clone())); builder.new_list_condition().unwrap(); builder.new_and_condition(QuitMatcher); builder.new_list_condition().unwrap(); builder.new_and_condition(Counter(after.clone())); builder.build().matches(&abbbc, &mut deps.new_matcher_io()); assert_eq!(*before.borrow(), 1); assert_eq!(*after.borrow(), 0); } } findutils-0.7.0/src/find/matchers/ls.rs000064400000000000000000000175161046102023000161440ustar 00000000000000// This file is part of the uutils findutils package. // // For the full copyright and license information, please view the LICENSE // file that was distributed with this source code. use chrono::DateTime; use std::{ fs::File, io::{stderr, Write}, }; use super::{Matcher, MatcherIO, WalkEntry}; #[cfg(unix)] fn format_permissions(mode: uucore::libc::mode_t) -> String { let file_type = match mode & (uucore::libc::S_IFMT as uucore::libc::mode_t) { uucore::libc::S_IFDIR => "d", uucore::libc::S_IFREG => "-", _ => "?", }; // S_$$USR means "user permissions" let user_perms = format!( "{}{}{}", if mode & uucore::libc::S_IRUSR != 0 { "r" } else { "-" }, if mode & uucore::libc::S_IWUSR != 0 { "w" } else { "-" }, if mode & uucore::libc::S_IXUSR != 0 { "x" } else { "-" } ); // S_$$GRP means "group permissions" let group_perms = format!( "{}{}{}", if mode & uucore::libc::S_IRGRP != 0 { "r" } else { "-" }, if mode & uucore::libc::S_IWGRP != 0 { "w" } else { "-" }, if mode & uucore::libc::S_IXGRP != 0 { "x" } else { "-" } ); // S_$$OTH means "other permissions" let other_perms = format!( "{}{}{}", if mode & uucore::libc::S_IROTH != 0 { "r" } else { "-" }, if mode & uucore::libc::S_IWOTH != 0 { "w" } else { "-" }, if mode & uucore::libc::S_IXOTH != 0 { "x" } else { "-" } ); format!("{}{}{}{}", file_type, user_perms, group_perms, other_perms) } #[cfg(windows)] fn format_permissions(file_attributes: u32) -> String { let mut attributes = Vec::new(); // https://learn.microsoft.com/en-us/windows/win32/fileio/file-attribute-constants if file_attributes & 0x0001 != 0 { attributes.push("read-only"); } if file_attributes & 0x0002 != 0 { attributes.push("hidden"); } if file_attributes & 0x0004 != 0 { attributes.push("system"); } if file_attributes & 0x0020 != 0 { attributes.push("archive"); } if file_attributes & 0x0040 != 0 { attributes.push("compressed"); } if file_attributes & 0x0080 != 0 { attributes.push("offline"); } attributes.join(", ") } pub struct Ls { output_file: Option, } impl Ls { pub fn new(output_file: Option) -> Self { Self { output_file } } #[cfg(unix)] fn print(&self, file_info: &WalkEntry, mut out: impl Write, print_error_message: bool) { use nix::unistd::{Gid, Group, Uid, User}; use std::os::unix::fs::{MetadataExt, PermissionsExt}; let metadata = file_info.metadata().unwrap(); let inode_number = metadata.ino(); let number_of_blocks = { let size = metadata.size(); let number_of_blocks = size / 1024; let remainder = number_of_blocks % 4; if remainder == 0 { if number_of_blocks == 0 { 4 } else { number_of_blocks } } else { number_of_blocks + (4 - (remainder)) } }; let permission = { format_permissions(metadata.permissions().mode() as uucore::libc::mode_t) }; let hard_links = metadata.nlink(); let user = { let uid = metadata.uid(); User::from_uid(Uid::from_raw(uid)).unwrap().unwrap().name }; let group = { let gid = metadata.gid(); Group::from_gid(Gid::from_raw(gid)).unwrap().unwrap().name }; let size = metadata.size(); let last_modified = { let system_time = metadata.modified().unwrap(); let now_utc: DateTime = system_time.into(); now_utc.format("%b %e %H:%M") }; let path = file_info.path().to_string_lossy(); match writeln!( out, " {:<4} {:>6} {:<10} {:>3} {:<8} {:<8} {:>8} {} {}", inode_number, number_of_blocks, permission, hard_links, user, group, size, last_modified, path, ) { Ok(_) => {} Err(e) => { if print_error_message { writeln!( &mut stderr(), "Error writing {:?} for {}", file_info.path().to_string_lossy(), e ) .unwrap(); uucore::error::set_exit_code(1); } } } } #[cfg(windows)] fn print(&self, file_info: &WalkEntry, mut out: impl Write, print_error_message: bool) { use std::os::windows::fs::MetadataExt; let metadata = file_info.metadata().unwrap(); let inode_number = 0; let number_of_blocks = { let size = metadata.file_size(); let number_of_blocks = size / 1024; let remainder = number_of_blocks % 4; if remainder == 0 { if number_of_blocks == 0 { 4 } else { number_of_blocks } } else { number_of_blocks + (4 - (remainder)) } }; let permission = { format_permissions(metadata.file_attributes()) }; let hard_links = 0; let user = 0; let group = 0; let size = metadata.file_size(); let last_modified = { let system_time = metadata.modified().unwrap(); let now_utc: DateTime = system_time.into(); now_utc.format("%b %e %H:%M") }; let path = file_info.path().to_string_lossy(); match write!( out, " {:<4} {:>6} {:<10} {:>3} {:<8} {:<8} {:>8} {} {}\n", inode_number, number_of_blocks, permission, hard_links, user, group, size, last_modified, path, ) { Ok(_) => {} Err(e) => { if print_error_message { writeln!( &mut stderr(), "Error writing {:?} for {}", file_info.path().to_string_lossy(), e ) .unwrap(); uucore::error::set_exit_code(1); } } } } } impl Matcher for Ls { fn matches(&self, file_info: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { if let Some(file) = &self.output_file { self.print(file_info, file, true); } else { self.print( file_info, &mut *matcher_io.deps.get_output().borrow_mut(), false, ); } true } fn has_side_effects(&self) -> bool { true } } #[cfg(test)] mod tests { #[test] #[cfg(unix)] fn test_format_permissions() { use super::format_permissions; let mode: uucore::libc::mode_t = 0o100644; let expected = "-rw-r--r--"; assert_eq!(format_permissions(mode), expected); let mode: uucore::libc::mode_t = 0o040755; let expected = "drwxr-xr-x"; assert_eq!(format_permissions(mode), expected); let mode: uucore::libc::mode_t = 0o100777; let expected = "-rwxrwxrwx"; assert_eq!(format_permissions(mode), expected); } } findutils-0.7.0/src/find/matchers/mod.rs000064400000000000000000001700611046102023000163000ustar 00000000000000// Copyright 2017 Google Inc. // // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. mod access; mod delete; mod empty; mod entry; pub mod exec; pub mod fs; mod glob; mod group; mod lname; mod logical_matchers; mod ls; mod name; mod path; mod perm; mod printer; mod printf; mod prune; mod quit; mod regex; mod samefile; mod size; #[cfg(unix)] mod stat; pub mod time; mod type_matcher; mod user; use ::regex::Regex; use chrono::{DateTime, Datelike, NaiveDateTime, Utc}; use fs::FileSystemMatcher; use ls::Ls; use std::fs::{File, Metadata}; use std::path::Path; use std::time::SystemTime; use std::{error::Error, str::FromStr}; use self::access::AccessMatcher; use self::delete::DeleteMatcher; use self::empty::EmptyMatcher; use self::exec::SingleExecMatcher; use self::group::{GroupMatcher, NoGroupMatcher}; use self::lname::LinkNameMatcher; use self::logical_matchers::{ AndMatcherBuilder, FalseMatcher, ListMatcherBuilder, NotMatcher, TrueMatcher, }; use self::name::NameMatcher; use self::path::PathMatcher; use self::perm::PermMatcher; use self::printer::{PrintDelimiter, Printer}; use self::printf::Printf; use self::prune::PruneMatcher; use self::quit::QuitMatcher; use self::regex::RegexMatcher; use self::samefile::SameFileMatcher; use self::size::SizeMatcher; #[cfg(unix)] use self::stat::{InodeMatcher, LinksMatcher}; use self::time::{ FileAgeRangeMatcher, FileTimeMatcher, FileTimeType, NewerMatcher, NewerOptionMatcher, NewerOptionType, NewerTimeMatcher, }; use self::type_matcher::{TypeMatcher, XtypeMatcher}; use self::user::{NoUserMatcher, UserMatcher}; use super::{Config, Dependencies}; pub use entry::{FileType, WalkEntry, WalkError}; /// Symlink following mode. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum Follow { /// Never follow symlinks (-P; default). Never, /// Follow symlinks on root paths only (-H). Roots, /// Always follow symlinks (-L). Always, } impl Follow { /// Check whether to follow a path of the given depth. pub fn follow_at_depth(self, depth: usize) -> bool { match self { Follow::Never => false, Follow::Roots => depth == 0, Follow::Always => true, } } /// Get metadata for a [WalkEntry]. pub fn metadata(self, entry: &WalkEntry) -> Result { if self.follow_at_depth(entry.depth()) == entry.follow() { // Same follow flag, re-use cached metadata entry.metadata().cloned() } else if !entry.follow() && !entry.file_type().is_symlink() { // Not a symlink, re-use cached metadata entry.metadata().cloned() } else if entry.follow() && entry.file_type().is_symlink() { // Broken symlink, re-use cached metadata entry.metadata().cloned() } else { self.metadata_at_depth(entry.path(), entry.depth()) } } /// Get metadata for a path from the command line. pub fn root_metadata(self, path: impl AsRef) -> Result { self.metadata_at_depth(path, 0) } /// Get metadata for a path, following symlinks as necessary. pub fn metadata_at_depth( self, path: impl AsRef, depth: usize, ) -> Result { let path = path.as_ref(); if self.follow_at_depth(depth) { match path.metadata().map_err(WalkError::from) { Ok(meta) => return Ok(meta), Err(e) if !e.is_not_found() => return Err(e), _ => {} } } Ok(path.symlink_metadata()?) } } /// Struct holding references to outputs and any inputs that can't be derived /// from the file/directory info. pub struct MatcherIO<'a> { should_skip_dir: bool, exit_code: i32, quit: bool, deps: &'a dyn Dependencies, } impl<'a> MatcherIO<'a> { pub fn new(deps: &dyn Dependencies) -> MatcherIO<'_> { MatcherIO { should_skip_dir: false, exit_code: 0, quit: false, deps, } } pub fn mark_current_dir_to_be_skipped(&mut self) { self.should_skip_dir = true; } #[must_use] pub fn should_skip_current_dir(&self) -> bool { self.should_skip_dir } pub fn set_exit_code(&mut self, code: i32) { self.exit_code = code; } #[must_use] pub fn exit_code(&self) -> i32 { self.exit_code } pub fn quit(&mut self) { self.quit = true; } #[must_use] pub fn should_quit(&self) -> bool { self.quit } #[must_use] pub fn now(&self) -> SystemTime { self.deps.now() } } /// A basic interface that can be used to determine whether a directory entry /// is what's being searched for. To a first order approximation, find consists /// of building a chain of Matcher objects, and then walking a directory tree, /// passing each entry to the chain of Matchers. pub trait Matcher: 'static { /// Boxes this matcher as a trait object. fn into_box(self) -> Box where Self: Sized, { Box::new(self) } /// Returns whether the given file matches the object's predicate. fn matches(&self, entry: &WalkEntry, matcher_io: &mut MatcherIO) -> bool; /// Returns whether the matcher has any side-effects (e.g. executing a /// command, deleting a file). Iff no such matcher exists in the chain, then /// the filename will be printed to stdout. While this is a compile-time /// fact for most matchers, it's run-time for matchers that contain a /// collection of sub-Matchers. fn has_side_effects(&self) -> bool { // most matchers don't have side-effects, so supply a default implementation. false } /// Notification that find has finished processing a given directory. fn finished_dir(&self, _finished_directory: &Path) {} /// Notification that find has finished processing all directories - /// allowing for any cleanup that isn't suitable for destructors (e.g. /// blocking calls, I/O etc.) fn finished(&self) {} } impl Matcher for Box { fn into_box(self) -> Box { self } fn matches(&self, entry: &WalkEntry, matcher_io: &mut MatcherIO) -> bool { (**self).matches(entry, matcher_io) } fn has_side_effects(&self) -> bool { (**self).has_side_effects() } fn finished_dir(&self, finished_directory: &Path) { (**self).finished_dir(finished_directory); } fn finished(&self) { (**self).finished(); } } pub enum ComparableValue { MoreThan(u64), EqualTo(u64), LessThan(u64), } impl ComparableValue { fn matches(&self, value: u64) -> bool { match *self { ComparableValue::MoreThan(limit) => value > limit, ComparableValue::EqualTo(limit) => value == limit, ComparableValue::LessThan(limit) => value < limit, } } /// same as matches, but takes a signed value fn imatches(&self, value: i64) -> bool { match *self { ComparableValue::MoreThan(limit) => value >= 0 && (value as u64) > limit, ComparableValue::EqualTo(limit) => value >= 0 && (value as u64) == limit, ComparableValue::LessThan(limit) => value < 0 || (value as u64) < limit, } } } /// Builds a single `AndMatcher` containing the Matcher objects corresponding /// to the passed in predicate arguments. pub fn build_top_level_matcher( args: &[&str], config: &mut Config, ) -> Result, Box> { let (_, top_level_matcher) = (build_matcher_tree(args, config, 0, false))?; // if the matcher doesn't have any side-effects, then we default to printing if !top_level_matcher.has_side_effects() { let mut new_and_matcher = AndMatcherBuilder::new(); new_and_matcher.new_and_condition(top_level_matcher); new_and_matcher.new_and_condition(Printer::new(PrintDelimiter::Newline, None)); return Ok(new_and_matcher.build()); } Ok(top_level_matcher) } /// Helper function for `build_matcher_tree`. fn are_more_expressions(args: &[&str], index: usize) -> bool { (index < args.len() - 1) && args[index + 1] != ")" } fn convert_arg_to_number( option_name: &str, value_as_string: &str, ) -> Result> { match value_as_string.parse::() { Ok(val) => Ok(val), _ => Err(From::from(format!( "Expected a positive decimal integer argument to {option_name}, but got \ `{value_as_string}'" ))), } } fn convert_arg_to_comparable_value( option_name: &str, value_as_string: &str, ) -> Result> { let re = Regex::new(r"^([+-]?)(\d+)$")?; if let Some(groups) = re.captures(value_as_string) { if let Ok(val) = groups[2].parse::() { return Ok(match &groups[1] { "+" => ComparableValue::MoreThan(val), "-" => ComparableValue::LessThan(val), _ => ComparableValue::EqualTo(val), }); } } Err(From::from(format!( "Expected a decimal integer (with optional + or - prefix) argument \ to {option_name}, but got `{value_as_string}'" ))) } fn convert_arg_to_comparable_value_and_suffix( option_name: &str, value_as_string: &str, ) -> Result<(ComparableValue, String), Box> { let re = Regex::new(r"([+-]?)(\d+)(.*)$")?; if let Some(groups) = re.captures(value_as_string) { if let Ok(val) = groups[2].parse::() { return Ok(( match &groups[1] { "+" => ComparableValue::MoreThan(val), "-" => ComparableValue::LessThan(val), _ => ComparableValue::EqualTo(val), }, groups[3].to_string(), )); } } Err(From::from(format!( "Expected a decimal integer (with optional + or - prefix) and \ (optional suffix) argument to {option_name}, but got `{value_as_string}'" ))) } /// This is a function that converts a specific string format into a timestamp. /// It allows converting a time string of /// "(week abbreviation) (date), (year) (time)" to a Unix timestamp. /// such as: "jan 01, 2025 00:00:01" -> 1735689601000 /// When (time) is not provided, it will be automatically filled in as 00:00:00 /// such as: "jan 01, 2025" = "jan 01, 2025 00:00:00" -> 1735689600000 fn parse_date_str_to_timestamps(date_str: &str) -> Option { let regex_pattern = r"^(?P\w{3} \d{2})?(?:, (?P\d{4}))?(?: (?P