pax_global_header00006660000000000000000000000064142732077060014522gustar00rootroot0000000000000052 comment=f319b91b56ad8a22d6c4aaf6f266fffaaee65ce6 xor_name-5.0.0/000077500000000000000000000000001427320770600133345ustar00rootroot00000000000000xor_name-5.0.0/.github/000077500000000000000000000000001427320770600146745ustar00rootroot00000000000000xor_name-5.0.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001427320770600170575ustar00rootroot00000000000000xor_name-5.0.0/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000020301427320770600215440ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve title: '' labels: bug assignees: '' --- Thank you for contributing to the project! We recommend you check out our ["Contributing to the SAFE Network"](https://github.com/maidsafe/QA/blob/master/CONTRIBUTING.md) guide if you haven't already. **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error **Expected behavior** A clear and concise description of what you expected to happen. **Screenshots** If applicable, add screenshots to help explain your problem. **Desktop (please complete the following information):** - OS: [e.g. Ubuntu 18.04.4] - Browser [e.g. chrome, safari] - Version [e.g. 22] **Smartphone (please complete the following information):** - Device: [e.g. iPhone6] - OS: [e.g. iOS8.1] - Browser [e.g. stock browser, safari] - Version [e.g. 22] **Additional context** Add any other context about the problem here. xor_name-5.0.0/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000014461427320770600226110ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project title: '' labels: enhancement assignees: '' --- Thank you for contributing to the project! We recommend you check out our ["Contributing to the SAFE Network"](https://github.com/maidsafe/QA/blob/master/CONTRIBUTING.md) guide if you haven't already. **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here. xor_name-5.0.0/.github/workflows/000077500000000000000000000000001427320770600167315ustar00rootroot00000000000000xor_name-5.0.0/.github/workflows/auto_merge_prs.yml000066400000000000000000000021101427320770600224610ustar00rootroot00000000000000# auto merge workflow. # # Auto merge PR if commit msg begins with `chore(release):`, # or if it has been raised by Dependabot. # Uses https://github.com/ridedott/merge-me-action. name: Merge Version Change and Dependabot PRs automatically on: pull_request jobs: merge: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 with: fetch-depth: '0' - name: get commit message run: | commitmsg=$(git log --format=%s -n 1 ${{ github.event.pull_request.head.sha }}) echo "commitmsg=${commitmsg}" >> $GITHUB_ENV - name: show commit message run : echo $commitmsg - name: Merge Version change PR if: startsWith( env.commitmsg, 'chore(release):') uses: ridedott/merge-me-action@81667e6ae186ddbe6d3c3186d27d91afa7475e2c with: GITHUB_LOGIN: dirvine GITHUB_TOKEN: ${{ secrets.MERGE_BUMP_BRANCH_TOKEN }} MERGE_METHOD: REBASE - name: Dependabot Merge uses: ridedott/merge-me-action@master with: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} MERGE_METHOD: REBASE xor_name-5.0.0/.github/workflows/bump_version.yml000066400000000000000000000011161427320770600221630ustar00rootroot00000000000000name: Version bump and create PR for changes on: # Trigger the workflow on push only for the master branch push: branches: - master env: NODE_ENV: 'development' jobs: update_changelog: runs-on: ubuntu-20.04 # Dont run if we're on a release commit if: "!startsWith(github.event.head_commit.message, 'chore(release):')" steps: - uses: actions/checkout@v2 with: fetch-depth: '0' - name: Bump Version uses: maidsafe/rust-version-bump-branch-creator@v2 with: token: ${{ secrets.BRANCH_CREATOR_TOKEN }} xor_name-5.0.0/.github/workflows/commitlint.yml000066400000000000000000000004621427320770600216350ustar00rootroot00000000000000name: Commitlint on: [pull_request] jobs: lint: runs-on: ubuntu-latest env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v2 with: fetch-depth: 0 - uses: wagoid/commitlint-github-action@f114310111fdbd07e99f47f9ca13d62b3ec98372 xor_name-5.0.0/.github/workflows/github_release.yml000066400000000000000000000017111427320770600224360ustar00rootroot00000000000000name: Create GitHub Release on: push: tags: - 'v*' jobs: release: # only if we have a tag name: Release runs-on: ubuntu-20.04 if: "startsWith(github.event.head_commit.message, 'chore(release):')" steps: - uses: actions/checkout@v2 with: fetch-depth: '0' - name: Set tag as env shell: bash run: echo "RELEASE_VERSION=$(echo ${GITHUB_REF:10})" >> $GITHUB_ENV - name: lets check tag shell: bash run: echo ${{ env.RELEASE_VERSION }} - name: Generate Changelog shell: bash run: awk '/# \[/{c++;p=1}{if(c==2){exit}}p;' CHANGELOG.md > RELEASE-CHANGELOG.txt - run: cat RELEASE-CHANGELOG.txt - name: Release generation uses: softprops/action-gh-release@91409e712cf565ce9eff10c87a8d1b11b81757ae env: GITHUB_TOKEN: ${{ secrets.MERGE_BUMP_BRANCH_TOKEN }} with: body_path: RELEASE-CHANGELOG.txt xor_name-5.0.0/.github/workflows/master.yml000066400000000000000000000034051427320770600207510ustar00rootroot00000000000000# Push to master workflow. # # Runs when a PR has been merged to the master branch. # # 1. Generates a release build. # 2. If the last commit is a version change, publish. name: Master on: push: branches: - master env: # Run all cargo commands with --verbose. CARGO_TERM_VERBOSE: true RUST_BACKTRACE: 1 jobs: build: name: Build runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, windows-latest, macOS-latest] steps: - uses: actions/checkout@v2 - name: Install Rust uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true # Cache. - name: Cargo cache registry, index and build uses: actions/cache@v2.1.4 with: path: | ~/.cargo/registry ~/.cargo/git target key: ${{ runner.os }}-cargo-cache-${{ hashFiles('**/Cargo.lock') }} # Make sure the code builds. - name: Run cargo build run: cargo build --release --workspace # Publish if we're on a release commit publish: name: Publish runs-on: ubuntu-latest needs: build if: "startsWith(github.event.head_commit.message, 'chore(release):')" steps: - uses: actions/checkout@v2 # checkout with fetch-depth: '0' to be sure to retrieve all commits to look for the semver commit message with: fetch-depth: '0' # Install Rust - uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true # Publish to crates.io. - name: Cargo Login run: cargo login ${{ secrets.CRATES_IO_TOKEN }} - name: Cargo Publish run: cargo publish xor_name-5.0.0/.github/workflows/pr.yml000066400000000000000000000107011427320770600200740ustar00rootroot00000000000000name: PR on: [pull_request] env: # Run all cargo commands with --verbose. CARGO_TERM_VERBOSE: true RUST_BACKTRACE: 1 # Deny all compiler warnings. RUSTFLAGS: "-D warnings" jobs: clippy: if: "!startsWith(github.event.pull_request.title, 'Automated version bump')" name: clippy & fmt checks runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true components: rustfmt, clippy # Cache. - name: Cargo cache registry, index and build uses: actions/cache@v2.1.4 with: path: | ~/.cargo/registry ~/.cargo/git target key: ${{ runner.os }}-cargo-cache-${{ hashFiles('**/Cargo.lock') }} # Check if the code is formatted correctly. - name: Check formatting run: cargo fmt --all -- --check # Run Clippy. - name: Clippy checks run: cargo clippy --all-targets --all-features coverage: if: "!startsWith(github.event.pull_request.title, 'Automated version bump')" name: Code coverage check runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 # Install Rust and required components - uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true # Cache. - name: Cargo cache registry, index and build uses: actions/cache@v2.1.4 with: path: | ~/.cargo/registry ~/.cargo/git target key: ${{ runner.os }}-cargo-cache-${{ hashFiles('**/Cargo.lock') }} # Run cargo tarpaulin & push result to coveralls.io - name: rust-tarpaulin code coverage check uses: actions-rs/tarpaulin@v0.1 with: args: '-v --release --out Lcov' - name: Push code coverage results to coveralls.io uses: coverallsapp/github-action@master with: github-token: ${{ secrets.GITHUB_TOKEN }} parallel: true path-to-lcov: ./lcov.info - name: Coveralls Finished uses: coverallsapp/github-action@master with: github-token: ${{ secrets.GITHUB_TOKEN }} parallel-finished: true cargo-udeps: if: "!startsWith(github.event.pull_request.title, 'Automated version bump')" name: Unused dependency check runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 # Install Rust and required components - uses: actions-rs/toolchain@v1 with: toolchain: nightly override: true - name: Run cargo-udeps uses: aig787/cargo-udeps-action@v1 with: version: 'latest' args: '--all-targets' cargo-deny: if: "!startsWith(github.event.pull_request.title, 'Automated version bump')" runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 # wget the shared deny.toml file from the QA repo - shell: bash run: wget https://raw.githubusercontent.com/maidsafe/QA/master/misc-scripts/deny.toml - uses: EmbarkStudios/cargo-deny-action@v1 test: if: "!startsWith(github.event.pull_request.title, 'Automated version bump')" name: Test runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, windows-latest, macOS-latest] steps: - uses: actions/checkout@v2 - name: Install Rust uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true # Cache. - name: Cargo cache registry, index and build uses: actions/cache@v2.1.4 with: path: | ~/.cargo/registry ~/.cargo/git target key: ${{ runner.os }}-cargo-cache-${{ hashFiles('**/Cargo.lock') }} # Run the tests. - name: Cargo test run: cargo test --all-features --release # Test publish using --dry-run. test-publish: if: "!startsWith(github.event.pull_request.title, 'Automated version bump')" name: Test Publish runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 # Install Rust - uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true # Cargo publish dry run - name: Cargo Publish Dry Run run: cargo publish --dry-run xor_name-5.0.0/.github/workflows/rustdoc.yml000066400000000000000000000013701427320770600211400ustar00rootroot00000000000000name: rustdoc on: push: branches: - master env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 RUSTFLAGS: -D warnings RUSTUP_MAX_RETRIES: 10 jobs: rustdoc: runs-on: ubuntu-latest steps: - name: Checkout repository uses: actions/checkout@v2 - name: Install Rust toolchain uses: actions-rs/toolchain@v1 with: toolchain: stable profile: minimal override: true components: rustfmt, rust-src - name: Build Documentation run: cargo doc --all --no-deps - name: Deploy Docs uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_branch: gh-pages publish_dir: ./target/doc keep_files: true xor_name-5.0.0/.github/workflows/security_audit.yml000066400000000000000000000004451427320770600225140ustar00rootroot00000000000000name: Security audit on: schedule: - cron: '0 0 * * *' jobs: audit: if: github.repository_owner == 'maidsafe' runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions-rs/audit-check@v1 with: token: ${{ secrets.GITHUB_TOKEN }} xor_name-5.0.0/.github/workflows/tag_release.yml000066400000000000000000000025231427320770600217310ustar00rootroot00000000000000name: Tag release commit on: # Trigger the workflow on push only for the master branch push: branches: - master env: NODE_ENV: 'development' GITHUB_TOKEN: ${{ secrets.BRANCH_CREATOR_TOKEN }} jobs: tag: runs-on: ubuntu-latest # Only run on a release commit if: "startsWith(github.event.head_commit.message, 'chore(release):')" steps: - uses: actions/checkout@v2 with: fetch-depth: '0' token: ${{ secrets.BRANCH_CREATOR_TOKEN }} - run: echo "RELEASE_VERSION=$(git log -1 --pretty=%s)" >> $GITHUB_ENV # parse out non-tag text - run: echo "RELEASE_VERSION=$( echo $RELEASE_VERSION | sed 's/chore(release)://' )" >> $GITHUB_ENV # remove spaces, but add back in `v` to tag, which is needed for standard-version - run: echo "RELEASE_VERSION=v$(echo $RELEASE_VERSION | tr -d '[:space:]')" >> $GITHUB_ENV - run: echo $RELEASE_VERSION - run: git tag $RELEASE_VERSION - name: Setup git for push run: | git remote add github "$REPO" git config --local user.email "action@github.com" git config --local user.name "GitHub Action" - name: Push tags to master run: git push "https://$GITHUB_ACTOR:$GITHUB_TOKEN@github.com/$GITHUB_REPOSITORY" HEAD:master --tags xor_name-5.0.0/.gitignore000066400000000000000000000001611427320770600153220ustar00rootroot00000000000000*.exe *.lock *.swp *.rsproj tags* *bootstrap.cache build/ build-tests/ target/ /.idea /.project /bin *.sublime-* xor_name-5.0.0/CHANGELOG.md000066400000000000000000000132531427320770600151510ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. ## [5.0.0](https://github.com/maidsafe/xor_name/compare/v4.1.0...v5.0.0) (2022-08-05) ### ⚠ BREAKING CHANGES * error instead of panic on too long string ### Features * different Prefix serialization format ([6ff6d39](https://github.com/maidsafe/xor_name/commit/6ff6d39400179f28580530fa3ee4bdb59db26876)) * impl Display for Prefix ([d42ba53](https://github.com/maidsafe/xor_name/commit/d42ba5309329be2780fa4efab15532275f6b1bd9)) ### Bug Fixes * allow prefix strings of more than one byte ([1f05f48](https://github.com/maidsafe/xor_name/commit/1f05f4841a848667cca24fe08c22bde6a3a36d59)) * error instead of panic on too long string ([73123cb](https://github.com/maidsafe/xor_name/commit/73123cbb0e2a23d30fc6298ad35812209dca27b5)) ## [4.1.0](https://github.com/maidsafe/xor_name/compare/v4.0.1...v4.1.0) (2022-08-04) ### Features * serialize to string ([9d54992](https://github.com/maidsafe/xor_name/commit/9d54992cdf519d66524fd9fdbedb53780133c183)) ### [4.0.1](https://github.com/maidsafe/xor_name/compare/v4.0.0...v4.0.1) (2022-03-18) ## [4.0.0](https://github.com/maidsafe/xor_name/compare/v3.1.0...v4.0.0) (2022-03-16) ### ⚠ BREAKING CHANGES * public api changed * remove OsRng ([c4d64e9](https://github.com/maidsafe/xor_name/commit/c4d64e98556e5c9caff902182c9e840dad869580)) ## [3.1.0](https://github.com/maidsafe/xor_name/compare/v3.0.0...v3.1.0) (2021-08-26) ### Features * **api:** simplify from content api ([441acc7](https://github.com/maidsafe/xor_name/commit/441acc7269747cff6868adf425cd0be6c12b39e5)) ## [3.0.0](https://github.com/maidsafe/xor_name/compare/v2.0.0...v3.0.0) (2021-08-24) ### ⚠ BREAKING CHANGES * remove prefix_map ### Features * remove prefix_map make with_bit public ([efa63e2](https://github.com/maidsafe/xor_name/commit/efa63e26dc3820c6ba1cdeaf270f41030684fa09)) * use DashMaps for better concurrency ([2ef45f3](https://github.com/maidsafe/xor_name/commit/2ef45f328699ccb8a750b8f0e5788b792414f3c1)) ## [2.0.0](https://github.com/maidsafe/xor_name/compare/v1.3.0...v2.0.0) (2021-08-10) ### ⚠ BREAKING CHANGES * **prefix-map:** - Expose PrefixMap as public from lib and remove pub prefix_map mod. - Adapting PrefixMap APIs to the removal of requirement of Borrow Trait for T. * **prefix-map:** remove the requirement of Borrow trait for T from PrefixMap ([1e32830](https://github.com/maidsafe/xor_name/commit/1e32830af72ae37f58a9961b8a0c8dde0981b0e0)) ## [1.3.0](https://github.com/maidsafe/xor_name/compare/v1.2.1...v1.3.0) (2021-08-06) ### Features * insert returns bool ([7e36f9d](https://github.com/maidsafe/xor_name/commit/7e36f9dfeb49765b281625f07ec64fd320c666d2)) * prefix map ([83be995](https://github.com/maidsafe/xor_name/commit/83be99545a3dda1fdb9d0c13a9d18a757bec8538)) * remove get_equal_or_ancestor ([4c2c7ed](https://github.com/maidsafe/xor_name/commit/4c2c7ed40db22f14a8548d8bb6e36589a0111165)) * use BTreeMap add get_matching_prefix make prune pub ([069767c](https://github.com/maidsafe/xor_name/commit/069767ce0e98a86e9b04f8efa2c91225968e022d)) ### [1.2.1](https://github.com/maidsafe/xor_name/compare/v1.2.0...v1.2.1) (2021-06-08) ## [1.2.0](https://github.com/maidsafe/xor_name/compare/v1.1.12...v1.2.0) (2021-04-19) ### Features * Debug output with binary fmt as well ([1382403](https://github.com/maidsafe/xor_name/commit/1382403befe73de1961fcde8ec6cfa042dd36fb0)) ### [1.1.12](https://github.com/maidsafe/xor_name/compare/v1.1.11...v1.1.12) (2021-03-03) ### [1.1.11](https://github.com/maidsafe/xor_name/compare/v1.1.10...v1.1.11) (2021-02-25) ### [1.1.10](https://github.com/maidsafe/xor_name/compare/v1.1.9...v1.1.10) (2021-02-09) ### [1.1.9](https://github.com/maidsafe/xor_name/compare/v1.1.8...v1.1.9) (2021-02-03) ### [1.1.8](https://github.com/maidsafe/xor_name/compare/v1.1.7...v1.1.8) (2021-02-03) ### [1.1.7](https://github.com/maidsafe/xor_name/compare/v1.1.6...v1.1.7) (2021-01-20) ### [1.1.6](https://github.com/maidsafe/xor_name/compare/v1.1.5...v1.1.6) (2021-01-14) ### [1.1.5](https://github.com/maidsafe/xor_name/compare/v1.1.4...v1.1.5) (2021-01-06) ### [1.1.4](https://github.com/maidsafe/xor_name/compare/v1.1.3...v1.1.4) (2020-11-23) ### [1.1.3](https://github.com/maidsafe/xor_name/compare/v1.1.2...v1.1.3) (2020-10-09) ### [1.1.2](https://github.com/maidsafe/xor_name/compare/v1.1.1...v1.1.2) (2020-10-09) ### [1.1.1](https://github.com/maidsafe/xor_name/compare/v1.1.0...v1.1.1) (2020-09-21) ### [1.1.0](https://github.com/maidsafe/xor_name/compare/v1.0.0...v1.1.0) (2020-08-18) * Add in `XorName::random()` functionality * Use OSRng ### [1.0.0](https://github.com/maidsafe/xor_name/compare/0.9.2...v1.0.0) (2020-07-02) * Make the crate no_std * Add impl Deref for XorName, remove slice indexing * Minimise the API surface * Remove generics ### [0.9.2] * Remove test barrier from the FromStr trait impl for Prefix ### [0.9.1] * Added borrow trait for prefix ### [0.9.0] * Extracted from the routing crate to become standalone (again) * License details updated to MIT or modified BSD license. * CI set up on GitHub Actions ### [0.1.0] * Replace CBOR usage with maidsafe_utilites::serialisation. * Updated dependencies. ### [0.0.5] * Migrate to maidsafe_utilities 0.2.1. * Make debug hex output lowercase. ### [0.0.4] * Reduce debug output to improve readability. ### [0.0.3] * Add the `with_flipped_bit` and `count_differing_bits` methods. * Rename `cmp_closeness` to `cmp_distance`. ### [0.0.2] * Rename `bucket_distance` to `bucket_index`. * Expand documentation. * Add `XorName::cmp_closeness` ordering method. ### [0.0.1] * Initial implementation xor_name-5.0.0/Cargo.toml000066400000000000000000000017741427320770600152750ustar00rootroot00000000000000[package] name = "xor_name" version = "5.0.0" authors = [ "MaidSafe Developers " ] description = "Xor Type" homepage = "http://maidsafe.net" edition = "2018" license = "MIT OR BSD-3-Clause" readme = "README.md" repository = "https://github.com/maidsafe/xor_name" [features] default = [ "serialize-hex" ] serialize-hex = [ "hex", "serde_test" ] [dependencies] rand_core = "0.6.3" [dependencies.tiny-keccak] version = "~2.0" features = [ "sha3" ] [dependencies.rand] version = "~0.8.5" default-features = false features = [ "std" ] [dependencies.serde] version = "1.0.113" default-features = false features = [ "derive" ] [dependencies.serde_test] version = "1" optional = true [dependencies.hex] version = "0.4" optional = true [dev-dependencies] bincode = "1.2.1" [dev-dependencies.arrayvec] version = "~0.5.1" default-features = false [dev-dependencies.rand] version = "~0.8.5" default-features = false features = [ "getrandom", "small_rng" ] xor_name-5.0.0/LICENSE-BSD000066400000000000000000000026721427320770600147560ustar00rootroot00000000000000Copyright 2020 MaidSafe.net limited. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. xor_name-5.0.0/LICENSE-MIT000066400000000000000000000020441427320770600147700ustar00rootroot00000000000000Copyright 2020 Maidsafe.net limited Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. xor_name-5.0.0/README.md000066400000000000000000000033631427320770600146200ustar00rootroot00000000000000# xor_name XorName is an array that is useful for calculations in DHT | [MaidSafe website](http://maidsafe.net) | [SAFE Network Forum](https://safenetforum.org/) | |:-------:|:-------:| ## Serialization `XorName` and `Prefix` are serialized into a human-readable hex string, instead of as a `u8` array. This is enabled by default, with the `serialize-hex` feature. This also allows for these structures to be serialised when used as a key in a map like `HashMap`, because most formats only allow keys to be strings, instead of more complex types. A struct like this: ```rust #[derive(Serialize, Deserialize)] struct MyStruct { prefix: Prefix, xor_name: XorName, } ``` Will yield this JSON ```json { "prefix": "10001101110001111100101000111001101101111101111010011001", "xor_name": "8dc7ca39b7de990eb943fd64854776dd85aa82c33a4269693c57b36e0749ed8f" } ``` instead of ```json { "prefix": { "bit_count": 56, "name": [141,199,202,57,183,222,153,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] }, "xor_name": [141,199,202,57,183,222,153,14,185,67,253,100,133,71,118,221,133,170,130,195,58,66,105,105,60,87,179,110,7,73,237,143] } ``` ## License This SAFE Network library is dual-licensed under the Modified BSD ([LICENSE-BSD](LICENSE-BSD) https://opensource.org/licenses/BSD-3-Clause) or the MIT license ([LICENSE-MIT](LICENSE-MIT) https://opensource.org/licenses/MIT) at your option. ## Contributing Want to contribute? Great :tada: There are many ways to give back to the project, whether it be writing new code, fixing bugs, or just reporting errors. All forms of contributions are encouraged! For instructions on how to contribute, see our [Guide to contributing](https://github.com/maidsafe/QA/blob/master/CONTRIBUTING.md). xor_name-5.0.0/codeowners000066400000000000000000000000371427320770600154270ustar00rootroot00000000000000* @maidsafe/backend_codeowners xor_name-5.0.0/src/000077500000000000000000000000001427320770600141235ustar00rootroot00000000000000xor_name-5.0.0/src/lib.rs000066400000000000000000000514301427320770600152420ustar00rootroot00000000000000// Copyright 2021 MaidSafe.net limited. // // This SAFE Network Software is licensed to you under the MIT license or the Modified BSD license , at your option. This file may not be copied, // modified, or distributed except according to those terms. Please review the Licences for the // specific language governing permissions and limitations relating to use of the SAFE Network // Software. //! # xor_name //! //! TODO requires further documentation. #![doc( html_logo_url = "https://raw.githubusercontent.com/maidsafe/QA/master/Images/maidsafe_logo.png", html_favicon_url = "http://maidsafe.net/img/favicon.ico", html_root_url = "http://maidsafe.github.io/xor_name" )] // For explanation of lint checks, run `rustc -W help` or see // https://github.com/maidsafe/QA/blob/master/Documentation/Rust%20Lint%20Checks.md #![forbid(mutable_transmutes, no_mangle_const_items, unknown_crate_types)] #![deny( deprecated, improper_ctypes, missing_docs, non_shorthand_field_patterns, overflowing_literals, stable_features, unconditional_recursion, unknown_lints, unsafe_code, unused, unused_allocation, unused_attributes, unused_comparisons, unused_features, unused_parens, while_true, warnings )] #![warn( trivial_casts, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results )] #![allow( box_pointers, missing_copy_implementations, missing_debug_implementations, variant_size_differences )] use core::{cmp::Ordering, fmt, ops}; pub use prefix::Prefix; pub use rand; use rand::distributions::{Distribution, Standard}; use tiny_keccak::{Hasher, Sha3}; /// Creates XorName with the given leading bytes and the rest filled with zeroes. #[macro_export] macro_rules! xor_name { ($($byte:expr),* $(,)?) => {{ let mut name = $crate::XorName::default(); let mut index = 0; #[allow(unused_assignments)] { $( name.0[index] = $byte; index += 1; )* } name }} } // No-std replacement for std::format! macro which returns `ArrayString` instead of `String`. The // capacity of the returned `ArrayString` needs to explicitly given as the first argument. #[cfg(test)] macro_rules! format { ($capacity:expr, $($arg:tt)*) => {{ let mut output = arrayvec::ArrayString::<[_; $capacity]>::new(); core::fmt::write(&mut output, core::format_args!($($arg)*)).expect("insufficient ArrayString capacity"); output }} } mod prefix; #[cfg(feature = "serialize-hex")] mod serialize; /// Constant byte length of `XorName`. pub const XOR_NAME_LEN: usize = 32; /// A 256-bit number, viewed as a point in XOR space. /// /// This wraps an array of 32 bytes, i. e. a number between 0 and 2256 - 1. /// /// XOR space is the space of these numbers, with the [XOR metric][1] as a notion of distance, /// i. e. the points with IDs `x` and `y` are considered to have distance `x xor y`. /// /// [1]: https://en.wikipedia.org/wiki/Kademlia#System_details #[derive(Eq, Copy, Clone, Default, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr( not(feature = "serialize-hex"), derive(serde::Serialize, serde::Deserialize) )] pub struct XorName(pub [u8; XOR_NAME_LEN]); impl XorName { /// Generate a XorName for the given content. pub fn from_content(content: &[u8]) -> Self { Self::from_content_parts(&[content]) } /// Generate a XorName for the given content (for content-addressable-storage) pub fn from_content_parts(content_parts: &[&[u8]]) -> Self { let mut sha3 = Sha3::v256(); for part in content_parts { sha3.update(part); } let mut hash = [0u8; XOR_NAME_LEN]; sha3.finalize(&mut hash); Self(hash) } /// Generate a random XorName pub fn random(rng: &mut T) -> Self { let mut xor = [0u8; XOR_NAME_LEN]; rng.fill(&mut xor); Self(xor) } /// Returns `true` if the `i`-th bit is `1`. pub fn bit(&self, i: u8) -> bool { let index = i / 8; let pow_i = 1 << (7 - (i % 8)); self[index as usize] & pow_i != 0 } /// Compares the distance of the arguments to `self`. Returns `Less` if `lhs` is closer, /// `Greater` if `rhs` is closer, and `Equal` if `lhs == rhs`. (The XOR distance can only be /// equal if the arguments are equal.) pub fn cmp_distance(&self, lhs: &Self, rhs: &Self) -> Ordering { for i in 0..XOR_NAME_LEN { if lhs[i] != rhs[i] { return Ord::cmp(&(lhs[i] ^ self[i]), &(rhs[i] ^ self[i])); } } Ordering::Equal } /// Returns a copy of `self`, with the `i`-th bit set to `bit`. /// /// If `i` exceeds the number of bits in `self`, an unmodified copy of `self` is returned. pub fn with_bit(mut self, i: u8, bit: bool) -> Self { if i as usize >= XOR_NAME_LEN * 8 { return self; } let pow_i = 1 << (7 - i % 8); if bit { self.0[i as usize / 8] |= pow_i; } else { self.0[i as usize / 8] &= !pow_i; } self } /// Returns a copy of `self`, with the `i`-th bit flipped. /// /// If `i` exceeds the number of bits in `self`, an unmodified copy of `self` is returned. fn with_flipped_bit(mut self, i: u8) -> Self { if i as usize >= XOR_NAME_LEN * 8 { return self; } self.0[i as usize / 8] ^= 1 << (7 - i % 8); self } /// Returns a copy of self with first `n` bits preserved, and remaining bits /// set to 0 (val == false) or 1 (val == true). fn set_remaining(mut self, n: u8, val: bool) -> Self { for (i, x) in self.0.iter_mut().enumerate() { let i = i as u8; if n <= i * 8 { *x = if val { !0 } else { 0 }; } else if n < (i + 1) * 8 { let mask = !0 >> (n - i * 8); if val { *x |= mask } else { *x &= !mask } } // else n >= (i+1) * bits: nothing to do } self } /// Returns the length of the common prefix with the `other` name; e. g. /// the when `other = 11110000` and `self = 11111111` this is 4. fn common_prefix(&self, other: &Self) -> usize { for byte_index in 0..XOR_NAME_LEN { if self[byte_index] != other[byte_index] { return (byte_index * 8) + (self[byte_index] ^ other[byte_index]).leading_zeros() as usize; } } 8 * XOR_NAME_LEN } } impl fmt::Debug for XorName { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!( formatter, "{:02x}{:02x}{:02x}({:08b})..", self[0], self[1], self[2], self ) } } impl fmt::Display for XorName { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "{:02x}{:02x}{:02x}..", self[0], self[1], self[2]) } } impl fmt::Binary for XorName { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { if let Some(width) = formatter.width() { let whole_bytes = width / 8; let remaining_bits = width % 8; for byte in &self[..whole_bytes] { write!(formatter, "{:08b}", byte)? } for bit in 0..remaining_bits { write!(formatter, "{}", (self[whole_bytes] >> (7 - bit)) & 1)?; } if formatter.alternate() && whole_bytes < XOR_NAME_LEN - 1 { write!(formatter, "..")?; } } else { for byte in &self[..] { write!(formatter, "{:08b}", byte)? } } Ok(()) } } impl fmt::LowerHex for XorName { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let bytes = formatter.width().unwrap_or(2 * XOR_NAME_LEN) / 2; for byte in &self[..bytes] { write!(formatter, "{:02x}", byte)?; } if formatter.alternate() && bytes < XOR_NAME_LEN { write!(formatter, "..")?; } Ok(()) } } impl fmt::UpperHex for XorName { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { let bytes = formatter.width().unwrap_or(2 * XOR_NAME_LEN) / 2; for byte in &self[..bytes] { write!(formatter, "{:02X}", byte)?; } if formatter.alternate() && bytes < XOR_NAME_LEN { write!(formatter, "..")?; } Ok(()) } } impl Distribution for Standard { fn sample(&self, rng: &mut R) -> XorName { let mut name = XorName::default(); rng.fill(&mut name.0[..]); name } } impl ops::Not for XorName { type Output = Self; fn not(mut self) -> Self { for byte in &mut self.0 { *byte = !*byte; } self } } impl AsRef for XorName { fn as_ref(&self) -> &Self { self } } impl AsRef<[u8]> for XorName { fn as_ref(&self) -> &[u8] { &self.0[..] } } impl ops::Deref for XorName { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.0[..] } } #[cfg(test)] mod tests { use super::*; use bincode::{deserialize, serialize}; use rand::{rngs::SmallRng, Rng, SeedableRng}; #[test] fn create_random_xorname() { let mut rng = SmallRng::from_entropy(); let xorname: XorName = XorName::random(&mut rng); let xorname2: XorName = XorName::random(&mut rng); assert_ne!(xorname, xorname2); } #[test] fn serialisation_xor_name() { let mut rng = SmallRng::from_entropy(); let obj_before: XorName = XorName::random(&mut rng); let data = serialize(&obj_before).unwrap(); assert_eq!(data.len(), XOR_NAME_LEN); let obj_after: XorName = deserialize(&data).unwrap(); assert_eq!(obj_before, obj_after); } #[test] #[allow(clippy::eq_op, clippy::nonminimal_bool)] fn xor_name_ord() { let type1: XorName = XorName([1u8; XOR_NAME_LEN]); let type2: XorName = XorName([2u8; XOR_NAME_LEN]); assert_eq!(Ord::cmp(&type1, &type1), Ordering::Equal); assert_eq!(Ord::cmp(&type1, &type2), Ordering::Less); assert_eq!(Ord::cmp(&type2, &type1), Ordering::Greater); assert!(type1 < type2); assert!(type1 <= type2); assert!(type1 <= type1); assert!(type2 > type1); assert!(type2 >= type1); assert!(type1 >= type1); assert!(!(type2 < type1)); assert!(!(type2 <= type1)); assert!(!(type1 > type2)); assert!(!(type1 >= type2)); } #[test] #[allow(clippy::nonminimal_bool)] fn xor_name_equal_assertion() { let mut rng = SmallRng::from_entropy(); let type1: XorName = rng.gen(); let type1_clone = type1; let type2: XorName = rng.gen(); assert_eq!(type1, type1_clone); assert!(!(type1 != type1_clone)); assert_ne!(type1, type2); } #[test] fn format_debug() { assert_eq!( &format!(18, "{:?}", xor_name!(0x01, 0x23, 0x45, 0x67)), "012345(00000001).." ); assert_eq!( &format!(18, "{:?}", xor_name!(0x89, 0xab, 0xcd, 0xdf)), "89abcd(10001001).." ); } #[test] fn format_hex() { assert_eq!( &format!(64, "{:x}", xor_name!(0x01, 0x23, 0xab)), "0123ab0000000000000000000000000000000000000000000000000000000000" ); assert_eq!(&format!(2, "{:2x}", xor_name!(0x01, 0x23, 0xab)), "01"); assert_eq!(&format!(4, "{:4x}", xor_name!(0x01, 0x23, 0xab)), "0123"); assert_eq!(&format!(6, "{:6x}", xor_name!(0x01, 0x23, 0xab)), "0123ab"); assert_eq!( &format!(8, "{:8x}", xor_name!(0x01, 0x23, 0xab)), "0123ab00" ); assert_eq!( &format!(10, "{:10x}", xor_name!(0x01, 0x23, 0xab)), "0123ab0000" ); assert_eq!( &format!(8, "{:8X}", xor_name!(0x01, 0x23, 0xab)), "0123AB00" ); assert_eq!( &format!(8, "{:#6x}", xor_name!(0x01, 0x23, 0xab)), "0123ab.." ); // odd widths are truncated to nearest even assert_eq!(&format!(2, "{:3x}", xor_name!(0x01, 0x23, 0xab)), "01"); assert_eq!(&format!(4, "{:5x}", xor_name!(0x01, 0x23, 0xab)), "0123"); } #[test] fn format_binary() { assert_eq!( &format!(256, "{:b}", xor_name!(0b00001111, 0b01010101)), "00001111010101010000000000000000000000000000000000000000000000000000000000000000000000\ 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000\ 000000000000000000000000000000000000000000000000000000000000000000000000000000000000" ); assert_eq!(&format!(1, "{:1b}", xor_name!(0b00001111, 0b01010101)), "0"); assert_eq!( &format!(2, "{:2b}", xor_name!(0b00001111, 0b01010101)), "00" ); assert_eq!( &format!(3, "{:3b}", xor_name!(0b00001111, 0b01010101)), "000" ); assert_eq!( &format!(4, "{:4b}", xor_name!(0b00001111, 0b01010101)), "0000" ); assert_eq!( &format!(5, "{:5b}", xor_name!(0b00001111, 0b01010101)), "00001" ); assert_eq!( &format!(6, "{:6b}", xor_name!(0b00001111, 0b01010101)), "000011" ); assert_eq!( &format!(7, "{:7b}", xor_name!(0b00001111, 0b01010101)), "0000111" ); assert_eq!( &format!(8, "{:8b}", xor_name!(0b00001111, 0b01010101)), "00001111" ); assert_eq!( &format!(9, "{:9b}", xor_name!(0b00001111, 0b01010101)), "000011110" ); assert_eq!( &format!(10, "{:10b}", xor_name!(0b00001111, 0b01010101)), "0000111101" ); assert_eq!( &format!(16, "{:16b}", xor_name!(0b00001111, 0b01010101)), "0000111101010101" ); assert_eq!( &format!(10, "{:#8b}", xor_name!(0b00001111, 0b01010101)), "00001111.." ); } #[test] fn with_flipped_bit() { let mut rng = SmallRng::from_entropy(); let name: XorName = rng.gen(); for i in 0..18 { assert_eq!(i, name.common_prefix(&name.with_flipped_bit(i as u8))); } for i in 0..10 { assert_eq!( 19 * i, name.common_prefix(&name.with_flipped_bit(19 * i as u8)) ); } } #[test] fn common_prefix() { assert_eq!( 0, xor_name!(0b00000000).common_prefix(&xor_name!(0b10000000)) ); assert_eq!( 3, xor_name!(0b11100000).common_prefix(&xor_name!(0b11111111)) ); assert_eq!( 5, xor_name!(0b10101010).common_prefix(&xor_name!(0b10101111)) ); assert_eq!( 0, xor_name!(0, 0, 0, 0).common_prefix(&xor_name!(128, 0, 0, 0)) ); assert_eq!( 11, xor_name!(0, 10, 0, 0).common_prefix(&xor_name!(0, 16, 0, 0)) ); assert_eq!( 31, xor_name!(1, 2, 3, 4).common_prefix(&xor_name!(1, 2, 3, 5)) ); assert_eq!( 256, xor_name!(1, 2, 3, 4).common_prefix(&xor_name!(1, 2, 3, 4)) ); } #[test] fn cmp_distance() { assert_eq!( xor_name!(42).cmp_distance(&xor_name!(13), &xor_name!(13)), Ordering::Equal, ); assert_eq!( xor_name!(42).cmp_distance(&xor_name!(44), &xor_name!(45)), Ordering::Less, ); assert_eq!( xor_name!(42).cmp_distance(&xor_name!(45), &xor_name!(44)), Ordering::Greater, ); assert_eq!( xor_name!(1, 2, 3, 4).cmp_distance(&xor_name!(2, 3, 4, 5), &xor_name!(2, 3, 4, 5)), Ordering::Equal, ); assert_eq!( xor_name!(1, 2, 3, 4).cmp_distance(&xor_name!(2, 2, 4, 5), &xor_name!(2, 3, 6, 5)), Ordering::Less, ); assert_eq!( xor_name!(1, 2, 3, 4).cmp_distance(&xor_name!(2, 3, 6, 5), &xor_name!(2, 2, 4, 5)), Ordering::Greater, ); assert_eq!( xor_name!(1, 2, 3, 4).cmp_distance(&xor_name!(1, 2, 3, 8), &xor_name!(1, 2, 8, 4)), Ordering::Less, ); assert_eq!( xor_name!(1, 2, 3, 4).cmp_distance(&xor_name!(1, 2, 8, 4), &xor_name!(1, 2, 3, 8)), Ordering::Greater, ); assert_eq!( xor_name!(1, 2, 3, 4).cmp_distance(&xor_name!(1, 2, 7, 4), &xor_name!(1, 2, 6, 4)), Ordering::Less, ); assert_eq!( xor_name!(1, 2, 3, 4).cmp_distance(&xor_name!(1, 2, 6, 4), &xor_name!(1, 2, 7, 4)), Ordering::Greater, ); } #[test] fn bit() { assert!(!xor_name!(0b00101000).bit(0)); assert!(xor_name!(0b00101000).bit(2)); assert!(!xor_name!(0b00101000).bit(3)); assert!(xor_name!(2, 128, 1, 0).bit(6)); assert!(xor_name!(2, 128, 1, 0).bit(8)); assert!(xor_name!(2, 128, 1, 0).bit(23)); assert!(!xor_name!(2, 128, 1, 0).bit(7)); assert!(!xor_name!(2, 128, 1, 0).bit(9)); assert!(!xor_name!(2, 128, 1, 0).bit(5)); assert!(!xor_name!(2, 128, 1, 0).bit(22)); assert!(!xor_name!(2, 128, 1, 0).bit(24)); } #[test] fn set_remaining() { assert_eq!( xor_name!(0b10011011).set_remaining(5, false), xor_name!(0b10011000) ); assert_eq!( xor_name!(0b11111111).set_remaining(2, false), xor_name!(0b11000000) ); assert_eq!( xor_name!(0b00000000).set_remaining(4, true), xor_name!( 0b00001111, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255 ) ); assert_eq!( xor_name!(13, 112, 9, 1).set_remaining(0, false), xor_name!(0, 0, 0, 0) ); assert_eq!( xor_name!(13, 112, 9, 1).set_remaining(100, false), xor_name!(13, 112, 9, 1) ); assert_eq!( xor_name!(13, 112, 9, 1).set_remaining(10, false), xor_name!(13, 64, 0, 0) ); assert_eq!( xor_name!(13, 112, 9, 1).set_remaining(10, true), xor_name!( 13, 127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255 ) ); } #[test] fn xor_name_macro() { let mut rng = SmallRng::from_entropy(); for _ in 0..100 { let byte = rng.gen(); assert_eq!(&xor_name!(byte)[..1], &[byte]); } for _ in 0..100 { let byte0 = rng.gen(); let byte1 = rng.gen(); assert_eq!(&xor_name!(byte0, byte1)[..2], &[byte0, byte1]); } for _ in 0..100 { let byte0 = rng.gen(); let byte1 = rng.gen(); let byte2 = rng.gen(); assert_eq!(&xor_name!(byte0, byte1, byte2)[..3], &[byte0, byte1, byte2]); } } #[test] fn conversion_from_u64() { assert_eq!( &from_u64(0x0123456789abcdef)[XOR_NAME_LEN - 8..], &[0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef] ); } #[test] fn xor_name_from_content() { let alpha_1 = XorName::from_content_parts(&[b"abcdefg", b"hijk"]); let alpha_2 = XorName::from_content_parts(&[b"abcdefg", b"hijk"]); let alpha_3 = XorName::from_content(b"abcdefg"); assert_eq!(alpha_1, alpha_2); assert_ne!(alpha_1, alpha_3); } #[test] fn xor_name_from_content_is_agnostic_to_where_content_parts_splits() { let alpha_1 = XorName::from_content_parts(&[b"abcdefg", b"hijk"]); let alpha_2 = XorName::from_content(b"abcdefghijk"); assert_eq!(alpha_1, alpha_2); } // Create a `XorName` with the 8 trailing bytes equal to `x` (in big endian order) and the rest // filled with zeroes. fn from_u64(x: u64) -> XorName { let mut name = XorName::default(); name.0[XOR_NAME_LEN - 8..].copy_from_slice(&x.to_be_bytes()); name } } xor_name-5.0.0/src/prefix.rs000066400000000000000000000365551427320770600160040ustar00rootroot00000000000000// Copyright 2020 MaidSafe.net limited. // // This SAFE Network Software is licensed to you under the MIT license or the Modified BSD license , at your option. This file may not be copied, // modified, or distributed except according to those terms. Please review the Licences for the // specific language governing permissions and limitations relating to use of the SAFE Network // Software. use crate::{XorName, XOR_NAME_LEN}; use core::{ cmp::{self, Ordering}, fmt::{Binary, Debug, Display, Formatter, Result as FmtResult}, hash::{Hash, Hasher}, ops::RangeInclusive, str::FromStr, }; /// A section prefix, i.e. a sequence of bits specifying the part of the network's name space /// consisting of all names that start with this sequence. #[derive(Clone, Copy, Default, Eq)] #[cfg_attr( not(feature = "serialize-hex"), derive(serde::Serialize, serde::Deserialize) )] pub struct Prefix { pub(crate) bit_count: u16, pub(crate) name: XorName, } impl Prefix { /// Creates a new `Prefix` with the first `bit_count` bits of `name`. Insignificant bits are all /// set to 0. pub fn new(bit_count: usize, name: XorName) -> Self { Prefix { bit_count: bit_count.min(8 * XOR_NAME_LEN) as u16, name: name.set_remaining(bit_count as u8, false), } } /// Returns the name of this prefix. pub fn name(&self) -> XorName { self.name } /// Returns `self` with an appended bit: `0` if `bit` is `false`, and `1` if `bit` is `true`. If /// `self.bit_count` is already at the maximum for this type, then an unmodified copy of `self` /// is returned. pub fn pushed(mut self, bit: bool) -> Self { if self.bit_count < 8 * XOR_NAME_LEN as u16 { self.name = self.name.with_bit(self.bit_count() as u8, bit); self.bit_count += 1; } self } /// Returns a prefix copying the first `bitcount() - 1` bits from `self`, /// or `self` if it is already empty. pub fn popped(mut self) -> Self { if self.bit_count > 0 { self.bit_count -= 1; // unused bits should be zero: self.name = self.name.with_bit(self.bit_count() as u8, false); } self } /// Returns the number of bits in the prefix. pub fn bit_count(&self) -> usize { self.bit_count as usize } /// Returns `true` if this is the empty prefix, with no bits. pub fn is_empty(&self) -> bool { self.bit_count == 0 } /// Returns `true` if `self` is a prefix of `other` or vice versa. pub fn is_compatible(&self, other: &Self) -> bool { let i = self.name.common_prefix(&other.name); i >= self.bit_count() || i >= other.bit_count() } /// Returns `true` if `other` is compatible but strictly shorter than `self`. pub fn is_extension_of(&self, other: &Self) -> bool { let i = self.name.common_prefix(&other.name); i >= other.bit_count() && self.bit_count() > other.bit_count() } /// Returns `true` if the `other` prefix differs in exactly one bit from this one. pub fn is_neighbour(&self, other: &Self) -> bool { let i = self.name.common_prefix(&other.name); if i >= self.bit_count() || i >= other.bit_count() { false } else { let j = self .name .with_flipped_bit(i as u8) .common_prefix(&other.name); j >= self.bit_count() || j >= other.bit_count() } } /// Returns the number of common leading bits with the input name, capped with prefix length. pub fn common_prefix(&self, name: &XorName) -> usize { cmp::min(self.bit_count(), self.name.common_prefix(name)) } /// Returns `true` if this is a prefix of the given `name`. pub fn matches(&self, name: &XorName) -> bool { self.name.common_prefix(name) >= self.bit_count() } /// Compares the distance of `self` and `other` to `target`. Returns `Less` if `self` is closer, /// `Greater` if `other` is closer, and compares the prefix directly if of equal distance /// (this is to make sorting deterministic). pub fn cmp_distance(&self, other: &Self, target: &XorName) -> Ordering { if self.is_compatible(other) { // Note that if bit_counts are equal, prefixes are also equal since // one is a prefix of the other (is_compatible). Ord::cmp(&self.bit_count, &other.bit_count) } else { Ord::cmp( &other.name.common_prefix(target), &self.name.common_prefix(target), ) } } /// Compares the prefixes using breadth-first order. That is, shorter prefixes are ordered /// before longer. This is in contrast with the default `Ord` impl of `Prefix` which uses /// depth-first order. pub fn cmp_breadth_first(&self, other: &Self) -> Ordering { self.bit_count .cmp(&other.bit_count) .then_with(|| self.name.cmp(&other.name)) } /// Returns the smallest name matching the prefix pub fn lower_bound(&self) -> XorName { if self.bit_count() < 8 * XOR_NAME_LEN { self.name.set_remaining(self.bit_count() as u8, false) } else { self.name } } /// Returns the largest name matching the prefix pub fn upper_bound(&self) -> XorName { if self.bit_count() < 8 * XOR_NAME_LEN { self.name.set_remaining(self.bit_count() as u8, true) } else { self.name } } /// Inclusive range from lower_bound to upper_bound pub fn range_inclusive(&self) -> RangeInclusive { RangeInclusive::new(self.lower_bound(), self.upper_bound()) } /// Returns whether the namespace defined by `self` is covered by prefixes in the `prefixes` /// set pub fn is_covered_by<'a, I>(&self, prefixes: I) -> bool where I: IntoIterator + Clone, { let max_prefix_len = prefixes .clone() .into_iter() .map(Self::bit_count) .max() .unwrap_or(0); self.is_covered_by_impl(prefixes, max_prefix_len) } fn is_covered_by_impl<'a, I>(&self, prefixes: I, max_prefix_len: usize) -> bool where I: IntoIterator + Clone, { prefixes .clone() .into_iter() .any(|x| x.is_compatible(self) && x.bit_count() <= self.bit_count()) || (self.bit_count() <= max_prefix_len && self .pushed(false) .is_covered_by_impl(prefixes.clone(), max_prefix_len) && self .pushed(true) .is_covered_by_impl(prefixes, max_prefix_len)) } /// Returns the neighbouring prefix differing in the `i`-th bit /// If `i` is larger than our bit count, `self` is returned pub fn with_flipped_bit(&self, i: u8) -> Self { if i as usize >= self.bit_count() { *self } else { Self::new(self.bit_count(), self.name.with_flipped_bit(i)) } } /// Returns the given `name` with first bits replaced by `self` pub fn substituted_in(&self, mut name: XorName) -> XorName { // TODO: is there a more efficient way of doing that? for i in 0..self.bit_count() { name = name.with_bit(i as u8, self.name.bit(i as u8)); } name } /// Returns the same prefix, with the last bit flipped, or unchanged, if empty. pub fn sibling(&self) -> Self { if self.bit_count() > 0 && self.bit_count() < 8 * XOR_NAME_LEN { self.with_flipped_bit(self.bit_count() as u8 - 1) } else { *self } } /// Returns the ancestors of this prefix that has the given bit count. /// /// # Panics /// /// Panics if `bit_count` is not less than the bit count of this prefix. pub fn ancestor(&self, bit_count: u8) -> Self { assert!((bit_count as usize) < self.bit_count()); Self::new(bit_count as usize, self.name) } /// Returns an iterator that yields all ancestors of this prefix. pub fn ancestors(&self) -> Ancestors { Ancestors { target: *self, current_len: 0, } } } impl PartialEq for Prefix { fn eq(&self, other: &Self) -> bool { self.is_compatible(other) && self.bit_count == other.bit_count } } impl PartialOrd for Prefix { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } impl Ord for Prefix { fn cmp(&self, other: &Self) -> Ordering { if self == other { Ordering::Equal } else if self.is_compatible(other) { self.bit_count().cmp(&other.bit_count()) } else { self.name.cmp(&other.name) } } } impl Hash for Prefix { fn hash(&self, state: &mut H) { for i in 0..self.bit_count() { self.name.bit(i as u8).hash(state); } } } impl Binary for Prefix { fn fmt(&self, formatter: &mut Formatter) -> FmtResult { write!(formatter, "{0:1$b}", self.name, self.bit_count()) } } impl Debug for Prefix { fn fmt(&self, formatter: &mut Formatter) -> FmtResult { write!(formatter, "Prefix({:b})", self) } } /// Format `Prefix` as bit string, e.g. `"010"` with a [`Prefix::bit_count`] of `3`. impl Display for Prefix { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { // Use `Binary` impl from `XorName` with restricted width write!(f, "{:width$b}", self.name, width = self.bit_count as usize) } } #[derive(Debug)] pub enum FromStrError { InvalidChar(char), TooLong(usize), } impl Display for FromStrError { fn fmt(&self, f: &mut Formatter) -> FmtResult { match self { FromStrError::InvalidChar(c) => { write!(f, "expected `0` or `1`, but encountered `{}`", c) } FromStrError::TooLong(l) => { write!( f, "max length exceeded {} with length of {l}", XOR_NAME_LEN * 8 ) } } } } impl FromStr for Prefix { type Err = FromStrError; fn from_str(bits: &str) -> Result { if bits.len() > XOR_NAME_LEN * 8 { return Err(FromStrError::TooLong(bits.len())); } let mut name = [0; XOR_NAME_LEN]; for (i, bit) in bits.chars().enumerate() { if bit == '1' { let byte = i / 8; name[byte] |= 1 << (7 - (i % 8)); } else if bit != '0' { return Err(FromStrError::InvalidChar(bit)); } } Ok(Self::new(bits.len(), XorName(name))) } } /// Iterator that yields the ancestors of the given prefix starting at the root prefix. /// Does not include the prefix itself. pub struct Ancestors { target: Prefix, current_len: usize, } impl Iterator for Ancestors { type Item = Prefix; fn next(&mut self) -> Option { if self.current_len < self.target.bit_count() { let output = self.target.ancestor(self.current_len as u8); self.current_len += 1; Some(output) } else { None } } } #[cfg(test)] mod tests { use super::*; use rand::{rngs::SmallRng, seq::SliceRandom, SeedableRng}; #[test] fn prefix() { assert_eq!(parse("101").pushed(true), parse("1011")); assert_eq!(parse("101").pushed(false), parse("1010")); assert_eq!(parse("1011").popped(), parse("101")); assert!(parse("101").is_compatible(&parse("1010"))); assert!(parse("1010").is_compatible(&parse("101"))); assert!(!parse("1010").is_compatible(&parse("1011"))); assert!(parse("101").is_neighbour(&parse("1111"))); assert!(!parse("1010").is_neighbour(&parse("1111"))); assert!(parse("1010").is_neighbour(&parse("10111"))); assert!(!parse("101").is_neighbour(&parse("10111"))); assert!(parse("101").matches(&xor_name!(0b10101100))); assert!(!parse("1011").matches(&xor_name!(0b10101100))); assert_eq!(parse("0101").lower_bound(), xor_name!(0b01010000)); assert_eq!( parse("0101").upper_bound(), xor_name!( 0b01011111, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255 ) ); // Check we handle passing an excessive `bit_count` to `new()`. assert_eq!(Prefix::new(256, xor_name!(0)).bit_count(), 256); assert_eq!(Prefix::new(257, xor_name!(0)).bit_count(), 256); } #[test] fn breadth_first_order() { let expected = [ parse(""), parse("0"), parse("1"), parse("00"), parse("01"), parse("10"), parse("11"), parse("000"), parse("001"), parse("010"), parse("011"), parse("100"), parse("101"), parse("110"), parse("111"), ]; let mut rng = SmallRng::from_entropy(); for _ in 0..100 { let mut actual = expected; actual.shuffle(&mut rng); actual.sort_by(|lhs, rhs| lhs.cmp_breadth_first(rhs)); assert_eq!(actual, expected); } } #[test] fn ancestors() { let mut ancestors = parse("").ancestors(); assert_eq!(ancestors.next(), None); let mut ancestors = parse("0").ancestors(); assert_eq!(ancestors.next(), Some(parse(""))); assert_eq!(ancestors.next(), None); let mut ancestors = parse("01").ancestors(); assert_eq!(ancestors.next(), Some(parse(""))); assert_eq!(ancestors.next(), Some(parse("0"))); assert_eq!(ancestors.next(), None); let mut ancestors = parse("011").ancestors(); assert_eq!(ancestors.next(), Some(parse(""))); assert_eq!(ancestors.next(), Some(parse("0"))); assert_eq!(ancestors.next(), Some(parse("01"))); assert_eq!(ancestors.next(), None); } #[test] fn format_binary() { assert_eq!(&format!(0, "{:b}", parse("")), ""); assert_eq!(&format!(1, "{:b}", parse("0")), "0"); assert_eq!(&format!(2, "{:b}", parse("00")), "00"); assert_eq!(&format!(2, "{:b}", parse("01")), "01"); assert_eq!(&format!(2, "{:b}", parse("10")), "10"); assert_eq!(&format!(2, "{:b}", parse("11")), "11"); assert_eq!(&format!(7, "{:b}", parse("1100101")), "1100101"); // Bit string with 257 width assert!(Prefix::from_str(&"1".repeat(XOR_NAME_LEN * 8 + 1)).is_err()); } #[test] fn format_parse_roundtrip() { let format_parse_eq = |p| p == parse(&std::format!("{}", p)); assert!(format_parse_eq(Prefix::new(0, XorName([0xBB; 32])))); assert!(format_parse_eq(Prefix::new(256, XorName([0x33; 32])))); assert!(format_parse_eq(Prefix::new(5, XorName([0xAA; 32])))); assert!(format_parse_eq(Prefix::new(76, XorName([0xAA; 32])))); } fn parse(input: &str) -> Prefix { Prefix::from_str(input).unwrap() } } xor_name-5.0.0/src/serialize.rs000066400000000000000000000141241427320770600164620ustar00rootroot00000000000000use crate::{Prefix, XorName}; use serde::{ de::{self, Visitor}, ser::SerializeStruct, Deserialize, Deserializer, Serialize, Serializer, }; use std::{fmt, str::FromStr}; impl Serialize for XorName { fn serialize(&self, serializer: S) -> Result where S: Serializer, { // Return string with hexadecimal representation if serializer.is_human_readable() { return serializer.serialize_str(&hex::encode(self.0)); } // Default serialization. serializer.serialize_newtype_struct("XorName", &self.0) } } impl<'de> Deserialize<'de> for XorName { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { if deserializer.is_human_readable() { struct XorNameHexStrVisitor; impl<'de> Visitor<'de> for XorNameHexStrVisitor { type Value = XorName; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "32 byte hex string") } fn visit_str(self, s: &str) -> Result where E: de::Error, { let buffer = <[u8; 32] as hex::FromHex>::from_hex(s) .map_err(|e| E::custom(std::format!("hex decoding ({})", e)))?; Ok(XorName(buffer)) } } return deserializer.deserialize_str(XorNameHexStrVisitor); } #[derive(Deserialize)] #[serde(rename = "XorName")] struct XorNameDerived([u8; 32]); let x = ::deserialize(deserializer)?; Ok(XorName(x.0)) } } impl Serialize for Prefix { fn serialize(&self, serializer: S) -> Result where S: Serializer, { if serializer.is_human_readable() { // Use `Display` impl from `Prefix` return serializer.serialize_str(&std::format!("{}", self)); } let mut s = serializer.serialize_struct("Prefix", 2)?; s.serialize_field("bit_count", &self.bit_count)?; s.serialize_field("name", &self.name)?; s.end() } } impl<'de> Deserialize<'de> for Prefix { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { if deserializer.is_human_readable() { struct PrefixVisitor; impl<'de> Visitor<'de> for PrefixVisitor { type Value = Prefix; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "binary formatted string") } fn visit_str(self, s: &str) -> Result where E: de::Error, { Prefix::from_str(s).map_err(|e| { E::custom(std::format!("could not convert string to `Prefix`: {e}")) }) } } return deserializer.deserialize_str(PrefixVisitor); } #[derive(Deserialize)] #[serde(rename = "Prefix")] struct PrefixDerived { bit_count: u16, name: XorName, } let p = ::deserialize(deserializer)?; Ok(Prefix { bit_count: p.bit_count, name: p.name, }) } } #[cfg(test)] mod test { use super::*; use serde_test::*; /// `XorName` with derived `Serialize` impl. Used to compare against. #[derive(PartialEq, Debug, serde::Serialize, Deserialize)] struct XorNameDerived([u8; 32]); /// `Prefix` with derived `Serialize` impl. Used to compare against. #[derive(PartialEq, Debug, serde::Serialize, Deserialize)] struct PrefixDerived { bit_count: u16, name: XorNameDerived, } #[test] fn xorname_ser_de() { let xor = XorName([0xAA; 32]); let xor_derived = XorNameDerived([0xAA; 32]); let xor_hex_str = static_str("aa".repeat(32)); assert_tokens(&xor.readable(), &[Token::Str(xor_hex_str)]); assert_tokens(&xor.compact(), &xor_tokens("XorName")); // Verify our `Serialize` impl is same as when it would be derived assert_tokens(&xor_derived.compact(), &xor_tokens("XorNameDerived")); } #[test] fn prefix_ser_de() { let bit_count = 15; let prefix = Prefix { bit_count, name: XorName([0xAA; 32]), }; let prefix_derived = PrefixDerived { bit_count, name: XorNameDerived([0xAA; 32]), }; assert_tokens(&prefix.readable(), &[Token::Str("101010101010101")]); assert_tokens( &prefix.compact(), &prefix_tokens(bit_count, "Prefix", "XorName"), ); // Verify our `Serialize` impl is same as when it would be derived assert_tokens( &prefix_derived.compact(), &prefix_tokens(bit_count, "PrefixDerived", "XorNameDerived"), ); } // Little helper to leak a &str to obtain a static str (`Token::Str` requires &'static str) fn static_str(s: String) -> &'static str { Box::leak(s.into_boxed_str()) } // Compact/derived representation of `XorName` fn xor_tokens(name: &'static str) -> Vec { let mut a = vec![]; a.extend_from_slice(&[Token::NewtypeStruct { name }, Token::Tuple { len: 32 }]); a.extend_from_slice(&[Token::U8(0xAA); 32]); // Repeat a U8 Token 32 times a.extend_from_slice(&[Token::TupleEnd]); a } // Compact/derived representation of `Prefix` fn prefix_tokens(bit_count: u16, name: &'static str, name2: &'static str) -> Vec { let mut v = vec![ Token::Struct { name, len: 2 }, Token::Str("bit_count"), Token::U16(bit_count), Token::Str("name"), ]; v.extend_from_slice(&xor_tokens(name2)); v.extend_from_slice(&[Token::StructEnd]); v } }