hashlink-0.8.0/.cargo_vcs_info.json0000644000000001120000000000100126340ustar { "git": { "sha1": "f5846ec3ff06865a204114bd710253e7e67e4498" } } hashlink-0.8.0/.circleci/config.yml000064400000000000000000000035330072674642500153110ustar 00000000000000version: 2 jobs: build: docker: - image: circleci/rust:latest steps: - checkout - run: name: Setup Rust command: | rustup toolchain uninstall nightly rustup toolchain install nightly -c miri rust-src rustfmt - run: name: Version information command: | rustc --version cargo --version rustc +nightly --version cargo +nightly --version rustup --version - run: name: Calculate dependencies command: cargo generate-lockfile - restore_cache: keys: - cargo-cache-{{ arch }}-{{ checksum "Cargo.lock" }} - run: name: Check Formatting command: | rustfmt --version cargo fmt --all -- --check --color=auto - run: name: Build all targets command: cargo build --all --all-targets - run: name: Run all tests command: cargo test --all --all-features - run: name: Run all tests under miri command: | cargo +nightly miri test --all-features - run: name: Run all tests under sanitizers command: | RUSTFLAGS="-Z sanitizer=address" cargo +nightly -Z build-std test --target x86_64-unknown-linux-gnu --all-features RUSTFLAGS="-Z sanitizer=leak" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu --all-features RUSTFLAGS="-Z sanitizer=memory" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu --all-features - save_cache: paths: - /usr/local/cargo/registry - target/debug/.fingerprint - target/debug/build - target/debug/deps key: cargo-cache-{{ arch }}-{{ checksum "Cargo.lock" }} hashlink-0.8.0/.gitignore000064400000000000000000000001170072674642500134510ustar 00000000000000/target **/*.rs.bk Cargo.lock .DS_Store .envrc .direnv shell.nix .dir-locals.elhashlink-0.8.0/CHANGELOG.md000064400000000000000000000054340072674642500133010ustar 00000000000000## [0.8.0] - API incompatible change: No longer re-export hashbrown types so that bumping hashbrown is no longer an API compatible change. - bump hashbrown to 0.12 - Fix implementation of `shrink_to_fit` to not panic when called on non-empty containers. ## [0.7.0] - API incompatible change: depend on hashbrown 0.11, changes re-exported types. - Fix `LinkedHashSet::back` to take `&self` not `&mut self`. - API incompatible change: equality tests on `LinkedHashSet` are now *ordered*, similar to `LinkedHashMap`. - Make the serde `Deserialize` implementations on `LinkedHashMap` and `LinkedHashSet` generic on the `BuildHasher` type. - Add `to_back` and `to_front` methods for `LinkedHashMap` to control entry order. ## [0.6.0] - API incompatible change: depend on hashbrown 0.9, re-export renamed hashbrown::TryReserveError type. - Add a `Debug` impl to `LruCache` (thanks @thomcc!) - Adjust trait bounds for `LinkedHashMap::retain`, `LinkedHashSet::default` to be less strict (to match hashbrown) - Adjust trait bounds for all `Debug` impls to be less strict (to match hashbrown). - Adjust trait bounds for all `IntoIterator` impls to be less strict (to match hashbrown). - Adjust trait bounds for `LruCache::with_hasher`, `LruCache::capacity`, `LruCache::len`, `LruCache::is_empty`, `LruCache::clear`, `LruCache::iter`, `LruCache::iter_mut`, and `LruCache::drain` to be less strict - Add optional serde support for `LinkedHashMap` and `LinkedHashSet`. - Add `to_back` and `to_front` methods for LinkedHashSet to control entry order. ## [0.5.1] - Add `LinkedHashMap::remove_entry` and `LruCache::remove_entry` - Add `LruCache::new_unbounded` constructor that sets capacity to usize::MAX - Add `LruCache::get` method to go with `LruCache::get_mut` - Add `LruCache::peek` and `LruCache::peek_mut` to access the cache without moving the entry in the LRU list ## [0.5.0] - API incompatible change: depend on hashbrown 0.7 ## [0.4.0] - API incompatible change: depend on hashbrown 0.6 - Passes miri ## [0.3.0] - Add some *minimal* documentation for methods that change the internal ordering. - Decide on a pattern for methods that change the internal ordering: the word "insert" means that it will move an existing entry to the back. - Some methods have been renamed to conform to the above system. ## [0.2.1] - Fix variance for LinkedHashMap (now covariant where appropriate) - Add Debug impls to many more associated types - Add LinkedHashSet - Add `LinkedHashMap::retain` ## [0.2.0] - Move `linked_hash_map` into its own module - Add `LruCache` type ported from `lru-cache` crate into its own module - Add `LruCache` entry and raw-entry API - Add `linked_hash_map` `IntoIter` iterator that is different from `Drain` iterator - Make `Drain` iterator recycle freed linked list nodes ## [0.1.0] - Initial release hashlink-0.8.0/Cargo.toml0000644000000021620000000000100106410ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2018" name = "hashlink" version = "0.8.0" authors = ["kyren "] description = "HashMap-like containers that hold their key-value pairs in a user controllable order" documentation = "https://docs.rs/hashlink" readme = "README.md" keywords = ["data-structures"] license = "MIT OR Apache-2.0" repository = "https://github.com/kyren/hashlink" [dependencies.hashbrown] version = "0.12.0" [dependencies.serde] version = "1.0" optional = true [dev-dependencies.fxhash] version = "0.2.1" [dev-dependencies.serde_test] version = "1.0" [features] serde_impl = ["serde"] [badges.circle-ci] branch = "master" repository = "kyren/hashlink" hashlink-0.8.0/Cargo.toml.orig000064400000000000000000000011640072674642500143530ustar 00000000000000[package] name = "hashlink" version = "0.8.0" authors = ["kyren "] edition = "2018" description = "HashMap-like containers that hold their key-value pairs in a user controllable order" repository = "https://github.com/kyren/hashlink" documentation = "https://docs.rs/hashlink" readme = "README.md" keywords = ["data-structures"] license = "MIT OR Apache-2.0" [badges] circle-ci = { repository = "kyren/hashlink", branch = "master" } [features] serde_impl = ["serde"] [dependencies] hashbrown = "0.12.0" serde = { version = "1.0", optional = true } [dev-dependencies] serde_test = "1.0" fxhash = "0.2.1" hashlink-0.8.0/LICENSE-APACHE000064400000000000000000000251410072674642500134110ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.hashlink-0.8.0/LICENSE-MIT000064400000000000000000000021550072674642500131210ustar 00000000000000This work is derived in part from the `linked-hash-map` crate, Copyright (c) 2015 The Rust Project Developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.hashlink-0.8.0/README.md000064400000000000000000000051330072674642500127430ustar 00000000000000# hashlink -- HashMap-like containers that hold their key-value pairs in a user controllable order [![Build Status](https://img.shields.io/circleci/project/github/kyren/hashlink.svg)](https://circleci.com/gh/kyren/hashlink) [![Latest Version](https://img.shields.io/crates/v/hashlink.svg)](https://crates.io/crates/hashlink) [![API Documentation](https://docs.rs/hashlink/badge.svg)](https://docs.rs/hashlink) This crate is a fork of [linked-hash-map](https://github.com/contain-rs/linked-hash-map) that builds on top of [hashbrown](https://github.com/rust-lang/hashbrown) to implement more up to date versions of `LinkedHashMap` `LinkedHashSet`, and `LruCache`. One important API change is that when a `LinkedHashMap` is used as a LRU cache, it allows you to easily retrieve an entry and move it to the back OR produce a new entry at the back without needlessly repeating key hashing and lookups: ``` rust let mut lru_cache = LinkedHashMap::new(); let key = "key".to_owned(); // Try to find my expensive to construct and hash key let _cached_val = match lru_cache.raw_entry_mut().from_key(&key) { RawEntryMut::Occupied(mut occupied) => { // Cache hit, move entry to the back. occupied.to_back(); occupied.into_mut() } RawEntryMut::Vacant(vacant) => { // Insert expensive to construct key and expensive to compute value, // automatically inserted at the back. vacant.insert(key.clone(), 42).1 } }; ``` Or, a simpler way to do the same thing: ``` rust let mut lru_cache = LinkedHashMap::new(); let key = "key".to_owned(); let _cached_val = lru_cache .raw_entry_mut() .from_key(&key) .or_insert_with(|| (key.clone(), 42)); ``` This crate contains a decent amount of unsafe code from handling its internal linked list, and the unsafe code has diverged quite a lot from the original `linked-hash-map` implementation. It currently passes tests under miri and sanitizers, but it should probably still receive more review and testing, and check for test code coverage. ## Credit There is a huge amount of code in this crate that is copied verbatim from `linked-hash-map` and `hashbrown`, especially tests, associated types like iterators, and things like `Debug` impls. ## License This library is licensed the same as [linked-hash-map](https://github.com/contain-rs/linked-hash-map) and [hashbrown](https://github.com/rust-lang/hashbrown), it is licensed under either of: * MIT license [LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT * Apache License 2.0 [LICENSE-APACHE](LICENSE-APACHE) or https://opensource.org/licenses/Apache-2.0 at your option. hashlink-0.8.0/src/lib.rs000064400000000000000000000003410072674642500133630ustar 00000000000000pub mod linked_hash_map; pub mod linked_hash_set; pub mod lru_cache; #[cfg(feature = "serde_impl")] pub mod serde; pub use linked_hash_map::LinkedHashMap; pub use linked_hash_set::LinkedHashSet; pub use lru_cache::LruCache; hashlink-0.8.0/src/linked_hash_map.rs000064400000000000000000001547520072674642500157430ustar 00000000000000use std::{ alloc::Layout, borrow::Borrow, cmp::Ordering, fmt, hash::{BuildHasher, Hash, Hasher}, iter::FromIterator, marker::PhantomData, mem::{self, MaybeUninit}, ops::{Index, IndexMut}, ptr::{self, NonNull}, }; use hashbrown::{hash_map, HashMap}; pub enum TryReserveError { CapacityOverflow, AllocError { layout: Layout }, } /// A version of `HashMap` that has a user controllable order for its entries. /// /// It achieves this by keeping its entries in an internal linked list and using a `HashMap` to /// point at nodes in this linked list. /// /// The order of entries defaults to "insertion order", but the user can also modify the order of /// existing entries by manually moving them to the front or back. /// /// There are two kinds of methods that modify the order of the internal list: /// /// * Methods that have names like `to_front` and `to_back` will unsurprisingly move an existing /// entry to the front or back /// * Methods that have the word `insert` will insert a new entry ot the back of the list, and if /// that method might replace an entry, that method will *also move that existing entry to the /// back*. pub struct LinkedHashMap { map: HashMap>, (), NullHasher>, // We need to keep any custom hash builder outside of the HashMap so we can access it alongside // the entry API without mutable aliasing. hash_builder: S, // Circular linked list of nodes. If `values` is non-null, it will point to a "guard node" // which will never have an initialized key or value, `values.prev` will contain the last key / // value in the list, `values.next` will contain the first key / value in the list. values: Option>>, // *Singly* linked list of free nodes. The `prev` pointers in the free list should be assumed // invalid. free: Option>>, } impl LinkedHashMap { #[inline] pub fn new() -> Self { Self { hash_builder: hash_map::DefaultHashBuilder::default(), map: HashMap::with_hasher(NullHasher), values: None, free: None, } } #[inline] pub fn with_capacity(capacity: usize) -> Self { Self { hash_builder: hash_map::DefaultHashBuilder::default(), map: HashMap::with_capacity_and_hasher(capacity, NullHasher), values: None, free: None, } } } impl LinkedHashMap { #[inline] pub fn with_hasher(hash_builder: S) -> Self { Self { hash_builder, map: HashMap::with_hasher(NullHasher), values: None, free: None, } } #[inline] pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self { Self { hash_builder, map: HashMap::with_capacity_and_hasher(capacity, NullHasher), values: None, free: None, } } #[inline] pub fn reserve(&mut self, additional: usize) { self.map.reserve(additional); } #[inline] pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { self.map.try_reserve(additional).map_err(|e| match e { hashbrown::TryReserveError::CapacityOverflow => TryReserveError::CapacityOverflow, hashbrown::TryReserveError::AllocError { layout } => { TryReserveError::AllocError { layout } } }) } #[inline] pub fn len(&self) -> usize { self.map.len() } #[inline] pub fn is_empty(&self) -> bool { self.len() == 0 } #[inline] pub fn clear(&mut self) { self.map.clear(); if let Some(mut values) = self.values { unsafe { drop_value_nodes(values); values.as_mut().links.value = ValueLinks { prev: values, next: values, }; } } } #[inline] pub fn iter(&self) -> Iter { let (head, tail) = if let Some(values) = self.values { unsafe { let ValueLinks { next, prev } = values.as_ref().links.value; (next.as_ptr(), prev.as_ptr()) } } else { (ptr::null_mut(), ptr::null_mut()) }; Iter { head, tail, remaining: self.len(), marker: PhantomData, } } #[inline] pub fn iter_mut(&mut self) -> IterMut { let (head, tail) = if let Some(values) = self.values { unsafe { let ValueLinks { next, prev } = values.as_ref().links.value; (Some(next), Some(prev)) } } else { (None, None) }; IterMut { head, tail, remaining: self.len(), marker: PhantomData, } } #[inline] pub fn drain(&mut self) -> Drain<'_, K, V> { unsafe { let (head, tail) = if let Some(mut values) = self.values { let ValueLinks { next, prev } = values.as_ref().links.value; values.as_mut().links.value = ValueLinks { next: values, prev: values, }; (Some(next), Some(prev)) } else { (None, None) }; let len = self.len(); self.map.clear(); Drain { free: (&mut self.free).into(), head, tail, remaining: len, marker: PhantomData, } } } #[inline] pub fn keys(&self) -> Keys { Keys { inner: self.iter() } } #[inline] pub fn values(&self) -> Values { Values { inner: self.iter() } } #[inline] pub fn values_mut(&mut self) -> ValuesMut { ValuesMut { inner: self.iter_mut(), } } #[inline] pub fn front(&self) -> Option<(&K, &V)> { if self.is_empty() { return None; } unsafe { let front = (*self.values.as_ptr()).links.value.next.as_ptr(); let (key, value) = (*front).entry_ref(); Some((key, value)) } } #[inline] pub fn back(&self) -> Option<(&K, &V)> { if self.is_empty() { return None; } unsafe { let back = &*(*self.values.as_ptr()).links.value.prev.as_ptr(); let (key, value) = (*back).entry_ref(); Some((key, value)) } } #[inline] pub fn retain(&mut self, mut f: F) where F: FnMut(&K, &mut V) -> bool, { // We do not drop the key and value when a value is filtered from the map during the call to // `retain`. We need to be very careful not to have a live `HashMap` entry pointing to // either a dangling `Node` or a `Node` with dropped keys / values. Since the key and value // types may panic on drop, they may short-circuit the entry in the map actually being // removed. Instead, we push the removed nodes onto the free list eagerly, then try and // drop the keys and values for any newly freed nodes *after* `HashMap::retain` has // completely finished. struct DropFilteredValues<'a, K, V> { free: &'a mut Option>>, cur_free: Option>>, } impl<'a, K, V> DropFilteredValues<'a, K, V> { #[inline] fn drop_later(&mut self, node: NonNull>) { unsafe { detach_node(node); push_free(&mut self.cur_free, node); } } } impl<'a, K, V> Drop for DropFilteredValues<'a, K, V> { fn drop(&mut self) { unsafe { let end_free = self.cur_free; while self.cur_free != *self.free { let cur_free = self.cur_free.as_ptr(); (*cur_free).take_entry(); self.cur_free = (*cur_free).links.free.next; } *self.free = end_free; } } } let free = self.free; let mut drop_filtered_values = DropFilteredValues { free: &mut self.free, cur_free: free, }; self.map.retain(|&node, _| unsafe { let (k, v) = (*node.as_ptr()).entry_mut(); if f(k, v) { true } else { drop_filtered_values.drop_later(node); false } }); } #[inline] pub fn hasher(&self) -> &S { &self.hash_builder } #[inline] pub fn capacity(&self) -> usize { self.map.capacity() } } impl LinkedHashMap where K: Eq + Hash, S: BuildHasher, { #[inline] pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> { match self.raw_entry_mut().from_key(&key) { RawEntryMut::Occupied(occupied) => Entry::Occupied(OccupiedEntry { key, raw_entry: occupied, }), RawEntryMut::Vacant(vacant) => Entry::Vacant(VacantEntry { key, raw_entry: vacant, }), } } #[inline] pub fn get(&self, k: &Q) -> Option<&V> where K: Borrow, Q: Hash + Eq + ?Sized, { self.raw_entry().from_key(k).map(|(_, v)| v) } #[inline] pub fn get_key_value(&self, k: &Q) -> Option<(&K, &V)> where K: Borrow, Q: Hash + Eq + ?Sized, { self.raw_entry().from_key(k) } #[inline] pub fn contains_key(&self, k: &Q) -> bool where K: Borrow, Q: Hash + Eq + ?Sized, { self.get(k).is_some() } #[inline] pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> where K: Borrow, Q: Hash + Eq + ?Sized, { match self.raw_entry_mut().from_key(k) { RawEntryMut::Occupied(occupied) => Some(occupied.into_mut()), RawEntryMut::Vacant(_) => None, } } /// Inserts the given key / value pair at the *back* of the internal linked list. /// /// Returns the previously set value, if one existed prior to this call. After this call, /// calling `LinkedHashMap::back` will return a reference to this key / value pair. #[inline] pub fn insert(&mut self, k: K, v: V) -> Option { match self.raw_entry_mut().from_key(&k) { RawEntryMut::Occupied(mut occupied) => { occupied.to_back(); Some(occupied.replace_value(v)) } RawEntryMut::Vacant(vacant) => { vacant.insert(k, v); None } } } /// If the given key is not in this map, inserts the key / value pair at the *back* of the /// internal linked list and returns `None`, otherwise, replaces the existing value with the /// given value *without* moving the entry in the internal linked list and returns the previous /// value. #[inline] pub fn replace(&mut self, k: K, v: V) -> Option { match self.raw_entry_mut().from_key(&k) { RawEntryMut::Occupied(mut occupied) => Some(occupied.replace_value(v)), RawEntryMut::Vacant(vacant) => { vacant.insert(k, v); None } } } #[inline] pub fn remove(&mut self, k: &Q) -> Option where K: Borrow, Q: Hash + Eq + ?Sized, { match self.raw_entry_mut().from_key(&k) { RawEntryMut::Occupied(occupied) => Some(occupied.remove()), RawEntryMut::Vacant(_) => None, } } #[inline] pub fn remove_entry(&mut self, k: &Q) -> Option<(K, V)> where K: Borrow, Q: Hash + Eq + ?Sized, { match self.raw_entry_mut().from_key(&k) { RawEntryMut::Occupied(occupied) => Some(occupied.remove_entry()), RawEntryMut::Vacant(_) => None, } } #[inline] pub fn pop_front(&mut self) -> Option<(K, V)> { if self.is_empty() { return None; } unsafe { let front = (*self.values.as_ptr()).links.value.next; match self.map.raw_entry_mut().from_hash( hash_key(&self.hash_builder, front.as_ref().key_ref()), |k| (*k).as_ref().key_ref().eq(front.as_ref().key_ref()), ) { hash_map::RawEntryMut::Occupied(occupied) => { Some(remove_node(&mut self.free, occupied.remove_entry().0)) } hash_map::RawEntryMut::Vacant(_) => None, } } } #[inline] pub fn pop_back(&mut self) -> Option<(K, V)> { if self.is_empty() { return None; } unsafe { let back = (*self.values.as_ptr()).links.value.prev; match self .map .raw_entry_mut() .from_hash(hash_key(&self.hash_builder, back.as_ref().key_ref()), |k| { (*k).as_ref().key_ref().eq(back.as_ref().key_ref()) }) { hash_map::RawEntryMut::Occupied(occupied) => { Some(remove_node(&mut self.free, occupied.remove_entry().0)) } hash_map::RawEntryMut::Vacant(_) => None, } } } /// If an entry with this key exists, move it to the front of the list and return a reference to /// the value. #[inline] pub fn to_front(&mut self, k: &Q) -> Option<&mut V> where K: Borrow, Q: Hash + Eq + ?Sized, { match self.raw_entry_mut().from_key(k) { RawEntryMut::Occupied(mut occupied) => { occupied.to_front(); Some(occupied.into_mut()) } RawEntryMut::Vacant(_) => None, } } /// If an entry with this key exists, move it to the back of the list and return a reference to /// the value. #[inline] pub fn to_back(&mut self, k: &Q) -> Option<&mut V> where K: Borrow, Q: Hash + Eq + ?Sized, { match self.raw_entry_mut().from_key(k) { RawEntryMut::Occupied(mut occupied) => { occupied.to_back(); Some(occupied.into_mut()) } RawEntryMut::Vacant(_) => None, } } #[inline] pub fn shrink_to_fit(&mut self) { unsafe { let len = self.map.len(); if len != self.map.capacity() { self.map = HashMap::with_hasher(NullHasher); self.map.reserve(len); if let Some(guard) = self.values { let mut cur = guard.as_ref().links.value.next; while cur != guard { let hash = hash_key(&self.hash_builder, cur.as_ref().key_ref()); match self .map .raw_entry_mut() .from_hash(hash, |k| (*k).as_ref().key_ref().eq(cur.as_ref().key_ref())) { hash_map::RawEntryMut::Occupied(_) => unreachable!(), hash_map::RawEntryMut::Vacant(vacant) => { let hash_builder = &self.hash_builder; vacant.insert_with_hasher(hash, cur, (), |k| { hash_key(hash_builder, (*k).as_ref().key_ref()) }); } } cur = cur.as_ref().links.value.next; } } } drop_free_nodes(self.free); self.free = None; } } } impl LinkedHashMap where S: BuildHasher, { #[inline] pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> { RawEntryBuilder { hash_builder: &self.hash_builder, entry: self.map.raw_entry(), } } #[inline] pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> { RawEntryBuilderMut { hash_builder: &self.hash_builder, values: &mut self.values, free: &mut self.free, entry: self.map.raw_entry_mut(), } } } impl Default for LinkedHashMap where S: Default, { #[inline] fn default() -> Self { Self::with_hasher(S::default()) } } impl FromIterator<(K, V)> for LinkedHashMap { #[inline] fn from_iter>(iter: I) -> Self { let iter = iter.into_iter(); let mut map = Self::with_capacity_and_hasher(iter.size_hint().0, S::default()); map.extend(iter); map } } impl fmt::Debug for LinkedHashMap where K: fmt::Debug, V: fmt::Debug, { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_map().entries(self).finish() } } impl PartialEq for LinkedHashMap { #[inline] fn eq(&self, other: &Self) -> bool { self.len() == other.len() && self.iter().eq(other) } } impl Eq for LinkedHashMap {} impl PartialOrd for LinkedHashMap { #[inline] fn partial_cmp(&self, other: &Self) -> Option { self.iter().partial_cmp(other) } #[inline] fn lt(&self, other: &Self) -> bool { self.iter().lt(other) } #[inline] fn le(&self, other: &Self) -> bool { self.iter().le(other) } #[inline] fn ge(&self, other: &Self) -> bool { self.iter().ge(other) } #[inline] fn gt(&self, other: &Self) -> bool { self.iter().gt(other) } } impl Ord for LinkedHashMap { #[inline] fn cmp(&self, other: &Self) -> Ordering { self.iter().cmp(other) } } impl Hash for LinkedHashMap { #[inline] fn hash(&self, h: &mut H) { for e in self.iter() { e.hash(h); } } } impl Drop for LinkedHashMap { #[inline] fn drop(&mut self) { unsafe { if let Some(values) = self.values { drop_value_nodes(values); Box::from_raw(values.as_ptr()); } drop_free_nodes(self.free); } } } unsafe impl Send for LinkedHashMap {} unsafe impl Sync for LinkedHashMap {} impl<'a, K, V, S, Q> Index<&'a Q> for LinkedHashMap where K: Hash + Eq + Borrow, S: BuildHasher, Q: Eq + Hash + ?Sized, { type Output = V; #[inline] fn index(&self, index: &'a Q) -> &V { self.get(index).expect("no entry found for key") } } impl<'a, K, V, S, Q> IndexMut<&'a Q> for LinkedHashMap where K: Hash + Eq + Borrow, S: BuildHasher, Q: Eq + Hash + ?Sized, { #[inline] fn index_mut(&mut self, index: &'a Q) -> &mut V { self.get_mut(index).expect("no entry found for key") } } impl Clone for LinkedHashMap { #[inline] fn clone(&self) -> Self { let mut map = Self::with_hasher(self.hash_builder.clone()); map.extend(self.iter().map(|(k, v)| (k.clone(), v.clone()))); map } } impl Extend<(K, V)> for LinkedHashMap { #[inline] fn extend>(&mut self, iter: I) { for (k, v) in iter { self.insert(k, v); } } } impl<'a, K, V, S> Extend<(&'a K, &'a V)> for LinkedHashMap where K: 'a + Hash + Eq + Copy, V: 'a + Copy, S: BuildHasher, { #[inline] fn extend>(&mut self, iter: I) { for (&k, &v) in iter { self.insert(k, v); } } } pub enum Entry<'a, K, V, S> { Occupied(OccupiedEntry<'a, K, V>), Vacant(VacantEntry<'a, K, V, S>), } impl fmt::Debug for Entry<'_, K, V, S> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(), } } } impl<'a, K, V, S> Entry<'a, K, V, S> { /// If this entry is vacant, inserts a new entry with the given value and returns a reference to /// it. /// /// If this entry is occupied, this method *moves the occupied entry to the back of the internal /// linked list* and returns a reference to the existing value. #[inline] pub fn or_insert(self, default: V) -> &'a mut V where K: Hash, S: BuildHasher, { match self { Entry::Occupied(mut entry) => { entry.to_back(); entry.into_mut() } Entry::Vacant(entry) => entry.insert(default), } } /// Similar to `Entry::or_insert`, but accepts a function to construct a new value if this entry /// is vacant. #[inline] pub fn or_insert_with V>(self, default: F) -> &'a mut V where K: Hash, S: BuildHasher, { match self { Entry::Occupied(mut entry) => { entry.to_back(); entry.into_mut() } Entry::Vacant(entry) => entry.insert(default()), } } #[inline] pub fn key(&self) -> &K { match *self { Entry::Occupied(ref entry) => entry.key(), Entry::Vacant(ref entry) => entry.key(), } } #[inline] pub fn and_modify(self, f: F) -> Self where F: FnOnce(&mut V), { match self { Entry::Occupied(mut entry) => { f(entry.get_mut()); Entry::Occupied(entry) } Entry::Vacant(entry) => Entry::Vacant(entry), } } } pub struct OccupiedEntry<'a, K, V> { key: K, raw_entry: RawOccupiedEntryMut<'a, K, V>, } impl fmt::Debug for OccupiedEntry<'_, K, V> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OccupiedEntry") .field("key", self.key()) .field("value", self.get()) .finish() } } impl<'a, K, V> OccupiedEntry<'a, K, V> { #[inline] pub fn key(&self) -> &K { self.raw_entry.key() } #[inline] pub fn remove_entry(self) -> (K, V) { self.raw_entry.remove_entry() } #[inline] pub fn get(&self) -> &V { self.raw_entry.get() } #[inline] pub fn get_mut(&mut self) -> &mut V { self.raw_entry.get_mut() } #[inline] pub fn into_mut(self) -> &'a mut V { self.raw_entry.into_mut() } #[inline] pub fn to_back(&mut self) { self.raw_entry.to_back() } #[inline] pub fn to_front(&mut self) { self.raw_entry.to_front() } /// Replaces this entry's value with the provided value. /// /// Similarly to `LinkedHashMap::insert`, this moves the existing entry to the back of the /// internal linked list. #[inline] pub fn insert(&mut self, value: V) -> V { self.raw_entry.to_back(); self.raw_entry.replace_value(value) } #[inline] pub fn remove(self) -> V { self.raw_entry.remove() } /// Similar to `OccupiedEntry::replace_entry`, but *does* move the entry to the back of the /// internal linked list. #[inline] pub fn insert_entry(mut self, value: V) -> (K, V) { self.raw_entry.to_back(); self.replace_entry(value) } /// Replaces the entry's key with the key provided to `LinkedHashMap::entry`, and replaces the /// entry's value with the given `value` parameter. /// /// Does *not* move the entry to the back of the internal linked list. pub fn replace_entry(mut self, value: V) -> (K, V) { let old_key = mem::replace(self.raw_entry.key_mut(), self.key); let old_value = mem::replace(self.raw_entry.get_mut(), value); (old_key, old_value) } /// Replaces this entry's key with the key provided to `LinkedHashMap::entry`. /// /// Does *not* move the entry to the back of the internal linked list. #[inline] pub fn replace_key(mut self) -> K { mem::replace(self.raw_entry.key_mut(), self.key) } } pub struct VacantEntry<'a, K, V, S> { key: K, raw_entry: RawVacantEntryMut<'a, K, V, S>, } impl fmt::Debug for VacantEntry<'_, K, V, S> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("VacantEntry").field(self.key()).finish() } } impl<'a, K, V, S> VacantEntry<'a, K, V, S> { #[inline] pub fn key(&self) -> &K { &self.key } #[inline] pub fn into_key(self) -> K { self.key } /// Insert's the key for this vacant entry paired with the given value as a new entry at the /// *back* of the internal linked list. #[inline] pub fn insert(self, value: V) -> &'a mut V where K: Hash, S: BuildHasher, { self.raw_entry.insert(self.key, value).1 } } pub struct RawEntryBuilder<'a, K, V, S> { hash_builder: &'a S, entry: hash_map::RawEntryBuilder<'a, NonNull>, (), NullHasher>, } impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> where S: BuildHasher, { #[inline] pub fn from_key(self, k: &Q) -> Option<(&'a K, &'a V)> where K: Borrow, Q: Hash + Eq + ?Sized, { let hash = hash_key(self.hash_builder, k); self.from_key_hashed_nocheck(hash, k) } #[inline] pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)> where K: Borrow, Q: Hash + Eq + ?Sized, { self.from_hash(hash, move |o| k.eq(o.borrow())) } #[inline] pub fn from_hash( self, hash: u64, mut is_match: impl FnMut(&K) -> bool, ) -> Option<(&'a K, &'a V)> { unsafe { let node = *self .entry .from_hash(hash, move |k| is_match((*k).as_ref().key_ref()))? .0; let (key, value) = (*node.as_ptr()).entry_ref(); Some((key, value)) } } } unsafe impl<'a, K, V, S> Send for RawEntryBuilder<'a, K, V, S> where K: Send, V: Send, S: Send, { } unsafe impl<'a, K, V, S> Sync for RawEntryBuilder<'a, K, V, S> where K: Sync, V: Sync, S: Sync, { } pub struct RawEntryBuilderMut<'a, K, V, S> { hash_builder: &'a S, values: &'a mut Option>>, free: &'a mut Option>>, entry: hash_map::RawEntryBuilderMut<'a, NonNull>, (), NullHasher>, } impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> where S: BuildHasher, { #[inline] pub fn from_key(self, k: &Q) -> RawEntryMut<'a, K, V, S> where K: Borrow, Q: Hash + Eq + ?Sized, { let hash = hash_key(self.hash_builder, k); self.from_key_hashed_nocheck(hash, k) } #[inline] pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S> where K: Borrow, Q: Hash + Eq + ?Sized, { self.from_hash(hash, move |o| k.eq(o.borrow())) } #[inline] pub fn from_hash( self, hash: u64, mut is_match: impl FnMut(&K) -> bool, ) -> RawEntryMut<'a, K, V, S> { let entry = self .entry .from_hash(hash, move |k| is_match(unsafe { (*k).as_ref().key_ref() })); match entry { hash_map::RawEntryMut::Occupied(occupied) => { RawEntryMut::Occupied(RawOccupiedEntryMut { free: self.free, values: self.values, entry: occupied, }) } hash_map::RawEntryMut::Vacant(vacant) => RawEntryMut::Vacant(RawVacantEntryMut { hash_builder: self.hash_builder, values: self.values, free: self.free, entry: vacant, }), } } } unsafe impl<'a, K, V, S> Send for RawEntryBuilderMut<'a, K, V, S> where K: Send, V: Send, S: Send, { } unsafe impl<'a, K, V, S> Sync for RawEntryBuilderMut<'a, K, V, S> where K: Sync, V: Sync, S: Sync, { } pub enum RawEntryMut<'a, K, V, S> { Occupied(RawOccupiedEntryMut<'a, K, V>), Vacant(RawVacantEntryMut<'a, K, V, S>), } impl<'a, K, V, S> RawEntryMut<'a, K, V, S> { /// Similarly to `Entry::or_insert`, if this entry is occupied, it will move the existing entry /// to the back of the internal linked list. #[inline] pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V) where K: Hash, S: BuildHasher, { match self { RawEntryMut::Occupied(mut entry) => { entry.to_back(); entry.into_key_value() } RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val), } } /// Similarly to `Entry::or_insert_with`, if this entry is occupied, it will move the existing /// entry to the back of the internal linked list. #[inline] pub fn or_insert_with(self, default: F) -> (&'a mut K, &'a mut V) where F: FnOnce() -> (K, V), K: Hash, S: BuildHasher, { match self { RawEntryMut::Occupied(mut entry) => { entry.to_back(); entry.into_key_value() } RawEntryMut::Vacant(entry) => { let (k, v) = default(); entry.insert(k, v) } } } #[inline] pub fn and_modify(self, f: F) -> Self where F: FnOnce(&mut K, &mut V), { match self { RawEntryMut::Occupied(mut entry) => { { let (k, v) = entry.get_key_value_mut(); f(k, v); } RawEntryMut::Occupied(entry) } RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry), } } } pub struct RawOccupiedEntryMut<'a, K, V> { free: &'a mut Option>>, values: &'a mut Option>>, entry: hash_map::RawOccupiedEntryMut<'a, NonNull>, (), NullHasher>, } impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> { #[inline] pub fn key(&self) -> &K { self.get_key_value().0 } #[inline] pub fn key_mut(&mut self) -> &mut K { self.get_key_value_mut().0 } #[inline] pub fn into_key(self) -> &'a mut K { self.into_key_value().0 } #[inline] pub fn get(&self) -> &V { self.get_key_value().1 } #[inline] pub fn get_mut(&mut self) -> &mut V { self.get_key_value_mut().1 } #[inline] pub fn into_mut(self) -> &'a mut V { self.into_key_value().1 } #[inline] pub fn get_key_value(&self) -> (&K, &V) { unsafe { let node = *self.entry.key(); let (key, value) = (*node.as_ptr()).entry_ref(); (key, value) } } #[inline] pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) { unsafe { let node = *self.entry.key_mut(); let (key, value) = (*node.as_ptr()).entry_mut(); (key, value) } } #[inline] pub fn into_key_value(self) -> (&'a mut K, &'a mut V) { unsafe { let node = *self.entry.into_key(); let (key, value) = (*node.as_ptr()).entry_mut(); (key, value) } } #[inline] pub fn to_back(&mut self) { unsafe { let node = *self.entry.key_mut(); detach_node(node); attach_before(node, NonNull::new_unchecked(self.values.as_ptr())); } } #[inline] pub fn to_front(&mut self) { unsafe { let node = *self.entry.key_mut(); detach_node(node); attach_before(node, (*self.values.as_ptr()).links.value.next); } } #[inline] pub fn replace_value(&mut self, value: V) -> V { unsafe { let mut node = *self.entry.key_mut(); mem::replace(&mut node.as_mut().entry_mut().1, value) } } #[inline] pub fn replace_key(&mut self, key: K) -> K { unsafe { let mut node = *self.entry.key_mut(); mem::replace(&mut node.as_mut().entry_mut().0, key) } } #[inline] pub fn remove(self) -> V { self.remove_entry().1 } #[inline] pub fn remove_entry(self) -> (K, V) { let node = self.entry.remove_entry().0; unsafe { remove_node(self.free, node) } } } pub struct RawVacantEntryMut<'a, K, V, S> { hash_builder: &'a S, values: &'a mut Option>>, free: &'a mut Option>>, entry: hash_map::RawVacantEntryMut<'a, NonNull>, (), NullHasher>, } impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> { #[inline] pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V) where K: Hash, S: BuildHasher, { let hash = hash_key(self.hash_builder, &key); self.insert_hashed_nocheck(hash, key, value) } #[inline] pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) where K: Hash, S: BuildHasher, { let hash_builder = self.hash_builder; self.insert_with_hasher(hash, key, value, |k| hash_key(hash_builder, k)) } #[inline] pub fn insert_with_hasher( self, hash: u64, key: K, value: V, hasher: impl Fn(&K) -> u64, ) -> (&'a mut K, &'a mut V) where S: BuildHasher, { unsafe { ensure_guard_node(self.values); let mut new_node = allocate_node(self.free); new_node.as_mut().put_entry((key, value)); attach_before(new_node, NonNull::new_unchecked(self.values.as_ptr())); let node = *self .entry .insert_with_hasher(hash, new_node, (), move |k| hasher((*k).as_ref().key_ref())) .0; let (key, value) = (*node.as_ptr()).entry_mut(); (key, value) } } } impl fmt::Debug for RawEntryBuilderMut<'_, K, V, S> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RawEntryBuilder").finish() } } impl fmt::Debug for RawEntryMut<'_, K, V, S> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(), RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(), } } } impl fmt::Debug for RawOccupiedEntryMut<'_, K, V> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RawOccupiedEntryMut") .field("key", self.key()) .field("value", self.get()) .finish() } } impl fmt::Debug for RawVacantEntryMut<'_, K, V, S> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RawVacantEntryMut").finish() } } impl fmt::Debug for RawEntryBuilder<'_, K, V, S> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RawEntryBuilder").finish() } } unsafe impl<'a, K, V> Send for RawOccupiedEntryMut<'a, K, V> where K: Send, V: Send, { } unsafe impl<'a, K, V> Sync for RawOccupiedEntryMut<'a, K, V> where K: Sync, V: Sync, { } unsafe impl<'a, K, V, S> Send for RawVacantEntryMut<'a, K, V, S> where K: Send, V: Send, S: Send, { } unsafe impl<'a, K, V, S> Sync for RawVacantEntryMut<'a, K, V, S> where K: Sync, V: Sync, S: Sync, { } pub struct Iter<'a, K, V> { head: *const Node, tail: *const Node, remaining: usize, marker: PhantomData<(&'a K, &'a V)>, } pub struct IterMut<'a, K, V> { head: Option>>, tail: Option>>, remaining: usize, marker: PhantomData<(&'a K, &'a mut V)>, } pub struct IntoIter { head: Option>>, tail: Option>>, remaining: usize, marker: PhantomData<(K, V)>, } pub struct Drain<'a, K, V> { free: NonNull>>>, head: Option>>, tail: Option>>, remaining: usize, // We want `Drain` to be covariant marker: PhantomData<(K, V, &'a LinkedHashMap)>, } impl IterMut<'_, K, V> { #[inline] pub(crate) fn iter(&self) -> Iter<'_, K, V> { Iter { head: self.head.as_ptr(), tail: self.tail.as_ptr(), remaining: self.remaining, marker: PhantomData, } } } impl IntoIter { #[inline] pub(crate) fn iter(&self) -> Iter<'_, K, V> { Iter { head: self.head.as_ptr(), tail: self.tail.as_ptr(), remaining: self.remaining, marker: PhantomData, } } } impl Drain<'_, K, V> { #[inline] pub(crate) fn iter(&self) -> Iter<'_, K, V> { Iter { head: self.head.as_ptr(), tail: self.tail.as_ptr(), remaining: self.remaining, marker: PhantomData, } } } unsafe impl<'a, K, V> Send for Iter<'a, K, V> where K: Send, V: Send, { } unsafe impl<'a, K, V> Send for IterMut<'a, K, V> where K: Send, V: Send, { } unsafe impl Send for IntoIter where K: Send, V: Send, { } unsafe impl<'a, K, V> Send for Drain<'a, K, V> where K: Send, V: Send, { } unsafe impl<'a, K, V> Sync for Iter<'a, K, V> where K: Sync, V: Sync, { } unsafe impl<'a, K, V> Sync for IterMut<'a, K, V> where K: Sync, V: Sync, { } unsafe impl Sync for IntoIter where K: Sync, V: Sync, { } unsafe impl<'a, K, V> Sync for Drain<'a, K, V> where K: Sync, V: Sync, { } impl<'a, K, V> Clone for Iter<'a, K, V> { #[inline] fn clone(&self) -> Self { Iter { ..*self } } } impl fmt::Debug for Iter<'_, K, V> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } impl fmt::Debug for IterMut<'_, K, V> where K: fmt::Debug, V: fmt::Debug, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl fmt::Debug for IntoIter where K: fmt::Debug, V: fmt::Debug, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl fmt::Debug for Drain<'_, K, V> where K: fmt::Debug, V: fmt::Debug, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl<'a, K, V> Iterator for Iter<'a, K, V> { type Item = (&'a K, &'a V); #[inline] fn next(&mut self) -> Option<(&'a K, &'a V)> { if self.remaining == 0 { None } else { self.remaining -= 1; unsafe { let (key, value) = (*self.head).entry_ref(); self.head = (*self.head).links.value.next.as_ptr(); Some((key, value)) } } } #[inline] fn size_hint(&self) -> (usize, Option) { (self.remaining, Some(self.remaining)) } } impl<'a, K, V> Iterator for IterMut<'a, K, V> { type Item = (&'a K, &'a mut V); #[inline] fn next(&mut self) -> Option<(&'a K, &'a mut V)> { if self.remaining == 0 { None } else { self.remaining -= 1; unsafe { let head = self.head.as_ptr(); let (key, value) = (*head).entry_mut(); self.head = Some((*head).links.value.next); Some((key, value)) } } } #[inline] fn size_hint(&self) -> (usize, Option) { (self.remaining, Some(self.remaining)) } } impl Iterator for IntoIter { type Item = (K, V); #[inline] fn next(&mut self) -> Option<(K, V)> { if self.remaining == 0 { return None; } self.remaining -= 1; unsafe { let head = self.head.as_ptr(); self.head = Some((*head).links.value.next); let mut e = Box::from_raw(head); Some(e.take_entry()) } } #[inline] fn size_hint(&self) -> (usize, Option) { (self.remaining, Some(self.remaining)) } } impl<'a, K, V> Iterator for Drain<'a, K, V> { type Item = (K, V); #[inline] fn next(&mut self) -> Option<(K, V)> { if self.remaining == 0 { return None; } self.remaining -= 1; unsafe { let mut head = NonNull::new_unchecked(self.head.as_ptr()); self.head = Some(head.as_ref().links.value.next); let entry = head.as_mut().take_entry(); push_free(self.free.as_mut(), head); Some(entry) } } #[inline] fn size_hint(&self) -> (usize, Option) { (self.remaining, Some(self.remaining)) } } impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> { #[inline] fn next_back(&mut self) -> Option<(&'a K, &'a V)> { if self.remaining == 0 { None } else { self.remaining -= 1; unsafe { let tail = self.tail; self.tail = (*tail).links.value.prev.as_ptr(); let (key, value) = (*tail).entry_ref(); Some((key, value)) } } } } impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> { #[inline] fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { if self.remaining == 0 { None } else { self.remaining -= 1; unsafe { let tail = self.tail.as_ptr(); self.tail = Some((*tail).links.value.prev); let (key, value) = (*tail).entry_mut(); Some((key, value)) } } } } impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option<(K, V)> { if self.remaining == 0 { return None; } self.remaining -= 1; unsafe { let mut e = *Box::from_raw(self.tail.as_ptr()); self.tail = Some(e.links.value.prev); Some(e.take_entry()) } } } impl<'a, K, V> DoubleEndedIterator for Drain<'a, K, V> { #[inline] fn next_back(&mut self) -> Option<(K, V)> { if self.remaining == 0 { return None; } self.remaining -= 1; unsafe { let mut tail = NonNull::new_unchecked(self.tail.as_ptr()); self.tail = Some(tail.as_ref().links.value.prev); let entry = tail.as_mut().take_entry(); push_free(&mut *self.free.as_ptr(), tail); Some(entry) } } } impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {} impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {} impl ExactSizeIterator for IntoIter {} impl Drop for IntoIter { #[inline] fn drop(&mut self) { for _ in 0..self.remaining { unsafe { let tail = self.tail.as_ptr(); self.tail = Some((*tail).links.value.prev); (*tail).take_entry(); Box::from_raw(tail); } } } } impl<'a, K, V> Drop for Drain<'a, K, V> { #[inline] fn drop(&mut self) { for _ in 0..self.remaining { unsafe { let mut tail = NonNull::new_unchecked(self.tail.as_ptr()); self.tail = Some(tail.as_ref().links.value.prev); tail.as_mut().take_entry(); push_free(&mut *self.free.as_ptr(), tail); } } } } pub struct Keys<'a, K, V> { inner: Iter<'a, K, V>, } impl fmt::Debug for Keys<'_, K, V> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } impl<'a, K, V> Clone for Keys<'a, K, V> { #[inline] fn clone(&self) -> Keys<'a, K, V> { Keys { inner: self.inner.clone(), } } } impl<'a, K, V> Iterator for Keys<'a, K, V> { type Item = &'a K; #[inline] fn next(&mut self) -> Option<&'a K> { self.inner.next().map(|e| e.0) } #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> { #[inline] fn next_back(&mut self) -> Option<&'a K> { self.inner.next_back().map(|e| e.0) } } impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } pub struct Values<'a, K, V> { inner: Iter<'a, K, V>, } impl Clone for Values<'_, K, V> { #[inline] fn clone(&self) -> Self { Values { inner: self.inner.clone(), } } } impl fmt::Debug for Values<'_, K, V> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } impl<'a, K, V> Iterator for Values<'a, K, V> { type Item = &'a V; #[inline] fn next(&mut self) -> Option<&'a V> { self.inner.next().map(|e| e.1) } #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> { #[inline] fn next_back(&mut self) -> Option<&'a V> { self.inner.next_back().map(|e| e.1) } } impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } pub struct ValuesMut<'a, K, V> { inner: IterMut<'a, K, V>, } impl fmt::Debug for ValuesMut<'_, K, V> where K: fmt::Debug, V: fmt::Debug, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.inner.iter()).finish() } } impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { type Item = &'a mut V; #[inline] fn next(&mut self) -> Option<&'a mut V> { self.inner.next().map(|e| e.1) } #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> { #[inline] fn next_back(&mut self) -> Option<&'a mut V> { self.inner.next_back().map(|e| e.1) } } impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } impl<'a, K, V, S> IntoIterator for &'a LinkedHashMap { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; #[inline] fn into_iter(self) -> Iter<'a, K, V> { self.iter() } } impl<'a, K, V, S> IntoIterator for &'a mut LinkedHashMap { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; #[inline] fn into_iter(self) -> IterMut<'a, K, V> { self.iter_mut() } } impl IntoIterator for LinkedHashMap { type Item = (K, V); type IntoIter = IntoIter; #[inline] fn into_iter(mut self) -> IntoIter { unsafe { let (head, tail) = if let Some(values) = self.values { let ValueLinks { next: head, prev: tail, } = values.as_ref().links.value; Box::from_raw(self.values.as_ptr()); self.values = None; (Some(head), Some(tail)) } else { (None, None) }; let len = self.len(); drop_free_nodes(self.free); self.free = None; self.map.clear(); IntoIter { head, tail, remaining: len, marker: PhantomData, } } } } // A ZST that asserts that the inner HashMap will not do its own key hashing struct NullHasher; impl BuildHasher for NullHasher { type Hasher = Self; #[inline] fn build_hasher(&self) -> Self { Self } } impl Hasher for NullHasher { #[inline] fn write(&mut self, _bytes: &[u8]) { unreachable!("inner map should not be using its built-in hasher") } #[inline] fn finish(&self) -> u64 { unreachable!("inner map should not be using its built-in hasher") } } struct ValueLinks { next: NonNull>, prev: NonNull>, } impl Clone for ValueLinks { #[inline] fn clone(&self) -> Self { ValueLinks { next: self.next, prev: self.prev, } } } impl Copy for ValueLinks {} struct FreeLink { next: Option>>, } impl Clone for FreeLink { #[inline] fn clone(&self) -> Self { FreeLink { next: self.next } } } impl Copy for FreeLink {} union Links { value: ValueLinks, free: FreeLink, } struct Node { entry: MaybeUninit<(K, V)>, links: Links, } impl Node { #[inline] unsafe fn put_entry(&mut self, entry: (K, V)) { self.entry.as_mut_ptr().write(entry) } #[inline] unsafe fn entry_ref(&self) -> &(K, V) { &*self.entry.as_ptr() } #[inline] unsafe fn key_ref(&self) -> &K { &(*self.entry.as_ptr()).0 } #[inline] unsafe fn entry_mut(&mut self) -> &mut (K, V) { &mut *self.entry.as_mut_ptr() } #[inline] unsafe fn take_entry(&mut self) -> (K, V) { self.entry.as_ptr().read() } } trait OptNonNullExt { fn as_ptr(self) -> *mut T; } impl OptNonNullExt for Option> { #[inline] fn as_ptr(self) -> *mut T { match self { Some(ptr) => ptr.as_ptr(), None => ptr::null_mut(), } } } // Allocate a circular list guard node if not present. #[inline] unsafe fn ensure_guard_node(head: &mut Option>>) { if head.is_none() { let mut p = NonNull::new_unchecked(Box::into_raw(Box::new(Node { entry: MaybeUninit::uninit(), links: Links { value: ValueLinks { next: NonNull::dangling(), prev: NonNull::dangling(), }, }, }))); p.as_mut().links.value = ValueLinks { next: p, prev: p }; *head = Some(p); } } // Attach the `to_attach` node to the existing circular list *before* `node`. #[inline] unsafe fn attach_before(mut to_attach: NonNull>, mut node: NonNull>) { to_attach.as_mut().links.value = ValueLinks { prev: node.as_ref().links.value.prev, next: node, }; node.as_mut().links.value.prev = to_attach; (*to_attach.as_mut().links.value.prev.as_ptr()) .links .value .next = to_attach; } #[inline] unsafe fn detach_node(mut node: NonNull>) { node.as_mut().links.value.prev.as_mut().links.value.next = node.as_ref().links.value.next; node.as_mut().links.value.next.as_mut().links.value.prev = node.as_ref().links.value.prev; } #[inline] unsafe fn push_free( free_list: &mut Option>>, mut node: NonNull>, ) { node.as_mut().links.free.next = *free_list; *free_list = Some(node); } #[inline] unsafe fn pop_free( free_list: &mut Option>>, ) -> Option>> { if let Some(free) = *free_list { *free_list = free.as_ref().links.free.next; Some(free) } else { None } } #[inline] unsafe fn allocate_node(free_list: &mut Option>>) -> NonNull> { if let Some(mut free) = pop_free(free_list) { free.as_mut().links.value = ValueLinks { next: NonNull::dangling(), prev: NonNull::dangling(), }; free } else { NonNull::new_unchecked(Box::into_raw(Box::new(Node { entry: MaybeUninit::uninit(), links: Links { value: ValueLinks { next: NonNull::dangling(), prev: NonNull::dangling(), }, }, }))) } } // Given node is assumed to be the guard node and is *not* dropped. #[inline] unsafe fn drop_value_nodes(guard: NonNull>) { let mut cur = guard.as_ref().links.value.prev; while cur != guard { let prev = cur.as_ref().links.value.prev; cur.as_mut().take_entry(); Box::from_raw(cur.as_ptr()); cur = prev; } } // Drops all linked free nodes starting with the given node. Free nodes are only non-circular // singly linked, and should have uninitialized keys / values. #[inline] unsafe fn drop_free_nodes(mut free: Option>>) { while let Some(some_free) = free { let next_free = some_free.as_ref().links.free.next; Box::from_raw(some_free.as_ptr()); free = next_free; } } #[inline] unsafe fn remove_node( free_list: &mut Option>>, mut node: NonNull>, ) -> (K, V) { detach_node(node); push_free(free_list, node); node.as_mut().take_entry() } #[inline] fn hash_key(s: &S, k: &Q) -> u64 where S: BuildHasher, Q: Hash + ?Sized, { let mut hasher = s.build_hasher(); k.hash(&mut hasher); hasher.finish() } hashlink-0.8.0/src/linked_hash_set.rs000064400000000000000000000411540072674642500157500ustar 00000000000000use std::{ borrow::Borrow, fmt, hash::{BuildHasher, Hash, Hasher}, iter::{Chain, FromIterator}, ops::{BitAnd, BitOr, BitXor, Sub}, }; use hashbrown::hash_map::DefaultHashBuilder; use crate::linked_hash_map::{self, LinkedHashMap, TryReserveError}; pub struct LinkedHashSet { map: LinkedHashMap, } impl LinkedHashSet { #[inline] pub fn new() -> LinkedHashSet { LinkedHashSet { map: LinkedHashMap::new(), } } #[inline] pub fn with_capacity(capacity: usize) -> LinkedHashSet { LinkedHashSet { map: LinkedHashMap::with_capacity(capacity), } } } impl LinkedHashSet { #[inline] pub fn capacity(&self) -> usize { self.map.capacity() } #[inline] pub fn iter(&self) -> Iter<'_, T> { Iter { iter: self.map.keys(), } } #[inline] pub fn len(&self) -> usize { self.map.len() } #[inline] pub fn is_empty(&self) -> bool { self.map.is_empty() } #[inline] pub fn drain(&mut self) -> Drain { Drain { iter: self.map.drain(), } } #[inline] pub fn clear(&mut self) { self.map.clear() } #[inline] pub fn retain(&mut self, mut f: F) where F: FnMut(&T) -> bool, { self.map.retain(|k, _| f(k)); } } impl LinkedHashSet where T: Eq + Hash, S: BuildHasher, { #[inline] pub fn with_hasher(hasher: S) -> LinkedHashSet { LinkedHashSet { map: LinkedHashMap::with_hasher(hasher), } } #[inline] pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> LinkedHashSet { LinkedHashSet { map: LinkedHashMap::with_capacity_and_hasher(capacity, hasher), } } #[inline] pub fn hasher(&self) -> &S { self.map.hasher() } #[inline] pub fn reserve(&mut self, additional: usize) { self.map.reserve(additional) } #[inline] pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { self.map.try_reserve(additional) } #[inline] pub fn shrink_to_fit(&mut self) { self.map.shrink_to_fit() } #[inline] pub fn difference<'a>(&'a self, other: &'a LinkedHashSet) -> Difference<'a, T, S> { Difference { iter: self.iter(), other, } } #[inline] pub fn symmetric_difference<'a>( &'a self, other: &'a LinkedHashSet, ) -> SymmetricDifference<'a, T, S> { SymmetricDifference { iter: self.difference(other).chain(other.difference(self)), } } #[inline] pub fn intersection<'a>(&'a self, other: &'a LinkedHashSet) -> Intersection<'a, T, S> { Intersection { iter: self.iter(), other, } } #[inline] pub fn union<'a>(&'a self, other: &'a LinkedHashSet) -> Union<'a, T, S> { Union { iter: self.iter().chain(other.difference(self)), } } #[inline] pub fn contains(&self, value: &Q) -> bool where T: Borrow, Q: Hash + Eq, { self.map.contains_key(value) } #[inline] pub fn get(&self, value: &Q) -> Option<&T> where T: Borrow, Q: Hash + Eq, { self.map.raw_entry().from_key(value).map(|p| p.0) } #[inline] pub fn get_or_insert(&mut self, value: T) -> &T { self.map .raw_entry_mut() .from_key(&value) .or_insert(value, ()) .0 } #[inline] pub fn get_or_insert_with(&mut self, value: &Q, f: F) -> &T where T: Borrow, Q: Hash + Eq, F: FnOnce(&Q) -> T, { self.map .raw_entry_mut() .from_key(value) .or_insert_with(|| (f(value), ())) .0 } #[inline] pub fn is_disjoint(&self, other: &LinkedHashSet) -> bool { self.iter().all(|v| !other.contains(v)) } #[inline] pub fn is_subset(&self, other: &LinkedHashSet) -> bool { self.iter().all(|v| other.contains(v)) } #[inline] pub fn is_superset(&self, other: &LinkedHashSet) -> bool { other.is_subset(self) } /// Inserts the given value into the set. /// /// If the set did not have this value present, inserts it at the *back* of the internal linked /// list and returns true, otherwise it moves the existing value to the *back* of the internal /// linked list and returns false. #[inline] pub fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()).is_none() } /// Adds the given value to the set, replacing the existing value. /// /// If a previous value existed, returns the replaced value. In this case, the value's position /// in the internal linked list is *not* changed. #[inline] pub fn replace(&mut self, value: T) -> Option { match self.map.entry(value) { linked_hash_map::Entry::Occupied(occupied) => Some(occupied.replace_key()), linked_hash_map::Entry::Vacant(vacant) => { vacant.insert(()); None } } } #[inline] pub fn remove(&mut self, value: &Q) -> bool where T: Borrow, Q: Hash + Eq, { self.map.remove(value).is_some() } #[inline] pub fn take(&mut self, value: &Q) -> Option where T: Borrow, Q: Hash + Eq, { match self.map.raw_entry_mut().from_key(value) { linked_hash_map::RawEntryMut::Occupied(occupied) => Some(occupied.remove_entry().0), linked_hash_map::RawEntryMut::Vacant(_) => None, } } #[inline] pub fn front(&self) -> Option<&T> { self.map.front().map(|(k, _)| k) } #[inline] pub fn pop_front(&mut self) -> Option { self.map.pop_front().map(|(k, _)| k) } #[inline] pub fn back(&self) -> Option<&T> { self.map.back().map(|(k, _)| k) } #[inline] pub fn pop_back(&mut self) -> Option { self.map.pop_back().map(|(k, _)| k) } #[inline] pub fn to_front(&mut self, value: &Q) -> bool where T: Borrow, Q: Hash + Eq, { match self.map.raw_entry_mut().from_key(value) { linked_hash_map::RawEntryMut::Occupied(mut occupied) => { occupied.to_front(); true } linked_hash_map::RawEntryMut::Vacant(_) => false, } } #[inline] pub fn to_back(&mut self, value: &Q) -> bool where T: Borrow, Q: Hash + Eq, { match self.map.raw_entry_mut().from_key(value) { linked_hash_map::RawEntryMut::Occupied(mut occupied) => { occupied.to_back(); true } linked_hash_map::RawEntryMut::Vacant(_) => false, } } } impl Clone for LinkedHashSet { #[inline] fn clone(&self) -> Self { let map = self.map.clone(); Self { map } } } impl PartialEq for LinkedHashSet where T: Eq + Hash, S: BuildHasher, { #[inline] fn eq(&self, other: &Self) -> bool { self.len() == other.len() && self.iter().eq(other) } } impl Hash for LinkedHashSet where T: Eq + Hash, S: BuildHasher, { #[inline] fn hash(&self, state: &mut H) { for e in self { e.hash(state); } } } impl Eq for LinkedHashSet where T: Eq + Hash, S: BuildHasher, { } impl fmt::Debug for LinkedHashSet where T: fmt::Debug, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_set().entries(self.iter()).finish() } } impl FromIterator for LinkedHashSet where T: Eq + Hash, S: BuildHasher + Default, { #[inline] fn from_iter>(iter: I) -> LinkedHashSet { let mut set = LinkedHashSet::with_hasher(Default::default()); set.extend(iter); set } } impl Extend for LinkedHashSet where T: Eq + Hash, S: BuildHasher, { #[inline] fn extend>(&mut self, iter: I) { self.map.extend(iter.into_iter().map(|k| (k, ()))); } } impl<'a, T, S> Extend<&'a T> for LinkedHashSet where T: 'a + Eq + Hash + Copy, S: BuildHasher, { #[inline] fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } } impl Default for LinkedHashSet where S: Default, { #[inline] fn default() -> LinkedHashSet { LinkedHashSet { map: LinkedHashMap::default(), } } } impl<'a, 'b, T, S> BitOr<&'b LinkedHashSet> for &'a LinkedHashSet where T: Eq + Hash + Clone, S: BuildHasher + Default, { type Output = LinkedHashSet; #[inline] fn bitor(self, rhs: &LinkedHashSet) -> LinkedHashSet { self.union(rhs).cloned().collect() } } impl<'a, 'b, T, S> BitAnd<&'b LinkedHashSet> for &'a LinkedHashSet where T: Eq + Hash + Clone, S: BuildHasher + Default, { type Output = LinkedHashSet; #[inline] fn bitand(self, rhs: &LinkedHashSet) -> LinkedHashSet { self.intersection(rhs).cloned().collect() } } impl<'a, 'b, T, S> BitXor<&'b LinkedHashSet> for &'a LinkedHashSet where T: Eq + Hash + Clone, S: BuildHasher + Default, { type Output = LinkedHashSet; #[inline] fn bitxor(self, rhs: &LinkedHashSet) -> LinkedHashSet { self.symmetric_difference(rhs).cloned().collect() } } impl<'a, 'b, T, S> Sub<&'b LinkedHashSet> for &'a LinkedHashSet where T: Eq + Hash + Clone, S: BuildHasher + Default, { type Output = LinkedHashSet; #[inline] fn sub(self, rhs: &LinkedHashSet) -> LinkedHashSet { self.difference(rhs).cloned().collect() } } pub struct Iter<'a, K> { iter: linked_hash_map::Keys<'a, K, ()>, } pub struct IntoIter { iter: linked_hash_map::IntoIter, } pub struct Drain<'a, K: 'a> { iter: linked_hash_map::Drain<'a, K, ()>, } pub struct Intersection<'a, T, S> { iter: Iter<'a, T>, other: &'a LinkedHashSet, } pub struct Difference<'a, T, S> { iter: Iter<'a, T>, other: &'a LinkedHashSet, } pub struct SymmetricDifference<'a, T, S> { iter: Chain, Difference<'a, T, S>>, } pub struct Union<'a, T, S> { iter: Chain, Difference<'a, T, S>>, } impl<'a, T, S> IntoIterator for &'a LinkedHashSet { type Item = &'a T; type IntoIter = Iter<'a, T>; #[inline] fn into_iter(self) -> Iter<'a, T> { self.iter() } } impl IntoIterator for LinkedHashSet { type Item = T; type IntoIter = IntoIter; #[inline] fn into_iter(self) -> IntoIter { IntoIter { iter: self.map.into_iter(), } } } impl<'a, K> Clone for Iter<'a, K> { #[inline] fn clone(&self) -> Iter<'a, K> { Iter { iter: self.iter.clone(), } } } impl<'a, K> Iterator for Iter<'a, K> { type Item = &'a K; #[inline] fn next(&mut self) -> Option<&'a K> { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } impl<'a, K> ExactSizeIterator for Iter<'a, K> {} impl<'a, T> DoubleEndedIterator for Iter<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a T> { self.iter.next_back() } } impl<'a, K: fmt::Debug> fmt::Debug for Iter<'a, K> { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } impl Iterator for IntoIter { type Item = K; #[inline] fn next(&mut self) -> Option { self.iter.next().map(|(k, _)| k) } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } impl ExactSizeIterator for IntoIter {} impl DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back().map(|(k, _)| k) } } impl<'a, K> Iterator for Drain<'a, K> { type Item = K; #[inline] fn next(&mut self) -> Option { self.iter.next().map(|(k, _)| k) } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } impl<'a, K> DoubleEndedIterator for Drain<'a, K> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back().map(|(k, _)| k) } } impl<'a, K> ExactSizeIterator for Drain<'a, K> {} impl<'a, T, S> Clone for Intersection<'a, T, S> { #[inline] fn clone(&self) -> Intersection<'a, T, S> { Intersection { iter: self.iter.clone(), ..*self } } } impl<'a, T, S> Iterator for Intersection<'a, T, S> where T: Eq + Hash, S: BuildHasher, { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { loop { match self.iter.next() { None => return None, Some(elt) => { if self.other.contains(elt) { return Some(elt); } } } } } #[inline] fn size_hint(&self) -> (usize, Option) { let (_, upper) = self.iter.size_hint(); (0, upper) } } impl<'a, T, S> fmt::Debug for Intersection<'a, T, S> where T: fmt::Debug + Eq + Hash, S: BuildHasher, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } impl<'a, T, S> Clone for Difference<'a, T, S> { #[inline] fn clone(&self) -> Difference<'a, T, S> { Difference { iter: self.iter.clone(), ..*self } } } impl<'a, T, S> Iterator for Difference<'a, T, S> where T: Eq + Hash, S: BuildHasher, { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { loop { match self.iter.next() { None => return None, Some(elt) => { if !self.other.contains(elt) { return Some(elt); } } } } } #[inline] fn size_hint(&self) -> (usize, Option) { let (_, upper) = self.iter.size_hint(); (0, upper) } } impl<'a, T, S> fmt::Debug for Difference<'a, T, S> where T: fmt::Debug + Eq + Hash, S: BuildHasher, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } impl<'a, T, S> Clone for SymmetricDifference<'a, T, S> { #[inline] fn clone(&self) -> SymmetricDifference<'a, T, S> { SymmetricDifference { iter: self.iter.clone(), } } } impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S> where T: Eq + Hash, S: BuildHasher, { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } impl<'a, T, S> fmt::Debug for SymmetricDifference<'a, T, S> where T: fmt::Debug + Eq + Hash, S: BuildHasher, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } impl<'a, T, S> Clone for Union<'a, T, S> { #[inline] fn clone(&self) -> Union<'a, T, S> { Union { iter: self.iter.clone(), } } } impl<'a, T, S> fmt::Debug for Union<'a, T, S> where T: fmt::Debug + Eq + Hash, S: BuildHasher, { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } impl<'a, T, S> Iterator for Union<'a, T, S> where T: Eq + Hash, S: BuildHasher, { type Item = &'a T; #[inline] fn next(&mut self) -> Option<&'a T> { self.iter.next() } #[inline] fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } hashlink-0.8.0/src/lru_cache.rs000064400000000000000000000170360072674642500145530ustar 00000000000000use std::{ borrow::Borrow, fmt, hash::{BuildHasher, Hash}, usize, }; use hashbrown::hash_map; use crate::linked_hash_map::{self, LinkedHashMap}; pub use crate::linked_hash_map::{ Drain, Entry, IntoIter, Iter, IterMut, OccupiedEntry, RawEntryBuilder, RawEntryBuilderMut, RawOccupiedEntryMut, RawVacantEntryMut, VacantEntry, }; pub struct LruCache { map: LinkedHashMap, max_size: usize, } impl LruCache { #[inline] pub fn new(capacity: usize) -> Self { LruCache { map: LinkedHashMap::new(), max_size: capacity, } } /// Create a new unbounded `LruCache` that does not automatically evict entries. /// /// A simple convenience method that is equivalent to `LruCache::new(usize::MAX)` #[inline] pub fn new_unbounded() -> Self { LruCache::new(usize::MAX) } } impl LruCache { #[inline] pub fn with_hasher(capacity: usize, hash_builder: S) -> Self { LruCache { map: LinkedHashMap::with_hasher(hash_builder), max_size: capacity, } } #[inline] pub fn capacity(&self) -> usize { self.max_size } #[inline] pub fn len(&self) -> usize { self.map.len() } #[inline] pub fn is_empty(&self) -> bool { self.map.is_empty() } #[inline] pub fn clear(&mut self) { self.map.clear(); } #[inline] pub fn iter(&self) -> Iter { self.map.iter() } #[inline] pub fn iter_mut(&mut self) -> IterMut { self.map.iter_mut() } #[inline] pub fn drain(&mut self) -> Drain { self.map.drain() } } impl LruCache where S: BuildHasher, { #[inline] pub fn contains_key(&mut self, key: &Q) -> bool where K: Borrow, Q: Hash + Eq + ?Sized, { self.get_mut(key).is_some() } /// Insert a new value into the `LruCache`. /// /// If necessary, will remove the value at the front of the LRU list to make room. #[inline] pub fn insert(&mut self, k: K, v: V) -> Option { let old_val = self.map.insert(k, v); if self.len() > self.capacity() { self.remove_lru(); } old_val } /// Get the value for the given key, *without* marking the value as recently used and moving it /// to the back of the LRU list. #[inline] pub fn peek(&self, k: &Q) -> Option<&V> where K: Borrow, Q: Hash + Eq + ?Sized, { self.map.get(k) } /// Get the value for the given key mutably, *without* marking the value as recently used and /// moving it to the back of the LRU list. #[inline] pub fn peek_mut(&mut self, k: &Q) -> Option<&mut V> where K: Borrow, Q: Hash + Eq + ?Sized, { self.map.get_mut(k) } /// Retrieve the given key, marking it as recently used and moving it to the back of the LRU /// list. #[inline] pub fn get(&mut self, k: &Q) -> Option<&V> where K: Borrow, Q: Hash + Eq + ?Sized, { self.get_mut(k).map(|v| &*v) } /// Retrieve the given key, marking it as recently used and moving it to the back of the LRU /// list. #[inline] pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> where K: Borrow, Q: Hash + Eq + ?Sized, { match self.map.raw_entry_mut().from_key(k) { linked_hash_map::RawEntryMut::Occupied(mut occupied) => { occupied.to_back(); Some(occupied.into_mut()) } linked_hash_map::RawEntryMut::Vacant(_) => None, } } /// If the returned entry is vacant, it will always have room to insert a single value. By /// using the entry API, you can exceed the configured capacity by 1. /// /// The returned entry is not automatically moved to the back of the LRU list. By calling /// `Entry::to_back` / `Entry::to_front` you can manually control the position of this entry in /// the LRU list. #[inline] pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> { if self.len() > self.capacity() { self.remove_lru(); } self.map.entry(key) } /// The constructed raw entry is never automatically moved to the back of the LRU list. By /// calling `Entry::to_back` / `Entry::to_front` you can manually control the position of this /// entry in the LRU list. #[inline] pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> { self.map.raw_entry() } /// If the constructed raw entry is vacant, it will always have room to insert a single value. /// By using the raw entry API, you can exceed the configured capacity by 1. /// /// The constructed raw entry is never automatically moved to the back of the LRU list. By /// calling `Entry::to_back` / `Entry::to_front` you can manually control the position of this /// entry in the LRU list. #[inline] pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> { if self.len() > self.capacity() { self.remove_lru(); } self.map.raw_entry_mut() } #[inline] pub fn remove(&mut self, k: &Q) -> Option where K: Borrow, Q: Hash + Eq + ?Sized, { self.map.remove(k) } #[inline] pub fn remove_entry(&mut self, k: &Q) -> Option<(K, V)> where K: Borrow, Q: Hash + Eq + ?Sized, { self.map.remove_entry(k) } /// Set the new cache capacity for the `LruCache`. /// /// If there are more entries in the `LruCache` than the new capacity will allow, they are /// removed. #[inline] pub fn set_capacity(&mut self, capacity: usize) { for _ in capacity..self.len() { self.remove_lru(); } self.max_size = capacity; } /// Remove the least recently used entry and return it. /// /// If the `LruCache` is empty this will return None. #[inline] pub fn remove_lru(&mut self) -> Option<(K, V)> { self.map.pop_front() } } impl Clone for LruCache { #[inline] fn clone(&self) -> Self { LruCache { map: self.map.clone(), max_size: self.max_size, } } } impl Extend<(K, V)> for LruCache { #[inline] fn extend>(&mut self, iter: I) { for (k, v) in iter { self.insert(k, v); } } } impl IntoIterator for LruCache { type Item = (K, V); type IntoIter = IntoIter; #[inline] fn into_iter(self) -> IntoIter { self.map.into_iter() } } impl<'a, K, V, S> IntoIterator for &'a LruCache { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; #[inline] fn into_iter(self) -> Iter<'a, K, V> { self.iter() } } impl<'a, K, V, S> IntoIterator for &'a mut LruCache { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; #[inline] fn into_iter(self) -> IterMut<'a, K, V> { self.iter_mut() } } impl fmt::Debug for LruCache where K: fmt::Debug, V: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_map().entries(self.iter().rev()).finish() } } hashlink-0.8.0/src/serde.rs000064400000000000000000000106610072674642500137250ustar 00000000000000use std::{ fmt::{self, Formatter}, hash::{BuildHasher, Hash}, marker::PhantomData, }; use serde::{ de::{MapAccess, SeqAccess, Visitor}, ser::{SerializeMap, SerializeSeq}, Deserialize, Deserializer, Serialize, Serializer, }; use crate::{LinkedHashMap, LinkedHashSet}; // LinkedHashMap impls impl Serialize for LinkedHashMap where K: Serialize + Eq + Hash, V: Serialize, S: BuildHasher, { #[inline] fn serialize(&self, serializer: T) -> Result { let mut map_serializer = serializer.serialize_map(Some(self.len()))?; for (k, v) in self { map_serializer.serialize_key(k)?; map_serializer.serialize_value(v)?; } map_serializer.end() } } impl<'de, K, V, S> Deserialize<'de> for LinkedHashMap where K: Deserialize<'de> + Eq + Hash, V: Deserialize<'de>, S: BuildHasher + Default, { fn deserialize>(deserializer: D) -> Result { #[derive(Debug)] pub struct LinkedHashMapVisitor { marker: PhantomData>, } impl LinkedHashMapVisitor { fn new() -> Self { LinkedHashMapVisitor { marker: PhantomData, } } } impl Default for LinkedHashMapVisitor { fn default() -> Self { Self::new() } } impl<'de, K, V, S> Visitor<'de> for LinkedHashMapVisitor where K: Deserialize<'de> + Eq + Hash, V: Deserialize<'de>, S: BuildHasher + Default, { type Value = LinkedHashMap; fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { write!(formatter, "a map") } #[inline] fn visit_map>(self, mut map: M) -> Result { let mut values = LinkedHashMap::with_capacity_and_hasher( map.size_hint().unwrap_or(0), S::default(), ); while let Some((k, v)) = map.next_entry()? { values.insert(k, v); } Ok(values) } } deserializer.deserialize_map(LinkedHashMapVisitor::default()) } } // LinkedHashSet impls impl Serialize for LinkedHashSet where T: Serialize + Eq + Hash, S: BuildHasher, { #[inline] fn serialize(&self, serializer: U) -> Result { let mut seq_serializer = serializer.serialize_seq(Some(self.len()))?; for v in self { seq_serializer.serialize_element(v)?; } seq_serializer.end() } } impl<'de, T, S> Deserialize<'de> for LinkedHashSet where T: Deserialize<'de> + Eq + Hash, S: BuildHasher + Default, { fn deserialize>(deserializer: D) -> Result { #[derive(Debug)] pub struct LinkedHashSetVisitor { marker: PhantomData>, } impl LinkedHashSetVisitor { fn new() -> Self { LinkedHashSetVisitor { marker: PhantomData, } } } impl Default for LinkedHashSetVisitor { fn default() -> Self { Self::new() } } impl<'de, T, S> Visitor<'de> for LinkedHashSetVisitor where T: Deserialize<'de> + Eq + Hash, S: BuildHasher + Default, { type Value = LinkedHashSet; fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { write!(formatter, "a sequence") } #[inline] fn visit_seq>(self, mut seq: SA) -> Result { let mut values = LinkedHashSet::with_capacity_and_hasher( seq.size_hint().unwrap_or(0), S::default(), ); while let Some(v) = seq.next_element()? { values.insert(v); } Ok(values) } } deserializer.deserialize_seq(LinkedHashSetVisitor::default()) } } hashlink-0.8.0/tests/linked_hash_map.rs000064400000000000000000000354130072674642500163060ustar 00000000000000use hashlink::{linked_hash_map, LinkedHashMap}; #[allow(dead_code)] fn assert_covariance() { fn set<'new>(v: LinkedHashMap<&'static str, ()>) -> LinkedHashMap<&'new str, ()> { v } fn iter<'a, 'new>( v: linked_hash_map::Iter<'a, &'static str, &'static str>, ) -> linked_hash_map::Iter<'a, &'new str, &'new str> { v } fn iter_mut<'a, 'new>( v: linked_hash_map::Iter<'a, &'static str, ()>, ) -> linked_hash_map::Iter<'a, &'new str, ()> { v } fn into_iter<'new>( v: linked_hash_map::IntoIter<&'static str, &'static str>, ) -> linked_hash_map::IntoIter<&'new str, &'new str> { v } fn drain<'new>( d: linked_hash_map::Drain<'static, &'static str, &'static str>, ) -> linked_hash_map::Drain<'new, &'new str, &'new str> { d } fn raw_entry_builder<'a, 'new>( v: linked_hash_map::RawEntryBuilder<'a, &'static str, &'static str, ()>, ) -> linked_hash_map::RawEntryBuilder<'a, &'new str, &'new str, ()> { v } } #[test] fn test_index() { let mut map = LinkedHashMap::new(); map.insert(1, 10); map.insert(2, 20); assert_eq!(10, map[&1]); map[&2] = 22; assert_eq!(22, map[&2]); } #[test] fn test_insert_and_get() { let mut map = LinkedHashMap::new(); map.insert(1, 10); map.insert(2, 20); assert_eq!(map.get(&1), Some(&10)); assert_eq!(map.get(&2), Some(&20)); assert_eq!(map.len(), 2); } #[test] fn test_insert_update() { let mut map = LinkedHashMap::new(); map.insert("1".to_string(), vec![10, 10]); map.insert("1".to_string(), vec![10, 19]); assert_eq!(map.get(&"1".to_string()), Some(&vec![10, 19])); assert_eq!(map.len(), 1); } #[test] fn test_entry_insert_vacant() { let mut map = LinkedHashMap::new(); match map.entry("1".to_string()) { linked_hash_map::Entry::Vacant(e) => { assert_eq!(*e.insert(vec![10, 10]), vec![10, 10]); } _ => panic!("fail"), } assert!(map.contains_key("1")); assert_eq!(map["1"], vec![10, 10]); match map.entry("1".to_string()) { linked_hash_map::Entry::Occupied(mut e) => { assert_eq!(*e.get(), vec![10, 10]); assert_eq!(e.insert(vec![10, 16]), vec![10, 10]); } _ => panic!("fail"), } assert!(map.contains_key("1")); assert_eq!(map["1"], vec![10, 16]); match map.entry("1".to_string()) { linked_hash_map::Entry::Occupied(e) => { assert_eq!(e.remove(), vec![10, 16]); } _ => panic!("fail"), } } #[test] fn test_remove() { let mut map = LinkedHashMap::new(); map.insert(1, 10); map.insert(2, 20); map.insert(3, 30); map.insert(4, 40); map.insert(5, 50); map.remove(&3); map.remove(&4); assert!(map.get(&3).is_none()); assert!(map.get(&4).is_none()); map.insert(6, 60); map.insert(7, 70); map.insert(8, 80); assert_eq!(map.get(&6), Some(&60)); assert_eq!(map.get(&7), Some(&70)); assert_eq!(map.get(&8), Some(&80)); } #[test] fn test_pop() { let mut map = LinkedHashMap::new(); map.insert(1, 10); map.insert(2, 20); map.insert(3, 30); map.insert(4, 40); map.insert(5, 50); assert_eq!(map.pop_front(), Some((1, 10))); assert!(map.get(&1).is_none()); assert_eq!(map.pop_back(), Some((5, 50))); assert!(map.get(&5).is_none()); map.insert(6, 60); map.insert(7, 70); map.insert(8, 80); assert_eq!(map.pop_front(), Some((2, 20))); assert!(map.get(&2).is_none()); assert_eq!(map.pop_back(), Some((8, 80))); assert!(map.get(&8).is_none()); map.insert(3, 30); assert_eq!(map.pop_front(), Some((4, 40))); assert!(map.get(&4).is_none()); assert_eq!(map.pop_back(), Some((3, 30))); assert!(map.get(&3).is_none()); } #[test] fn test_move() { let to_back = |map: &mut LinkedHashMap<_, _>, key| match map.entry(key) { linked_hash_map::Entry::Occupied(mut occupied) => occupied.to_back(), linked_hash_map::Entry::Vacant(_) => panic!(), }; let to_front = |map: &mut LinkedHashMap<_, _>, key| match map.entry(key) { linked_hash_map::Entry::Occupied(mut occupied) => occupied.to_front(), linked_hash_map::Entry::Vacant(_) => panic!(), }; let mut map = LinkedHashMap::new(); map.insert(1, 10); map.insert(2, 20); map.insert(3, 30); map.insert(4, 40); map.insert(5, 50); to_back(&mut map, 1); assert_eq!(map.keys().copied().collect::>(), vec![2, 3, 4, 5, 1]); to_front(&mut map, 4); assert_eq!(map.keys().copied().collect::>(), vec![4, 2, 3, 5, 1]); to_back(&mut map, 3); assert_eq!(map.keys().copied().collect::>(), vec![4, 2, 5, 1, 3]); to_front(&mut map, 2); assert_eq!(map.keys().copied().collect::>(), vec![2, 4, 5, 1, 3]); to_back(&mut map, 3); assert_eq!(map.keys().copied().collect::>(), vec![2, 4, 5, 1, 3]); to_front(&mut map, 2); assert_eq!(map.keys().copied().collect::>(), vec![2, 4, 5, 1, 3]); } #[test] fn test_clear() { let mut map = LinkedHashMap::new(); map.insert(1, 10); map.insert(2, 20); map.clear(); assert!(map.get(&1).is_none()); assert!(map.get(&2).is_none()); assert!(map.is_empty()); } #[test] fn test_iter() { let mut map = LinkedHashMap::new(); // empty iter assert_eq!(None, map.iter().next()); map.insert("a", 10); map.insert("b", 20); map.insert("c", 30); // regular iter let mut iter = map.iter(); assert_eq!((&"a", &10), iter.next().unwrap()); assert_eq!((&"b", &20), iter.next().unwrap()); assert_eq!((&"c", &30), iter.next().unwrap()); assert_eq!(None, iter.next()); assert_eq!(None, iter.next()); let mut iter = map.iter(); assert_eq!((&"a", &10), iter.next().unwrap()); let mut iclone = iter.clone(); assert_eq!((&"b", &20), iter.next().unwrap()); assert_eq!((&"b", &20), iclone.next().unwrap()); assert_eq!((&"c", &30), iter.next().unwrap()); assert_eq!((&"c", &30), iclone.next().unwrap()); // reversed iter let mut rev_iter = map.iter().rev(); assert_eq!((&"c", &30), rev_iter.next().unwrap()); assert_eq!((&"b", &20), rev_iter.next().unwrap()); assert_eq!((&"a", &10), rev_iter.next().unwrap()); assert_eq!(None, rev_iter.next()); assert_eq!(None, rev_iter.next()); // mixed let mut mixed_iter = map.iter(); assert_eq!((&"a", &10), mixed_iter.next().unwrap()); assert_eq!((&"c", &30), mixed_iter.next_back().unwrap()); assert_eq!((&"b", &20), mixed_iter.next().unwrap()); assert_eq!(None, mixed_iter.next()); assert_eq!(None, mixed_iter.next_back()); } #[test] fn test_borrow() { #[derive(PartialEq, Eq, Hash)] struct Foo(Bar); #[derive(PartialEq, Eq, Hash)] struct Bar(i32); impl ::std::borrow::Borrow for Foo { fn borrow(&self) -> &Bar { &self.0 } } let mut map = LinkedHashMap::new(); map.insert(Foo(Bar(1)), "a"); map.insert(Foo(Bar(2)), "b"); assert!(map.contains_key(&Bar(1))); assert!(map.contains_key(&Bar(2))); assert!(map.contains_key(&Foo(Bar(1)))); assert!(map.contains_key(&Foo(Bar(2)))); assert_eq!(map.get(&Bar(1)), Some(&"a")); assert_eq!(map.get(&Bar(2)), Some(&"b")); assert_eq!(map.get(&Foo(Bar(1))), Some(&"a")); assert_eq!(map.get(&Foo(Bar(2))), Some(&"b")); assert_eq!(map.get_mut(&Bar(1)), Some(&mut "a")); assert_eq!(map.get_mut(&Bar(2)), Some(&mut "b")); assert_eq!(map.get_mut(&Foo(Bar(1))), Some(&mut "a")); assert_eq!(map.get_mut(&Foo(Bar(2))), Some(&mut "b")); assert_eq!(map[&Bar(1)], "a"); assert_eq!(map[&Bar(2)], "b"); assert_eq!(map[&Foo(Bar(1))], "a"); assert_eq!(map[&Foo(Bar(2))], "b"); assert_eq!(map.remove(&Bar(1)), Some("a")); assert_eq!(map.remove(&Bar(2)), Some("b")); assert_eq!(map.remove(&Foo(Bar(1))), None); assert_eq!(map.remove(&Foo(Bar(2))), None); } #[test] fn test_iter_mut() { let mut map = LinkedHashMap::new(); map.insert("a", 10); map.insert("c", 30); map.insert("b", 20); { let mut iter = map.iter_mut(); let entry = iter.next().unwrap(); assert_eq!("a", *entry.0); *entry.1 = 17; assert_eq!(format!("{:?}", iter), "[(\"c\", 30), (\"b\", 20)]"); // reverse iterator let mut iter = iter.rev(); let entry = iter.next().unwrap(); assert_eq!("b", *entry.0); *entry.1 = 23; let entry = iter.next().unwrap(); assert_eq!("c", *entry.0); assert_eq!(None, iter.next()); assert_eq!(None, iter.next()); } assert_eq!(17, map[&"a"]); assert_eq!(23, map[&"b"]); } #[test] fn test_consuming_iter() { let map = { let mut map = LinkedHashMap::new(); map.insert("a", 10); map.insert("c", 30); map.insert("b", 20); map }; let mut iter = map.into_iter(); assert_eq!(Some(("a", 10)), iter.next()); assert_eq!(Some(("b", 20)), iter.next_back()); assert_eq!(iter.len(), 1); assert_eq!(format!("{:?}", iter), "[(\"c\", 30)]"); assert_eq!(Some(("c", 30)), iter.next()); assert_eq!(None, iter.next()); } #[test] fn test_consuming_iter_empty() { let map = LinkedHashMap::<&str, i32>::new(); let mut iter = map.into_iter(); assert_eq!(None, iter.next()); } #[test] fn test_consuming_iter_with_free_list() { let mut map = LinkedHashMap::new(); map.insert("a", 10); map.insert("c", 30); map.insert("b", 20); map.remove("a"); map.remove("b"); let mut iter = map.into_iter(); assert_eq!(Some(("c", 30)), iter.next()); assert_eq!(None, iter.next()); } #[test] fn test_into_iter_drop() { struct Counter<'a>(&'a mut usize); impl<'a> Drop for Counter<'a> { fn drop(&mut self) { *self.0 += 1; } } let mut a = 0; let mut b = 0; let mut c = 0; { let mut map = LinkedHashMap::new(); map.insert("a", Counter(&mut a)); map.insert("b", Counter(&mut b)); map.insert("c", Counter(&mut c)); let mut iter = map.into_iter(); assert_eq!(iter.next().map(|p| p.0), Some("a")); assert_eq!(iter.next_back().map(|p| p.0), Some("c")); } assert_eq!(a, 1); assert_eq!(b, 1); assert_eq!(c, 1); } #[test] fn test_drain() { use std::{cell::Cell, rc::Rc}; struct Counter(Rc>); impl<'a> Drop for Counter { fn drop(&mut self) { self.0.set(self.0.get() + 1); } } let mut map = LinkedHashMap::new(); let a = Rc::new(Cell::new(0)); let b = Rc::new(Cell::new(0)); let c = Rc::new(Cell::new(0)); map.insert("a", Counter(a.clone())); map.insert("b", Counter(b.clone())); map.insert("c", Counter(c.clone())); let mut iter = map.drain(); assert_eq!(iter.next().map(|p| p.0), Some("a")); assert_eq!(iter.next_back().map(|p| p.0), Some("c")); assert_eq!(iter.next_back().map(|p| p.0), Some("b")); assert!(iter.next().is_none()); assert!(iter.next_back().is_none()); drop(iter); assert_eq!(map.len(), 0); assert_eq!(a.get(), 1); assert_eq!(b.get(), 1); assert_eq!(c.get(), 1); map.insert("a", Counter(a.clone())); map.insert("b", Counter(b.clone())); map.insert("c", Counter(c.clone())); let mut iter = map.drain(); assert_eq!(iter.next().map(|p| p.0), Some("a")); assert_eq!(iter.next().map(|p| p.0), Some("b")); assert_eq!(iter.next_back().map(|p| p.0), Some("c")); assert!(iter.next().is_none()); assert!(iter.next_back().is_none()); drop(iter); assert_eq!(map.len(), 0); assert_eq!(a.get(), 2); assert_eq!(b.get(), 2); assert_eq!(c.get(), 2); map.insert("a", Counter(a.clone())); map.insert("b", Counter(b.clone())); map.insert("c", Counter(c.clone())); map.drain(); assert_eq!(map.len(), 0); assert_eq!(a.get(), 3); assert_eq!(b.get(), 3); assert_eq!(c.get(), 3); } #[test] fn test_send_sync() { fn is_send_sync() {} is_send_sync::>(); is_send_sync::>(); is_send_sync::>(); is_send_sync::>(); is_send_sync::>(); is_send_sync::>(); is_send_sync::>(); is_send_sync::>(); is_send_sync::>(); is_send_sync::>(); } #[test] fn test_retain() { use std::{cell::Cell, rc::Rc}; let xs = [1, 2, 3, 4, 5, 6]; let mut map: LinkedHashMap = xs.iter().map(|i| (i.to_string(), *i)).collect(); map.retain(|_, v| *v % 2 == 0); assert_eq!(map.len(), 3); assert!(map.contains_key("2")); assert!(map.contains_key("4")); assert!(map.contains_key("6")); struct Counter(Rc>); impl<'a> Drop for Counter { fn drop(&mut self) { self.0.set(self.0.get() + 1); } } let c = Rc::new(Cell::new(0)); let mut map = LinkedHashMap::new(); map.insert(1, Counter(Rc::clone(&c))); map.insert(2, Counter(Rc::clone(&c))); map.insert(3, Counter(Rc::clone(&c))); map.insert(4, Counter(Rc::clone(&c))); map.retain(|k, _| *k % 2 == 0); assert!(c.get() == 2); drop(map); assert!(c.get() == 4); } #[test] fn test_order_equality() { let xs = [1, 2, 3, 4, 5, 6]; let mut map1: LinkedHashMap = xs.iter().map(|i| (i.to_string(), *i)).collect(); let mut map2: LinkedHashMap = xs.iter().map(|i| (i.to_string(), *i)).collect(); assert_eq!(map1, map2); map1.to_front("4"); assert_ne!(map1, map2); map2.to_front("4"); assert_eq!(map1, map2); } #[test] fn test_replace() { let mut map = LinkedHashMap::new(); map.insert(1, 1); map.insert(2, 2); map.insert(3, 3); map.insert(4, 4); assert!(map .iter() .map(|(k, v)| (*k, *v)) .eq([(1, 1), (2, 2), (3, 3), (4, 4)].iter().copied())); map.insert(3, 5); assert!(map .iter() .map(|(k, v)| (*k, *v)) .eq([(1, 1), (2, 2), (4, 4), (3, 5)].iter().copied())); map.replace(2, 6); assert!(map .iter() .map(|(k, v)| (*k, *v)) .eq([(1, 1), (2, 6), (4, 4), (3, 5)].iter().copied())); } #[test] fn test_shrink_to_fit_resize() { let mut map = LinkedHashMap::new(); map.shrink_to_fit(); for i in 0..100 { map.insert(i, i); } map.shrink_to_fit(); for _ in 0..50 { map.pop_front(); map.shrink_to_fit(); } assert_eq!(map.len(), 50); for i in 50..100 { assert_eq!(map.get(&i).unwrap(), &i); } } hashlink-0.8.0/tests/linked_hash_set.rs000064400000000000000000000273770072674642500163360ustar 00000000000000use hashbrown::hash_map::DefaultHashBuilder; use hashlink::linked_hash_set::{self, LinkedHashSet}; #[allow(dead_code)] fn assert_covariance() { fn set<'new>(v: LinkedHashSet<&'static str>) -> LinkedHashSet<&'new str> { v } fn iter<'a, 'new>( v: linked_hash_set::Iter<'a, &'static str>, ) -> linked_hash_set::Iter<'a, &'new str> { v } fn into_iter<'new>( v: linked_hash_set::IntoIter<&'static str>, ) -> linked_hash_set::IntoIter<&'new str> { v } fn difference<'a, 'new>( v: linked_hash_set::Difference<'a, &'static str, DefaultHashBuilder>, ) -> linked_hash_set::Difference<'a, &'new str, DefaultHashBuilder> { v } fn symmetric_difference<'a, 'new>( v: linked_hash_set::SymmetricDifference<'a, &'static str, DefaultHashBuilder>, ) -> linked_hash_set::SymmetricDifference<'a, &'new str, DefaultHashBuilder> { v } fn intersection<'a, 'new>( v: linked_hash_set::Intersection<'a, &'static str, DefaultHashBuilder>, ) -> linked_hash_set::Intersection<'a, &'new str, DefaultHashBuilder> { v } fn union<'a, 'new>( v: linked_hash_set::Union<'a, &'static str, DefaultHashBuilder>, ) -> linked_hash_set::Union<'a, &'new str, DefaultHashBuilder> { v } fn drain<'new>( d: linked_hash_set::Drain<'static, &'static str>, ) -> linked_hash_set::Drain<'new, &'new str> { d } } #[test] fn test_zero_capacities() { type HS = LinkedHashSet; let s = HS::new(); assert_eq!(s.capacity(), 0); let s = HS::default(); assert_eq!(s.capacity(), 0); let s = HS::with_hasher(DefaultHashBuilder::default()); assert_eq!(s.capacity(), 0); let s = HS::with_capacity(0); assert_eq!(s.capacity(), 0); let s = HS::with_capacity_and_hasher(0, DefaultHashBuilder::default()); assert_eq!(s.capacity(), 0); let mut s = HS::new(); s.insert(1); s.insert(2); s.remove(&1); s.remove(&2); s.shrink_to_fit(); assert_eq!(s.capacity(), 0); let mut s = HS::new(); s.reserve(0); assert_eq!(s.capacity(), 0); } #[test] fn test_disjoint() { let mut xs = LinkedHashSet::new(); let mut ys = LinkedHashSet::new(); assert!(xs.is_disjoint(&ys)); assert!(ys.is_disjoint(&xs)); assert!(xs.insert(5)); assert!(ys.insert(11)); assert!(xs.is_disjoint(&ys)); assert!(ys.is_disjoint(&xs)); assert!(xs.insert(7)); assert!(xs.insert(19)); assert!(xs.insert(4)); assert!(ys.insert(2)); assert!(ys.insert(-11)); assert!(xs.is_disjoint(&ys)); assert!(ys.is_disjoint(&xs)); assert!(ys.insert(7)); assert!(!xs.is_disjoint(&ys)); assert!(!ys.is_disjoint(&xs)); } #[test] fn test_subset_and_superset() { let mut a = LinkedHashSet::new(); assert!(a.insert(0)); assert!(a.insert(5)); assert!(a.insert(11)); assert!(a.insert(7)); let mut b = LinkedHashSet::new(); assert!(b.insert(0)); assert!(b.insert(7)); assert!(b.insert(19)); assert!(b.insert(250)); assert!(b.insert(11)); assert!(b.insert(200)); assert!(!a.is_subset(&b)); assert!(!a.is_superset(&b)); assert!(!b.is_subset(&a)); assert!(!b.is_superset(&a)); assert!(b.insert(5)); assert!(a.is_subset(&b)); assert!(!a.is_superset(&b)); assert!(!b.is_subset(&a)); assert!(b.is_superset(&a)); } #[test] fn test_iterate() { let mut a = LinkedHashSet::new(); for i in 0..32 { assert!(a.insert(i)); } let mut observed: u32 = 0; for k in &a { observed |= 1 << *k; } assert_eq!(observed, 0xFFFF_FFFF); } #[test] fn test_intersection() { let mut a = LinkedHashSet::new(); let mut b = LinkedHashSet::new(); assert!(a.insert(11)); assert!(a.insert(1)); assert!(a.insert(3)); assert!(a.insert(77)); assert!(a.insert(103)); assert!(a.insert(5)); assert!(a.insert(-5)); assert!(b.insert(2)); assert!(b.insert(11)); assert!(b.insert(77)); assert!(b.insert(-9)); assert!(b.insert(-42)); assert!(b.insert(5)); assert!(b.insert(3)); let mut i = 0; let expected = [3, 5, 11, 77]; for x in a.intersection(&b) { assert!(expected.contains(x)); i += 1 } assert_eq!(i, expected.len()); } #[test] fn test_difference() { let mut a = LinkedHashSet::new(); let mut b = LinkedHashSet::new(); assert!(a.insert(1)); assert!(a.insert(3)); assert!(a.insert(5)); assert!(a.insert(9)); assert!(a.insert(11)); assert!(b.insert(3)); assert!(b.insert(9)); let mut i = 0; let expected = [1, 5, 11]; for x in a.difference(&b) { assert!(expected.contains(x)); i += 1 } assert_eq!(i, expected.len()); } #[test] fn test_symmetric_difference() { let mut a = LinkedHashSet::new(); let mut b = LinkedHashSet::new(); assert!(a.insert(1)); assert!(a.insert(3)); assert!(a.insert(5)); assert!(a.insert(9)); assert!(a.insert(11)); assert!(b.insert(-2)); assert!(b.insert(3)); assert!(b.insert(9)); assert!(b.insert(14)); assert!(b.insert(22)); let mut i = 0; let expected = [-2, 1, 5, 11, 14, 22]; for x in a.symmetric_difference(&b) { assert!(expected.contains(x)); i += 1 } assert_eq!(i, expected.len()); } #[test] fn test_union() { let mut a = LinkedHashSet::new(); let mut b = LinkedHashSet::new(); assert!(a.insert(1)); assert!(a.insert(3)); assert!(a.insert(5)); assert!(a.insert(9)); assert!(a.insert(11)); assert!(a.insert(16)); assert!(a.insert(19)); assert!(a.insert(24)); assert!(b.insert(-2)); assert!(b.insert(1)); assert!(b.insert(5)); assert!(b.insert(9)); assert!(b.insert(13)); assert!(b.insert(19)); let mut i = 0; let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]; for x in a.union(&b) { assert!(expected.contains(x)); i += 1 } assert_eq!(i, expected.len()); } #[test] fn test_from_iter() { let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9]; let set: LinkedHashSet<_> = xs.iter().cloned().collect(); for x in &xs { assert!(set.contains(x)); } } #[test] fn test_move_iter() { let hs = { let mut hs = LinkedHashSet::new(); hs.insert('a'); hs.insert('b'); hs }; let v = hs.into_iter().collect::>(); assert!(v == ['a', 'b'] || v == ['b', 'a']); } #[test] fn test_eq() { let mut s1 = LinkedHashSet::new(); s1.insert(1); s1.insert(2); s1.insert(3); let mut s2 = LinkedHashSet::new(); s2.insert(1); s2.insert(2); assert!(s1 != s2); s2.insert(3); assert_eq!(s1, s2); } #[test] fn test_show() { let mut set = LinkedHashSet::new(); let empty = LinkedHashSet::::new(); set.insert(1); set.insert(2); let set_str = format!("{:?}", set); assert!(set_str == "{1, 2}" || set_str == "{2, 1}"); assert_eq!(format!("{:?}", empty), "{}"); } #[test] fn test_trivial_drain() { let mut s = LinkedHashSet::::new(); for _ in s.drain() {} assert!(s.is_empty()); drop(s); let mut s = LinkedHashSet::::new(); drop(s.drain()); assert!(s.is_empty()); } #[test] fn test_drain() { let mut s: LinkedHashSet<_> = (1..100).collect(); for _ in 0..20 { assert_eq!(s.len(), 99); { let mut last_i = 0; let mut d = s.drain(); for (i, x) in d.by_ref().take(50).enumerate() { last_i = i; assert!(x != 0); } assert_eq!(last_i, 49); } for _ in &s { panic!("s should be empty!"); } s.extend(1..100); } } #[test] fn test_replace() { use core::hash; #[derive(Debug)] struct Foo(&'static str, i32); impl PartialEq for Foo { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } impl Eq for Foo {} impl hash::Hash for Foo { fn hash(&self, h: &mut H) { self.0.hash(h); } } let mut s = LinkedHashSet::new(); assert_eq!(s.replace(Foo("a", 1)), None); assert_eq!(s.len(), 1); assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1))); assert_eq!(s.len(), 1); let mut it = s.iter(); assert_eq!(it.next(), Some(&Foo("a", 2))); assert_eq!(it.next(), None); } #[test] fn test_extend_ref() { let mut a = LinkedHashSet::new(); a.insert(1); a.extend(&[2, 3, 4]); assert_eq!(a.len(), 4); assert!(a.contains(&1)); assert!(a.contains(&2)); assert!(a.contains(&3)); assert!(a.contains(&4)); let mut b = LinkedHashSet::new(); b.insert(5); b.insert(6); a.extend(&b); assert_eq!(a.len(), 6); assert!(a.contains(&1)); assert!(a.contains(&2)); assert!(a.contains(&3)); assert!(a.contains(&4)); assert!(a.contains(&5)); assert!(a.contains(&6)); } #[test] fn test_retain() { let xs = [1, 2, 3, 4, 5, 6]; let mut set: LinkedHashSet = xs.iter().cloned().collect(); set.retain(|&k| k % 2 == 0); assert_eq!(set.len(), 3); assert!(set.contains(&2)); assert!(set.contains(&4)); assert!(set.contains(&6)); } #[test] fn insert_order() { let mut set = LinkedHashSet::new(); set.insert(1); set.insert(2); set.insert(3); set.insert(4); assert_eq!( set.clone().into_iter().collect::>(), vec![1, 2, 3, 4] ); assert_eq!(set.into_iter().collect::>(), vec![1, 2, 3, 4]); } #[test] fn front_back() { let mut set = LinkedHashSet::new(); set.insert(1); set.insert(2); set.insert(3); set.insert(4); assert_eq!(set.front(), Some(&1)); assert_eq!(set.back(), Some(&4)); assert_eq!(set.pop_back(), Some(4)); assert_eq!(set.back(), Some(&3)); assert_eq!(set.pop_front(), Some(1)); assert_eq!(set.front(), Some(&2)); } #[test] fn double_ended_iter() { let mut set = LinkedHashSet::new(); set.insert(1); set.insert(2); set.insert(3); set.insert(4); let mut iter = set.iter(); assert_eq!(iter.next(), Some(&1)); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.next_back(), Some(&4)); assert_eq!(iter.next_back(), Some(&3)); assert_eq!(iter.next_back(), None); assert_eq!(iter.next(), None); assert_eq!(iter.next_back(), None); drop(iter); let mut iter = set.drain(); assert_eq!(iter.next(), Some(1)); assert_eq!(iter.next(), Some(2)); assert_eq!(iter.next_back(), Some(4)); assert_eq!(iter.next_back(), Some(3)); assert_eq!(iter.next_back(), None); assert_eq!(iter.next(), None); assert_eq!(iter.next_back(), None); drop(iter); set.insert(1); set.insert(2); set.insert(3); set.insert(4); let mut iter = set.into_iter(); assert_eq!(iter.next(), Some(1)); assert_eq!(iter.next(), Some(2)); assert_eq!(iter.next_back(), Some(4)); assert_eq!(iter.next_back(), Some(3)); assert_eq!(iter.next_back(), None); assert_eq!(iter.next(), None); assert_eq!(iter.next_back(), None); } #[test] fn to_back_front_order() { let mut set = LinkedHashSet::new(); set.insert(1); set.insert(2); set.insert(3); set.insert(4); assert_eq!(set.back().copied(), Some(4)); assert_eq!(set.front().copied(), Some(1)); set.to_back(&2); assert_eq!(set.back().copied(), Some(2)); set.to_front(&3); assert_eq!(set.front().copied(), Some(3)); } #[test] fn test_order_equality() { let xs = [1, 2, 3, 4, 5, 6]; let mut set1: LinkedHashSet = xs.iter().copied().collect(); let mut set2: LinkedHashSet = xs.iter().copied().collect(); assert_eq!(set1, set2); set1.to_front(&4); assert_ne!(set1, set2); set2.to_front(&4); assert_eq!(set1, set2); } hashlink-0.8.0/tests/lru_cache.rs000064400000000000000000000102060072674642500151160ustar 00000000000000use hashlink::LruCache; #[test] fn test_put_and_get() { let mut cache = LruCache::new(2); cache.insert(1, 10); cache.insert(2, 20); assert_eq!(cache.get_mut(&1), Some(&mut 10)); assert_eq!(cache.get_mut(&2), Some(&mut 20)); assert_eq!(cache.len(), 2); } #[test] fn test_put_update() { let mut cache = LruCache::new(1); cache.insert("1", 10); cache.insert("1", 19); assert_eq!(cache.get_mut("1"), Some(&mut 19)); assert_eq!(cache.len(), 1); } #[test] fn test_contains_key() { let mut cache = LruCache::new(1); cache.insert("1", 10); assert_eq!(cache.contains_key("1"), true); } #[test] fn test_expire_lru() { let mut cache = LruCache::new(2); cache.insert("foo1", "bar1"); cache.insert("foo2", "bar2"); cache.insert("foo3", "bar3"); assert!(cache.get_mut("foo1").is_none()); cache.insert("foo2", "bar2update"); cache.insert("foo4", "bar4"); assert!(cache.get_mut("foo3").is_none()); } #[test] fn test_pop() { let mut cache = LruCache::new(2); cache.insert(1, 10); cache.insert(2, 20); assert_eq!(cache.len(), 2); let opt1 = cache.remove(&1); assert!(opt1.is_some()); assert_eq!(opt1.unwrap(), 10); assert!(cache.get_mut(&1).is_none()); assert_eq!(cache.len(), 1); } #[test] fn test_change_capacity() { let mut cache = LruCache::new(2); assert_eq!(cache.capacity(), 2); cache.insert(1, 10); cache.insert(2, 20); cache.set_capacity(1); assert!(cache.get_mut(&1).is_none()); assert_eq!(cache.capacity(), 1); } #[test] fn test_remove() { let mut cache = LruCache::new(3); cache.insert(1, 10); cache.insert(2, 20); cache.insert(3, 30); cache.insert(4, 40); cache.insert(5, 50); cache.remove(&3); cache.remove(&4); assert!(cache.get_mut(&3).is_none()); assert!(cache.get_mut(&4).is_none()); cache.insert(6, 60); cache.insert(7, 70); cache.insert(8, 80); assert!(cache.get_mut(&5).is_none()); assert_eq!(cache.get_mut(&6), Some(&mut 60)); assert_eq!(cache.get_mut(&7), Some(&mut 70)); assert_eq!(cache.get_mut(&8), Some(&mut 80)); } #[test] fn test_clear() { let mut cache = LruCache::new(2); cache.insert(1, 10); cache.insert(2, 20); cache.clear(); assert!(cache.get_mut(&1).is_none()); assert!(cache.get_mut(&2).is_none()); assert!(cache.is_empty()) } #[test] fn test_iter() { let mut cache = LruCache::new(3); cache.insert(1, 10); cache.insert(2, 20); cache.insert(3, 30); cache.insert(4, 40); cache.insert(5, 50); assert_eq!( cache.iter().collect::>(), [(&3, &30), (&4, &40), (&5, &50)] ); assert_eq!( cache.iter_mut().collect::>(), [(&3, &mut 30), (&4, &mut 40), (&5, &mut 50)] ); assert_eq!( cache.iter().rev().collect::>(), [(&5, &50), (&4, &40), (&3, &30)] ); assert_eq!( cache.iter_mut().rev().collect::>(), [(&5, &mut 50), (&4, &mut 40), (&3, &mut 30)] ); } #[test] fn test_peek() { let mut cache = LruCache::new_unbounded(); cache.insert(1, 10); cache.insert(2, 20); cache.insert(3, 30); cache.insert(4, 40); cache.insert(5, 50); cache.insert(6, 60); assert_eq!(cache.remove_lru(), Some((1, 10))); assert_eq!(cache.peek(&2), Some(&20)); assert_eq!(cache.remove_lru(), Some((2, 20))); assert_eq!(cache.peek_mut(&3), Some(&mut 30)); assert_eq!(cache.remove_lru(), Some((3, 30))); assert_eq!(cache.get(&4), Some(&40)); assert_eq!(cache.remove_lru(), Some((5, 50))); } #[test] fn test_entry() { let mut cache = LruCache::new(4); cache.insert(1, 10); cache.insert(2, 20); cache.insert(3, 30); cache.insert(4, 40); cache.insert(5, 50); cache.insert(6, 60); assert_eq!(cache.len(), 4); cache.entry(7).or_insert(70); cache.entry(8).or_insert(80); cache.entry(9).or_insert(90); assert!(cache.len() <= 5); cache.raw_entry_mut().from_key(&10).or_insert(10, 100); cache.raw_entry_mut().from_key(&11).or_insert(11, 110); cache.raw_entry_mut().from_key(&12).or_insert(12, 120); assert!(cache.len() <= 5); } hashlink-0.8.0/tests/serde.rs000064400000000000000000000045170072674642500143030ustar 00000000000000#![cfg(feature = "serde_impl")] use fxhash::FxBuildHasher; use hashlink::{LinkedHashMap, LinkedHashSet}; use serde_test::{assert_tokens, Token}; #[test] fn map_serde_tokens_empty() { let map = LinkedHashMap::::new(); assert_tokens(&map, &[Token::Map { len: Some(0) }, Token::MapEnd]); } #[test] fn map_serde_tokens() { let mut map = LinkedHashMap::new(); map.insert('a', 10); map.insert('b', 20); map.insert('c', 30); assert_tokens( &map, &[ Token::Map { len: Some(3) }, Token::Char('a'), Token::I32(10), Token::Char('b'), Token::I32(20), Token::Char('c'), Token::I32(30), Token::MapEnd, ], ); } #[test] fn map_serde_tokens_empty_generic() { let map = LinkedHashMap::::with_hasher(FxBuildHasher::default()); assert_tokens(&map, &[Token::Map { len: Some(0) }, Token::MapEnd]); } #[test] fn map_serde_tokens_generic() { let mut map = LinkedHashMap::with_hasher(FxBuildHasher::default()); map.insert('a', 10); map.insert('b', 20); map.insert('c', 30); assert_tokens( &map, &[ Token::Map { len: Some(3) }, Token::Char('a'), Token::I32(10), Token::Char('b'), Token::I32(20), Token::Char('c'), Token::I32(30), Token::MapEnd, ], ); } #[test] fn set_serde_tokens_empty() { let set = LinkedHashSet::::new(); assert_tokens(&set, &[Token::Seq { len: Some(0) }, Token::SeqEnd]); } #[test] fn set_serde_tokens() { let mut set = LinkedHashSet::new(); set.insert(10); set.insert(20); set.insert(30); assert_tokens( &set, &[ Token::Seq { len: Some(3) }, Token::I32(10), Token::I32(20), Token::I32(30), Token::SeqEnd, ], ); } #[test] fn set_serde_tokens_generic() { let mut set = LinkedHashSet::with_hasher(FxBuildHasher::default()); set.insert('a'); set.insert('b'); set.insert('c'); assert_tokens( &set, &[ Token::Seq { len: Some(3) }, Token::Char('a'), Token::Char('b'), Token::Char('c'), Token::SeqEnd, ], ); }