typed-arena-2.0.1/.gitignore010066400017500001750000000000221333606476200141420ustar0000000000000000target Cargo.lock typed-arena-2.0.1/.travis.yml010066400017500001750000000004251360616356200142700ustar0000000000000000sudo: false language: rust matrix: include: - rust: 1.36.0 - rust: stable - rust: beta - rust: nightly - rust: nightly os: linux name: "Miri" script: - sh ci/miri.sh script: - cargo test - cargo build --no-default-features typed-arena-2.0.1/CHANGELOG.md010066400017500001750000000055761360616366100140040ustar0000000000000000## Unreleased Released YYYY/MM/DD. ### Added * TODO (or remove section if none) ### Changed * TODO (or remove section if none) ### Deprecated * TODO (or remove section if none) ### Removed * TODO (or remove section if none) ### Fixed * TODO (or remove section if none) ### Security * TODO (or remove section if none) -------------------------------------------------------------------------------- ## 2.0.1 Released 2019/01/10. ### Fixed * Support `#![no_std]` on stable Rust. -------------------------------------------------------------------------------- ## 2.0.0 Released 2019/12/03. ### Fixed * Fixed some intra-documentation URLs. -------------------------------------------------------------------------------- ## 2.0.0-rc1 Released 2019/11/26. Unless any issues are discovered or raised, we will release version 2.0.0 soon. ### Added * Added `alloc_str` to `Arena`, to be able to allocate string slices. ### Changed * The minimum supported rust version is now 1.36.0. * `alloc_uninitialized` returns `&mut [MaybeUninit]` instead of `*mut [T]`, which is less prone to undefined behavior. -------------------------------------------------------------------------------- ## 1.7.0 Released 2019/10/31. *Spooky!* ### Added * Added a `len` method to count how many items are in an arena. ### Fixed * Fixed some theoretical overflows. -------------------------------------------------------------------------------- ## 1.6.1 Released 2019/09/17. ### Fixed * Now compiles on old stable Rust versions again, instead of just new stable Rust versions. From here on out, we'll promise that 1.X will continue to compile on rustc versions >= 1.32. -------------------------------------------------------------------------------- ## 1.6.0 Released 2019/09/09. ### Added * Added the `Arena::iter_mut` method for mutably iterating over an arena's contents. [See #29 for details.](https://github.com/SimonSapin/rust-typed-arena/pull/29) -------------------------------------------------------------------------------- ## 1.5.0 Released 2019/08/02. ### Added * `Arena` now implements `Default` ### Fixed * Introduced an internal fast path for allocation, improving performance. * Tests now run cleanly on Miri. There was previously a technicality where the stacked borrow rules were not being followed. -------------------------------------------------------------------------------- ## 1.4.1 Released 2018/06/29. ### Added * Added more documentation comments and examples. -------------------------------------------------------------------------------- ## 1.4.0 Released 2018/06/21. ### Added * Added a new, on-by-default feature named "std". Disabling this feature allows the crate to be used in `#![no_std]` environments. [#15][] [#12][] [#15]: https://github.com/SimonSapin/rust-typed-arena/pull/15 [#12]: https://github.com/SimonSapin/rust-typed-arena/pull/12 typed-arena-2.0.1/Cargo.toml.orig010066400017500001750000000012031360616371000150340ustar0000000000000000[package] name = "typed-arena" version = "2.0.1" authors = ["Simon Sapin ", "Nick Fitzgerald "] license = "MIT" description = "The arena, a fast but limited type of allocator" documentation = "https://docs.rs/typed-arena" repository = "https://github.com/SimonSapin/rust-typed-arena" categories = ["memory-management", "no-std"] keywords = ["arena"] readme = "./README.md" [lib] name = "typed_arena" path = "src/lib.rs" [features] default = ["std"] std = [] [dev-dependencies] criterion = "0.3.0" [[bench]] name = "benches" path = "benches/benches.rs" harness = false [profile.bench] debug = true typed-arena-2.0.1/Cargo.toml0000644000000022261360616412500113470ustar00# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies # # If you believe there's an error in this file please file an # issue against the rust-lang/cargo repository. If you're # editing this file be aware that the upstream Cargo.toml # will likely look very different (and much more reasonable) [package] name = "typed-arena" version = "2.0.1" authors = ["Simon Sapin ", "Nick Fitzgerald "] description = "The arena, a fast but limited type of allocator" documentation = "https://docs.rs/typed-arena" readme = "./README.md" keywords = ["arena"] categories = ["memory-management", "no-std"] license = "MIT" repository = "https://github.com/SimonSapin/rust-typed-arena" [profile.bench] debug = true [lib] name = "typed_arena" path = "src/lib.rs" [[bench]] name = "benches" path = "benches/benches.rs" harness = false [dev-dependencies.criterion] version = "0.3.0" [features] default = ["std"] std = [] typed-arena-2.0.1/LICENSE010066400017500001750000000020731352107622700131620ustar0000000000000000MIT License Copyright (c) 2018 The typed-arena developers Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. typed-arena-2.0.1/README.md010066400017500001750000000044531360616356200134430ustar0000000000000000# `typed-arena` [![](https://docs.rs/typed-arena/badge.svg)](https://docs.rs/typed-arena/) [![](https://img.shields.io/crates/v/typed-arena.svg)](https://crates.io/crates/typed-arena) [![](https://img.shields.io/crates/d/typed-arena.svg)](https://crates.io/crates/typed-arena) [![Travis CI Build Status](https://travis-ci.org/SimonSapin/rust-typed-arena.svg?branch=master)](https://travis-ci.org/SimonSapin/typed-arena) **A fast (but limited) allocation arena for values of a single type.** Allocated objects are destroyed all at once, when the arena itself is destroyed. There is no deallocation of individual objects while the arena itself is still alive. The flipside is that allocation is fast: typically just a vector push. There is also a method `into_vec()` to recover ownership of allocated objects when the arena is no longer required, instead of destroying everything. ## Example ```rust use typed_arena::Arena; struct Monster { level: u32, } let monsters = Arena::new(); let vegeta = monsters.alloc(Monster { level: 9001 }); assert!(vegeta.level > 9000); ``` ## Safe Cycles All allocated objects get the same lifetime, so you can safely create cycles between them. This can be useful for certain data structures, such as graphs and trees with parent pointers. ```rust use std::cell::Cell; use typed_arena::Arena; struct CycleParticipant<'a> { other: Cell>>, } let arena = Arena::new(); let a = arena.alloc(CycleParticipant { other: Cell::new(None) }); let b = arena.alloc(CycleParticipant { other: Cell::new(None) }); a.other.set(Some(b)); b.other.set(Some(a)); ``` ## Alternatives ### Need to allocate many different types of values? Use multiple arenas if you have only a couple different types or try [`bumpalo`](https://crates.io/crates/bumpalo), which is a bump-allocation arena can allocate heterogenous types of values. ### Want allocation to return identifiers instead of references and dealing with references and lifetimes everywhere? Check out [`id-arena`](https://crates.io/crates/id-arena) or [`generational-arena`](https://crates.io/crates/generational-arena). ### Need to deallocate individual objects at a time? Check out [`generational-arena`](https://crates.io/crates/generational-arena) for an arena-style crate or look for a more traditional allocator. typed-arena-2.0.1/benches/benches.rs010066400017500001750000000021331360616356200155410ustar0000000000000000#[macro_use] extern crate criterion; extern crate typed_arena; use criterion::{Criterion, ParameterizedBenchmark, Throughput}; #[derive(Default)] struct Small(usize); #[derive(Default)] struct Big([usize; 32]); fn allocate(n: usize) { let arena = typed_arena::Arena::new(); for _ in 0..n { let val: &mut T = arena.alloc(Default::default()); criterion::black_box(val); } } fn criterion_benchmark(c: &mut Criterion) { c.bench( "allocate", ParameterizedBenchmark::new( "allocate-small", |b, n| b.iter(|| allocate::(*n)), (1..5).map(|n| n * 1000).collect::>(), ) .throughput(|n| Throughput::Elements(*n as u64)), ); c.bench( "allocate", ParameterizedBenchmark::new( "allocate-big", |b, n| b.iter(|| allocate::(*n)), (1..5).map(|n| n * 1000).collect::>(), ) .throughput(|n| Throughput::Elements(*n as u64)), ); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches); typed-arena-2.0.1/ci/miri.sh010066400017500001750000000004371360616356200140510ustar0000000000000000set -ex MIRI_NIGHTLY=nightly-$(curl -s https://rust-lang.github.io/rustup-components-history/x86_64-unknown-linux-gnu/miri) echo "Installing latest nightly with Miri: $MIRI_NIGHTLY" rustup default "$MIRI_NIGHTLY" cargo clean rustup component add miri cargo miri setup cargo miri test typed-arena-2.0.1/src/lib.rs010066400017500001750000000510561360616356200140700ustar0000000000000000//! The arena, a fast but limited type of allocator. //! //! **A fast (but limited) allocation arena for values of a single type.** //! //! Allocated objects are destroyed all at once, when the arena itself is //! destroyed. There is no deallocation of individual objects while the arena //! itself is still alive. The flipside is that allocation is fast: typically //! just a vector push. //! //! There is also a method `into_vec()` to recover ownership of allocated //! objects when the arena is no longer required, instead of destroying //! everything. //! //! ## Example //! //! ``` //! use typed_arena::Arena; //! //! struct Monster { //! level: u32, //! } //! //! let monsters = Arena::new(); //! //! let vegeta = monsters.alloc(Monster { level: 9001 }); //! assert!(vegeta.level > 9000); //! ``` //! //! ## Safe Cycles //! //! All allocated objects get the same lifetime, so you can safely create cycles //! between them. This can be useful for certain data structures, such as graphs //! and trees with parent pointers. //! //! ``` //! use std::cell::Cell; //! use typed_arena::Arena; //! //! struct CycleParticipant<'a> { //! other: Cell>>, //! } //! //! let arena = Arena::new(); //! //! let a = arena.alloc(CycleParticipant { other: Cell::new(None) }); //! let b = arena.alloc(CycleParticipant { other: Cell::new(None) }); //! //! a.other.set(Some(b)); //! b.other.set(Some(a)); //! ``` // Potential optimizations: // 1) add and stabilize a method for in-place reallocation of vecs. // 2) add and stabilize placement new. // 3) use an iterator. This may add far too much unsafe code. #![deny(missing_docs)] #![cfg_attr(not(any(feature = "std", test)), no_std)] #[cfg(not(feature = "std"))] extern crate alloc; #[cfg(any(feature = "std", test))] extern crate core; #[cfg(not(feature = "std"))] use alloc::vec::Vec; use core::cell::RefCell; use core::cmp; use core::iter; use core::mem; use core::slice; use core::str; use mem::MaybeUninit; #[cfg(test)] mod test; // Initial size in bytes. const INITIAL_SIZE: usize = 1024; // Minimum capacity. Must be larger than 0. const MIN_CAPACITY: usize = 1; /// An arena of objects of type `T`. /// /// ## Example /// /// ``` /// use typed_arena::Arena; /// /// struct Monster { /// level: u32, /// } /// /// let monsters = Arena::new(); /// /// let vegeta = monsters.alloc(Monster { level: 9001 }); /// assert!(vegeta.level > 9000); /// ``` pub struct Arena { chunks: RefCell>, } struct ChunkList { current: Vec, rest: Vec>, } impl Arena { /// Construct a new arena. /// /// ## Example /// /// ``` /// use typed_arena::Arena; /// /// let arena = Arena::new(); /// # arena.alloc(1); /// ``` pub fn new() -> Arena { let size = cmp::max(1, mem::size_of::()); Arena::with_capacity(INITIAL_SIZE / size) } /// Construct a new arena with capacity for `n` values pre-allocated. /// /// ## Example /// /// ``` /// use typed_arena::Arena; /// /// let arena = Arena::with_capacity(1337); /// # arena.alloc(1); /// ``` pub fn with_capacity(n: usize) -> Arena { let n = cmp::max(MIN_CAPACITY, n); Arena { chunks: RefCell::new(ChunkList { current: Vec::with_capacity(n), rest: Vec::new(), }), } } /// Return the size of the arena /// /// This is useful for using the size of previous typed arenas to build new typed arenas with large enough spaces. /// /// ## Example /// /// ``` /// use typed_arena::Arena; /// /// let arena = Arena::with_capacity(0); /// let a = arena.alloc(1); /// let b = arena.alloc(2); /// /// assert_eq!(arena.len(), 2); /// ``` pub fn len(&self) -> usize { let chunks = self.chunks.borrow(); let mut res = 0; for vec in chunks.rest.iter() { res += vec.len() } res + chunks.current.len() } /// Allocates a value in the arena, and returns a mutable reference /// to that value. /// /// ## Example /// /// ``` /// use typed_arena::Arena; /// /// let arena = Arena::new(); /// let x = arena.alloc(42); /// assert_eq!(*x, 42); /// ``` #[inline] pub fn alloc(&self, value: T) -> &mut T { self.alloc_fast_path(value) .unwrap_or_else(|value| self.alloc_slow_path(value)) } #[inline] fn alloc_fast_path(&self, value: T) -> Result<&mut T, T> { let mut chunks = self.chunks.borrow_mut(); let len = chunks.current.len(); if len < chunks.current.capacity() { chunks.current.push(value); // Avoid going through `Vec::deref_mut`, which overlaps // other references we have already handed out! debug_assert!(len < chunks.current.len()); // bounds check Ok(unsafe { &mut *chunks.current.as_mut_ptr().add(len) }) } else { Err(value) } } fn alloc_slow_path(&self, value: T) -> &mut T { &mut self.alloc_extend(iter::once(value))[0] } /// Uses the contents of an iterator to allocate values in the arena. /// Returns a mutable slice that contains these values. /// /// ## Example /// /// ``` /// use typed_arena::Arena; /// /// let arena = Arena::new(); /// let abc = arena.alloc_extend("abcdefg".chars().take(3)); /// assert_eq!(abc, ['a', 'b', 'c']); /// ``` pub fn alloc_extend(&self, iterable: I) -> &mut [T] where I: IntoIterator, { let mut iter = iterable.into_iter(); let mut chunks = self.chunks.borrow_mut(); let iter_min_len = iter.size_hint().0; let mut next_item_index; debug_assert!( chunks.current.capacity() >= chunks.current.len(), "capacity is always greater than or equal to len, so we don't need to worry about underflow" ); if iter_min_len > chunks.current.capacity() - chunks.current.len() { chunks.reserve(iter_min_len); chunks.current.extend(iter); next_item_index = 0; } else { next_item_index = chunks.current.len(); let mut i = 0; while let Some(elem) = iter.next() { if chunks.current.len() == chunks.current.capacity() { // The iterator was larger than we could fit into the current chunk. let chunks = &mut *chunks; // Create a new chunk into which we can freely push the entire iterator into chunks.reserve(i + 1); let previous_chunk = chunks.rest.last_mut().unwrap(); let previous_chunk_len = previous_chunk.len(); // Move any elements we put into the previous chunk into this new chunk chunks .current .extend(previous_chunk.drain(previous_chunk_len - i..)); chunks.current.push(elem); // And the remaining elements in the iterator chunks.current.extend(iter); next_item_index = 0; break; } else { chunks.current.push(elem); } i += 1; } } let new_slice_ref = &mut chunks.current[next_item_index..]; // Extend the lifetime from that of `chunks_borrow` to that of `self`. // This is OK because we’re careful to never move items // by never pushing to inner `Vec`s beyond their initial capacity. // The returned reference is unique (`&mut`): // the `Arena` never gives away references to existing items. unsafe { mem::transmute::<&mut [T], &mut [T]>(new_slice_ref) } } /// Allocates space for a given number of values, but doesn't initialize it. /// /// ## Safety /// /// After calling this method, the arena considers the elements initialized. If you fail to /// initialize them (which includes because of panicking during the initialization), the arena /// will run destructors on the uninitialized memory. Therefore, you must initialize them. /// /// Considering how easy it is to cause undefined behaviour using this, you're advised to /// prefer the other (safe) methods, like [`alloc_extend`][Arena::alloc_extend]. /// /// ## Example /// /// ```rust /// use std::mem::{self, MaybeUninit}; /// use std::ptr; /// use typed_arena::Arena; /// /// // Transmute from MaybeUninit slice to slice of initialized T. /// // It is a separate function to preserve the lifetime of the reference. /// unsafe fn transmute_uninit(r: &mut [MaybeUninit]) -> &mut [A] { /// mem::transmute(r) /// } /// /// let arena: Arena = Arena::new(); /// let slice: &mut [bool]; /// unsafe { /// let uninitialized = arena.alloc_uninitialized(10); /// for elem in uninitialized.iter_mut() { /// ptr::write(elem.as_mut_ptr(), true); /// } /// slice = transmute_uninit(uninitialized); /// } /// ``` /// /// ## Alternative allocation pattern /// /// To avoid the problem of dropping assumed to be initialized elements on panic, it is also /// possible to combine the [`reserve_extend`][Arena::reserve_extend] with /// [`uninitialized_array`][Arena::uninitialized_array], initialize the elements and confirm /// them by this method. In such case, when there's a panic during initialization, the already /// initialized elements would leak but it wouldn't cause UB. /// /// ```rust /// use std::mem::{self, MaybeUninit}; /// use std::ptr; /// use typed_arena::Arena; /// /// unsafe fn transmute_uninit(r: &mut [MaybeUninit]) -> &mut [A] { /// mem::transmute(r) /// } /// /// const COUNT: usize = 2; /// /// let arena: Arena = Arena::new(); /// /// arena.reserve_extend(COUNT); /// let slice: &mut [String]; /// unsafe { /// // Perform initialization before we claim the memory. /// let uninitialized = arena.uninitialized_array(); /// assert!((*uninitialized).len() >= COUNT); // Ensured by the reserve_extend /// for elem in &mut (*uninitialized)[..COUNT] { /// ptr::write(elem.as_mut_ptr(), "Hello".to_owned()); /// } /// let addr = (*uninitialized).as_ptr() as usize; /// /// // The alloc_uninitialized returns the same memory, but "confirms" its allocation. /// slice = transmute_uninit(arena.alloc_uninitialized(COUNT)); /// assert_eq!(addr, slice.as_ptr() as usize); /// assert_eq!(slice, &["Hello".to_owned(), "Hello".to_owned()]); /// } /// ``` pub unsafe fn alloc_uninitialized(&self, num: usize) -> &mut [MaybeUninit] { let mut chunks = self.chunks.borrow_mut(); debug_assert!( chunks.current.capacity() >= chunks.current.len(), "capacity is always greater than or equal to len, so we don't need to worry about underflow" ); if num > chunks.current.capacity() - chunks.current.len() { chunks.reserve(num); } // At this point, the current chunk must have free capacity. let next_item_index = chunks.current.len(); chunks.current.set_len(next_item_index + num); // Go through pointers, to make sure we never create a reference to uninitialized T. let start = chunks.current.as_mut_ptr().offset(next_item_index as isize); let start_uninit = start as *mut MaybeUninit; slice::from_raw_parts_mut(start_uninit, num) } /// Makes sure there's enough continuous space for at least `num` elements. /// /// This may save some work if called before [`alloc_extend`][Arena::alloc_extend]. It also /// allows somewhat safer use pattern of [`alloc_uninitialized`][Arena::alloc_uninitialized]. /// On the other hand this might waste up to `n - 1` elements of space. In case new allocation /// is needed, the unused ones in current chunk are never used. pub fn reserve_extend(&self, num: usize) { let mut chunks = self.chunks.borrow_mut(); debug_assert!( chunks.current.capacity() >= chunks.current.len(), "capacity is always greater than or equal to len, so we don't need to worry about underflow" ); if num > chunks.current.capacity() - chunks.current.len() { chunks.reserve(num); } } /// Returns unused space. /// /// *This unused space is still not considered "allocated".* Therefore, it /// won't be dropped unless there are further calls to `alloc`, /// [`alloc_uninitialized`][Arena::alloc_uninitialized], or /// [`alloc_extend`][Arena::alloc_extend] which is why the method is safe. /// /// It returns a raw pointer to avoid creating multiple mutable references to the same place. /// It is up to the caller not to dereference it after any of the `alloc_` methods are called. pub fn uninitialized_array(&self) -> *mut [MaybeUninit] { let mut chunks = self.chunks.borrow_mut(); let len = chunks.current.capacity() - chunks.current.len(); let next_item_index = chunks.current.len(); unsafe { // Go through pointers, to make sure we never create a reference to uninitialized T. let start = chunks.current.as_mut_ptr().offset(next_item_index as isize); let start_uninit = start as *mut MaybeUninit; slice::from_raw_parts_mut(start_uninit, len) as *mut _ } } /// Convert this `Arena` into a `Vec`. /// /// Items in the resulting `Vec` appear in the order that they were /// allocated in. /// /// ## Example /// /// ``` /// use typed_arena::Arena; /// /// let arena = Arena::new(); /// /// arena.alloc("a"); /// arena.alloc("b"); /// arena.alloc("c"); /// /// let easy_as_123 = arena.into_vec(); /// /// assert_eq!(easy_as_123, vec!["a", "b", "c"]); /// ``` pub fn into_vec(self) -> Vec { let mut chunks = self.chunks.into_inner(); // keep order of allocation in the resulting Vec let n = chunks .rest .iter() .fold(chunks.current.len(), |a, v| a + v.len()); let mut result = Vec::with_capacity(n); for mut vec in chunks.rest { result.append(&mut vec); } result.append(&mut chunks.current); result } /// Returns an iterator that allows modifying each value. /// /// Items are yielded in the order that they were allocated. /// /// ## Example /// /// ``` /// use typed_arena::Arena; /// /// #[derive(Debug, PartialEq, Eq)] /// struct Point { x: i32, y: i32 }; /// /// let mut arena = Arena::new(); /// /// arena.alloc(Point { x: 0, y: 0 }); /// arena.alloc(Point { x: 1, y: 1 }); /// /// for point in arena.iter_mut() { /// point.x += 10; /// } /// /// let points = arena.into_vec(); /// /// assert_eq!(points, vec![Point { x: 10, y: 0 }, Point { x: 11, y: 1 }]); /// /// ``` /// /// ## Immutable Iteration /// /// Note that there is no corresponding `iter` method. Access to the arena's contents /// requries mutable access to the arena itself. /// /// ```compile_fail /// use typed_arena::Arena; /// /// let mut arena = Arena::new(); /// let x = arena.alloc(1); /// /// // borrow error! /// for i in arena.iter_mut() { /// println!("i: {}", i); /// } /// /// // borrow error! /// *x = 2; /// ``` #[inline] pub fn iter_mut(&mut self) -> IterMut { let chunks = self.chunks.get_mut(); let position = if !chunks.rest.is_empty() { let index = 0; let inner_iter = chunks.rest[index].iter_mut(); // Extend the lifetime of the individual elements to that of the arena. // This is OK because we borrow the arena mutably to prevent new allocations // and we take care here to never move items inside the arena while the // iterator is alive. let inner_iter = unsafe { mem::transmute(inner_iter) }; IterMutState::ChunkListRest { index, inner_iter } } else { // Extend the lifetime of the individual elements to that of the arena. let iter = unsafe { mem::transmute(chunks.current.iter_mut()) }; IterMutState::ChunkListCurrent { iter } }; IterMut { chunks, state: position, } } } impl Arena { /// Allocates a string slice and returns a mutable reference to it. /// /// This is on `Arena`, because string slices use byte slices (`[u8]`) as their backing /// storage. /// /// # Example /// /// ``` /// use typed_arena::Arena; /// /// let arena: Arena = Arena::new(); /// let hello = arena.alloc_str("Hello world"); /// assert_eq!("Hello world", hello); /// ``` #[inline] pub fn alloc_str(&self, s: &str) -> &mut str { let buffer = self.alloc_extend(s.bytes()); // Can't fail the utf8 validation, it already came in as utf8 unsafe { str::from_utf8_unchecked_mut(buffer) } } } impl Default for Arena { fn default() -> Self { Self::new() } } impl ChunkList { #[inline(never)] #[cold] fn reserve(&mut self, additional: usize) { let double_cap = self .current .capacity() .checked_mul(2) .expect("capacity overflow"); let required_cap = additional .checked_next_power_of_two() .expect("capacity overflow"); let new_capacity = cmp::max(double_cap, required_cap); let chunk = mem::replace(&mut self.current, Vec::with_capacity(new_capacity)); self.rest.push(chunk); } } enum IterMutState<'a, T> { ChunkListRest { index: usize, inner_iter: slice::IterMut<'a, T>, }, ChunkListCurrent { iter: slice::IterMut<'a, T>, }, } /// Mutable arena iterator. /// /// This struct is created by the [`iter_mut`](struct.Arena.html#method.iter_mut) method on [Arenas](struct.Arena.html). pub struct IterMut<'a, T: 'a> { chunks: &'a mut ChunkList, state: IterMutState<'a, T>, } impl<'a, T> Iterator for IterMut<'a, T> { type Item = &'a mut T; fn next(&mut self) -> Option<&'a mut T> { loop { self.state = match self.state { IterMutState::ChunkListRest { mut index, ref mut inner_iter, } => { match inner_iter.next() { Some(item) => return Some(item), None => { index += 1; if index < self.chunks.rest.len() { let inner_iter = self.chunks.rest[index].iter_mut(); // Extend the lifetime of the individual elements to that of the arena. let inner_iter = unsafe { mem::transmute(inner_iter) }; IterMutState::ChunkListRest { index, inner_iter } } else { let iter = self.chunks.current.iter_mut(); // Extend the lifetime of the individual elements to that of the arena. let iter = unsafe { mem::transmute(iter) }; IterMutState::ChunkListCurrent { iter } } } } } IterMutState::ChunkListCurrent { ref mut iter } => return iter.next(), }; } } fn size_hint(&self) -> (usize, Option) { let current_len = self.chunks.current.len(); let current_cap = self.chunks.current.capacity(); if self.chunks.rest.is_empty() { (current_len, Some(current_len)) } else { let rest_len = self.chunks.rest.len(); let last_chunk_len = self .chunks .rest .last() .map(|chunk| chunk.len()) .unwrap_or(0); let min = current_len + last_chunk_len; let max = min + (rest_len * current_cap / rest_len); (min, Some(max)) } } } typed-arena-2.0.1/src/test.rs010066400017500001750000000242711360616356200143000ustar0000000000000000use super::*; use std::cell::Cell; use std::mem; use std::panic::{self, AssertUnwindSafe}; use std::ptr; struct DropTracker<'a>(&'a Cell); impl<'a> Drop for DropTracker<'a> { fn drop(&mut self) { self.0.set(self.0.get() + 1); } } struct Node<'a, 'b: 'a>(Option<&'a Node<'a, 'b>>, u32, DropTracker<'b>); #[test] fn arena_as_intended() { let drop_counter = Cell::new(0); { let arena = Arena::with_capacity(2); let mut node: &Node = arena.alloc(Node(None, 1, DropTracker(&drop_counter))); assert_eq!(arena.chunks.borrow().rest.len(), 0); node = arena.alloc(Node(Some(node), 2, DropTracker(&drop_counter))); assert_eq!(arena.chunks.borrow().rest.len(), 0); node = arena.alloc(Node(Some(node), 3, DropTracker(&drop_counter))); assert_eq!(arena.chunks.borrow().rest.len(), 1); node = arena.alloc(Node(Some(node), 4, DropTracker(&drop_counter))); assert_eq!(arena.chunks.borrow().rest.len(), 1); assert_eq!(node.1, 4); assert_eq!(node.0.unwrap().1, 3); assert_eq!(node.0.unwrap().0.unwrap().1, 2); assert_eq!(node.0.unwrap().0.unwrap().0.unwrap().1, 1); assert!(node.0.unwrap().0.unwrap().0.unwrap().0.is_none()); assert_eq!(arena.len(), 4); mem::drop(node); assert_eq!(drop_counter.get(), 0); let mut node: &Node = arena.alloc(Node(None, 5, DropTracker(&drop_counter))); assert_eq!(arena.chunks.borrow().rest.len(), 1); node = arena.alloc(Node(Some(node), 6, DropTracker(&drop_counter))); assert_eq!(arena.chunks.borrow().rest.len(), 1); node = arena.alloc(Node(Some(node), 7, DropTracker(&drop_counter))); assert_eq!(arena.chunks.borrow().rest.len(), 2); assert_eq!(drop_counter.get(), 0); assert_eq!(node.1, 7); assert_eq!(node.0.unwrap().1, 6); assert_eq!(node.0.unwrap().0.unwrap().1, 5); assert!(node.0.unwrap().0.unwrap().0.is_none()); assert_eq!(drop_counter.get(), 0); } assert_eq!(drop_counter.get(), 7); } #[test] fn ensure_into_vec_maintains_order_of_allocation() { let arena = Arena::with_capacity(1); // force multiple inner vecs for &s in &["t", "e", "s", "t"] { arena.alloc(String::from(s)); } let vec = arena.into_vec(); assert_eq!(vec, vec!["t", "e", "s", "t"]); } #[test] fn test_zero_cap() { let arena = Arena::with_capacity(0); let a = arena.alloc(1); let b = arena.alloc(2); assert_eq!(*a, 1); assert_eq!(*b, 2); assert_eq!(arena.len(), 2); } #[test] fn test_alloc_extend() { let arena = Arena::with_capacity(2); for i in 0..15 { let slice = arena.alloc_extend(0..i); for (j, &elem) in slice.iter().enumerate() { assert_eq!(j, elem); } } } #[test] fn test_alloc_uninitialized() { const LIMIT: usize = 15; let drop_counter = Cell::new(0); unsafe { let arena: Arena = Arena::with_capacity(4); for i in 0..LIMIT { let slice = arena.alloc_uninitialized(i); for (j, elem) in slice.iter_mut().enumerate() { ptr::write(elem.as_mut_ptr(), Node(None, j as u32, DropTracker(&drop_counter))); } assert_eq!(drop_counter.get(), 0); } } assert_eq!(drop_counter.get(), (0..LIMIT).fold(0, |a, e| a + e) as u32); } #[test] fn test_alloc_extend_with_drop_counter() { let drop_counter = Cell::new(0); { let arena = Arena::with_capacity(2); let iter = (0..100).map(|j| Node(None, j as u32, DropTracker(&drop_counter))); let older_ref = Some(&arena.alloc_extend(iter)[0]); assert_eq!(drop_counter.get(), 0); let iter = (0..100).map(|j| Node(older_ref, j as u32, DropTracker(&drop_counter))); arena.alloc_extend(iter); assert_eq!(drop_counter.get(), 0); } assert_eq!(drop_counter.get(), 200); } /// Test with bools. /// /// Bools, unlike integers, have invalid bit patterns. Therefore, ever having an uninitialized bool /// is insta-UB. Make sure miri doesn't find any such thing. #[test] fn test_alloc_uninitialized_bools() { const LEN: usize = 20; unsafe { let arena: Arena = Arena::with_capacity(2); let slice = arena.alloc_uninitialized(LEN); for elem in slice.iter_mut() { ptr::write(elem.as_mut_ptr(), true); } // Now it is fully initialized, we can safely transmute the slice. let slice: &mut [bool] = mem::transmute(slice); assert_eq!(&[true; LEN], slice); } } /// Check nothing bad happens by panicking during initialization of borrowed slice. #[test] fn alloc_uninitialized_with_panic() { struct Dropper(bool); impl Drop for Dropper { fn drop(&mut self) { // Just make sure we touch the value, to make sure miri would bite if it was // unitialized if self.0 { panic!(); } } } let mut reached_first_init = false; panic::catch_unwind(AssertUnwindSafe(|| unsafe { let arena: Arena = Arena::new(); arena.reserve_extend(2); let uninitialized = arena.uninitialized_array(); assert!((*uninitialized).len() >= 2); ptr::write((*uninitialized)[0].as_mut_ptr(), Dropper(false)); reached_first_init = true; panic!("To drop the arena"); // If it didn't panic, we would continue by initializing the second one and confirming by // .alloc_uninitialized(); })).unwrap_err(); assert!(reached_first_init); } #[test] fn test_uninitialized_array() { let arena = Arena::with_capacity(2); let uninit = arena.uninitialized_array(); arena.alloc_extend(0..2); unsafe { for (&a, b) in (&*uninit).iter().zip(0..2) { assert_eq!(a.assume_init(), b); } assert!((&*arena.uninitialized_array()).as_ptr() != (&*uninit).as_ptr()); arena.alloc(0); let uninit = arena.uninitialized_array(); assert_eq!((&*uninit).len(), 3); } } #[test] fn dont_trust_the_iterator_size() { use std::iter::repeat; struct WrongSizeIter(I); impl Iterator for WrongSizeIter where I: Iterator, { type Item = I::Item; fn next(&mut self) -> Option { self.0.next() } fn size_hint(&self) -> (usize, Option) { (0, Some(0)) } } impl ExactSizeIterator for WrongSizeIter where I: Iterator {} let arena = Arena::with_capacity(2); arena.alloc(0); let slice = arena.alloc_extend(WrongSizeIter(repeat(1).take(1_000))); // Allocation of 1000 elements should have created a new chunk assert_eq!(arena.chunks.borrow().rest.len(), 1); assert_eq!(slice.len(), 1000); } #[test] fn arena_is_send() { fn assert_is_send(_: T) {} // If `T` is `Send`, ... assert_is_send(42_u32); // Then `Arena` is also `Send`. let arena: Arena = Arena::new(); assert_is_send(arena); } #[test] fn iter_mut_low_capacity() { #[derive(Debug, PartialEq, Eq)] struct NonCopy(usize); const MAX: usize = 1_000; const CAP: usize = 16; let mut arena = Arena::with_capacity(CAP); for i in 1..MAX { arena.alloc(NonCopy(i)); } assert!( arena.chunks.borrow().rest.len() > 1, "expected multiple chunks" ); let mut iter = arena.iter_mut(); for i in 1..MAX { assert_eq!(Some(&mut NonCopy(i)), iter.next()); } assert_eq!(None, iter.next()); } #[test] fn iter_mut_high_capacity() { #[derive(Debug, PartialEq, Eq)] struct NonCopy(usize); const MAX: usize = 1_000; const CAP: usize = 8192; let mut arena = Arena::with_capacity(CAP); for i in 1..MAX { arena.alloc(NonCopy(i)); } assert!( arena.chunks.borrow().rest.is_empty(), "expected single chunk" ); let mut iter = arena.iter_mut(); for i in 1..MAX { assert_eq!(Some(&mut NonCopy(i)), iter.next()); } assert_eq!(None, iter.next()); } fn assert_size_hint(arena_len: usize, iter: IterMut<'_, T>) { let (min, max) = iter.size_hint(); assert!(max.is_some()); let max = max.unwrap(); // Check that the actual arena length lies between the estimated min and max assert!(min <= arena_len); assert!(max >= arena_len); // Check that the min and max estimates are within a factor of 3 assert!(min >= arena_len / 3); assert!(max <= arena_len * 3); } #[test] fn size_hint() { #[derive(Debug, PartialEq, Eq)] struct NonCopy(usize); const MAX: usize = 32; const CAP: usize = 0; for cap in CAP..(CAP + 16/* check some non-power-of-two capacities */) { let mut arena = Arena::with_capacity(cap); for i in 1..MAX { arena.alloc(NonCopy(i)); let iter = arena.iter_mut(); assert_size_hint(i, iter); } } } #[test] #[cfg_attr(miri, ignore)] fn size_hint_low_initial_capacities() { #[derive(Debug, PartialEq, Eq)] struct NonCopy(usize); const MAX: usize = 25_000; const CAP: usize = 0; for cap in CAP..(CAP + 128/* check some non-power-of-two capacities */) { let mut arena = Arena::with_capacity(cap); for i in 1..MAX { arena.alloc(NonCopy(i)); let iter = arena.iter_mut(); assert_size_hint(i, iter); } } } #[test] #[cfg_attr(miri, ignore)] fn size_hint_high_initial_capacities() { #[derive(Debug, PartialEq, Eq)] struct NonCopy(usize); const MAX: usize = 25_000; const CAP: usize = 8164; for cap in CAP..(CAP + 128/* check some non-power-of-two capacities */) { let mut arena = Arena::with_capacity(cap); for i in 1..MAX { arena.alloc(NonCopy(i)); let iter = arena.iter_mut(); assert_size_hint(i, iter); } } } #[test] #[cfg_attr(miri, ignore)] fn size_hint_many_items() { #[derive(Debug, PartialEq, Eq)] struct NonCopy(usize); const MAX: usize = 5_000_000; const CAP: usize = 16; let mut arena = Arena::with_capacity(CAP); for i in 1..MAX { arena.alloc(NonCopy(i)); let iter = arena.iter_mut(); assert_size_hint(i, iter); } } typed-arena-2.0.1/.cargo_vcs_info.json0000644000000001121360616412500133410ustar00{ "git": { "sha1": "8d0f94f052844e8064328d2e914a68010284df86" } }