bumpalo-3.7.0/.cargo_vcs_info.json0000644000000001120000000000000124730ustar { "git": { "sha1": "a0b03414463d65dfcb67e4cd7626a278d5b3d47b" } } bumpalo-3.7.0/.gitattributes000064400000000000000000000000270000000000000141410ustar 00000000000000README.md -diff -merge bumpalo-3.7.0/.github/workflows/rust.yml000064400000000000000000000044700000000000000163710ustar 00000000000000name: Rust on: push: branches: [ master ] pull_request: branches: [ master ] env: CARGO_TERM_COLOR: always RUST_BACKTRACE: 1 jobs: build: strategy: matrix: rust_channel: ["stable", "beta", "nightly", "1.44.0"] feature_set: ["--features collections,boxed"] include: - rust_channel: "nightly" feature_set: "--all-features" exclude: - rust_channel: "nightly" feature_set: "--features collections,boxed" runs-on: ubuntu-latest steps: - name: Install rustup run: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile=minimal - name: Install rust channel run: rustup install ${{matrix.rust_channel}} && rustup default ${{matrix.rust_channel}} - name: Install `cargo readme` run: cargo install cargo-readme --vers "^3" - uses: actions/checkout@v2 - name: Run tests (no features) run: cargo test --verbose - name: Run tests (features) run: cargo test --verbose ${{matrix.feature_set}} valgrind: runs-on: ubuntu-latest env: # Don't leak-check, as Rust globals tend to cause false positives. CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER: "valgrind --suppressions=valgrind.supp --leak-check=no --error-exitcode=1 --gen-suppressions=all" steps: - name: Install rustup run: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile=minimal - name: Install rust stable run: rustup install stable && rustup default stable - name: Install `cargo readme` run: cargo install cargo-readme --vers "^3" - name: Install valgrind run: sudo apt update && sudo apt install valgrind - uses: actions/checkout@v2 - name: Test under valgrind (no features) run: cargo test --verbose - name: Test under valgrind (features) run: cargo test --verbose --features collections,boxed benches: runs-on: ubuntu-latest steps: - name: Install rustup run: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile=minimal - name: Install rust nightly run: rustup install nightly && rustup default nightly - uses: actions/checkout@v2 - name: Check that benches build run: cargo check --benches --all-features bumpalo-3.7.0/.gitignore000064400000000000000000000000070000000000000132340ustar 00000000000000target bumpalo-3.7.0/CHANGELOG.md000064400000000000000000000375320000000000000130720ustar 00000000000000## Unreleased Released YYYY-MM-DD. ### Added * TODO (or remove section if none) ### Changed * TODO (or remove section if none) ### Deprecated * TODO (or remove section if none) ### Removed * TODO (or remove section if none) ### Fixed * TODO (or remove section if none) ### Security * TODO (or remove section if none) -------------------------------------------------------------------------------- ## 3.7.0 Released 2020-05-28. ### Added * Added `Borrow` and `BorrowMut` trait implementations for `bumpalo::collections::Vec` and `bumpalo::collections::String`. [#108](https://github.com/fitzgen/bumpalo/pull/108) ### Changed * When allocating a new chunk fails, don't immediately give up. Instead, try allocating a chunk that is half that size, and if that fails, then try half of *that* size, etc until either we successfully allocate a chunk or we fail to allocate the minimum chunk size and then finally give up. [#111](https://github.com/fitzgen/bumpalo/pull/111) -------------------------------------------------------------------------------- ## 3.6.1 Released 2020-02-18. ### Added * Improved performance of `Bump`'s `Allocator::grow_zeroed` trait method implementation. [#99](https://github.com/fitzgen/bumpalo/pull/99) -------------------------------------------------------------------------------- ## 3.6.0 Released 2020-01-29. ### Added * Added a few new flavors of allocation: * `try_alloc` for fallible, by-value allocation * `try_alloc_with` for fallible allocation with an infallible initializer function * `alloc_try_with` for infallible allocation with a fallible initializer function * `try_alloc_try_with` method for fallible allocation with a fallible initializer function We already have infallible, by-value allocation (`alloc`) and infallible allocation with an infallible initializer (`alloc_with`). With these new methods, we now have every combination covered. Thanks to [Tamme Schichler](https://github.com/Tamschi) for contributing these methods! -------------------------------------------------------------------------------- ## 3.5.0 Released 2020-01-22. ### Added * Added experimental, unstable support for the unstable, nightly Rust `allocator_api` feature. The `allocator_api` feature defines an `Allocator` trait and exposes custom allocators for `std` types. Bumpalo has a matching `allocator_api` cargo feature to enable implementing `Allocator` and using `Bump` with `std` collections. First, enable the `allocator_api` feature in your `Cargo.toml`: ```toml [dependencies] bumpalo = { version = "3.4.0", features = ["allocator_api"] } ``` Next, enable the `allocator_api` nightly Rust feature in your `src/lib.rs` or `src/main.rs`: ```rust # #[cfg(feature = "allocator_api")] # { #![feature(allocator_api)] # } ``` Finally, use `std` collections with `Bump`, so that their internal heap allocations are made within the given bump arena: ``` # #![cfg_attr(feature = "allocator_api", feature(allocator_api))] # #[cfg(feature = "allocator_api")] # { #![feature(allocator_api)] use bumpalo::Bump; // Create a new bump arena. let bump = Bump::new(); // Create a `Vec` whose elements are allocated within the bump arena. let mut v = Vec::new_in(&bump); v.push(0); v.push(1); v.push(2); # } ``` I'm very excited to see custom allocators in `std` coming along! Thanks to Arthur Gautier for implementing support for the `allocator_api` feature for Bumpalo. -------------------------------------------------------------------------------- ## 3.4.0 Released 2020-06-01. ### Added * Added the `bumpalo::boxed::Box` type. It is an owned pointer referencing a bump-allocated value, and it runs `T`'s `Drop` implementation on the referenced value when dropped. This type can be used by enabling the `"boxed"` cargo feature flag. -------------------------------------------------------------------------------- ## 3.3.0 Released 2020-05-13. ### Added * Added fallible allocation methods to `Bump`: `try_new`, `try_with_capacity`, and `try_alloc_layout`. * Added `Bump::chunk_capacity` * Added `bumpalo::collections::Vec::try_reserve[_exact]` -------------------------------------------------------------------------------- ## 3.2.1 Released 2020-03-24. ### Security * When `realloc`ing, if we allocate new space, we need to copy the old allocation's bytes into the new space. There are `old_size` number of bytes in the old allocation, but we were accidentally copying `new_size` number of bytes, which could lead to copying bytes into the realloc'd space from past the chunk that we're bump allocating out of, from unknown memory. If an attacker can cause `realloc`s, and can read the `realoc`ed data back, this could allow them to read things from other regions of memory that they shouldn't be able to. For example, if some crypto keys happened to live in memory right after a chunk we were bump allocating out of, this could allow the attacker to read the crypto keys. Beyond just fixing the bug and adding a regression test, I've also taken two additional steps: 1. While we were already running the testsuite under `valgrind` in CI, because `valgrind` exits with the same code that the program did, if there are invalid reads/writes that happen not to trigger a segfault, the program can still exit OK and we will be none the wiser. I've enabled the `--error-exitcode=1` flag for `valgrind` in CI so that tests eagerly fail in these scenarios. 2. I've written a quickcheck test to exercise `realloc`. Without the bug fix in this patch, this quickcheck immediately triggers invalid reads when run under `valgrind`. We didn't previously have quickchecks that exercised `realloc` beacuse `realloc` isn't publicly exposed directly, and instead can only be indirectly called. This new quickcheck test exercises `realloc` via `bumpalo::collections::Vec::resize` and `bumpalo::collections::Vec::shrink_to_fit` calls. This bug was introduced in version 3.0.0. See [#69](https://github.com/fitzgen/bumpalo/issues/69) for details. -------------------------------------------------------------------------------- ## 3.2.0 Released 2020-02-07. ### Added * Added the `bumpalo::collections::Vec::into_bump_slice_mut` method to turn a `bumpalo::collections::Vec<'bump, T>` into a `&'bump mut [T]`. -------------------------------------------------------------------------------- ## 3.1.2 Released 2020-01-07. ### Fixed * The `bumpalo::collections::format!` macro did not used to accept a trailing comma like `format!(in bump; "{}", 1,)`, but it does now. -------------------------------------------------------------------------------- ## 3.1.1 Released 2020-01-03. ### Fixed * The `bumpalo::collections::vec!` macro did not used to accept a trailing comma like `vec![in bump; 1, 2,]`, but it does now. -------------------------------------------------------------------------------- ## 3.1.0 Released 2019-12-27. ### Added * Added the `Bump::allocated_bytes` diagnostic method for counting the total number of bytes a `Bump` has allocated. -------------------------------------------------------------------------------- # 3.0.0 Released 2019-12-20. ## Added * Added `Bump::alloc_str` for copying string slices into a `Bump`. * Added `Bump::alloc_slice_copy` and `Bump::alloc_slice_clone` for copying or cloning slices into a `Bump`. * Added `Bump::alloc_slice_fill_iter` for allocating a slice in the `Bump` from an iterator. * Added `Bump::alloc_slice_fill_copy` and `Bump::alloc_slice_fill_clone` for creating slices of length `n` that are filled with copies or clones of an inital element. * Added `Bump::alloc_slice_fill_default` for creating slices of length `n` with the element type's default instance. * Added `Bump::alloc_slice_fill_with` for creating slices of length `n` whose elements are initialized with a function or closure. * Added `Bump::iter_allocated_chunks` as a replacement for the old `Bump::each_allocated_chunk`. The `iter_allocated_chunks` version returns an iterator, which is more idiomatic than its old, callback-taking counterpart. Additionally, `iter_allocated_chunks` exposes the chunks as `MaybeUninit`s instead of slices, which makes it usable in more situations without triggering undefined behavior. See also the note about bump direction in the "changed" section; if you're iterating chunks, you're likely affected by that change! * Added `Bump::with_capacity` so that you can pre-allocate a chunk with the requested space. ### Changed * **BREAKING:** The direction we allocate within a chunk has changed. It used to be "upwards", from low addresses within a chunk towards high addresses. It is now "downwards", from high addresses towards lower addresses. Additionally, the order in which we iterate over allocated chunks has changed! We used to iterate over chunks from oldest chunk to youngest chunk, and now we do the opposite: the youngest chunks are iterated over first, and the oldest chunks are iterated over last. If you were using `Bump::each_allocated_chunk` to iterate over data that you had previously allocated, and *you want to iterate in order of oldest-to-youngest allocation*, you need to reverse the chunks iterator and also reverse the order in which you loop through the data within a chunk! For example, if you had this code: ```rust unsafe { bump.each_allocated_chunk(|chunk| { for byte in chunk { // Touch each byte in oldest-to-youngest allocation order... } }); } ``` It should become this code: ```rust let mut chunks: Vec<_> = bump.iter_allocated_chunks().collect(); chunks.reverse(); for chunk in chunks { for byte in chunk.iter().rev() { let byte = unsafe { byte.assume_init() }; // Touch each byte in oldest-to-youngest allocation order... } } ``` The good news is that this change yielded a *speed up in allocation throughput of 3-19%!* See https://github.com/fitzgen/bumpalo/pull/37 and https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html for details. * **BREAKING:** The `collections` cargo feature is no longer on by default. You must explicitly turn it on if you intend to use the `bumpalo::collections` module. * `Bump::reset` will now retain only the last allocated chunk (the biggest), rather than only the first allocated chunk (the smallest). This should enable `Bump` to better adapt to workload sizes and quickly reach a steady state where new chunks are not requested from the global allocator. ### Removed * The `Bump::each_allocated_chunk` method is removed in favor of `Bump::iter_allocated_chunks`. Note that its safety requirements for reading from the allocated chunks are slightly different from the old `each_allocated_chunk`: only up to 16-byte alignment is supported now. If you allocate anything with greater alignment than that into the bump arena, there might be uninitilized padding inserted in the chunks, and therefore it is no longer safe to read them via `MaybeUninit::assume_init`. See also the note about bump direction in the "changed" section; if you're iterating chunks, you're likely affected by that change! * The `std` cargo feature has been removed, since this crate is now always no-std. ## Fixed * Fixed a bug involving potential integer overflows with large requested allocation sizes. -------------------------------------------------------------------------------- # 2.6.0 Released 2019-08-19. * Implement `Send` for `Bump`. -------------------------------------------------------------------------------- # 2.5.0 Released 2019-07-01. * Add `alloc_slice_copy` and `alloc_slice_clone` methods that allocate space for slices and either copy (with bound `T: Copy`) or clone (with bound `T: Clone`) the provided slice's data into the newly allocated space. -------------------------------------------------------------------------------- # 2.4.3 Released 2019-05-20. * Fixed a bug where chunks were always deallocated with the default chunk layout, not the layout that the chunk was actually allocated with (i.e. if we started growing largers chunks with larger layouts, we would deallocate those chunks with an incorrect layout). -------------------------------------------------------------------------------- # 2.4.2 Released 2019-05-17. * Added an implementation `Default` for `Bump`. * Made it so that if bump allocation within a chunk overflows, we still try to allocate a new chunk to bump out of for the requested allocation. This can avoid some OOMs in scenarios where the chunk we are currently allocating out of is very near the high end of the address space, and there is still available address space lower down for new chunks. -------------------------------------------------------------------------------- # 2.4.1 Released 2019-04-19. * Added readme metadata to Cargo.toml so it shows up on crates.io -------------------------------------------------------------------------------- # 2.4.0 Released 2019-04-19. * Added support for `realloc`ing in-place when the pointer being `realloc`ed is the last allocation made from the bump arena. This should speed up various `String`, `Vec`, and `format!` operations in many cases. -------------------------------------------------------------------------------- # 2.3.0 Released 2019-03-26. * Add the `alloc_with` method, that (usually) avoids stack-allocating the allocated value and then moving it into the bump arena. This avoids potential stack overflows in release mode when allocating very large objects, and also some `memcpy` calls. This is similar to the `copyless` crate. Read [the `alloc_with` doc comments][alloc-with-doc-comments] and [the original issue proposing this API][issue-proposing-alloc-with] for more. [alloc-with-doc-comments]: https://github.com/fitzgen/bumpalo/blob/9f47aee8a6839ba65c073b9ad5372aacbbd02352/src/lib.rs#L436-L475 [issue-proposing-alloc-with]: https://github.com/fitzgen/bumpalo/issues/10 -------------------------------------------------------------------------------- # 2.2.2 Released 2019-03-18. * Fix a regression from 2.2.1 where chunks were not always aligned to the chunk footer's alignment. -------------------------------------------------------------------------------- # 2.2.1 Released 2019-03-18. * Fix a regression in 2.2.0 where newly allocated bump chunks could fail to have capacity for a large requested bump allocation in some corner cases. -------------------------------------------------------------------------------- # 2.2.0 Released 2019-03-15. * Chunks in an arena now start out small, and double in size as more chunks are requested. -------------------------------------------------------------------------------- # 2.1.0 Released 2019-02-12. * Added the `into_bump_slice` method on `bumpalo::collections::Vec`. -------------------------------------------------------------------------------- # 2.0.0 Released 2019-02-11. * Removed the `BumpAllocSafe` trait. * Correctly detect overflows from large allocations and panic. -------------------------------------------------------------------------------- # 1.2.0 Released 2019-01-15. * Fixed an overly-aggressive `debug_assert!` that had false positives. * Ported to Rust 2018 edition. -------------------------------------------------------------------------------- # 1.1.0 Released 2018-11-28. * Added the `collections` module, which contains ports of `std`'s collection types that are compatible with backing their storage in `Bump` arenas. * Lifted the limits on size and alignment of allocations. -------------------------------------------------------------------------------- # 1.0.2 -------------------------------------------------------------------------------- # 1.0.1 -------------------------------------------------------------------------------- # 1.0.0 bumpalo-3.7.0/Cargo.toml0000644000000025360000000000000105050ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies # # If you believe there's an error in this file please file an # issue against the rust-lang/cargo repository. If you're # editing this file be aware that the upstream Cargo.toml # will likely look very different (and much more reasonable) [package] edition = "2018" name = "bumpalo" version = "3.7.0" authors = ["Nick Fitzgerald "] description = "A fast bump allocation arena for Rust." documentation = "https://docs.rs/bumpalo" readme = "./README.md" categories = ["memory-management", "rust-patterns", "no-std"] license = "MIT/Apache-2.0" repository = "https://github.com/fitzgen/bumpalo" [package.metadata.docs.rs] all-features = true [lib] path = "src/lib.rs" bench = false [[test]] name = "try_alloc" path = "tests/try_alloc.rs" harness = false [[bench]] name = "benches" path = "benches/benches.rs" harness = false required-features = ["collections"] [dev-dependencies.criterion] version = "0.3.0" [dev-dependencies.quickcheck] version = "0.9.0" [dev-dependencies.rand] version = "0.7" [features] allocator_api = [] boxed = [] collections = [] default = [] bumpalo-3.7.0/Cargo.toml.orig0000644000000014650000000000000114440ustar [package] authors = ["Nick Fitzgerald "] categories = ["memory-management", "rust-patterns", "no-std"] description = "A fast bump allocation arena for Rust." documentation = "https://docs.rs/bumpalo" edition = "2018" license = "MIT/Apache-2.0" name = "bumpalo" readme = "./README.md" repository = "https://github.com/fitzgen/bumpalo" version = "3.7.0" [package.metadata.docs.rs] all-features = true [lib] path = "src/lib.rs" bench = false [[bench]] name = "benches" path = "benches/benches.rs" harness = false required-features = ["collections"] [[test]] name = "try_alloc" path = "tests/try_alloc.rs" harness = false [dev-dependencies] quickcheck = "0.9.0" criterion = "0.3.0" rand = "0.7" [features] default = [] collections = [] boxed = [] allocator_api = [] # [profile.bench] # debug = true bumpalo-3.7.0/Cargo.toml.orig000064400000000000000000000014650000000000000141440ustar 00000000000000[package] authors = ["Nick Fitzgerald "] categories = ["memory-management", "rust-patterns", "no-std"] description = "A fast bump allocation arena for Rust." documentation = "https://docs.rs/bumpalo" edition = "2018" license = "MIT/Apache-2.0" name = "bumpalo" readme = "./README.md" repository = "https://github.com/fitzgen/bumpalo" version = "3.7.0" [package.metadata.docs.rs] all-features = true [lib] path = "src/lib.rs" bench = false [[bench]] name = "benches" path = "benches/benches.rs" harness = false required-features = ["collections"] [[test]] name = "try_alloc" path = "tests/try_alloc.rs" harness = false [dev-dependencies] quickcheck = "0.9.0" criterion = "0.3.0" rand = "0.7" [features] default = [] collections = [] boxed = [] allocator_api = [] # [profile.bench] # debug = true bumpalo-3.7.0/LICENSE-APACHE000064400000000000000000000251370000000000000132030ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. bumpalo-3.7.0/LICENSE-MIT000064400000000000000000000020430000000000000127020ustar 00000000000000Copyright (c) 2019 Nick Fitzgerald Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. bumpalo-3.7.0/README.md000064400000000000000000000146300000000000000125320ustar 00000000000000# `bumpalo` **A fast bump allocation arena for Rust.** [![](https://docs.rs/bumpalo/badge.svg)](https://docs.rs/bumpalo/) [![](https://img.shields.io/crates/v/bumpalo.svg)](https://crates.io/crates/bumpalo) [![](https://img.shields.io/crates/d/bumpalo.svg)](https://crates.io/crates/bumpalo) [![Build Status](https://github.com/fitzgen/bumpalo/workflows/Rust/badge.svg)](https://github.com/fitzgen/bumpalo/actions?query=workflow%3ARust) ![](https://github.com/fitzgen/bumpalo/raw/master/bumpalo.png) ### Bump Allocation Bump allocation is a fast, but limited approach to allocation. We have a chunk of memory, and we maintain a pointer within that memory. Whenever we allocate an object, we do a quick test that we have enough capacity left in our chunk to allocate the object and then update the pointer by the object's size. *That's it!* The disadvantage of bump allocation is that there is no general way to deallocate individual objects or reclaim the memory region for a no-longer-in-use object. These trade offs make bump allocation well-suited for *phase-oriented* allocations. That is, a group of objects that will all be allocated during the same program phase, used, and then can all be deallocated together as a group. ### Deallocation en Masse, but No `Drop` To deallocate all the objects in the arena at once, we can simply reset the bump pointer back to the start of the arena's memory chunk. This makes mass deallocation *extremely* fast, but allocated objects' `Drop` implementations are not invoked. > **However:** [`bumpalo::boxed::Box`][crate::boxed::Box] can be used to wrap > `T` values allocated in the `Bump` arena, and calls `T`'s `Drop` > implementation when the `Box` wrapper goes out of scope. This is similar to > how [`std::boxed::Box`] works, except without deallocating its backing memory. [`std::boxed::Box`]: https://doc.rust-lang.org/std/boxed/struct.Box.html ### What happens when the memory chunk is full? This implementation will allocate a new memory chunk from the global allocator and then start bump allocating into this new memory chunk. ### Example ```rust use bumpalo::Bump; use std::u64; struct Doggo { cuteness: u64, age: u8, scritches_required: bool, } // Create a new arena to bump allocate into. let bump = Bump::new(); // Allocate values into the arena. let scooter = bump.alloc(Doggo { cuteness: u64::max_value(), age: 8, scritches_required: true, }); // Exclusive, mutable references to the just-allocated value are returned. assert!(scooter.scritches_required); scooter.age += 1; ``` ### Collections When the `"collections"` cargo feature is enabled, a fork of some of the `std` library's collections are available in the `collections` module. These collection types are modified to allocate their space inside `bumpalo::Bump` arenas. ```rust use bumpalo::{Bump, collections::Vec}; // Create a new bump arena. let bump = Bump::new(); // Create a vector of integers whose storage is backed by the bump arena. The // vector cannot outlive its backing arena, and this property is enforced with // Rust's lifetime rules. let mut v = Vec::new_in(&bump); // Push a bunch of integers onto `v`! for i in 0..100 { v.push(i); } ``` Eventually [all `std` collection types will be parameterized by an allocator](https://github.com/rust-lang/rust/issues/42774) and we can remove this `collections` module and use the `std` versions. For unstable, nightly-only support for custom allocators in `std`, see the `allocator_api` section below. ### `bumpalo::boxed::Box` When the `"boxed"` cargo feature is enabled, a fork of `std::boxed::Box` library is available in the `boxed` module. This `Box` type is modified to allocate its space inside `bumpalo::Bump` arenas. **A `Box` runs `T`'s drop implementation when the `Box` is dropped.** You can use this to work around the fact that `Bump` does not drop values allocated in its space itself. ```rust use bumpalo::{Bump, boxed::Box}; use std::sync::atomic::{AtomicUsize, Ordering}; static NUM_DROPPED: AtomicUsize = AtomicUsize::new(0); struct CountDrops; impl Drop for CountDrops { fn drop(&mut self) { NUM_DROPPED.fetch_add(1, Ordering::SeqCst); } } // Create a new bump arena. let bump = Bump::new(); // Create a `CountDrops` inside the bump arena. let mut c = Box::new_in(CountDrops, &bump); // No `CountDrops` have been dropped yet. assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 0); // Drop our `Box`. drop(c); // Its `Drop` implementation was run, and so `NUM_DROPS` has been incremented. assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 1); ``` ### `#![no_std]` Support Bumpalo is a `no_std` crate. It depends only on the `alloc` and `core` crates. ### Thread support The `Bump` is `!Send`, which makes it hard to use in certain situations around threads ‒ for example in `rayon`. The [`bumpalo-herd`](https://crates.io/crates/bumpalo-herd) crate provides a pool of `Bump` allocators for use in such situations. ### Nightly Rust `feature(allocator_api)` Support The unstable, nightly-only Rust `allocator_api` feature defines an `Allocator` trait and exposes custom allocators for `std` types. Bumpalo has a matching `allocator_api` cargo feature to enable implementing `Allocator` and using `Bump` with `std` collections. Note that, as `feature(allocator_api)` is unstable and only in nightly Rust, Bumpalo's matching `allocator_api` cargo feature should be considered unstable, and will not follow the semver conventions that the rest of the crate does. First, enable the `allocator_api` feature in your `Cargo.toml`: ```toml [dependencies] bumpalo = { version = "3.4.0", features = ["allocator_api"] } ``` Next, enable the `allocator_api` nightly Rust feature in your `src/lib.rs` or `src/main.rs`: ```rust #![feature(allocator_api)] ``` Finally, use `std` collections with `Bump`, so that their internal heap allocations are made within the given bump arena: ```rust #![feature(allocator_api)] use bumpalo::Bump; // Create a new bump arena. let bump = Bump::new(); // Create a `Vec` whose elements are allocated within the bump arena. let mut v = Vec::new_in(&bump); v.push(0); v.push(1); v.push(2); ``` #### Minimum Supported Rust Version (MSRV) This crate is guaranteed to compile on stable Rust 1.44 and up. It might compile with older versions but that may change in any new patch release. We reserve the right to increment the MSRV on minor releases, however we will strive to only do it deliberately and for good reasons. bumpalo-3.7.0/README.tpl000064400000000000000000000000320000000000000127200ustar 00000000000000# `{{crate}}` {{readme}} bumpalo-3.7.0/bumpalo.png000064400000000000000000001473770000000000000134370ustar 00000000000000PNG  IHDR hWZHWbKGD pHYs  tIME 0s IDATxύaB`(9p$M!( 8N`jUŪby`4M=߾,Љ%t7u}f %uX`YtN @D)tO @D07Sbb"MkW)@~н#1[tG 퉉\׃_Х2Ʋ@/L=%fKCZe%$>=㺹|ehGhz6vЈ`\n1y`@VK>X.x|/k|~=O#~{mx192)mMH][/wWab {Cף1yސ>e{r`ot>߿|QūQ,tUzWS՘0qr00Q,t֠08v=5Y%i߿| M&n}EtA${~!`0&0;>5dɰ!_S{D`+]kUX3jG ^M}ls/>= [z"`T]L-nZ), 0Q,;XNbV̸&>r[!hi{5rD?Zwg"ԒaIg4Q,=Z#{Pz6 cC֧` Xԭ VcZO `hgb̽`56f-X ܛOXz|rkSOCՊq{ {DB(9bs󿽽湖^im//|Ɩ<+`YDekg'^NXbhQřIǔkm@5e=Т5T˵(vV/ϪN. (d CC^0-56=W^hRH$(a=Ǘ5ؘL{u[Z Q,d846ukLj}VZ 5!3X>|jhSRK( M=ڛYb׷Nd\[1QdzE(Tfнr+1!x?jz>s Mp_bVhzν)~C55=z\9<抉C @+NiG;zWb{L5!6mtRh(ᅄ!1֒JƩGP{mʚoiН 2$ csox)dbZ,@DL#t=Ȇ^`rg%ښ 0'Q,5Y "c"+Z:uCc=DaQtI9&}^ɮ'2vBh5gr`KGE' p Q,])146M=֘v,ƦW9+L @N yLZc*6yFbZbVht4k[; b}f+pq֎7׹bb܃ԣ`2djJG걦<u (N-(N=-[̴R{s^2ʢWDP@k^\"}z/|d"(J PZ{\#n#i5"5{\un5DHX+=e㺯cu(n"X DZ k szi鉧{_*|~޳3{hTXDX3ަqe55:uk}@Q,G{!QfJ\{0s"p.Sx  Ɣ뼒 JzsƮg)ىb!kW+܊\^?G[zmu$˚5-bDͩ)0D&ޖbda,@Q,8s%'nSkC3D\j+,QsL̼zxbvb"djc Q>i=NE (QfP[=3!jH`{[0(`2fڒuYCwcb[4{>Dҫ3{6F\zx{ Q,k-}EQaU^: Ocɻ5X}%('>O yւqfXŏ(-qb+GQ!dH {vؘI5_#d s \5n X.8>LyfrlSNcϽ^ƞ9}z(e)Ք cYC'VtDX a¾S_+HFKWH]\0sއ=XLAkEH(3&KǍ{"V,^uؚ\xX VJ]w>ʮ ?[ Nů-5`Ɩڇ1փ_}LSV蕞YzZ,M~7Qs,lDtHPxN06ؖ- ib+s]`յcgJɵΆ9ֹ|\E '_sbƤF|AȄ$% }̷7K *N%<. y>탽.&+@.vXsZl(D'5 'c"R:@=;v̤IF{zkk>f'J[<`$N.xjNMWLj<{0KtŲD0uR,n8#Mo!uל\ Q,Qn {l2R+ !!e°֥R8s+Fd1"kr&Uh +Q,a^[G)v#ǣ#M,yf .؜b(O 4(>+ed[ "[[Boo?>n\3ϕr$j7xnmƾ Xc֭XD>}d-yMN9^l,Z+̵3N@ǘ{ٙ2sLx-M1q1457g,zK}{toM Q,mbs{wO8?q=-ىi\r^ P(3sf㟣\XBPVu_qާĉgCFc_ioL~pQ,H2a Psޯ]vDƦ]ո>Gq.ID,@K]cۘɶGQn1u]oo?>>X4<[L 5I`:]@R&~Hx:֗{t^%&יtƆK+QCrZ?b]ίOp5¤R| dGeC"ҞL I N ,Kڏy/׶02uʶ| űDП aCzdk/6˵{o\a,0Q,#[t$jhY~Gxpsjص{Qb^t;ct(b:lt9pԵm/~B٫Xlrg?cZ,^V5þM@&0)UX%ݾW]ϩpo\w{P(s0(z"~k k=8JPI@6-L!k Ć.ݸ=L 9/yO\ug|xM|텱@DyǦk1}g^GkbֈKOyRq cKgKiB64qQ,OJ@%u UV+سW{(>}uF iE>OcDZǾ\lxj8(N e?Uls,zDZ緼/r]Z.^ʘ 9a,P(.*zF{6-~|>Vmﺅ^z-^kJ?[h3}$a,P(zLK[qW-M<3qc[/~bGy@X aX0k@6@A¼:k0Z [(N+z%5B~ ޿k$#=2ڊeǶvg1+5!4GK]^޹$_jDo=KG?z v|ߖ"Z:kW+gu^#eO^Uj@f{ak|uYZ-ڕ:a,tBcɽ@[{hú eDںDgyalTPBjE-h=MY'f=zo c7=ךG܇0bBD5b5O?7&:}˾9fcw?^O J%b$^!cP[cn=}?zT{ͅUDI-Y㧯5Ua,0>H)0(B İy=F[b??4mujUuGcŗf}]V _'+bEs08%gc9WıAUb?L0׻Z眍SB^oX(z%qzl D@u0أǧZ:Gc)Gb=&Z^RUc@6X8"{!+Gq>5ık#ۥں7רX CgؐxsO"?ScřKUkMjE9"srߙ0r}^ 5u^os0>@Q,Di0}]+ur῎ &oQ2k--ƖDҏ9X%`Bэ+W٨nk2lMm?kgΧs3?cBMk5̤X`ϡO_|<-G!yS_>>{LJ0#SWlz>0BbK{̺9>0D@՟ғ^퉟I}g@Q,<e+}5v¼^_bbPc0_{h{>TYo/^2w{ܴX (m61a==h9kAuT IDAT$Prk0vvC"mk/Xob X`v?xcMM%uyMr0isqL]KN ]# cYoZ,pH Bi5Z cM6߳{{Qb}Z%`zy`A{\)b˘&GNjڊU[9 ]ci{uf HЄ9aN9[!V(vk2OL?S[[[!еZ=kk/ OL[ht۫cb$~m:~~v?'Ȗ^W.(qkާ9.=-5Y7@n,X-F>~݄_>\ߟ횾:Q^r#轱.rsg&Svz1/m.L F+ (LQO_O[^1DjDkǞdƱB OLo͎ׯ&{=:ȦıbiTg{qYL7ӴؘYybbhB[T1iX)ƊJ)IȽ_J E PuމڽO9!qlH1Db|zEΰ,erds^y?xm'^3.9b"=GG-8 K=%&@Q,Zi˔ك֚ZRqg'u{̨f pD ֽ?1R4C4[zR_0RO:LF d:62wŭ{ksUcZ,.&55M T1[qbr!-c?&=bZ,,(h+l1( cckGn{*55A d!fXqɱ!){O{AeZ, +Qf/bas%>s!L̏QzBێ˽уX5cx {y35{;eYnVeR,# O<9c;9?_ScCҫǞh)ez,B D6e˄lm{hj[ 9ΘqX=}@Q؏uMC~~q߿|E eR,@ڐ(-vZcSI9], 05[^MޫW/^u )qB"ަ~5<^}]h)W}|Y~9L`T~lr!{>~]K_qk]A,dZ/z~ CF23ؐɯؽkqZlyc~YLf `z C[1^m/D0$DLC _a,{p'DX}a]D0|{{716_>o?}]~ߺ9iqL XFEGϹ5x5IiGcMQ7b#Rd1Ϛgt5!sZq}@D@GRKaHɊha5kf(q/:|yw=FX ]y=yw6\?&]ϻv~WI@V!O_|B{ =3MRQ\CN^2Ll)%Zߛįd=-cb`:X_>>O YcNfiyoN__8M=,nhY`TM)q} $|q.;)E*_'#И,4O_| 6DZN s;qA|ۻb2A=}mIH'U *?-˲,~_?~^vv51%_ϵ%GT; \>Zߔ㋹V@#5ψZ(wJ۱Sr!l{,gsJ1ג@GNֿcxTCQ5\ |ȏZoccCVƺc0גuON Ts^S0Vs} /(? szl +5-[:3`ȯOS> (wZyZSao=:|(v`-QlrO<C~r.y>^֎`oypC,ГhAdԽODtma/W[0Zq&]'*#2)vr%^LTהɨ#M5)$w [[B{˽^?8n4¤Xj-VH6Kz:~`jİn9׮k= 0 [~1ⷷw?uXI{ǺfF/ښ{˱^ ns|\ 4@ Dk)]9{Sư˲lŰQrX^X'X b˷P+^M~s1-kkvYcX ș`Qh큒1=жix @'D(PۏkU:ܚu===G>i[XİQ,+$#lm {ǺF"}8Ͽ?3][0ȝ3țQh={^,sN=:g|31bF1 [ϧ$Բ~eZlLMt_@5epư'~q4>7a^z_L͠:B#}|0:_rguڝ@G { ר:@L~H !Mz] 01I[Vv9W<0wuxk>[{}q,^@{e+Ήb]_?Eؚ= I>Ʃ%ϽT[-@> ½?l@A죐8yMG&Q so.fbϬC5Z+-{dR,L.e2WepvZcc>lA잽y zb`PDG#xpkJ3Sc^~d} ?_}7k>W(:ݖkX3LĤX6$AűV~g]t-^/*155*qn> I3pg!Sc\ېu/zާ|/:%Qlf{ v$|шb bűh_=.u*ޏFtk´? [S׬DLoݻ0Qj c +jFg4]'[{hZ+nqDJG[q063竵^N1c%ndQǔk:VBX,P(՚깅{8bC Z+@qә= F]m]hmSz=@{xx5&n]~DL koxs&Qߤј8׍yͿ??;qq$|˟|5.5q){k0ܠ?ؘq=fWj=kL]ؙ`1)>W: }wx(xO\[;s/Hu\X_S+kWLI#'FbZbe{bp05.5%6dOv c3wF:\F^Sb+M:L-v1A X[cMm vCX)={̈S0/ bG aKJn!(uŏy{co͏Z3l{ 1KĬ'MF=)j(L(?UX_MԸGk bއ|}>qaKL]ؑZo P%`b 1=cT'*։AVTX0XH@o@NTp5!Հ5.<J̉{cw-xTwVJ,© Sty}WQ%V X@,Ǔ*$]%bZbQ%:B!y£Ovs6\6U`DѫVb2X0'0UEX.; fТ2beLj4 T%V Q)j~N@,c; T .j) qsYV*\A(%]-.yѧ5ݬ7i]=`0L3׎<# -+ N4wF(}kҮ{-L( bI`SVkMZ{YPcUjP&86涛59qtϜc Ǎ`k[A J*@1SUz݆Z*X* VTJ|MoWCQ}jSz]s6sLk " 0;cgu,52%Lw\10 U%FbdbJvsS:[oub﷊@@,@J+ U Vm.:v0i(6~ Ɲs}m 0bU*c DU}eq I ~W`jZ& Єb6Ϻ`l6;d0aӶZ10oNbY%vH Uk"9aХJy\-tdR,ԈtVW5v1±!1BKKkGc`8!MJݚ~uq]ڇYca PoGmP"qCkF@T*D*ƮU]mw$ c$ bj./d,mkVvZq?yXڽk6]kaX% @6C S4ZcaP6bȔ ,9 Hܪ<'5~vskn@츚i75N FR,$fRi[]wURVv:UJl^g % TDuy깈O vԶ*;n09XHܐaUa Ħ`\sI {Ku % 6 FXH0 lz˜͟}o2H(-mv°G0`b #m°/߈l^t *gOʚ9 kK r4 X.y&شgm@=X@]N6~bU`s XHXYRx%> A5cTeyi bN@D(`*COXHخ_ކĪJޜ/tʤc/s~6!Tͯ?q{YT(fXmMLC( @l`,qdU]b": PM( 8 Ğ:}^,HV\ I 6H ?ύ +XrRhR55duX2mshƖ*N( ϐ]G p,;[EX6Bm`>8 LBXi&Ț>w IDATY7T&cɱ ܩ ,q.Ub몽vkNAsԼ1V`"+STl/V O/ѽkqW g;h x'ЗP,$hĔ>~}+6}N`cOӀgwro~`b] CӰS_Xc`vcIHEꮀm@|CT\5I6_Gn+> +B̚@ `] BEi%3AR lw˷_.:gXjf!4c8rP8u|m0ء?y}L[rR,s3]a\m2w<&־ƨ^+ۮߴT`1B]ز9|H̴ 49fٷ.]ڵ͉{bU`nb PmXպl´gӘ>-c&tR53Ug|0:r Xp0 mǐ!Qc{b\mj֨Ub o2R,s&AgCWIVHCnϻ~دǸz TsUIXfogC@V!ip5=7 Ħ?J(>86هnvڥ8\I V%VwPE( q KWV}06=uAP' Ymo Y@l93±yW`FbX`,FCA\nN0]i{!xs>c2&`Av?*jבÇl?vvQ]k~D|q9@ vզo7o#X b 8 Kmպwڞ_݇]L4T]>bn/bXB.0Re*,ǔױi3ƌie΃ƨ;9 xbXo)2a^Ru7oK?.l;rL)R.Ub>4I58>ר'jcW".{.w0,dKXJ؉}3-P KɼZrξRiڣ>UbȒJ@cM9]C3ST a*Nc5OfNەgPk,šx > © ج &pUiǷ̻si]?G\;X*ؚSؕ,BbʶU;d7TزwqϽa¶ Slݒ5ЁP,pِfSVP=rPm65 ;K*&:vss;8;9>KĪx @Bklnt[fqñClT\Z WcHlAv|̓P,,nC[v`"`V1t,l)+͍UP%`?4tVǮ[f;~n:о !S\xLױLfC%șP,KX0Kp˜e%1bǯBडu7m߻v({Ȑvm- <&׺r 0Gf{PA&4mZ~]Hgn nsa+nb걇s"B(%h,Ěbm vfmeֲu!]Tm2¼HOWe,ڶ?^Ll1Jc = 0B,b9ByuA.!7Ms5t;uje= n5jmccOcd3(<6cklqmC&>$l,@iL$GJuP(j -_PP>D[*%N6 F <99ׯ1rڅg!!v& b M4o.m<8(5FPu,-__؎5N+bѳ_EJc`"fbIsVɟԅc+8 EEpٺ]rlTvGR-X* =i *fT_Tڣ*4ZĐg5.!+vyyҢ=V]BXl aTpkiPyZ &tKzZVXe:bXj_ܵJ^Ç:mwk."ߏkRכ5F EZxcca)`mjm*4;NK el55>vb3=| z|: a!TJ]UlW!r3{c]RY#"Qx&cѽE㛶Uv󪥌$ "]KPpIa;I{){b^(F#ULطUN& &waXcx+[cku..TYO>>tsۯ#1SvgY7bA(\(TdvӌQ"`|nTym.1Զ;]A y+BJch'[_$t! 5D p)(ܤS_󍽏KGcәڄhs~β}`/W֙孥S% zC8iu@9N{bi+c0l:}ǘV`bmӦ:ʞ,:A8wR?AEɭȧ3@oBnB9)pӯ*Ʃkۿu0mZ4hcm PqޣZ,L @Ui# n?v6ۭXa#hҞژ 1`vbUCǘa!v ]W G´  y^hk jcl!«);r낲^B)io"E0/*bhu@t}kk*9YM5)C !EkSr,P,lׁ!k00_ Ʈo8k^ ̈P,fᙔ{u.>Oپcܿ>ch3Z嫺Ўxv XblKVݾVmklیZ-vR7&%6/aڞ`b $7us75DZݢJa& 3r7fؾ^ Q&?VعXczWKX?Ck% ;]TP~7d#>w筺_୊y1 ys9ܢ35cI(fl@Iѩ/vUl}æcVEpq ^sCugf^ ̏P,plc=.}CxB=,0ʘiuȈP,Դ\*ĥ:v8*@+6<ܓYܶԞ`moMqW86b?yf VXkMZdžC0`b`VGj\? | ƺoiZV]Ş_1檪`n`ܥ $N(fsm|~KnR7]Ж D! 0X"P̘lV˺vxK&ebX+Kǩq  Sܟ}HP,&f[Xմ>%xJCgU6bX,auA8<[B)6S\cB([Zxe`bUօ@cwPVNM-?]sgB@$E*r`3cD0 B,^ X'1ó]}6 v}=Mjd0+ jUYȵߢ- >Xfg.[ o.:mCW]WU5 }\tR5kAؖvU_9iNp ڒ[!)`l$]| `.bNbCr+ *SkJp,$v^@0l>k=b`ˉ{T \&SGz\Yz&Y7iƎ)8 VBö oP g Q61(*q>b=nw\X_A_IںI.;~&b걞G9=  @\` vry[ڼb辪SUu@ZŵKiXwʲCUѶVSvvGx)[rY"|grfMJoͺL*rWUu<:k'S1B_]gtjxʹl0Fkc"p'p1 ȮZl@Xb*iCTO Q9=T- u]״cr-RhcbE4b"R!ñEJ}Xg;6:D(v{l滶cW.ֱj7^( 4 QLH#bk ylN\jcSJ.Ź7AUt}Y%ێ׭ǹ1׳'\Ezs8N,X"T`i0TQ GxAήi]CUw2껨a9T00EP5QkTJ.3Z0eXF_>bvyc{LH"W7' cS(a1$<;N (Mm|gZۆPۆcVOھ:qtokD*sα{'ɏ5l1ŵl1,Ui cFǐum`ljMCMȩn1,Y@huBud M>Ç>ۉ%yJ! s Ӵ:lcW >\JBywgܯM4b\fyZVPJ?޴vMXq WNoQݵI 6}N֎<3<z/ i4 v]֏XT&'CۚXޏWS RlT<~G6hT#V2/U襢Ja6ac' ,Urcbccs>UT]@,]°CTAM,:`3nK7ipcʞu09W! Ehjڼ߉{fr u~"9*9& ߩ 6 ZT]ok&[S' N uQv˜8U NNS%.%[YP)h!]A6U_[hkQ{ FR1`c+" _dʰe?u|UvM^σc/`Fs*84   !6bkcu>~p,tZ0:IB}n)ZHyGzec5TT4faTMku J\RvV-YE z,4sP fl(]W=rPj}ߪlcS`Fc泖P 0S_q ÇT]_j?zX}IXqJf6r5YU'\Q16j#S `\N*_ ]4?ב';[V=wMHYz83fMh@XjkB$:>ªKp잻|80KhzЏ?l%걮hllWS=vW6J*r)֠ժ}mMk@*eWe*ߤ<6Z gzcowL IDATh&H^Qrc㡗]_xV TmjbXX| oe%,Ð+NCVb! 0EeJ!|cܕD#@k^ CTrw,&XȣoW]=qtoNm2ak'T|@lj쫖TE Q۷, ܢA(aa"ֻ]ؖ*@„b`>T(2,`9bcȏP,K^cd&ZJ,@b`cwIcFk9bK(kЪSg@XTȀP,[cWz+]XXX u} B}6~cܥa8 `qU6hªT>yߧA˶y-˷`lϜJ\E[zkVA,Ҫg0gF(s8V@ p]*]7%sX:U#kKovͱ~xڴȤsS6@/< C0`> =W+  Y6k~,}x-?3  W UAي0ۚ`\STX{b 07Sz;v]L=[pm ``*c^u-X?^h$Clvq5\rf}|u~u-o}`ZGc85%ZQ&27xxk iPY+)RI\#V 8U[H݄bH0ĚD'ڀP+ C()A[P,rFG( O<ܺ ,̛P,@Fx:꘯2 `X e.>g`ňK>t@J+0GB\"5s9b-ʽՄb_qU&Odq{6)0w;o~hQ W8{56YX)f},I|K)ǹ9fsJ5u310 8^.Bdu\[R}3;ߨ['mG,VXHP,,.hՓ5< 1 ec0?b݆9L?f ?gL>A45RN(1aVЖ XW|,L~k#0KChG~_ \FHP,IF*m`].xbsz .4% "]}A\&碏Z2kf}u,i>,5!o"K`9;^{mm.0XCh%bae;wrwq?NLݾ&p 0 kW @X.%O}#ĸh-Xuos^<@8viJ#oYR˅uAZrKl#sxƜ9'j.aY`:xou^<@P,LWw޿fOؕSw\3&Rpwq>q{~k@c, yvdȗP,SXEVAWrc]4!KE8 0WHa,\qCbz2Ai@_9-XT$q<[r&0/޻ K%,Z,c LoOk@Y׽CyfN,K ' s*7k]v$% -& b{PIt>(D <5Sٺ[  c5MDX ʺe|:Vb7!i[DsMXPkˎa3 0]"* c rMwcSH(ϗ08VrȎBjWۄAzuyYd\~ "7.P,@V;~v1r~  !" v8Xw)끮[˪izB5XX٬ eղ ®! 'HXv-,~.[юP,@uqIDY0`|QMeך\KAu^S*  "_X !aӲ69 njsNׯ;kA *;&G c cǛQEp s#[,P,@bc]0 v`UP벵t|e@w.fםgF5Z+֊+19$WBysBUW}7!24)uMae# 0-+D*+4 ͼRZdsϬ"8y #PƸZVPA] WC|h\+^<X^ypهφ 7¥/qGhАPl<9mJKw> F@0,wUSc&`i=Rʥ>1țcCy(~?{UOuSxo uƒ,ֹ9\p13ɇ o /K/_ W_{uk{ׇ~Cx{nx (|{bG_4C0vZ%|X] ?5<n|Ǎ-%›7^};Fw^ x(<\xB…3χƒ`~|wn АUNۄ6+ ԯ `~z`N^ypهφ 7¥/qG ?/-pc~8عS_ T{9/}8\u* Ɵ{-{M}Ic},|?^|֏֙oz o>ƹ ;n|;~_ܮVB}p; [V>1BcqA}mX cqs_.^ٿx6CѶA/}4|O? c z{<|Oh.'±?<8| ׅzʘ^xG/_$}b~xc;wO'S{٫Bfg߇XU0%SY ǿyxb8gg“= s^ ^pu׆7-BQoϓ'o}Cm4_|yGXO;ч>oo _[{ׇ{㏅cWN}%X:3 @x㏄=phu9{9]~T]7W*<љxs]xW7p9<Г+KJOS}04oz‡p97;ízk_'& 0osb_ ,ӫ?|5<φ~:}l.~:LmZ/uυ{yog?j\4pp;n o-woWGsHxs__ls++:d|?W?pb׮?Z^xG/_CrJh}&0u=>q'ڧo7o0?uGGCwǞ4sΝRupϽ? ׾Aӥ{T՝7ou74F E ұɈ8Df%-L|`hL"Vqs3($&rP#5H  ܺvĦNї:ϳK]ϩ}.9{&2E|GM=jb6c6nC/$_t7';=V,Y7Pl"`o(=FxGmVjL 3&61~wKN@=ߏ4۪y_s|ߌQ=ZCAEݜ: Q>6欚.?Q=:^{Cʲ-xblzݓw?Xv{NT|xs Ɔ7SO`PHFg+7ǏqZʴrKl>S؃Nаy@+ոs}͠a]}w_z|矉yuݹ7h>llL,_7 WoJ\ZUXyIJWt'{BK0R,Hmmp$PLK1fzkZMm?6ěʪʨQq/89Ɲ2.2&O~Bb9_771?k"Pޯ<&6!jf{~O英K5 @ rFwǾ7LGO95hW%|c &¹M6᥆hXې,{{TNUW,/s2X"y>Tb #@P__odXHoӮmb/G=on3k^oכbK[7.SFy_8/QV{dJ   ߴWd(Q*b|x51rP_?I>{|PVjU<k?\QVk6n>رeG;#*Wjx}N(YZ޿o^صQ^Qn)~T|3ߏ][wu~%\$㑸tɥ1f -- k߾k[v0E,Ɵv 8ǀ=FNyaF ~ v"rΆ?K'9^Jֶ%'Q9IJ?57z>>6欚.?Q=:g<ے)?@z aPQ:LLl6?Yx+v[7WIT/ v+ usnN_(a|ݼjs %_NqNKV,7# %-_ u[1uؿgl|fcbY##on"Q(V }JXo{.ܲŔ (BOdbٱ=6_8S^ ;9Z/OoPP~0DعugbِQC UHei֝1E&F{4+ϿX>Yic3dC"wLHAm@iS_cWYSM+rO/ƃ_y01uJ[N%ޒϛ91f,|oy Ķj7?Bؠ+p W*ផL,yN;Qq¹'tjiN,۾y{ܹџ bqMǁ}r< ЩEwk&cĸYΨG%% K0Rl711}U)M[S"o˫k^{>wO 6(ZpVL==FOvw=QI~q\صQ^Q_yL8mB̬xc闗jЯxR}O~BbC OsFKamCβxR;5V-_Cn_Doû{b[μ5vmyzmCK?|\W =oz@wIJǎ(+GN82OEuK"۹H`JwA7;/|߻)211/s훶c~4*b|x51rP;>6B+p Aa 6jkk5w}TOo&4ޕBW_>X^佟#7Ǯϸ9vlّ5?q+(=&U[R Ȕ}ɾ}ձjܦhcCq`U1zwʸ8M IDATc)"hjin Oo5]/Xط[pYIJj\УFSg[/Ŵ@iלڿuO-sV /뱿Ơܺ3lȨ![NՈ[wcVB! %ĔdtyӇ82.b[7"`A|wg%| nN]ͩmx~E22e߳kۮXE$NzS4[_l,qi2:yq(4}{WwGCq٘X6n|A1n|PlDoXQs]/fUbDž_hg.1q(++L G&5цJRy0jˁCC|"=cNo,Ws-ͲCX[Vm[hw}9F^B#L1)ݓ替)vHC~37ۧ᥆hXې,{{TNM,/WRE{_[^~懿(RVQ71W>vm|ǔH%&_ u[NwyW߳@?o@]g_7>>qDQޯ)3+2Wxȍ~E)WN53k=g'^|X&^]j{fаAqւb1z?޶;?>뉷FOxlwXǮ7TYEY?u|D5qH[w& 5`˩QV~p()yٵ' GeK.вBjAf?s }9lyD6EbH }U-JLk[XVsVMzШ\3mKΟfC}\YEY{;7&&RTKk}oK,q.?8C///]R1?>GO;nN]ͩ}؛X6xxrkK]#ƍ3>vF̺fVT?4P^Q =oI\X 6(lw-%hJ>w=r ZӏQFE$ztdHpL@S7c3׍/2ճ{M4lC+J:2kf?=Oqc /wbIUZgaG?}ŵ]~w` 7'bF x;Ǝ-;y_s4m}gq0d@ F&qv6,rvyg'Q[b-(O1phǜ4&f8;gY_Wn{ B3J,@*)~ADxWǞ< ;-Mm!>nN]|w_9_SgM5U{ {&82.(+o#FĂ,LYΆ?KGCkJɹ)][w|ϋ\٩HF$xeGU`^o@IJrm|8gGDy9ad̺fV,\0 = s9zP,@J NP6^OMKŬا[w.3>cAX8yaq`߁o~3AD87m}pXڋRi/+L1ټjs %DOqNO,7ᡃ%VVqhab˪-y\/gd7woe۟EKKKAuIJ!JTZKgמIJ#'-,n{XoQ|eWbWW4D( ޔme#ee]y#hCPIJc{lLpz$ Ɔ7h|耼 Ww:M]_{ 3<**+r5HچIJmzՠav7nfhz)<_8% )R__)C c]~.KӨGѯweb̫fvQǍ=Sf sχv:>BO֏FOX|#ߵ 􄪑Ue;vl9;3"۹{ FF=MH ǔgtع5QC Uy[ HG,+(^.S?+:tMuŨIY7.[ /Wnc%阓I,Ԇ,c Z~wSlm{+; |uU9zP,=LofbӉm߼=\pg|ǂp¸hZȥG#EH;5I,oh ?5lscv0hT-Zr?OsgIcb;~(>%ϚI}Gt$/1vzUP?m,g뺭eÏ^M+P,=LonObx;Ǝ-;y_s4m}gq>nb u[wB:ockjΪ1wШ\/ڒFv7_b]{s}|8gGDy9ad̺fV,\0 {e6TL{b]jե{IJSF2ޏ\f?fxTTy_s~H4SEDDv^$ Pjkk5S[o~35K,<[ZywĮּȋ=Z2:З4&{Wjn񙍉edAHÒgسkObّ˖\z[,ڷ(+1s .)d1Q'U'j9?R':וW);u۔X6vXEH(Q֔gthޗveު\ĺزjK:W|oEk6Яvi}$=0=@xA%n]4DMy¹iW7.;t[oUW%2G}cN>&lS +7tjt?X"Q__=Lyunߴg= @__=qcc؛ _#ƍ(rFFE.S1gwM/zxJov7;/D:;lGTa5Wdn|\_}3#_y02f]3+X%x˲ x|IJÓG߾y{ܹџ bqMǁ}r< Щ},2:|3 MmJ,;m  o&ߏyf3qOI{r]4㍆7E-g7nOc\ݸyLM]WIJW.{S&WO N3 MZ?.Jz?WuvJ?&:cJ՞7ʺpovaɡ=$9ȸleZVR6"b_ 1[]l;z͛ۓèI#my~K:ص6Ͼx˯~H {9옓~ԆDVnȻ{)cw%fwXvXv_YUW~ʼX@o9X59=QsVMTlwk[+H,?3h"`n.!L~LA%n]4DM¹b޷+5K,:wKKK1wb#/@<0ϷMzIJ5r;^y-wa&bmϘy8sS?T5(캢hͺfVbYچ?|7ZZZUWXtɢQbFVři(FJFʔKo\bw6vXͭ xA⴪to&41r./]S5*lg΂-gwFd;>eueՖuފЗ?Ԯy=Cϔ5eXwE۟{kcT7@((_[^*TƂ{ĉ瞨q}}l ыΚSvJOև.4o`ݓso~=5˅3UlDdJ} N&j)KC_~3 FX[NFV Pb򎚜c*m/o;l7mow}Yݪ'UǨIᏇr%Z+Nu t+RL&:7=6ݱ{[gW7..[rY}v Ra_Oߴ?g3K뎿.κ꬘~=et?v7 OmzGM>J#5&S~lJ`W^Qߘ|3 ܦIJ~Zr@K>'~~=hcNo\!ۿۃؾ:蓎kbosqjrpe\XdEb99GrPeLhzLhƠS6鼯ϋX9TƂ{ĉhg(2ƲL^t~uso;o1'X YƆ:| }~~%ݵ7׍믏3>~FQ}D+FƬkf cA۽7qYqXְ!mwgXtɢQbFVřd21ֹqWT׸q՞' EV#)siҽLf|tFbYa™IekYw#^[ZL'3 Яp,iV%?޳kObّ˖\z[,ڷ(+1sbI FD iΚSvJb?z:oŮ]yY亸[ͯ'0T ͘?#nz8 WT<{8ģ5fMP)K7_̚IJuO]jxU璏cT7n/޸?Ms?⬫ΊMSFGAcwԆxcW&1f^5Scs@DD 9$:7.Xձꗫbӳ Ѵ)+jDU2:Ɲ2.}pZL8mBd2 Xb!5@7;c+5SYJ ŶN.Od«T&?:aˁi}|tL,;iI=9}_2F&eg΂-gwFd;@1SGnHɻ_qw,yY,yY믬+eWklhc c 5gUMM%D( ou܃c2FLzXܣuJuտ^uK\wuOo7AܺXvKpC[oUW%2W@ 1nDʎ-'_]U#6t̫fk/_|*|F#4@tR=MͿ}~/u͟\Xhz)4 oij</[θ%oܞX7;oGg$xvٳOշzruL8u+ nӶhii)rۚX6hy_8`UOq'i@_>D(j݌f !NK]Q,So:9>;߂;nxꞧÕ쪣O::jN{bsckcŒ| O }Ï'lj,r6$WEL&:70?MXͭK,{;]ot_]9q^oƬkf%5mw=[Xtɢ sȪ835:_%:#ݲ,gYeƔgEYEY,W92ܳ0uԘSbW,GOǷ>tɥy_o_O|ͅra 7.r=}xrqiE7:ffہ++9 Om]q yΐCbs.տ^~*6=)M;bQ92FT)c)bń&D&рc׈XNkr{欚\:M޻Z<ظwm;]׸qqْˌ6T@ozʿ9˟YL\wuqUg)qwlxjC<~+amU356_pEԆ;&R^c#k\Wwkk^KtFL<}Aʘ~~tPٷbJP,thӬkf%WZt_DYYa2{1Fp [~hVTǬ/̊3/=]۝3eL|䶏]+5wDzŲuʪʸWFyEƆV!> 953k=.v5;;1t?d311rȘzԸK'Mo\n2q|*|F+ RzRu4*gYˁi}_yIJfcv^O^b(Zit3{i3fyQVQ˿0xHe,gAxà,.N] LzNܺXvKT[oUW%2P$b0;L&sowؽmwW7..[rY}Я>fGg$b_^r<٘v{M'ӫ'WDŽS'CwP룶VCP4f^53^{͘?#N8ŭGh4hj{kcfřeee=i27̎7̶#~'5gĚ߮Y~jrpe\XdEb99Gw}7Ko\zC7jk/8K Lг\ңL@: 8=oy_8`UOӽ:{Wy@w2rH̽un8g8>Ly&1 FNSϟ|X¸iM?X (.Xְ!ߍvո1](qتUqgjth}dzbkSCT//1 02e0uԘSbW,GOǷ>tɥQ5*uOov\/ƀnW*3ڹd(B̘?#nz8 ;uO^> ~Ϗ~%?'$=.hFƢ3n'1n\̼j>}#Bx@wh.տ^~*6=)M;bQ92FT)c)bń&D&рЍ\p8ט)c#}$]ݸ;ݼ,ݼWVUƕ߿2+0EF(L2 ]3Z?~Uzǧ~8jQEH(HŦS((_[^*TƂ{ĉh7̎7@I*Q 4Pj2L̽un\y1x.5n\,||aL;hHP Q__!1Fp [~hVTǬ/̊3/=3ʌ7Šپ# !#[E7\:VrUlzvS4!v4rpeTSFǸSŴN ML&uw>B Q[[!蓮\5S92_4=_4=u}p8/>}9>H=XI:~ E(RT=8HBdK @;y/B3]GhαP,E͍G=b RcMSbzP,E,3 -)9<7п P[[JP rad;ڌ*=K6ӟKxH]p=H: ^&::l_u| tX]n3L}~"'Yx׃[bjkk^C@R3Zﮮ{m fzh{g>³$ݵx@@o ҁv4Zk6ߒ.=[LmT3Q^m'vU)D 1$NZ <\X1z;MXl?ڑpf!_櫫,-QjuiYήC#Y7Rwo&. H g( XH#trA7b#zZޮ#Ӂe;ϟ4jRp#'ҁ6̴ގnLk{s`t7z]g(1Y2F |T y9[/QʄbX0]nGקmP5ڑke{lۑe3ymއ Zna>[eaێM3x_{kCIA|=T@i)Cۀm -=*9y9f(}{oRDwB1p8F{{;.nZ\rDG4vچJۆht\\d|YsDnmu`{grY-Ffm;l:2ѵgd3]8wÖ&ap||9~s;z X_rIϋJP,@ E%[B9#--h+xz$||fp&K׷]\17ms\q:fx0g?;}\<~8W;ynEBEN(;',;5=W-X!Ѷmi|l:f]“OKe*k`mbXFK>wpi&mץ,ay6vph4W@9%ߺ$fsm\m[qaL)zh;TtAa!'"@kg?BMSv~{}P,>+AؤhKkT͇YvKr9o;lɱe9iGeڬKs;+0{6˥Zvu.VmGMj,?Zl`m6kr7}hKHty@[y@uuZh}(źsfmςq]^biW~Di"$Jl'˳ e 6wodL~d|!ؖ՚02_`~Wo}<:Ykv!/a!pnp {Mb .)qہ-e@ N s]Q_WBs|~@ӄb(y}0"Rn%M3m] ?xKEO.'4[lS8zP,G®. Tm?鷯ͱi/+)f:7{;ڎ+ TW>#ĺ~w>IRСPQ^˴'tZ{_s I(gWzx;Sx'4l;m\~(띎hK#{R{~ B}Tn~Cx'i7/"z9IY\o[@){?P"bJTJBn 싷Fmy@6m –Jw[{ 7p~  BFy7b[:..4 PbyW&]& Eu3Np,)MVH:^A)ᵕ@ohAt^ZoXHa`@z3!(V H^{i:ZCٷ;g<'RB@PB\tjomX'Еk lO b,ok>\Q4Px{0l(Bn P,B9u+`/N w-*ݡX\;@"X<,Һ_+k&/ I<g[f&p .n{! `s{r۾=kLS迥h{I5t HM )l/:BY[7_x(ybyk<%}^ [\;E(bޏe$_3oEI(6^!JP;gm O(pss/Pl*Ы@gλXR-: p_=tnq_#:Iאa^#NF׍Yx( =)6$h,kE~Nr(kJ};][nh#@>)tPV + iDN'HX F`XR(4uHMu: M ˵:tB&qB5BPb@ zM#nv2R,gl-ǵ;bH=X(nYM' ' EF(gdZ8 XU(BBP,j Xh?T(RBB@(QZ3N(% PBP,^wΚ5B>YM& eWB_z@zBP86dY  @ @adSb  b2]|->O(%d )# #>E(X«FOw.oJ(v\=؍m@s]2bnB+XXŸ%hO+<#huq#b>.bŕ/AB @(nZ,GuYNXp-@ GyDZ,(n+\(z\p:( &b'X !^;b˵XA ɾ+]@GODpm&p*q( "eN'}wt-H @(=:Cbp[Z^ZN&3m0(L]{eb8 2p*Q,'']ʏ%YybJWbC  qD𫓯 c0٭ vy8(\=/IX&vr1#bf{.6c @(W Q,̱MD7p}^~ @(<wr)Įχf07N1g{TZhW\IENDB`bumpalo-3.7.0/src/alloc.rs000064400000000000000000000753420000000000000135110ustar 00000000000000// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(unstable_name_collisions)] #![allow(dead_code)] #![allow(deprecated)] //! Memory allocation APIs use core::cmp; use core::fmt; use core::mem; use core::ptr::{self, NonNull}; use core::usize; pub use core::alloc::{Layout, LayoutErr}; fn new_layout_err() -> LayoutErr { Layout::from_size_align(1, 3).unwrap_err() } pub fn handle_alloc_error(layout: Layout) -> ! { panic!("encountered allocation error: {:?}", layout) } pub trait UnstableLayoutMethods { fn padding_needed_for(&self, align: usize) -> usize; fn repeat(&self, n: usize) -> Result<(Layout, usize), LayoutErr>; fn array(n: usize) -> Result; } impl UnstableLayoutMethods for Layout { fn padding_needed_for(&self, align: usize) -> usize { let len = self.size(); // Rounded up value is: // len_rounded_up = (len + align - 1) & !(align - 1); // and then we return the padding difference: `len_rounded_up - len`. // // We use modular arithmetic throughout: // // 1. align is guaranteed to be > 0, so align - 1 is always // valid. // // 2. `len + align - 1` can overflow by at most `align - 1`, // so the &-mask wth `!(align - 1)` will ensure that in the // case of overflow, `len_rounded_up` will itself be 0. // Thus the returned padding, when added to `len`, yields 0, // which trivially satisfies the alignment `align`. // // (Of course, attempts to allocate blocks of memory whose // size and padding overflow in the above manner should cause // the allocator to yield an error anyway.) let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1); len_rounded_up.wrapping_sub(len) } fn repeat(&self, n: usize) -> Result<(Layout, usize), LayoutErr> { let padded_size = self .size() .checked_add(self.padding_needed_for(self.align())) .ok_or_else(new_layout_err)?; let alloc_size = padded_size.checked_mul(n).ok_or_else(new_layout_err)?; unsafe { // self.align is already known to be valid and alloc_size has been // padded already. Ok(( Layout::from_size_align_unchecked(alloc_size, self.align()), padded_size, )) } } fn array(n: usize) -> Result { Layout::new::().repeat(n).map(|(k, offs)| { debug_assert!(offs == mem::size_of::()); k }) } } /// Represents the combination of a starting address and /// a total capacity of the returned block. // #[unstable(feature = "allocator_api", issue = "32838")] #[derive(Debug)] pub struct Excess(pub NonNull, pub usize); fn size_align() -> (usize, usize) { (mem::size_of::(), mem::align_of::()) } /// The `AllocErr` error indicates an allocation failure /// that may be due to resource exhaustion or to /// something wrong when combining the given input arguments with this /// allocator. // #[unstable(feature = "allocator_api", issue = "32838")] #[derive(Clone, PartialEq, Eq, Debug)] pub struct AllocErr; // (we need this for downstream impl of trait Error) // #[unstable(feature = "allocator_api", issue = "32838")] impl fmt::Display for AllocErr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("memory allocation failed") } } /// The `CannotReallocInPlace` error is used when `grow_in_place` or /// `shrink_in_place` were unable to reuse the given memory block for /// a requested layout. // #[unstable(feature = "allocator_api", issue = "32838")] #[derive(Clone, PartialEq, Eq, Debug)] pub struct CannotReallocInPlace; // #[unstable(feature = "allocator_api", issue = "32838")] impl CannotReallocInPlace { pub fn description(&self) -> &str { "cannot reallocate allocator's memory in place" } } // (we need this for downstream impl of trait Error) // #[unstable(feature = "allocator_api", issue = "32838")] impl fmt::Display for CannotReallocInPlace { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } /// An implementation of `Alloc` can allocate, reallocate, and /// deallocate arbitrary blocks of data described via `Layout`. /// /// Some of the methods require that a memory block be *currently /// allocated* via an allocator. This means that: /// /// * the starting address for that memory block was previously /// returned by a previous call to an allocation method (`alloc`, /// `alloc_zeroed`, `alloc_excess`, `alloc_one`, `alloc_array`) or /// reallocation method (`realloc`, `realloc_excess`, or /// `realloc_array`), and /// /// * the memory block has not been subsequently deallocated, where /// blocks are deallocated either by being passed to a deallocation /// method (`dealloc`, `dealloc_one`, `dealloc_array`) or by being /// passed to a reallocation method (see above) that returns `Ok`. /// /// A note regarding zero-sized types and zero-sized layouts: many /// methods in the `Alloc` trait state that allocation requests /// must be non-zero size, or else undefined behavior can result. /// /// * However, some higher-level allocation methods (`alloc_one`, /// `alloc_array`) are well-defined on zero-sized types and can /// optionally support them: it is left up to the implementor /// whether to return `Err`, or to return `Ok` with some pointer. /// /// * If an `Alloc` implementation chooses to return `Ok` in this /// case (i.e. the pointer denotes a zero-sized inaccessible block) /// then that returned pointer must be considered "currently /// allocated". On such an allocator, *all* methods that take /// currently-allocated pointers as inputs must accept these /// zero-sized pointers, *without* causing undefined behavior. /// /// * In other words, if a zero-sized pointer can flow out of an /// allocator, then that allocator must likewise accept that pointer /// flowing back into its deallocation and reallocation methods. /// /// Some of the methods require that a layout *fit* a memory block. /// What it means for a layout to "fit" a memory block means (or /// equivalently, for a memory block to "fit" a layout) is that the /// following two conditions must hold: /// /// 1. The block's starting address must be aligned to `layout.align()`. /// /// 2. The block's size must fall in the range `[use_min, use_max]`, where: /// /// * `use_min` is `self.usable_size(layout).0`, and /// /// * `use_max` is the capacity that was (or would have been) /// returned when (if) the block was allocated via a call to /// `alloc_excess` or `realloc_excess`. /// /// Note that: /// /// * the size of the layout most recently used to allocate the block /// is guaranteed to be in the range `[use_min, use_max]`, and /// /// * a lower-bound on `use_max` can be safely approximated by a call to /// `usable_size`. /// /// * if a layout `k` fits a memory block (denoted by `ptr`) /// currently allocated via an allocator `a`, then it is legal to /// use that layout to deallocate it, i.e. `a.dealloc(ptr, k);`. /// /// # Unsafety /// /// The `Alloc` trait is an `unsafe` trait for a number of reasons, and /// implementors must ensure that they adhere to these contracts: /// /// * Pointers returned from allocation functions must point to valid memory and /// retain their validity until at least the instance of `Alloc` is dropped /// itself. /// /// * `Layout` queries and calculations in general must be correct. Callers of /// this trait are allowed to rely on the contracts defined on each method, /// and implementors must ensure such contracts remain true. /// /// Note that this list may get tweaked over time as clarifications are made in /// the future. // #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe trait Alloc { // (Note: some existing allocators have unspecified but well-defined // behavior in response to a zero size allocation request ; // e.g. in C, `malloc` of 0 will either return a null pointer or a // unique pointer, but will not have arbitrary undefined // behavior. // However in jemalloc for example, // `mallocx(0)` is documented as undefined behavior.) /// Returns a pointer meeting the size and alignment guarantees of /// `layout`. /// /// If this method returns an `Ok(addr)`, then the `addr` returned /// will be non-null address pointing to a block of storage /// suitable for holding an instance of `layout`. /// /// The returned block of storage may or may not have its contents /// initialized. (Extension subtraits might restrict this /// behavior, e.g. to ensure initialization to particular sets of /// bit patterns.) /// /// # Safety /// /// This function is unsafe because undefined behavior can result /// if the caller does not ensure that `layout` has non-zero size. /// /// (Extension subtraits might provide more specific bounds on /// behavior, e.g. guarantee a sentinel address or a null pointer /// in response to a zero-size allocation request.) /// /// # Errors /// /// Returning `Err` indicates that either memory is exhausted or /// `layout` does not meet allocator's size or alignment /// constraints. /// /// Implementations are encouraged to return `Err` on memory /// exhaustion rather than panicking or aborting, but this is not /// a strict requirement. (Specifically: it is *legal* to /// implement this trait atop an underlying native allocation /// library that aborts on memory exhaustion.) /// /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the [`handle_alloc_error`] function, /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn alloc(&mut self, layout: Layout) -> Result, AllocErr>; /// Deallocate the memory referenced by `ptr`. /// /// # Safety /// /// This function is unsafe because undefined behavior can result /// if the caller does not ensure all of the following: /// /// * `ptr` must denote a block of memory currently allocated via /// this allocator, /// /// * `layout` must *fit* that block of memory, /// /// * In addition to fitting the block of memory `layout`, the /// alignment of the `layout` must match the alignment used /// to allocate that block of memory. unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); // == ALLOCATOR-SPECIFIC QUANTITIES AND LIMITS == // usable_size /// Returns bounds on the guaranteed usable size of a successful /// allocation created with the specified `layout`. /// /// In particular, if one has a memory block allocated via a given /// allocator `a` and layout `k` where `a.usable_size(k)` returns /// `(l, u)`, then one can pass that block to `a.dealloc()` with a /// layout in the size range [l, u]. /// /// (All implementors of `usable_size` must ensure that /// `l <= k.size() <= u`) /// /// Both the lower- and upper-bounds (`l` and `u` respectively) /// are provided, because an allocator based on size classes could /// misbehave if one attempts to deallocate a block without /// providing a correct value for its size (i.e., one within the /// range `[l, u]`). /// /// Clients who wish to make use of excess capacity are encouraged /// to use the `alloc_excess` and `realloc_excess` instead, as /// this method is constrained to report conservative values that /// serve as valid bounds for *all possible* allocation method /// calls. /// /// However, for clients that do not wish to track the capacity /// returned by `alloc_excess` locally, this method is likely to /// produce useful results. #[inline] fn usable_size(&self, layout: &Layout) -> (usize, usize) { (layout.size(), layout.size()) } // == METHODS FOR MEMORY REUSE == // realloc. alloc_excess, realloc_excess /// Returns a pointer suitable for holding data described by /// a new layout with `layout`’s alignment and a size given /// by `new_size`. To /// accomplish this, this may extend or shrink the allocation /// referenced by `ptr` to fit the new layout. /// /// If this returns `Ok`, then ownership of the memory block /// referenced by `ptr` has been transferred to this /// allocator. The memory may or may not have been freed, and /// should be considered unusable (unless of course it was /// transferred back to the caller again via the return value of /// this method). /// /// If this method returns `Err`, then ownership of the memory /// block has not been transferred to this allocator, and the /// contents of the memory block are unaltered. /// /// # Safety /// /// This function is unsafe because undefined behavior can result /// if the caller does not ensure all of the following: /// /// * `ptr` must be currently allocated via this allocator, /// /// * `layout` must *fit* the `ptr` (see above). (The `new_size` /// argument need not fit it.) /// /// * `new_size` must be greater than zero. /// /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, /// must not overflow (i.e. the rounded value must be less than `usize::MAX`). /// /// (Extension subtraits might provide more specific bounds on /// behavior, e.g. guarantee a sentinel address or a null pointer /// in response to a zero-size allocation request.) /// /// # Errors /// /// Returns `Err` only if the new layout /// does not meet the allocator's size /// and alignment constraints of the allocator, or if reallocation /// otherwise fails. /// /// Implementations are encouraged to return `Err` on memory /// exhaustion rather than panicking or aborting, but this is not /// a strict requirement. (Specifically: it is *legal* to /// implement this trait atop an underlying native allocation /// library that aborts on memory exhaustion.) /// /// Clients wishing to abort computation in response to a /// reallocation error are encouraged to call the [`handle_alloc_error`] function, /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn realloc( &mut self, ptr: NonNull, layout: Layout, new_size: usize, ) -> Result, AllocErr> { let old_size = layout.size(); if new_size >= old_size { if let Ok(()) = self.grow_in_place(ptr, layout, new_size) { return Ok(ptr); } } else if new_size < old_size { if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) { return Ok(ptr); } } // otherwise, fall back on alloc + copy + dealloc. let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let result = self.alloc(new_layout); if let Ok(new_ptr) = result { ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size)); self.dealloc(ptr, layout); } result } /// Behaves like `alloc`, but also ensures that the contents /// are set to zero before being returned. /// /// # Safety /// /// This function is unsafe for the same reasons that `alloc` is. /// /// # Errors /// /// Returning `Err` indicates that either memory is exhausted or /// `layout` does not meet allocator's size or alignment /// constraints, just as in `alloc`. /// /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the [`handle_alloc_error`] function, /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { let size = layout.size(); let p = self.alloc(layout); if let Ok(p) = p { ptr::write_bytes(p.as_ptr(), 0, size); } p } /// Behaves like `alloc`, but also returns the whole size of /// the returned block. For some `layout` inputs, like arrays, this /// may include extra storage usable for additional data. /// /// # Safety /// /// This function is unsafe for the same reasons that `alloc` is. /// /// # Errors /// /// Returning `Err` indicates that either memory is exhausted or /// `layout` does not meet allocator's size or alignment /// constraints, just as in `alloc`. /// /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the [`handle_alloc_error`] function, /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn alloc_excess(&mut self, layout: Layout) -> Result { let usable_size = self.usable_size(&layout); self.alloc(layout).map(|p| Excess(p, usable_size.1)) } /// Behaves like `realloc`, but also returns the whole size of /// the returned block. For some `layout` inputs, like arrays, this /// may include extra storage usable for additional data. /// /// # Safety /// /// This function is unsafe for the same reasons that `realloc` is. /// /// # Errors /// /// Returning `Err` indicates that either memory is exhausted or /// `layout` does not meet allocator's size or alignment /// constraints, just as in `realloc`. /// /// Clients wishing to abort computation in response to a /// reallocation error are encouraged to call the [`handle_alloc_error`] function, /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn realloc_excess( &mut self, ptr: NonNull, layout: Layout, new_size: usize, ) -> Result { let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let usable_size = self.usable_size(&new_layout); self.realloc(ptr, layout, new_size) .map(|p| Excess(p, usable_size.1)) } /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`. /// /// If this returns `Ok`, then the allocator has asserted that the /// memory block referenced by `ptr` now fits `new_size`, and thus can /// be used to carry data of a layout of that size and same alignment as /// `layout`. (The allocator is allowed to /// expend effort to accomplish this, such as extending the memory block to /// include successor blocks, or virtual memory tricks.) /// /// Regardless of what this method returns, ownership of the /// memory block referenced by `ptr` has not been transferred, and /// the contents of the memory block are unaltered. /// /// # Safety /// /// This function is unsafe because undefined behavior can result /// if the caller does not ensure all of the following: /// /// * `ptr` must be currently allocated via this allocator, /// /// * `layout` must *fit* the `ptr` (see above); note the /// `new_size` argument need not fit it, /// /// * `new_size` must not be less than `layout.size()`, /// /// # Errors /// /// Returns `Err(CannotReallocInPlace)` when the allocator is /// unable to assert that the memory block referenced by `ptr` /// could fit `layout`. /// /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error` /// function; clients are expected either to be able to recover from /// `grow_in_place` failures without aborting, or to fall back on /// another reallocation method before resorting to an abort. unsafe fn grow_in_place( &mut self, ptr: NonNull, layout: Layout, new_size: usize, ) -> Result<(), CannotReallocInPlace> { let _ = ptr; // this default implementation doesn't care about the actual address. debug_assert!(new_size >= layout.size()); let (_l, u) = self.usable_size(&layout); // _l <= layout.size() [guaranteed by usable_size()] // layout.size() <= new_layout.size() [required by this method] if new_size <= u { Ok(()) } else { Err(CannotReallocInPlace) } } /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`. /// /// If this returns `Ok`, then the allocator has asserted that the /// memory block referenced by `ptr` now fits `new_size`, and /// thus can only be used to carry data of that smaller /// layout. (The allocator is allowed to take advantage of this, /// carving off portions of the block for reuse elsewhere.) The /// truncated contents of the block within the smaller layout are /// unaltered, and ownership of block has not been transferred. /// /// If this returns `Err`, then the memory block is considered to /// still represent the original (larger) `layout`. None of the /// block has been carved off for reuse elsewhere, ownership of /// the memory block has not been transferred, and the contents of /// the memory block are unaltered. /// /// # Safety /// /// This function is unsafe because undefined behavior can result /// if the caller does not ensure all of the following: /// /// * `ptr` must be currently allocated via this allocator, /// /// * `layout` must *fit* the `ptr` (see above); note the /// `new_size` argument need not fit it, /// /// * `new_size` must not be greater than `layout.size()` /// (and must be greater than zero), /// /// # Errors /// /// Returns `Err(CannotReallocInPlace)` when the allocator is /// unable to assert that the memory block referenced by `ptr` /// could fit `layout`. /// /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error` /// function; clients are expected either to be able to recover from /// `shrink_in_place` failures without aborting, or to fall back /// on another reallocation method before resorting to an abort. unsafe fn shrink_in_place( &mut self, ptr: NonNull, layout: Layout, new_size: usize, ) -> Result<(), CannotReallocInPlace> { let _ = ptr; // this default implementation doesn't care about the actual address. debug_assert!(new_size <= layout.size()); let (l, _u) = self.usable_size(&layout); // layout.size() <= _u [guaranteed by usable_size()] // new_layout.size() <= layout.size() [required by this method] if l <= new_size { Ok(()) } else { Err(CannotReallocInPlace) } } // == COMMON USAGE PATTERNS == // alloc_one, dealloc_one, alloc_array, realloc_array. dealloc_array /// Allocates a block suitable for holding an instance of `T`. /// /// Captures a common usage pattern for allocators. /// /// The returned block is suitable for passing to the /// `alloc`/`realloc` methods of this allocator. /// /// Note to implementors: If this returns `Ok(ptr)`, then `ptr` /// must be considered "currently allocated" and must be /// acceptable input to methods such as `realloc` or `dealloc`, /// *even if* `T` is a zero-sized type. In other words, if your /// `Alloc` implementation overrides this method in a manner /// that can return a zero-sized `ptr`, then all reallocation and /// deallocation methods need to be similarly overridden to accept /// such values as input. /// /// # Errors /// /// Returning `Err` indicates that either memory is exhausted or /// `T` does not meet allocator's size or alignment constraints. /// /// For zero-sized `T`, may return either of `Ok` or `Err`, but /// will *not* yield undefined behavior. /// /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the [`handle_alloc_error`] function, /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html fn alloc_one(&mut self) -> Result, AllocErr> where Self: Sized, { let k = Layout::new::(); if k.size() > 0 { unsafe { self.alloc(k).map(|p| p.cast()) } } else { Err(AllocErr) } } /// Deallocates a block suitable for holding an instance of `T`. /// /// The given block must have been produced by this allocator, /// and must be suitable for storing a `T` (in terms of alignment /// as well as minimum and maximum size); otherwise yields /// undefined behavior. /// /// Captures a common usage pattern for allocators. /// /// # Safety /// /// This function is unsafe because undefined behavior can result /// if the caller does not ensure both: /// /// * `ptr` must denote a block of memory currently allocated via this allocator /// /// * the layout of `T` must *fit* that block of memory. unsafe fn dealloc_one(&mut self, ptr: NonNull) where Self: Sized, { let k = Layout::new::(); if k.size() > 0 { self.dealloc(ptr.cast(), k); } } /// Allocates a block suitable for holding `n` instances of `T`. /// /// Captures a common usage pattern for allocators. /// /// The returned block is suitable for passing to the /// `alloc`/`realloc` methods of this allocator. /// /// Note to implementors: If this returns `Ok(ptr)`, then `ptr` /// must be considered "currently allocated" and must be /// acceptable input to methods such as `realloc` or `dealloc`, /// *even if* `T` is a zero-sized type. In other words, if your /// `Alloc` implementation overrides this method in a manner /// that can return a zero-sized `ptr`, then all reallocation and /// deallocation methods need to be similarly overridden to accept /// such values as input. /// /// # Errors /// /// Returning `Err` indicates that either memory is exhausted or /// `[T; n]` does not meet allocator's size or alignment /// constraints. /// /// For zero-sized `T` or `n == 0`, may return either of `Ok` or /// `Err`, but will *not* yield undefined behavior. /// /// Always returns `Err` on arithmetic overflow. /// /// Clients wishing to abort computation in response to an /// allocation error are encouraged to call the [`handle_alloc_error`] function, /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html fn alloc_array(&mut self, n: usize) -> Result, AllocErr> where Self: Sized, { match Layout::array::(n) { Ok(layout) if layout.size() > 0 => unsafe { self.alloc(layout).map(|p| p.cast()) }, _ => Err(AllocErr), } } /// Reallocates a block previously suitable for holding `n_old` /// instances of `T`, returning a block suitable for holding /// `n_new` instances of `T`. /// /// Captures a common usage pattern for allocators. /// /// The returned block is suitable for passing to the /// `alloc`/`realloc` methods of this allocator. /// /// # Safety /// /// This function is unsafe because undefined behavior can result /// if the caller does not ensure all of the following: /// /// * `ptr` must be currently allocated via this allocator, /// /// * the layout of `[T; n_old]` must *fit* that block of memory. /// /// # Errors /// /// Returning `Err` indicates that either memory is exhausted or /// `[T; n_new]` does not meet allocator's size or alignment /// constraints. /// /// For zero-sized `T` or `n_new == 0`, may return either of `Ok` or /// `Err`, but will *not* yield undefined behavior. /// /// Always returns `Err` on arithmetic overflow. /// /// Clients wishing to abort computation in response to a /// reallocation error are encouraged to call the [`handle_alloc_error`] function, /// rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn realloc_array( &mut self, ptr: NonNull, n_old: usize, n_new: usize, ) -> Result, AllocErr> where Self: Sized, { match (Layout::array::(n_old), Layout::array::(n_new)) { (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => { debug_assert!(k_old.align() == k_new.align()); self.realloc(ptr.cast(), k_old.clone(), k_new.size()) .map(NonNull::cast) } _ => Err(AllocErr), } } /// Deallocates a block suitable for holding `n` instances of `T`. /// /// Captures a common usage pattern for allocators. /// /// # Safety /// /// This function is unsafe because undefined behavior can result /// if the caller does not ensure both: /// /// * `ptr` must denote a block of memory currently allocated via this allocator /// /// * the layout of `[T; n]` must *fit* that block of memory. /// /// # Errors /// /// Returning `Err` indicates that either `[T; n]` or the given /// memory block does not meet allocator's size or alignment /// constraints. /// /// Always returns `Err` on arithmetic overflow. unsafe fn dealloc_array(&mut self, ptr: NonNull, n: usize) -> Result<(), AllocErr> where Self: Sized, { match Layout::array::(n) { Ok(k) if k.size() > 0 => { self.dealloc(ptr.cast(), k); Ok(()) } _ => Err(AllocErr), } } } bumpalo-3.7.0/src/boxed.rs000064400000000000000000000502170000000000000135120ustar 00000000000000//! A pointer type for bump allocation. //! //! [`Box<'a, T>`], provides the simplest form of //! bump allocation in `bumpalo`. Boxes provide ownership for this allocation, and //! drop their contents when they go out of scope. //! //! # Examples //! //! Move a value from the stack to the heap by creating a [`Box`]: //! //! ``` //! use bumpalo::{Bump, boxed::Box}; //! //! let b = Bump::new(); //! //! let val: u8 = 5; //! let boxed: Box = Box::new_in(val, &b); //! ``` //! //! Move a value from a [`Box`] back to the stack by [dereferencing]: //! //! ``` //! use bumpalo::{Bump, boxed::Box}; //! //! let b = Bump::new(); //! //! let boxed: Box = Box::new_in(5, &b); //! let val: u8 = *boxed; //! ``` //! //! Running `Drop` implementations on bump-allocated values: //! //! ```rust //! use bumpalo::{Bump, boxed::Box}; //! use std::sync::atomic::{AtomicUsize, Ordering}; //! //! static NUM_DROPPED: AtomicUsize = AtomicUsize::new(0); //! //! struct CountDrops; //! //! impl Drop for CountDrops { //! fn drop(&mut self) { //! NUM_DROPPED.fetch_add(1, Ordering::SeqCst); //! } //! } //! //! // Create a new bump arena. //! let bump = Bump::new(); //! //! // Create a `CountDrops` inside the bump arena. //! let mut c = Box::new_in(CountDrops, &bump); //! //! // No `CountDrops` have been dropped yet. //! assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 0); //! //! // Drop our `Box`. //! drop(c); //! //! // Its `Drop` implementation was run, and so `NUM_DROPS` has been incremented. //! assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 1); //! ``` //! //! Creating a recursive data structure: //! //! ``` //! use bumpalo::{Bump, boxed::Box}; //! //! let b = Bump::new(); //! //! #[derive(Debug)] //! enum List<'a, T> { //! Cons(T, Box<'a, List<'a, T>>), //! Nil, //! } //! //! let list: List = List::Cons(1, Box::new_in(List::Cons(2, Box::new_in(List::Nil, &b)), &b)); //! println!("{:?}", list); //! ``` //! //! This will print `Cons(1, Cons(2, Nil))`. //! //! Recursive structures must be boxed, because if the definition of `Cons` //! looked like this: //! //! ```compile_fail,E0072 //! # enum List { //! Cons(T, List), //! # } //! ``` //! //! It wouldn't work. This is because the size of a `List` depends on how many //! elements are in the list, and so we don't know how much memory to allocate //! for a `Cons`. By introducing a [`Box`], which has a defined size, we know how //! big `Cons` needs to be. //! //! # Memory layout //! //! For non-zero-sized values, a [`Box`] will use the provided [`Bump`] allocator for //! its allocation. It is valid to convert both ways between a [`Box`] and a //! pointer allocated with the [`Bump`] allocator, given that the //! [`Layout`] used with the allocator is correct for the type. More precisely, //! a `value: *mut T` that has been allocated with the [`Bump`] allocator //! with `Layout::for_value(&*value)` may be converted into a box using //! [`Box::::from_raw(value)`]. Conversely, the memory backing a `value: *mut //! T` obtained from [`Box::::into_raw`] will be deallocated by the //! [`Bump`] allocator with [`Layout::for_value(&*value)`]. //! //! Note that roundtrip `Box::from_raw(Box::into_raw(b))` looses lifetime bound to the //! [`Bump`] immutable borrow which guarantees that allocator will not be reset //! and memory will not be freed. //! //! [dereferencing]: https://doc.rust-lang.org/std/ops/trait.Deref.html //! [`Box`]: struct.Box.html //! [`Box`]: struct.Box.html //! [`Box<'a, T>`]: struct.Box.html //! [`Box::::from_raw(value)`]: struct.Box.html#method.from_raw //! [`Box::::into_raw`]: struct.Box.html#method.into_raw //! [`Bump`]: ../struct.Bump.html //! [`Layout`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html //! [`Layout::for_value(&*value)`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html#method.for_value use { crate::Bump, { core::{ any::Any, borrow, cmp::Ordering, convert::TryFrom, future::Future, hash::{Hash, Hasher}, iter::FusedIterator, mem, ops::{Deref, DerefMut}, pin::Pin, task::{Context, Poll}, }, core_alloc::fmt, }, }; /// An owned pointer to a bump-allocated `T` value, that runs `Drop` /// implementations. /// /// See the [module-level documentation][crate::boxed] for more details. #[repr(transparent)] pub struct Box<'a, T: ?Sized>(&'a mut T); impl<'a, T> Box<'a, T> { /// Allocates memory on the heap and then places `x` into it. /// /// This doesn't actually allocate if `T` is zero-sized. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, boxed::Box}; /// /// let b = Bump::new(); /// /// let five = Box::new_in(5, &b); /// ``` #[inline(always)] pub fn new_in(x: T, a: &'a Bump) -> Box<'a, T> { Box(a.alloc(x)) } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `x` will be pinned in memory and unable to be moved. #[inline(always)] pub fn pin_in(x: T, a: &'a Bump) -> Pin> { Box(a.alloc(x)).into() } } impl<'a, T: ?Sized> Box<'a, T> { /// Constructs a box from a raw pointer. /// /// After calling this function, the raw pointer is owned by the /// resulting `Box`. Specifically, the `Box` destructor will call /// the destructor of `T` and free the allocated memory. For this /// to be safe, the memory must have been allocated in accordance /// with the [memory layout] used by `Box` . /// /// # Safety /// /// This function is unsafe because improper use may lead to /// memory problems. For example, a double-free may occur if the /// function is called twice on the same raw pointer. /// /// # Examples /// Recreate a `Box` which was previously converted to a raw pointer /// using [`Box::into_raw`]: /// ``` /// use bumpalo::{Bump, boxed::Box}; /// /// let b = Bump::new(); /// /// let x = Box::new_in(5, &b); /// let ptr = Box::into_raw(x); /// let x = unsafe { Box::from_raw(ptr) }; // Note that new `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset. /// ``` /// Manually create a `Box` from scratch by using the bump allocator: /// ``` /// use std::alloc::{alloc, Layout}; /// use bumpalo::{Bump, boxed::Box}; /// /// let b = Bump::new(); /// /// unsafe { /// let ptr = b.alloc_layout(Layout::new::()).as_ptr() as *mut i32; /// *ptr = 5; /// let x = Box::from_raw(ptr); // Note that `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset. /// } /// ``` /// /// [memory layout]: https://doc.rust-lang.org/std/boxed/index.html#memory-layout /// [`Layout`]: https://doc.rust-lang.org/std/alloc/struct.Layout.html /// [`Box::into_raw`]: https://doc.rust-lang.org/std/boxed/struct.Box.html#method.into_raw #[inline] pub unsafe fn from_raw(raw: *mut T) -> Self { Box(&mut *raw) } /// Consumes the `Box`, returning a wrapped raw pointer. /// /// The pointer will be properly aligned and non-null. /// /// After calling this function, the caller is responsible for the /// value previously managed by the `Box`. In particular, the /// caller should properly destroy `T`. The easiest way to /// do this is to convert the raw pointer back into a `Box` with the /// [`Box::from_raw`] function, allowing the `Box` destructor to perform /// the cleanup. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This /// is so that there is no conflict with a method on the inner type. /// /// # Examples /// Converting the raw pointer back into a `Box` with [`Box::from_raw`] /// for automatic cleanup: /// ``` /// use bumpalo::{Bump, boxed::Box}; /// /// let b = Bump::new(); /// /// let x = Box::new_in(String::from("Hello"), &b); /// let ptr = Box::into_raw(x); /// let x = unsafe { Box::from_raw(ptr) }; // Note that new `x`'s lifetime is unbound. It must be bound to the `b` immutable borrow before `b` is reset. /// ``` /// Manual cleanup by explicitly running the destructor: /// ``` /// use std::ptr; /// use bumpalo::{Bump, boxed::Box}; /// /// let b = Bump::new(); /// /// let mut x = Box::new_in(String::from("Hello"), &b); /// let p = Box::into_raw(x); /// unsafe { /// ptr::drop_in_place(p); /// } /// ``` /// /// [memory layout]: index.html#memory-layout /// [`Box::from_raw`]: struct.Box.html#method.from_raw #[inline] pub fn into_raw(b: Box<'a, T>) -> *mut T { let ptr = b.0 as *mut T; mem::forget(b); ptr } /// Consumes and leaks the `Box`, returning a mutable reference, /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime /// `'a`. If the type has only static references, or none at all, then this /// may be chosen to be `'static`. /// /// This function is mainly useful for data that lives for the remainder of /// the program's life. Dropping the returned reference will cause a memory /// leak. If this is not acceptable, the reference should first be wrapped /// with the [`Box::from_raw`] function producing a `Box`. This `Box` can /// then be dropped which will properly destroy `T` and release the /// allocated memory. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::leak(b)` instead of `b.leak()`. This /// is so that there is no conflict with a method on the inner type. /// /// [`Box::from_raw`]: struct.Box.html#method.from_raw /// /// # Examples /// /// Simple usage: /// /// ``` /// use bumpalo::{Bump, boxed::Box}; /// /// let b = Bump::new(); /// /// let x = Box::new_in(41, &b); /// let reference: &mut usize = Box::leak(x); /// *reference += 1; /// assert_eq!(*reference, 42); /// ``` /// ///``` /// # #[cfg(feature = "collections")] /// # { ///use bumpalo::{Bump, boxed::Box, vec}; /// ///let b = Bump::new(); /// ///let x = vec![in &b; 1, 2, 3].into_boxed_slice(); ///let reference = Box::leak(x); ///reference[0] = 4; ///assert_eq!(*reference, [4, 2, 3]); /// # } ///``` #[inline] pub fn leak(b: Box<'a, T>) -> &'a mut T { unsafe { &mut *Box::into_raw(b) } } } impl<'a, T: ?Sized> Drop for Box<'a, T> { fn drop(&mut self) { unsafe { // `Box` owns value of `T`, but not memory behind it. core::ptr::drop_in_place(self.0); } } } impl<'a, T> Default for Box<'a, [T]> { fn default() -> Box<'a, [T]> { // It should be OK to `drop_in_place` empty slice of anything. Box(&mut []) } } impl<'a> Default for Box<'a, str> { fn default() -> Box<'a, str> { // Empty slice is valid string. // It should be OK to `drop_in_place` empty str. unsafe { Box::from_raw(Box::into_raw(Box::<[u8]>::default()) as *mut str) } } } impl<'a, 'b, T: ?Sized + PartialEq> PartialEq> for Box<'a, T> { #[inline] fn eq(&self, other: &Box<'b, T>) -> bool { PartialEq::eq(&**self, &**other) } #[inline] fn ne(&self, other: &Box<'b, T>) -> bool { PartialEq::ne(&**self, &**other) } } impl<'a, 'b, T: ?Sized + PartialOrd> PartialOrd> for Box<'a, T> { #[inline] fn partial_cmp(&self, other: &Box<'b, T>) -> Option { PartialOrd::partial_cmp(&**self, &**other) } #[inline] fn lt(&self, other: &Box<'b, T>) -> bool { PartialOrd::lt(&**self, &**other) } #[inline] fn le(&self, other: &Box<'b, T>) -> bool { PartialOrd::le(&**self, &**other) } #[inline] fn ge(&self, other: &Box<'b, T>) -> bool { PartialOrd::ge(&**self, &**other) } #[inline] fn gt(&self, other: &Box<'b, T>) -> bool { PartialOrd::gt(&**self, &**other) } } impl<'a, T: ?Sized + Ord> Ord for Box<'a, T> { #[inline] fn cmp(&self, other: &Box<'a, T>) -> Ordering { Ord::cmp(&**self, &**other) } } impl<'a, T: ?Sized + Eq> Eq for Box<'a, T> {} impl<'a, T: ?Sized + Hash> Hash for Box<'a, T> { fn hash(&self, state: &mut H) { (**self).hash(state); } } impl<'a, T: ?Sized + Hasher> Hasher for Box<'a, T> { fn finish(&self) -> u64 { (**self).finish() } fn write(&mut self, bytes: &[u8]) { (**self).write(bytes) } fn write_u8(&mut self, i: u8) { (**self).write_u8(i) } fn write_u16(&mut self, i: u16) { (**self).write_u16(i) } fn write_u32(&mut self, i: u32) { (**self).write_u32(i) } fn write_u64(&mut self, i: u64) { (**self).write_u64(i) } fn write_u128(&mut self, i: u128) { (**self).write_u128(i) } fn write_usize(&mut self, i: usize) { (**self).write_usize(i) } fn write_i8(&mut self, i: i8) { (**self).write_i8(i) } fn write_i16(&mut self, i: i16) { (**self).write_i16(i) } fn write_i32(&mut self, i: i32) { (**self).write_i32(i) } fn write_i64(&mut self, i: i64) { (**self).write_i64(i) } fn write_i128(&mut self, i: i128) { (**self).write_i128(i) } fn write_isize(&mut self, i: isize) { (**self).write_isize(i) } } impl<'a, T: ?Sized> From> for Pin> { /// Converts a `Box` into a `Pin>` /// /// This conversion does not allocate on the heap and happens in place. fn from(boxed: Box<'a, T>) -> Self { // It's not possible to move or replace the insides of a `Pin>` // when `T: !Unpin`, so it's safe to pin it directly without any // additional requirements. unsafe { Pin::new_unchecked(boxed) } } } impl<'a> Box<'a, dyn Any> { #[inline] /// Attempt to downcast the box to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// /// fn print_if_string(value: Box) { /// if let Ok(string) = value.downcast::() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// let my_string = "Hello World".to_string(); /// print_if_string(Box::new(my_string)); /// print_if_string(Box::new(0i8)); /// ``` pub fn downcast(self) -> Result, Box<'a, dyn Any>> { if self.is::() { unsafe { let raw: *mut dyn Any = Box::into_raw(self); Ok(Box::from_raw(raw as *mut T)) } } else { Err(self) } } } impl<'a> Box<'a, dyn Any + Send> { #[inline] /// Attempt to downcast the box to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// /// fn print_if_string(value: Box) { /// if let Ok(string) = value.downcast::() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// let my_string = "Hello World".to_string(); /// print_if_string(Box::new(my_string)); /// print_if_string(Box::new(0i8)); /// ``` pub fn downcast(self) -> Result, Box<'a, dyn Any + Send>> { if self.is::() { unsafe { let raw: *mut (dyn Any + Send) = Box::into_raw(self); Ok(Box::from_raw(raw as *mut T)) } } else { Err(self) } } } impl<'a, T: fmt::Display + ?Sized> fmt::Display for Box<'a, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } impl<'a, T: fmt::Debug + ?Sized> fmt::Debug for Box<'a, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } impl<'a, T: ?Sized> fmt::Pointer for Box<'a, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // It's not possible to extract the inner Uniq directly from the Box, // instead we cast it to a *const which aliases the Unique let ptr: *const T = &**self; fmt::Pointer::fmt(&ptr, f) } } impl<'a, T: ?Sized> Deref for Box<'a, T> { type Target = T; fn deref(&self) -> &T { &*self.0 } } impl<'a, T: ?Sized> DerefMut for Box<'a, T> { fn deref_mut(&mut self) -> &mut T { self.0 } } impl<'a, I: Iterator + ?Sized> Iterator for Box<'a, I> { type Item = I::Item; fn next(&mut self) -> Option { (**self).next() } fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } fn nth(&mut self, n: usize) -> Option { (**self).nth(n) } fn last(self) -> Option { #[inline] fn some(_: Option, x: T) -> Option { Some(x) } self.fold(None, some) } } impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<'a, I> { fn next_back(&mut self) -> Option { (**self).next_back() } fn nth_back(&mut self, n: usize) -> Option { (**self).nth_back(n) } } impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<'a, I> { fn len(&self) -> usize { (**self).len() } } impl<'a, I: FusedIterator + ?Sized> FusedIterator for Box<'a, I> {} #[cfg(feature = "collections")] impl<'a, A> Box<'a, [A]> { /// Creates a value from an iterator. /// This method is adapted version of `FromIterator::from_iter`. /// It cannot be made as that trait implementation given different signature. /// /// # Examples /// /// Basic usage: /// ``` /// use bumpalo::{Bump, boxed::Box, vec}; /// /// let b = Bump::new(); /// /// let five_fives = std::iter::repeat(5).take(5); /// let slice = Box::from_iter_in(five_fives, &b); /// assert_eq!(vec![in &b; 5, 5, 5, 5, 5], &*slice); /// ``` pub fn from_iter_in>(iter: T, a: &'a Bump) -> Self { use crate::collections::Vec; let mut vec = Vec::new_in(a); vec.extend(iter); vec.into_boxed_slice() } } impl<'a, T: ?Sized> borrow::Borrow for Box<'a, T> { fn borrow(&self) -> &T { &**self } } impl<'a, T: ?Sized> borrow::BorrowMut for Box<'a, T> { fn borrow_mut(&mut self) -> &mut T { &mut **self } } impl<'a, T: ?Sized> AsRef for Box<'a, T> { fn as_ref(&self) -> &T { &**self } } impl<'a, T: ?Sized> AsMut for Box<'a, T> { fn as_mut(&mut self) -> &mut T { &mut **self } } impl<'a, T: ?Sized> Unpin for Box<'a, T> {} impl<'a, F: ?Sized + Future + Unpin> Future for Box<'a, F> { type Output = F::Output; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { F::poll(Pin::new(&mut *self), cx) } } macro_rules! array_impls { ($($N: expr)+) => { $( /// This impl replaces unsize coersion. impl<'a, T> From> for Box<'a, [T]> { fn from(mut arr: Box<'a, [T; $N]>) -> Box<'a, [T]> { let ptr = core::ptr::slice_from_raw_parts_mut(arr.as_mut_ptr(), $N); mem::forget(arr); unsafe { Box::from_raw(ptr) } } } /// This impl replaces unsize coersion. impl<'a, T> TryFrom> for Box<'a, [T; $N]> { type Error = Box<'a, [T]>; fn try_from(mut slice: Box<'a, [T]>) -> Result, Box<'a, [T]>> { if slice.len() == $N { let ptr = slice.as_mut_ptr() as *mut [T; $N]; mem::forget(slice); Ok(unsafe { Box::from_raw(ptr) }) } else { Err(slice) } } } )+ } } array_impls! { 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 } bumpalo-3.7.0/src/collections/mod.rs000064400000000000000000000044750000000000000155130ustar 00000000000000// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Collection types. #![allow(deprecated)] mod raw_vec; pub mod vec; pub use self::vec::Vec; mod str; pub mod string; pub use self::string::String; // pub mod binary_heap; // mod btree; // pub mod linked_list; // pub mod vec_deque; // pub mod btree_map { // //! A map based on a B-Tree. // pub use super::btree::map::*; // } // pub mod btree_set { // //! A set based on a B-Tree. // pub use super::btree::set::*; // } // #[doc(no_inline)] // pub use self::binary_heap::BinaryHeap; // #[doc(no_inline)] // pub use self::btree_map::BTreeMap; // #[doc(no_inline)] // pub use self::btree_set::BTreeSet; // #[doc(no_inline)] // pub use self::linked_list::LinkedList; // #[doc(no_inline)] // pub use self::vec_deque::VecDeque; use crate::alloc::{AllocErr, LayoutErr}; /// Augments `AllocErr` with a CapacityOverflow variant. #[derive(Clone, PartialEq, Eq, Debug)] // #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] pub enum CollectionAllocErr { /// Error due to the computed capacity exceeding the collection's maximum /// (usually `isize::MAX` bytes). CapacityOverflow, /// Error due to the allocator (see the `AllocErr` type's docs). AllocErr, } // #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] impl From for CollectionAllocErr { #[inline] fn from(AllocErr: AllocErr) -> Self { CollectionAllocErr::AllocErr } } // #[unstable(feature = "try_reserve", reason = "new API", issue="48043")] impl From for CollectionAllocErr { #[inline] fn from(_: LayoutErr) -> Self { CollectionAllocErr::CapacityOverflow } } // /// An intermediate trait for specialization of `Extend`. // #[doc(hidden)] // trait SpecExtend { // /// Extends `self` with the contents of the given iterator. // fn spec_extend(&mut self, iter: I); // } bumpalo-3.7.0/src/collections/raw_vec.rs000064400000000000000000000663510000000000000163630ustar 00000000000000// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(unstable_name_collisions)] #![allow(dead_code)] use crate::Bump; use core::cmp; use core::mem; use core::ptr::{self, NonNull}; use crate::alloc::{handle_alloc_error, Alloc, Layout, UnstableLayoutMethods}; use crate::collections::CollectionAllocErr; use crate::collections::CollectionAllocErr::*; // use boxed::Box; /// A low-level utility for more ergonomically allocating, reallocating, and deallocating /// a buffer of memory on the heap without having to worry about all the corner cases /// involved. This type is excellent for building your own data structures like Vec and VecDeque. /// In particular: /// /// * Produces Unique::empty() on zero-sized types /// * Produces Unique::empty() on zero-length allocations /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics) /// * Guards against 32-bit systems allocating more than isize::MAX bytes /// * Guards against overflowing your length /// * Aborts on OOM /// * Avoids freeing Unique::empty() /// * Contains a ptr::Unique and thus endows the user with all related benefits /// /// This type does not in anyway inspect the memory that it manages. When dropped it *will* /// free its memory, but it *won't* try to Drop its contents. It is up to the user of RawVec /// to handle the actual things *stored* inside of a RawVec. /// /// Note that a RawVec always forces its capacity to be usize::MAX for zero-sized types. /// This enables you to use capacity growing logic catch the overflows in your length /// that might occur with zero-sized types. /// /// However this means that you need to be careful when round-tripping this type /// with a `Box<[T]>`: `cap()` won't yield the len. However `with_capacity`, /// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity /// field. This allows zero-sized types to not be special-cased by consumers of /// this type. #[allow(missing_debug_implementations)] pub struct RawVec<'a, T> { ptr: NonNull, cap: usize, a: &'a Bump, } impl<'a, T> RawVec<'a, T> { /// Like `new` but parameterized over the choice of allocator for /// the returned RawVec. pub fn new_in(a: &'a Bump) -> Self { // !0 is usize::MAX. This branch should be stripped at compile time. // FIXME(mark-i-m): use this line when `if`s are allowed in `const` //let cap = if mem::size_of::() == 0 { !0 } else { 0 }; // Unique::empty() doubles as "unallocated" and "zero-sized allocation" RawVec { ptr: unsafe { NonNull::new_unchecked(mem::align_of::() as *mut T) }, // FIXME(mark-i-m): use `cap` when ifs are allowed in const cap: [0, !0][(mem::size_of::() == 0) as usize], a, } } /// Like `with_capacity` but parameterized over the choice of /// allocator for the returned RawVec. #[inline] pub fn with_capacity_in(cap: usize, a: &'a Bump) -> Self { RawVec::allocate_in(cap, false, a) } /// Like `with_capacity_zeroed` but parameterized over the choice /// of allocator for the returned RawVec. #[inline] pub fn with_capacity_zeroed_in(cap: usize, a: &'a Bump) -> Self { RawVec::allocate_in(cap, true, a) } fn allocate_in(cap: usize, zeroed: bool, mut a: &'a Bump) -> Self { unsafe { let elem_size = mem::size_of::(); let alloc_size = cap .checked_mul(elem_size) .unwrap_or_else(|| capacity_overflow()); alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow()); // handles ZSTs and `cap = 0` alike let ptr = if alloc_size == 0 { NonNull::::dangling() } else { let align = mem::align_of::(); let layout = Layout::from_size_align(alloc_size, align).unwrap(); let result = if zeroed { a.alloc_zeroed(layout) } else { Alloc::alloc(&mut a, layout) }; match result { Ok(ptr) => ptr.cast(), Err(_) => handle_alloc_error(layout), } }; RawVec { ptr, cap, a } } } } impl<'a, T> RawVec<'a, T> { /// Reconstitutes a RawVec from a pointer, capacity, and allocator. /// /// # Undefined Behavior /// /// The ptr must be allocated (via the given allocator `a`), and with the given capacity. The /// capacity cannot exceed `isize::MAX` (only a concern on 32-bit systems). /// If the ptr and capacity come from a RawVec created via `a`, then this is guaranteed. pub unsafe fn from_raw_parts_in(ptr: *mut T, cap: usize, a: &'a Bump) -> Self { RawVec { ptr: NonNull::new_unchecked(ptr), cap, a, } } } impl<'a, T> RawVec<'a, T> { /// Gets a raw pointer to the start of the allocation. Note that this is /// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must /// be careful. pub fn ptr(&self) -> *mut T { self.ptr.as_ptr() } /// Gets the capacity of the allocation. /// /// This will always be `usize::MAX` if `T` is zero-sized. #[inline(always)] pub fn cap(&self) -> usize { if mem::size_of::() == 0 { !0 } else { self.cap } } /// Returns a shared reference to the allocator backing this RawVec. pub fn bump(&self) -> &'a Bump { self.a } fn current_layout(&self) -> Option { if self.cap == 0 { None } else { // We have an allocated chunk of memory, so we can bypass runtime // checks to get our current layout. unsafe { let align = mem::align_of::(); let size = mem::size_of::() * self.cap; Some(Layout::from_size_align_unchecked(size, align)) } } } /// Doubles the size of the type's backing allocation. This is common enough /// to want to do that it's easiest to just have a dedicated method. Slightly /// more efficient logic can be provided for this than the general case. /// /// This function is ideal for when pushing elements one-at-a-time because /// you don't need to incur the costs of the more general computations /// reserve needs to do to guard against overflow. You do however need to /// manually check if your `len == cap`. /// /// # Panics /// /// * Panics if T is zero-sized on the assumption that you managed to exhaust /// all `usize::MAX` slots in your imaginary buffer. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. /// /// # Aborts /// /// Aborts on OOM /// /// # Examples /// /// ```ignore /// # #![feature(alloc, raw_vec_internals)] /// # extern crate alloc; /// # use std::ptr; /// # use alloc::raw_vec::RawVec; /// struct MyVec { /// buf: RawVec, /// len: usize, /// } /// /// impl MyVec { /// pub fn push(&mut self, elem: T) { /// if self.len == self.buf.cap() { self.buf.double(); } /// // double would have aborted or panicked if the len exceeded /// // `isize::MAX` so this is safe to do unchecked now. /// unsafe { /// ptr::write(self.buf.ptr().add(self.len), elem); /// } /// self.len += 1; /// } /// } /// # fn main() { /// # let mut vec = MyVec { buf: RawVec::new(), len: 0 }; /// # vec.push(1); /// # } /// ``` #[inline(never)] #[cold] pub fn double(&mut self) { unsafe { let elem_size = mem::size_of::(); // since we set the capacity to usize::MAX when elem_size is // 0, getting to here necessarily means the RawVec is overfull. assert!(elem_size != 0, "capacity overflow"); let (new_cap, uniq) = match self.current_layout() { Some(cur) => { // Since we guarantee that we never allocate more than // isize::MAX bytes, `elem_size * self.cap <= isize::MAX` as // a precondition, so this can't overflow. Additionally the // alignment will never be too large as to "not be // satisfiable", so `Layout::from_size_align` will always // return `Some`. // // tl;dr; we bypass runtime checks due to dynamic assertions // in this module, allowing us to use // `from_size_align_unchecked`. let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); let ptr_res = self.a.realloc(self.ptr.cast(), cur, new_size); match ptr_res { Ok(ptr) => (new_cap, ptr.cast()), Err(_) => handle_alloc_error(Layout::from_size_align_unchecked( new_size, cur.align(), )), } } None => { // skip to 4 because tiny Vec's are dumb; but not if that // would cause overflow let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; match self.a.alloc_array::(new_cap) { Ok(ptr) => (new_cap, ptr), Err(_) => handle_alloc_error(Layout::array::(new_cap).unwrap()), } } }; self.ptr = uniq; self.cap = new_cap; } } /// Attempts to double the size of the type's backing allocation in place. This is common /// enough to want to do that it's easiest to just have a dedicated method. Slightly /// more efficient logic can be provided for this than the general case. /// /// Returns true if the reallocation attempt has succeeded, or false otherwise. /// /// # Panics /// /// * Panics if T is zero-sized on the assumption that you managed to exhaust /// all `usize::MAX` slots in your imaginary buffer. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. #[inline(never)] #[cold] pub fn double_in_place(&mut self) -> bool { unsafe { let elem_size = mem::size_of::(); let old_layout = match self.current_layout() { Some(layout) => layout, None => return false, // nothing to double }; // since we set the capacity to usize::MAX when elem_size is // 0, getting to here necessarily means the RawVec is overfull. assert!(elem_size != 0, "capacity overflow"); // Since we guarantee that we never allocate more than isize::MAX // bytes, `elem_size * self.cap <= isize::MAX` as a precondition, so // this can't overflow. // // Similarly like with `double` above we can go straight to // `Layout::from_size_align_unchecked` as we know this won't // overflow and the alignment is sufficiently small. let new_cap = 2 * self.cap; let new_size = new_cap * elem_size; alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow()); match self.a.grow_in_place(self.ptr.cast(), old_layout, new_size) { Ok(_) => { // We can't directly divide `size`. self.cap = new_cap; true } Err(_) => false, } } } /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. pub fn try_reserve_exact( &mut self, used_cap: usize, needed_extra_cap: usize, ) -> Result<(), CollectionAllocErr> { self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact) } /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already, /// will reallocate the minimum possible amount of memory necessary. /// Generally this will be exactly the amount of memory necessary, /// but in principle the allocator is free to give back more than /// we asked for. /// /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate /// the requested space. This is not really unsafe, but the unsafe /// code *you* write that relies on the behavior of this function may break. /// /// # Panics /// /// * Panics if the requested capacity exceeds `usize::MAX` bytes. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. /// /// # Aborts /// /// Aborts on OOM pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) { match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) { Err(CapacityOverflow) => capacity_overflow(), Err(AllocErr) => unreachable!(), Ok(()) => { /* yay */ } } } /// Calculates the buffer's new size given that it'll hold `used_cap + /// needed_extra_cap` elements. This logic is used in amortized reserve methods. /// Returns `(new_capacity, new_alloc_size)`. fn amortized_new_size( &self, used_cap: usize, needed_extra_cap: usize, ) -> Result { // Nothing we can really do about these checks :( let required_cap = used_cap .checked_add(needed_extra_cap) .ok_or(CapacityOverflow)?; // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. let double_cap = self.cap * 2; // `double_cap` guarantees exponential growth. Ok(cmp::max(double_cap, required_cap)) } /// The same as `reserve`, but returns on errors instead of panicking or aborting. pub fn try_reserve( &mut self, used_cap: usize, needed_extra_cap: usize, ) -> Result<(), CollectionAllocErr> { self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized) } /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have /// enough capacity, will reallocate enough space plus comfortable slack /// space to get amortized `O(1)` behavior. Will limit this behavior /// if it would needlessly cause itself to panic. /// /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate /// the requested space. This is not really unsafe, but the unsafe /// code *you* write that relies on the behavior of this function may break. /// /// This is ideal for implementing a bulk-push operation like `extend`. /// /// # Panics /// /// * Panics if the requested capacity exceeds `usize::MAX` bytes. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. /// /// # Aborts /// /// Aborts on OOM /// /// # Examples /// /// ```ignore /// # #![feature(alloc, raw_vec_internals)] /// # extern crate alloc; /// # use std::ptr; /// # use alloc::raw_vec::RawVec; /// struct MyVec { /// buf: RawVec, /// len: usize, /// } /// /// impl MyVec { /// pub fn push_all(&mut self, elems: &[T]) { /// self.buf.reserve(self.len, elems.len()); /// // reserve would have aborted or panicked if the len exceeded /// // `isize::MAX` so this is safe to do unchecked now. /// for x in elems { /// unsafe { /// ptr::write(self.buf.ptr().add(self.len), x.clone()); /// } /// self.len += 1; /// } /// } /// } /// # fn main() { /// # let mut vector = MyVec { buf: RawVec::new(), len: 0 }; /// # vector.push_all(&[1, 3, 5, 7, 9]); /// # } /// ``` pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) { match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) { Err(CapacityOverflow) => capacity_overflow(), Err(AllocErr) => unreachable!(), Ok(()) => { /* yay */ } } } /// Attempts to ensure that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have /// enough capacity, will reallocate in place enough space plus comfortable slack /// space to get amortized `O(1)` behavior. Will limit this behaviour /// if it would needlessly cause itself to panic. /// /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate /// the requested space. This is not really unsafe, but the unsafe /// code *you* write that relies on the behavior of this function may break. /// /// Returns true if the reallocation attempt has succeeded, or false otherwise. /// /// # Panics /// /// * Panics if the requested capacity exceeds `usize::MAX` bytes. /// * Panics on 32-bit platforms if the requested capacity exceeds /// `isize::MAX` bytes. pub fn reserve_in_place(&mut self, used_cap: usize, needed_extra_cap: usize) -> bool { unsafe { // NOTE: we don't early branch on ZSTs here because we want this // to actually catch "asking for more than usize::MAX" in that case. // If we make it past the first branch then we are guaranteed to // panic. // Don't actually need any more capacity. If the current `cap` is 0, we can't // reallocate in place. // Wrapping in case they give a bad `used_cap` let old_layout = match self.current_layout() { Some(layout) => layout, None => return false, }; if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return false; } let new_cap = self .amortized_new_size(used_cap, needed_extra_cap) .unwrap_or_else(|_| capacity_overflow()); // Here, `cap < used_cap + needed_extra_cap <= new_cap` // (regardless of whether `self.cap - used_cap` wrapped). // Therefore we can safely call grow_in_place. let new_layout = Layout::new::().repeat(new_cap).unwrap().0; // FIXME: may crash and burn on over-reserve alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow()); match self .a .grow_in_place(self.ptr.cast(), old_layout, new_layout.size()) { Ok(_) => { self.cap = new_cap; true } Err(_) => false, } } } /// Shrinks the allocation down to the specified amount. If the given amount /// is 0, actually completely deallocates. /// /// # Panics /// /// Panics if the given amount is *larger* than the current capacity. /// /// # Aborts /// /// Aborts on OOM. pub fn shrink_to_fit(&mut self, amount: usize) { let elem_size = mem::size_of::(); // Set the `cap` because they might be about to promote to a `Box<[T]>` if elem_size == 0 { self.cap = amount; return; } // This check is my waterloo; it's the only thing Vec wouldn't have to do. assert!(self.cap >= amount, "Tried to shrink to a larger capacity"); if amount == 0 { // We want to create a new zero-length vector within the // same allocator. We use ptr::write to avoid an // erroneous attempt to drop the contents, and we use // ptr::read to sidestep condition against destructuring // types that implement Drop. unsafe { let a = self.a; self.dealloc_buffer(); ptr::write(self, RawVec::new_in(a)); } } else if self.cap != amount { unsafe { // We know here that our `amount` is greater than zero. This // implies, via the assert above, that capacity is also greater // than zero, which means that we've got a current layout that // "fits" // // We also know that `self.cap` is greater than `amount`, and // consequently we don't need runtime checks for creating either // layout let old_size = elem_size * self.cap; let new_size = elem_size * amount; let align = mem::align_of::(); let old_layout = Layout::from_size_align_unchecked(old_size, align); match self.a.realloc(self.ptr.cast(), old_layout, new_size) { Ok(p) => self.ptr = p.cast(), Err(_) => { handle_alloc_error(Layout::from_size_align_unchecked(new_size, align)) } } } self.cap = amount; } } } #[cfg(feature = "boxed")] impl<'a, T> RawVec<'a, T> { /// Converts the entire buffer into `Box<[T]>`. /// /// Note that this will correctly reconstitute any `cap` changes /// that may have been performed. (See description of type for details.) /// /// # Undefined Behavior /// /// All elements of `RawVec` must be initialized. Notice that /// the rules around uninitialized boxed values are not finalized yet, /// but until they are, it is advisable to avoid them. pub unsafe fn into_box(self) -> crate::boxed::Box<'a, [T]> { use crate::boxed::Box; // NOTE: not calling `cap()` here; actually using the real `cap` field! let slice = core::slice::from_raw_parts_mut(self.ptr(), self.cap); let output: Box<'a, [T]> = Box::from_raw(slice); mem::forget(self); output } } enum Fallibility { Fallible, Infallible, } use self::Fallibility::*; enum ReserveStrategy { Exact, Amortized, } use self::ReserveStrategy::*; impl<'a, T> RawVec<'a, T> { fn reserve_internal( &mut self, used_cap: usize, needed_extra_cap: usize, fallibility: Fallibility, strategy: ReserveStrategy, ) -> Result<(), CollectionAllocErr> { unsafe { use crate::alloc::AllocErr; // NOTE: we don't early branch on ZSTs here because we want this // to actually catch "asking for more than usize::MAX" in that case. // If we make it past the first branch then we are guaranteed to // panic. // Don't actually need any more capacity. // Wrapping in case they gave a bad `used_cap`. if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return Ok(()); } // Nothing we can really do about these checks :( let new_cap = match strategy { Exact => used_cap .checked_add(needed_extra_cap) .ok_or(CapacityOverflow)?, Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?, }; let new_layout = Layout::array::(new_cap).map_err(|_| CapacityOverflow)?; alloc_guard(new_layout.size())?; let res = match self.current_layout() { Some(layout) => { debug_assert!(new_layout.align() == layout.align()); self.a.realloc(self.ptr.cast(), layout, new_layout.size()) } None => Alloc::alloc(&mut self.a, new_layout), }; if let (Err(AllocErr), Infallible) = (&res, fallibility) { handle_alloc_error(new_layout); } self.ptr = res?.cast(); self.cap = new_cap; Ok(()) } } } impl<'a, T> RawVec<'a, T> { /// Frees the memory owned by the RawVec *without* trying to Drop its contents. pub unsafe fn dealloc_buffer(&mut self) { let elem_size = mem::size_of::(); if elem_size != 0 { if let Some(layout) = self.current_layout() { self.a.dealloc(self.ptr.cast(), layout); } } } } impl<'a, T> Drop for RawVec<'a, T> { /// Frees the memory owned by the RawVec *without* trying to Drop its contents. fn drop(&mut self) { unsafe { self.dealloc_buffer(); } } } // We need to guarantee the following: // * We don't ever allocate `> isize::MAX` byte-size objects // * We don't overflow `usize::MAX` and actually allocate too little // // On 64-bit we just need to check for overflow since trying to allocate // `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add // an extra guard for this in case we're running on a platform which can use // all 4GB in user-space. e.g. PAE or x32 #[inline] fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> { if mem::size_of::() < 8 && alloc_size > ::core::isize::MAX as usize { Err(CapacityOverflow) } else { Ok(()) } } // One central function responsible for reporting capacity overflows. This'll // ensure that the code generation related to these panics is minimal as there's // only one location which panics rather than a bunch throughout the module. fn capacity_overflow() -> ! { panic!("capacity overflow") } #[cfg(test)] mod tests { use super::*; #[test] fn reserve_does_not_overallocate() { let bump = Bump::new(); { let mut v: RawVec = RawVec::new_in(&bump); // First `reserve` allocates like `reserve_exact` v.reserve(0, 9); assert_eq!(9, v.cap()); } { let mut v: RawVec = RawVec::new_in(&bump); v.reserve(0, 7); assert_eq!(7, v.cap()); // 97 if more than double of 7, so `reserve` should work // like `reserve_exact`. v.reserve(7, 90); assert_eq!(97, v.cap()); } { let mut v: RawVec = RawVec::new_in(&bump); v.reserve(0, 12); assert_eq!(12, v.cap()); v.reserve(12, 3); // 3 is less than half of 12, so `reserve` must grow // exponentially. At the time of writing this test grow // factor is 2, so new capacity is 24, however, grow factor // of 1.5 is OK too. Hence `>= 18` in assert. assert!(v.cap() >= 12 + 12 / 2); } } } bumpalo-3.7.0/src/collections/str/lossy.rs000064400000000000000000000150050000000000000167040ustar 00000000000000// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. use crate::collections::str as core_str; use core::char; use core::fmt; use core::fmt::Write; use core::str; /// Lossy UTF-8 string. pub struct Utf8Lossy<'a> { bytes: &'a [u8], } impl<'a> Utf8Lossy<'a> { pub fn from_bytes(bytes: &'a [u8]) -> Utf8Lossy<'a> { Utf8Lossy { bytes } } pub fn chunks(&self) -> Utf8LossyChunksIter<'a> { Utf8LossyChunksIter { source: &self.bytes, } } } /// Iterator over lossy UTF-8 string #[allow(missing_debug_implementations)] pub struct Utf8LossyChunksIter<'a> { source: &'a [u8], } #[derive(PartialEq, Eq, Debug)] pub struct Utf8LossyChunk<'a> { /// Sequence of valid chars. /// Can be empty between broken UTF-8 chars. pub valid: &'a str, /// Single broken char, empty if none. /// Empty iff iterator item is last. pub broken: &'a [u8], } impl<'a> Iterator for Utf8LossyChunksIter<'a> { type Item = Utf8LossyChunk<'a>; fn next(&mut self) -> Option> { if self.source.is_empty() { return None; } const TAG_CONT_U8: u8 = 128; fn unsafe_get(xs: &[u8], i: usize) -> u8 { unsafe { *xs.get_unchecked(i) } } fn safe_get(xs: &[u8], i: usize) -> u8 { if i >= xs.len() { 0 } else { unsafe_get(xs, i) } } let mut i = 0; while i < self.source.len() { let i_ = i; let byte = unsafe_get(self.source, i); i += 1; if byte < 128 { } else { let w = core_str::utf8_char_width(byte); macro_rules! error { () => {{ unsafe { let r = Utf8LossyChunk { valid: str::from_utf8_unchecked(&self.source[0..i_]), broken: &self.source[i_..i], }; self.source = &self.source[i..]; return Some(r); } }}; } match w { 2 => { if safe_get(self.source, i) & 192 != TAG_CONT_U8 { error!(); } i += 1; } 3 => { match (byte, safe_get(self.source, i)) { (0xE0, 0xA0..=0xBF) => (), (0xE1..=0xEC, 0x80..=0xBF) => (), (0xED, 0x80..=0x9F) => (), (0xEE..=0xEF, 0x80..=0xBF) => (), _ => { error!(); } } i += 1; if safe_get(self.source, i) & 192 != TAG_CONT_U8 { error!(); } i += 1; } 4 => { match (byte, safe_get(self.source, i)) { (0xF0, 0x90..=0xBF) => (), (0xF1..=0xF3, 0x80..=0xBF) => (), (0xF4, 0x80..=0x8F) => (), _ => { error!(); } } i += 1; if safe_get(self.source, i) & 192 != TAG_CONT_U8 { error!(); } i += 1; if safe_get(self.source, i) & 192 != TAG_CONT_U8 { error!(); } i += 1; } _ => { error!(); } } } } let r = Utf8LossyChunk { valid: unsafe { str::from_utf8_unchecked(self.source) }, broken: &[], }; self.source = &[]; Some(r) } } impl<'a> fmt::Display for Utf8Lossy<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // If we're the empty string then our iterator won't actually yield // anything, so perform the formatting manually if self.bytes.is_empty() { return "".fmt(f); } for Utf8LossyChunk { valid, broken } in self.chunks() { // If we successfully decoded the whole chunk as a valid string then // we can return a direct formatting of the string which will also // respect various formatting flags if possible. if valid.len() == self.bytes.len() { assert!(broken.is_empty()); return valid.fmt(f); } f.write_str(valid)?; if !broken.is_empty() { f.write_char(char::REPLACEMENT_CHARACTER)?; } } Ok(()) } } impl<'a> fmt::Debug for Utf8Lossy<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_char('"')?; for Utf8LossyChunk { valid, broken } in self.chunks() { // Valid part. // Here we partially parse UTF-8 again which is suboptimal. { let mut from = 0; for (i, c) in valid.char_indices() { let esc = c.escape_debug(); // If char needs escaping, flush backlog so far and write, else skip if esc.len() != 1 { f.write_str(&valid[from..i])?; for c in esc { f.write_char(c)?; } from = i + c.len_utf8(); } } f.write_str(&valid[from..])?; } // Broken parts of string as hex escape. for &b in broken { write!(f, "\\x{:02x}", b)?; } } f.write_char('"') } } bumpalo-3.7.0/src/collections/str/mod.rs000064400000000000000000000026460000000000000163210ustar 00000000000000// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! String manipulation //! //! For more details, see std::str #[allow(missing_docs)] pub mod lossy; // https://tools.ietf.org/html/rfc3629 #[rustfmt::skip] static UTF8_CHAR_WIDTH: [u8; 256] = [ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x1F 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x3F 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x5F 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x7F 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0x9F 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0xBF 0,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, // 0xDF 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, // 0xEF 4,4,4,4,4,0,0,0,0,0,0,0,0,0,0,0, // 0xFF ]; /// Given a first byte, determines how many bytes are in this UTF-8 character. #[inline] pub fn utf8_char_width(b: u8) -> usize { UTF8_CHAR_WIDTH[b as usize] as usize } bumpalo-3.7.0/src/collections/string.rs000064400000000000000000001702140000000000000162350ustar 00000000000000// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A UTF-8 encoded, growable string. //! //! This module contains the [`String`] type and several error types that may //! result from working with [`String`]s. //! //! # Examples //! //! You can create a new [`String`] from a string literal with [`String::from_str_in`]: //! //! ``` //! use bumpalo::{Bump, collections::String}; //! //! let b = Bump::new(); //! //! let s = String::from_str_in("world", &b); //! ``` //! //! You can create a new [`String`] from an existing one by concatenating with //! `+`: //! //! [`String`]: struct.String.html //! [`String::from_str_in`]: struct.String.html#method.from_str_in //! //! ``` //! use bumpalo::{Bump, collections::String}; //! //! let s = "Hello".to_string(); //! //! let message = s + " world!"; //! ``` //! //! If you have a vector of valid UTF-8 bytes, you can make a [`String`] out of //! it. You can do the reverse too. //! //! ``` //! use bumpalo::{Bump, collections::String}; //! //! let b = Bump::new(); //! //! let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150]; //! //! // We know these bytes are valid, so we'll use `unwrap()`. //! let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); //! //! assert_eq!("💖", sparkle_heart); //! //! let bytes = sparkle_heart.into_bytes(); //! //! assert_eq!(bytes, [240, 159, 146, 150]); //! ``` use crate::collections::str::lossy; use crate::collections::vec::Vec; use crate::Bump; use core::borrow::{Borrow, BorrowMut}; use core::char::decode_utf16; use core::fmt; use core::hash; use core::iter::FusedIterator; use core::mem; use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ops::{self, Add, AddAssign, Index, IndexMut, RangeBounds}; use core::ptr; use core::str::{self, Chars, Utf8Error}; use core_alloc::borrow::Cow; /// Like the `format!` macro for creating `std::string::String`s but for /// `bumpalo::collections::String`. /// /// # Examples /// /// ``` /// use bumpalo::Bump; /// /// let b = Bump::new(); /// /// let who = "World"; /// let s = bumpalo::format!(in &b, "Hello, {}!", who); /// assert_eq!(s, "Hello, World!") /// ``` #[macro_export] macro_rules! format { ( in $bump:expr, $fmt:expr, $($args:expr),* ) => {{ use $crate::core_alloc::fmt::Write; let bump = $bump; let mut s = $crate::collections::String::new_in(bump); let _ = write!(&mut s, $fmt, $($args),*); s }}; ( in $bump:expr, $fmt:expr, $($args:expr,)* ) => { $crate::format!(in $bump, $fmt, $($args),*) }; } /// A UTF-8 encoded, growable string. /// /// The `String` type is the most common string type that has ownership over the /// contents of the string. It has a close relationship with its borrowed /// counterpart, the primitive [`str`]. /// /// [`str`]: https://doc.rust-lang.org/nightly/std/primitive.str.html /// /// # Examples /// /// You can create a `String` from a literal string with [`String::from_iter_in`]: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let hello = String::from_str_in("Hello, world!", &b); /// ``` /// /// You can append a [`char`] to a `String` with the [`push`] method, and /// append a [`&str`] with the [`push_str`] method: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut hello = String::from_str_in("Hello, ", &b); /// /// hello.push('w'); /// hello.push_str("orld!"); /// ``` /// /// [`String::from_iter_in`]: #method.from_iter_in /// [`char`]: https://doc.rust-lang.org/nightly/std/primitive.char.html /// [`push`]: #method.push /// [`push_str`]: #method.push_str /// /// If you have a vector of UTF-8 bytes, you can create a `String` from it with /// the [`from_utf8`] method: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // some bytes, in a vector /// let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150]; /// /// // We know these bytes are valid, so we'll use `unwrap()`. /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); /// /// assert_eq!("💖", sparkle_heart); /// ``` /// /// [`from_utf8`]: #method.from_utf8 /// /// # UTF-8 /// /// `String`s are always valid UTF-8. This has a few implications, the first of /// which is that if you need a non-UTF-8 string, consider [`OsString`]. It is /// similar, but without the UTF-8 constraint. The second implication is that /// you cannot index into a `String`: /// /// ```compile_fail,E0277 /// let s = "hello"; /// /// println!("The first letter of s is {}", s[0]); // ERROR!!! /// ``` /// /// [`OsString`]: https://doc.rust-lang.org/nightly/std/ffi/struct.OsString.html /// /// Indexing is intended to be a constant-time operation, but UTF-8 encoding /// does not allow us to do this. Furthermore, it's not clear what sort of /// thing the index should return: a byte, a codepoint, or a grapheme cluster. /// The [`bytes`] and [`chars`] methods return iterators over the first /// two, respectively. /// /// [`bytes`]: #method.bytes /// [`chars`]: #method.chars /// /// # Deref /// /// `String`s implement [`Deref`]``, and so inherit all of [`str`]'s /// methods. In addition, this means that you can pass a `String` to a /// function which takes a [`&str`] by using an ampersand (`&`): /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// fn takes_str(s: &str) { } /// /// let s = String::from_str_in("Hello", &b); /// /// takes_str(&s); /// ``` /// /// This will create a [`&str`] from the `String` and pass it in. This /// conversion is very inexpensive, and so generally, functions will accept /// [`&str`]s as arguments unless they need a `String` for some specific /// reason. /// /// In certain cases Rust doesn't have enough information to make this /// conversion, known as [`Deref`] coercion. In the following example a string /// slice [`&'a str`][`&str`] implements the trait `TraitExample`, and the function /// `example_func` takes anything that implements the trait. In this case Rust /// would need to make two implicit conversions, which Rust doesn't have the /// means to do. For that reason, the following example will not compile. /// /// ```compile_fail,E0277 /// use bumpalo::{Bump, collections::String}; /// /// trait TraitExample {} /// /// impl<'a> TraitExample for &'a str {} /// /// fn example_func(example_arg: A) {} /// /// let b = Bump::new(); /// let example_string = String::from_str_in("example_string", &b); /// example_func(&example_string); /// ``` /// /// There are two options that would work instead. The first would be to /// change the line `example_func(&example_string);` to /// `example_func(example_string.as_str());`, using the method [`as_str()`] /// to explicitly extract the string slice containing the string. The second /// way changes `example_func(&example_string);` to /// `example_func(&*example_string);`. In this case we are dereferencing a /// `String` to a [`str`][`&str`], then referencing the [`str`][`&str`] back to /// [`&str`]. The second way is more idiomatic, however both work to do the /// conversion explicitly rather than relying on the implicit conversion. /// /// # Representation /// /// A `String` is made up of three components: a pointer to some bytes, a /// length, and a capacity. The pointer points to an internal buffer `String` /// uses to store its data. The length is the number of bytes currently stored /// in the buffer, and the capacity is the size of the buffer in bytes. As such, /// the length will always be less than or equal to the capacity. /// /// This buffer is always stored on the heap. /// /// You can look at these with the [`as_ptr`], [`len`], and [`capacity`] /// methods: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// use std::mem; /// /// let b = Bump::new(); /// /// let story = String::from_str_in("Once upon a time...", &b); /// /// let ptr = story.as_ptr(); /// let len = story.len(); /// let capacity = story.capacity(); /// /// // story has nineteen bytes /// assert_eq!(19, len); /// /// // Now that we have our parts, we throw the story away. /// mem::forget(story); /// /// // We can re-build a String out of ptr, len, and capacity. This is all /// // unsafe because we are responsible for making sure the components are /// // valid: /// let s = unsafe { String::from_raw_parts_in(ptr as *mut _, len, capacity, &b) } ; /// /// assert_eq!(String::from_str_in("Once upon a time...", &b), s); /// ``` /// /// [`as_ptr`]: #method.as_ptr /// [`len`]: #method.len /// [`capacity`]: #method.capacity /// /// If a `String` has enough capacity, adding elements to it will not /// re-allocate. For example, consider this program: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::new_in(&b); /// /// println!("{}", s.capacity()); /// /// for _ in 0..5 { /// s.push_str("hello"); /// println!("{}", s.capacity()); /// } /// ``` /// /// This will output the following: /// /// ```text /// 0 /// 5 /// 10 /// 20 /// 20 /// 40 /// ``` /// /// At first, we have no memory allocated at all, but as we append to the /// string, it increases its capacity appropriately. If we instead use the /// [`with_capacity_in`] method to allocate the correct capacity initially: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::with_capacity_in(25, &b); /// /// println!("{}", s.capacity()); /// /// for _ in 0..5 { /// s.push_str("hello"); /// println!("{}", s.capacity()); /// } /// ``` /// /// [`with_capacity_in`]: #method.with_capacity_in /// /// We end up with a different output: /// /// ```text /// 25 /// 25 /// 25 /// 25 /// 25 /// 25 /// ``` /// /// Here, there's no need to allocate more memory inside the loop. /// /// [`&str`]: https://doc.rust-lang.org/nightly/std/primitive.str.html /// [`Deref`]: https://doc.rust-lang.org/nightly/std/ops/trait.Deref.html /// [`as_str()`]: struct.String.html#method.as_str #[derive(PartialOrd, Eq, Ord)] pub struct String<'bump> { vec: Vec<'bump, u8>, } /// A possible error value when converting a `String` from a UTF-8 byte vector. /// /// This type is the error type for the [`from_utf8`] method on [`String`]. It /// is designed in such a way to carefully avoid reallocations: the /// [`into_bytes`] method will give back the byte vector that was used in the /// conversion attempt. /// /// [`from_utf8`]: struct.String.html#method.from_utf8 /// [`String`]: struct.String.html /// [`into_bytes`]: struct.FromUtf8Error.html#method.into_bytes /// /// The [`Utf8Error`] type provided by [`std::str`] represents an error that may /// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's /// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error` /// through the [`utf8_error`] method. /// /// [`Utf8Error`]: https://doc.rust-lang.org/nightly/std/str/struct.Utf8Error.html /// [`std::str`]: https://doc.rust-lang.org/nightly/std/str/index.html /// [`u8`]: https://doc.rust-lang.org/nightly/std/primitive.u8.html /// [`&str`]: https://doc.rust-lang.org/nightly/std/primitive.str.html /// [`utf8_error`]: #method.utf8_error /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // some invalid bytes, in a vector /// let bytes = bumpalo::vec![in &b; 0, 159]; /// /// let value = String::from_utf8(bytes); /// /// assert!(value.is_err()); /// assert_eq!(bumpalo::vec![in &b; 0, 159], value.unwrap_err().into_bytes()); /// ``` #[derive(Debug)] pub struct FromUtf8Error<'bump> { bytes: Vec<'bump, u8>, error: Utf8Error, } /// A possible error value when converting a `String` from a UTF-16 byte slice. /// /// This type is the error type for the [`from_utf16`] method on [`String`]. /// /// [`from_utf16`]: struct.String.html#method.from_utf16 /// [`String`]: struct.String.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // 𝄞muic /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, /// 0xD800, 0x0069, 0x0063]; /// /// assert!(String::from_utf16_in(v, &b).is_err()); /// ``` #[derive(Debug)] pub struct FromUtf16Error(()); impl<'bump> String<'bump> { /// Creates a new empty `String`. /// /// Given that the `String` is empty, this will not allocate any initial /// buffer. While that means that this initial operation is very /// inexpensive, it may cause excessive allocation later when you add /// data. If you have an idea of how much data the `String` will hold, /// consider the [`with_capacity_in`] method to prevent excessive /// re-allocation. /// /// [`with_capacity_in`]: #method.with_capacity_in /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let s = String::new_in(&b); /// ``` #[inline] pub fn new_in(bump: &'bump Bump) -> String<'bump> { String { vec: Vec::new_in(bump), } } /// Creates a new empty `String` with a particular capacity. /// /// `String`s have an internal buffer to hold their data. The capacity is /// the length of that buffer, and can be queried with the [`capacity`] /// method. This method creates an empty `String`, but one with an initial /// buffer that can hold `capacity` bytes. This is useful when you may be /// appending a bunch of data to the `String`, reducing the number of /// reallocations it needs to do. /// /// [`capacity`]: #method.capacity /// /// If the given capacity is `0`, no allocation will occur, and this method /// is identical to the [`new_in`] method. /// /// [`new_in`]: #method.new /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::with_capacity_in(10, &b); /// /// // The String contains no chars, even though it has capacity for more /// assert_eq!(s.len(), 0); /// /// // These are all done without reallocating... /// let cap = s.capacity(); /// for _ in 0..10 { /// s.push('a'); /// } /// /// assert_eq!(s.capacity(), cap); /// /// // ...but this may make the vector reallocate /// s.push('a'); /// ``` #[inline] pub fn with_capacity_in(capacity: usize, bump: &'bump Bump) -> String<'bump> { String { vec: Vec::with_capacity_in(capacity, bump), } } /// Converts a vector of bytes to a `String`. /// /// A string slice ([`&str`]) is made of bytes ([`u8`]), and a vector of bytes /// ([`Vec`]) is made of bytes, so this function converts between the /// two. Not all byte slices are valid `String`s, however: `String` /// requires that it is valid UTF-8. `from_utf8()` checks to ensure that /// the bytes are valid UTF-8, and then does the conversion. /// /// If you are sure that the byte slice is valid UTF-8, and you don't want /// to incur the overhead of the validity check, there is an unsafe version /// of this function, [`from_utf8_unchecked`], which has the same behavior /// but skips the check. /// /// This method will take care to not copy the vector, for efficiency's /// sake. /// /// If you need a [`&str`] instead of a `String`, consider /// [`str::from_utf8`]. /// /// The inverse of this method is [`as_bytes`]. /// /// # Errors /// /// Returns [`Err`] if the slice is not UTF-8 with a description as to why the /// provided bytes are not UTF-8. The vector you moved in is also included. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // some bytes, in a vector /// let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150]; /// /// // We know these bytes are valid, so we'll use `unwrap()`. /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap(); /// /// assert_eq!("💖", sparkle_heart); /// ``` /// /// Incorrect bytes: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // some invalid bytes, in a vector /// let sparkle_heart = bumpalo::vec![in &b; 0, 159, 146, 150]; /// /// assert!(String::from_utf8(sparkle_heart).is_err()); /// ``` /// /// See the docs for [`FromUtf8Error`] for more details on what you can do /// with this error. /// /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked /// [`&str`]: https://doc.rust-lang.org/nightly/std/primitive.str.html /// [`u8`]: https://doc.rust-lang.org/nightly/std/primitive.u8.html /// [`Vec`]: ../vec/struct.Vec.html /// [`str::from_utf8`]: https://doc.rust-lang.org/nightly/std/str/fn.from_utf8.html /// [`as_bytes`]: struct.String.html#method.as_bytes /// [`FromUtf8Error`]: struct.FromUtf8Error.html /// [`Err`]: https://doc.rust-lang.org/nightly/std/result/enum.Result.html#variant.Err #[inline] pub fn from_utf8(vec: Vec<'bump, u8>) -> Result, FromUtf8Error<'bump>> { match str::from_utf8(&vec) { Ok(..) => Ok(String { vec }), Err(e) => Err(FromUtf8Error { bytes: vec, error: e, }), } } /// Converts a slice of bytes to a string, including invalid characters. /// /// Strings are made of bytes ([`u8`]), and a slice of bytes /// ([`&[u8]`][byteslice]) is made of bytes, so this function converts /// between the two. Not all byte slices are valid strings, however: strings /// are required to be valid UTF-8. During this conversion, /// `from_utf8_lossy()` will replace any invalid UTF-8 sequences with /// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD], which looks like this: � /// /// [`u8`]: https://doc.rust-lang.org/nightly/std/primitive.u8.html /// [byteslice]: https://doc.rust-lang.org/nightly/std/primitive.slice.html /// [U+FFFD]: ../char/constant.REPLACEMENT_CHARACTER.html /// /// If you are sure that the byte slice is valid UTF-8, and you don't want /// to incur the overhead of the conversion, there is an unsafe version /// of this function, [`from_utf8_unchecked`], which has the same behavior /// but skips the checks. /// /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{collections::String, Bump, vec}; /// /// let b = Bump::new(); /// /// // some bytes, in a vector /// let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150]; /// /// let sparkle_heart = String::from_utf8_lossy_in(&sparkle_heart, &b); /// /// assert_eq!("💖", sparkle_heart); /// ``` /// /// Incorrect bytes: /// /// ``` /// use bumpalo::{collections::String, Bump, vec}; /// /// let b = Bump::new(); /// /// // some invalid bytes /// let input = b"Hello \xF0\x90\x80World"; /// let output = String::from_utf8_lossy_in(input, &b); /// /// assert_eq!("Hello �World", output); /// ``` pub fn from_utf8_lossy_in(v: &[u8], bump: &'bump Bump) -> String<'bump> { let mut iter = lossy::Utf8Lossy::from_bytes(v).chunks(); let (first_valid, first_broken) = if let Some(chunk) = iter.next() { let lossy::Utf8LossyChunk { valid, broken } = chunk; if valid.len() == v.len() { debug_assert!(broken.is_empty()); unsafe { return String::from_utf8_unchecked(Vec::from_iter_in(v.iter().cloned(), bump)); } } (valid, broken) } else { return String::from_str_in("", bump); }; const REPLACEMENT: &str = "\u{FFFD}"; let mut res = String::with_capacity_in(v.len(), bump); res.push_str(first_valid); if !first_broken.is_empty() { res.push_str(REPLACEMENT); } for lossy::Utf8LossyChunk { valid, broken } in iter { res.push_str(valid); if !broken.is_empty() { res.push_str(REPLACEMENT); } } res } /// Decode a UTF-16 encoded vector `v` into a `String`, returning [`Err`] /// if `v` contains any invalid data. /// /// [`Err`]: https://doc.rust-lang.org/nightly/std/result/enum.Result.html#variant.Err /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // 𝄞music /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, /// 0x0073, 0x0069, 0x0063]; /// assert_eq!(String::from_str_in("𝄞music", &b), /// String::from_utf16_in(v, &b).unwrap()); /// /// // 𝄞muic /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075, /// 0xD800, 0x0069, 0x0063]; /// assert!(String::from_utf16_in(v, &b).is_err()); /// ``` pub fn from_utf16_in(v: &[u16], bump: &'bump Bump) -> Result, FromUtf16Error> { let mut ret = String::with_capacity_in(v.len(), bump); for c in decode_utf16(v.iter().cloned()) { if let Ok(c) = c { ret.push(c); } else { return Err(FromUtf16Error(())); } } Ok(ret) } /// Construct a new `String<'bump>` from an iterator of `char`s. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let s = String::from_str_in("hello", &b); /// assert_eq!(s, "hello"); /// ``` pub fn from_str_in(s: &str, bump: &'bump Bump) -> String<'bump> { let mut t = String::with_capacity_in(s.len(), bump); t.push_str(s); t } /// Construct a new `String<'bump>` from an iterator of `char`s. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let s = String::from_str_in("hello", &b); /// assert_eq!(s, "hello"); /// ``` pub fn from_iter_in>(iter: I, bump: &'bump Bump) -> String<'bump> { let mut s = String::new_in(bump); for c in iter { s.push(c); } s } /// Creates a new `String` from a length, capacity, and pointer. /// /// # Safety /// /// This is highly unsafe, due to the number of invariants that aren't /// checked: /// /// * The memory at `ptr` needs to have been previously allocated by the /// same allocator the standard library uses. /// * `length` needs to be less than or equal to `capacity`. /// * `capacity` needs to be the correct value. /// /// Violating these may cause problems like corrupting the allocator's /// internal data structures. /// /// The ownership of `ptr` is effectively transferred to the /// `String` which may then deallocate, reallocate or change the /// contents of memory pointed to by the pointer at will. Ensure /// that nothing else uses the pointer after calling this /// function. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// use std::mem; /// /// let b = Bump::new(); /// /// unsafe { /// let s = String::from_str_in("hello", &b); /// let ptr = s.as_ptr(); /// let len = s.len(); /// let capacity = s.capacity(); /// /// mem::forget(s); /// /// let s = String::from_raw_parts_in(ptr as *mut _, len, capacity, &b); /// /// assert_eq!(String::from_str_in("hello", &b), s); /// } /// ``` #[inline] pub unsafe fn from_raw_parts_in( buf: *mut u8, length: usize, capacity: usize, bump: &'bump Bump, ) -> String<'bump> { String { vec: Vec::from_raw_parts_in(buf, length, capacity, bump), } } /// Converts a vector of bytes to a `String` without checking that the /// string contains valid UTF-8. /// /// See the safe version, [`from_utf8`], for more details. /// /// [`from_utf8`]: struct.String.html#method.from_utf8 /// /// # Safety /// /// This function is unsafe because it does not check that the bytes passed /// to it are valid UTF-8. If this constraint is violated, it may cause /// memory unsafety issues with future users of the `String`, as the rest of /// the standard library assumes that `String`s are valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // some bytes, in a vector /// let sparkle_heart = bumpalo::vec![in &b; 240, 159, 146, 150]; /// /// let sparkle_heart = unsafe { /// String::from_utf8_unchecked(sparkle_heart) /// }; /// /// assert_eq!("💖", sparkle_heart); /// ``` #[inline] pub unsafe fn from_utf8_unchecked(bytes: Vec<'bump, u8>) -> String<'bump> { String { vec: bytes } } /// Converts a `String` into a byte vector. /// /// This consumes the `String`, so we do not need to copy its contents. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let s = String::from_str_in("hello", &b); /// let bytes = s.into_bytes(); /// /// assert_eq!(&[104, 101, 108, 108, 111][..], &bytes[..]); /// ``` #[inline] pub fn into_bytes(self) -> Vec<'bump, u8> { self.vec } /// Convert this `String<'bump>` into a `&'bump str`. This is analagous to /// `std::string::String::into_boxed_str`. /// /// # Example /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let s = String::from_str_in("foo", &b); /// let t = s.into_bump_str(); /// assert_eq!("foo", t); /// ``` pub fn into_bump_str(self) -> &'bump str { let s = unsafe { let s = self.as_str(); mem::transmute(s) }; mem::forget(self); s } /// Extracts a string slice containing the entire `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let s = String::from_str_in("foo", &b); /// /// assert_eq!("foo", s.as_str()); /// ``` #[inline] pub fn as_str(&self) -> &str { self } /// Converts a `String` into a mutable string slice. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("foobar", &b); /// let s_mut_str = s.as_mut_str(); /// /// s_mut_str.make_ascii_uppercase(); /// /// assert_eq!("FOOBAR", s_mut_str); /// ``` #[inline] pub fn as_mut_str(&mut self) -> &mut str { self } /// Appends a given string slice onto the end of this `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("foo", &b); /// /// s.push_str("bar"); /// /// assert_eq!("foobar", s); /// ``` #[inline] pub fn push_str(&mut self, string: &str) { self.vec.extend_from_slice(string.as_bytes()) } /// Returns this `String`'s capacity, in bytes. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let s = String::with_capacity_in(10, &b); /// /// assert!(s.capacity() >= 10); /// ``` #[inline] pub fn capacity(&self) -> usize { self.vec.capacity() } /// Ensures that this `String`'s capacity is at least `additional` bytes /// larger than its length. /// /// The capacity may be increased by more than `additional` bytes if it /// chooses, to prevent frequent reallocations. /// /// If you do not want this "at least" behavior, see the [`reserve_exact`] /// method. /// /// # Panics /// /// Panics if the new capacity overflows [`usize`]. /// /// [`reserve_exact`]: struct.String.html#method.reserve_exact /// [`usize`]: https://doc.rust-lang.org/nightly/std/primitive.usize.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::new_in(&b); /// /// s.reserve(10); /// /// assert!(s.capacity() >= 10); /// ``` /// /// This may not actually increase the capacity: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::with_capacity_in(10, &b); /// s.push('a'); /// s.push('b'); /// /// // s now has a length of 2 and a capacity of 10 /// assert_eq!(2, s.len()); /// assert_eq!(10, s.capacity()); /// /// // Since we already have an extra 8 capacity, calling this... /// s.reserve(8); /// /// // ... doesn't actually increase. /// assert_eq!(10, s.capacity()); /// ``` #[inline] pub fn reserve(&mut self, additional: usize) { self.vec.reserve(additional) } /// Ensures that this `String`'s capacity is `additional` bytes /// larger than its length. /// /// Consider using the [`reserve`] method unless you absolutely know /// better than the allocator. /// /// [`reserve`]: #method.reserve /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::new_in(&b); /// /// s.reserve_exact(10); /// /// assert!(s.capacity() >= 10); /// ``` /// /// This may not actually increase the capacity: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::with_capacity_in(10, &b); /// s.push('a'); /// s.push('b'); /// /// // s now has a length of 2 and a capacity of 10 /// assert_eq!(2, s.len()); /// assert_eq!(10, s.capacity()); /// /// // Since we already have an extra 8 capacity, calling this... /// s.reserve_exact(8); /// /// // ... doesn't actually increase. /// assert_eq!(10, s.capacity()); /// ``` #[inline] pub fn reserve_exact(&mut self, additional: usize) { self.vec.reserve_exact(additional) } /// Shrinks the capacity of this `String` to match its length. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("foo", &b); /// /// s.reserve(100); /// assert!(s.capacity() >= 100); /// /// s.shrink_to_fit(); /// assert_eq!(3, s.capacity()); /// ``` #[inline] pub fn shrink_to_fit(&mut self) { self.vec.shrink_to_fit() } /// Appends the given [`char`] to the end of this `String`. /// /// [`char`]: https://doc.rust-lang.org/nightly/std/primitive.char.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("abc", &b); /// /// s.push('1'); /// s.push('2'); /// s.push('3'); /// /// assert_eq!("abc123", s); /// ``` #[inline] pub fn push(&mut self, ch: char) { match ch.len_utf8() { 1 => self.vec.push(ch as u8), _ => self .vec .extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()), } } /// Returns a byte slice of this `String`'s contents. /// /// The inverse of this method is [`from_utf8`]. /// /// [`from_utf8`]: #method.from_utf8 /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let s = String::from_str_in("hello", &b); /// /// assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes()); /// ``` #[inline] pub fn as_bytes(&self) -> &[u8] { &self.vec } /// Shortens this `String` to the specified length. /// /// If `new_len` is greater than the string's current length, this has no /// effect. /// /// Note that this method has no effect on the allocated capacity /// of the string /// /// # Panics /// /// Panics if `new_len` does not lie on a [`char`] boundary. /// /// [`char`]: https://doc.rust-lang.org/nightly/std/primitive.char.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("hello", &b); /// /// s.truncate(2); /// /// assert_eq!("he", s); /// ``` #[inline] pub fn truncate(&mut self, new_len: usize) { if new_len <= self.len() { assert!(self.is_char_boundary(new_len)); self.vec.truncate(new_len) } } /// Removes the last character from the string buffer and returns it. /// /// Returns [`None`] if this `String` is empty. /// /// [`None`]: https://doc.rust-lang.org/nightly/std/option/enum.Option.html#variant.None /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("foo", &b); /// /// assert_eq!(s.pop(), Some('o')); /// assert_eq!(s.pop(), Some('o')); /// assert_eq!(s.pop(), Some('f')); /// /// assert_eq!(s.pop(), None); /// ``` #[inline] pub fn pop(&mut self) -> Option { let ch = self.chars().rev().next()?; let newlen = self.len() - ch.len_utf8(); unsafe { self.vec.set_len(newlen); } Some(ch) } /// Removes a [`char`] from this `String` at a byte position and returns it. /// /// This is an `O(n)` operation, as it requires copying every element in the /// buffer. /// /// # Panics /// /// Panics if `idx` is larger than or equal to the `String`'s length, /// or if it does not lie on a [`char`] boundary. /// /// [`char`]: https://doc.rust-lang.org/nightly/std/primitive.char.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("foo", &b); /// /// assert_eq!(s.remove(0), 'f'); /// assert_eq!(s.remove(1), 'o'); /// assert_eq!(s.remove(0), 'o'); /// ``` #[inline] pub fn remove(&mut self, idx: usize) -> char { let ch = match self[idx..].chars().next() { Some(ch) => ch, None => panic!("cannot remove a char from the end of a string"), }; let next = idx + ch.len_utf8(); let len = self.len(); unsafe { ptr::copy( self.vec.as_ptr().add(next), self.vec.as_mut_ptr().add(idx), len - next, ); self.vec.set_len(len - (next - idx)); } ch } /// Retains only the characters specified by the predicate. /// /// In other words, remove all characters `c` such that `f(c)` returns `false`. /// This method operates in place and preserves the order of the retained /// characters. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("f_o_ob_ar", &b); /// /// s.retain(|c| c != '_'); /// /// assert_eq!(s, "foobar"); /// ``` #[inline] pub fn retain(&mut self, mut f: F) where F: FnMut(char) -> bool, { let len = self.len(); let mut del_bytes = 0; let mut idx = 0; while idx < len { let ch = unsafe { self.get_unchecked(idx..len).chars().next().unwrap() }; let ch_len = ch.len_utf8(); if !f(ch) { del_bytes += ch_len; } else if del_bytes > 0 { unsafe { ptr::copy( self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx - del_bytes), ch_len, ); } } // Point idx to the next char idx += ch_len; } if del_bytes > 0 { unsafe { self.vec.set_len(len - del_bytes); } } } /// Inserts a character into this `String` at a byte position. /// /// This is an `O(n)` operation as it requires copying every element in the /// buffer. /// /// # Panics /// /// Panics if `idx` is larger than the `String`'s length, or if it does not /// lie on a [`char`] boundary. /// /// [`char`]: https://doc.rust-lang.org/nightly/std/primitive.char.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::with_capacity_in(3, &b); /// /// s.insert(0, 'f'); /// s.insert(1, 'o'); /// s.insert(2, 'o'); /// /// assert_eq!("foo", s); /// ``` #[inline] pub fn insert(&mut self, idx: usize, ch: char) { assert!(self.is_char_boundary(idx)); let mut bits = [0; 4]; let bits = ch.encode_utf8(&mut bits).as_bytes(); unsafe { self.insert_bytes(idx, bits); } } unsafe fn insert_bytes(&mut self, idx: usize, bytes: &[u8]) { let len = self.len(); let amt = bytes.len(); self.vec.reserve(amt); ptr::copy( self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx, ); ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt); self.vec.set_len(len + amt); } /// Inserts a string slice into this `String` at a byte position. /// /// This is an `O(n)` operation as it requires copying every element in the /// buffer. /// /// # Panics /// /// Panics if `idx` is larger than the `String`'s length, or if it does not /// lie on a [`char`] boundary. /// /// [`char`]: https://doc.rust-lang.org/nightly/std/primitive.char.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("bar", &b); /// /// s.insert_str(0, "foo"); /// /// assert_eq!("foobar", s); /// ``` #[inline] pub fn insert_str(&mut self, idx: usize, string: &str) { assert!(self.is_char_boundary(idx)); unsafe { self.insert_bytes(idx, string.as_bytes()); } } /// Returns a mutable reference to the contents of this `String`. /// /// # Safety /// /// This function is unsafe because it does not check that the bytes passed /// to it are valid UTF-8. If this constraint is violated, it may cause /// memory unsafety issues with future users of the `String`, as the rest of /// the standard library assumes that `String`s are valid UTF-8. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("hello", &b); /// /// unsafe { /// let vec = s.as_mut_vec(); /// assert_eq!(&[104, 101, 108, 108, 111][..], &vec[..]); /// /// vec.reverse(); /// } /// assert_eq!(s, "olleh"); /// ``` #[inline] pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<'bump, u8> { &mut self.vec } /// Returns the length of this `String`, in bytes. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let a = String::from_str_in("foo", &b); /// /// assert_eq!(a.len(), 3); /// ``` #[inline] pub fn len(&self) -> usize { self.vec.len() } /// Returns `true` if this `String` has a length of zero. /// /// Returns `false` otherwise. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut v = String::new_in(&b); /// assert!(v.is_empty()); /// /// v.push('a'); /// assert!(!v.is_empty()); /// ``` #[inline] pub fn is_empty(&self) -> bool { self.len() == 0 } /// Splits the string into two at the given index. /// /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and /// the returned `String` contains bytes `[at, len)`. `at` must be on the /// boundary of a UTF-8 code point. /// /// Note that the capacity of `self` does not change. /// /// # Panics /// /// Panics if `at` is not on a `UTF-8` code point boundary, or if it is beyond the last /// code point of the string. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut hello = String::from_str_in("Hello, World!", &b); /// let world = hello.split_off(7); /// assert_eq!(hello, "Hello, "); /// assert_eq!(world, "World!"); /// ``` #[inline] pub fn split_off(&mut self, at: usize) -> String<'bump> { assert!(self.is_char_boundary(at)); let other = self.vec.split_off(at); unsafe { String::from_utf8_unchecked(other) } } /// Truncates this `String`, removing all contents. /// /// While this means the `String` will have a length of zero, it does not /// touch its capacity. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("foo", &b); /// /// s.clear(); /// /// assert!(s.is_empty()); /// assert_eq!(0, s.len()); /// assert_eq!(3, s.capacity()); /// ``` #[inline] pub fn clear(&mut self) { self.vec.clear() } /// Creates a draining iterator that removes the specified range in the `String` /// and yields the removed `chars`. /// /// Note: The element range is removed even if the iterator is not /// consumed until the end. /// /// # Panics /// /// Panics if the starting point or end point do not lie on a [`char`] /// boundary, or if they're out of bounds. /// /// [`char`]: https://doc.rust-lang.org/nightly/std/primitive.char.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("α is alpha, β is beta", &b); /// let beta_offset = s.find('β').unwrap_or(s.len()); /// /// // Remove the range up until the β from the string /// let t = String::from_iter_in(s.drain(..beta_offset), &b); /// assert_eq!(t, "α is alpha, "); /// assert_eq!(s, "β is beta"); /// /// // A full range clears the string /// s.drain(..); /// assert_eq!(s, ""); /// ``` pub fn drain<'a, R>(&'a mut self, range: R) -> Drain<'a, 'bump> where R: RangeBounds, { // Memory safety // // The String version of Drain does not have the memory safety issues // of the vector version. The data is just plain bytes. // Because the range removal happens in Drop, if the Drain iterator is leaked, // the removal will not happen. let len = self.len(); let start = match range.start_bound() { Included(&n) => n, Excluded(&n) => n + 1, Unbounded => 0, }; let end = match range.end_bound() { Included(&n) => n + 1, Excluded(&n) => n, Unbounded => len, }; // Take out two simultaneous borrows. The &mut String won't be accessed // until iteration is over, in Drop. let self_ptr = self as *mut _; // slicing does the appropriate bounds checks let chars_iter = self[start..end].chars(); Drain { start, end, iter: chars_iter, string: self_ptr, } } /// Removes the specified range in the string, /// and replaces it with the given string. /// The given string doesn't need to be the same length as the range. /// /// # Panics /// /// Panics if the starting point or end point do not lie on a [`char`] /// boundary, or if they're out of bounds. /// /// [`char`]: https://doc.rust-lang.org/nightly/std/primitive.char.html /// [`Vec::splice`]: ../vec/struct.Vec.html#method.splice /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let mut s = String::from_str_in("α is alpha, β is beta", &b); /// let beta_offset = s.find('β').unwrap_or(s.len()); /// /// // Replace the range up until the β from the string /// s.replace_range(..beta_offset, "Α is capital alpha; "); /// assert_eq!(s, "Α is capital alpha; β is beta"); /// ``` pub fn replace_range(&mut self, range: R, replace_with: &str) where R: RangeBounds, { // Memory safety // // Replace_range does not have the memory safety issues of a vector Splice. // of the vector version. The data is just plain bytes. match range.start_bound() { Included(&n) => assert!(self.is_char_boundary(n)), Excluded(&n) => assert!(self.is_char_boundary(n + 1)), Unbounded => {} }; match range.end_bound() { Included(&n) => assert!(self.is_char_boundary(n + 1)), Excluded(&n) => assert!(self.is_char_boundary(n)), Unbounded => {} }; unsafe { self.as_mut_vec() }.splice(range, replace_with.bytes()); } } impl<'bump> FromUtf8Error<'bump> { /// Returns a slice of [`u8`]s bytes that were attempted to convert to a `String`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // some invalid bytes, in a vector /// let bytes = bumpalo::vec![in &b; 0, 159]; /// /// let value = String::from_utf8(bytes); /// /// assert_eq!(&[0, 159], value.unwrap_err().as_bytes()); /// ``` pub fn as_bytes(&self) -> &[u8] { &self.bytes[..] } /// Returns the bytes that were attempted to convert to a `String`. /// /// This method is carefully constructed to avoid allocation. It will /// consume the error, moving out the bytes, so that a copy of the bytes /// does not need to be made. /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // some invalid bytes, in a vector /// let bytes = bumpalo::vec![in &b; 0, 159]; /// /// let value = String::from_utf8(bytes); /// /// assert_eq!(bumpalo::vec![in &b; 0, 159], value.unwrap_err().into_bytes()); /// ``` pub fn into_bytes(self) -> Vec<'bump, u8> { self.bytes } /// Fetch a `Utf8Error` to get more details about the conversion failure. /// /// The [`Utf8Error`] type provided by [`std::str`] represents an error that may /// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's /// an analogue to `FromUtf8Error`. See its documentation for more details /// on using it. /// /// [`Utf8Error`]: https://doc.rust-lang.org/nightly/std/str/struct.Utf8Error.html /// [`std::str`]: https://doc.rust-lang.org/nightly/std/str/index.html /// [`u8`]: https://doc.rust-lang.org/nightly/std/primitive.u8.html /// [`&str`]: https://doc.rust-lang.org/nightly/std/primitive.str.html /// /// # Examples /// /// Basic usage: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// // some invalid bytes, in a vector /// let bytes = bumpalo::vec![in &b; 0, 159]; /// /// let error = String::from_utf8(bytes).unwrap_err().utf8_error(); /// /// // the first byte is invalid here /// assert_eq!(1, error.valid_up_to()); /// ``` pub fn utf8_error(&self) -> Utf8Error { self.error } } impl<'bump> fmt::Display for FromUtf8Error<'bump> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.error, f) } } impl fmt::Display for FromUtf16Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt("invalid utf-16: lone surrogate found", f) } } impl<'bump> Clone for String<'bump> { fn clone(&self) -> Self { String { vec: self.vec.clone(), } } fn clone_from(&mut self, source: &Self) { self.vec.clone_from(&source.vec); } } impl<'bump> Extend for String<'bump> { fn extend>(&mut self, iter: I) { let iterator = iter.into_iter(); let (lower_bound, _) = iterator.size_hint(); self.reserve(lower_bound); for ch in iterator { self.push(ch) } } } impl<'a, 'bump> Extend<&'a char> for String<'bump> { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } } impl<'a, 'bump> Extend<&'a str> for String<'bump> { fn extend>(&mut self, iter: I) { for s in iter { self.push_str(s) } } } impl<'bump> Extend> for String<'bump> { fn extend>>(&mut self, iter: I) { for s in iter { self.push_str(&s) } } } impl<'bump> Extend for String<'bump> { fn extend>(&mut self, iter: I) { for s in iter { self.push_str(&s) } } } impl<'a, 'bump> Extend> for String<'bump> { fn extend>>(&mut self, iter: I) { for s in iter { self.push_str(&s) } } } impl<'bump> PartialEq for String<'bump> { #[inline] fn eq(&self, other: &String) -> bool { PartialEq::eq(&self[..], &other[..]) } } macro_rules! impl_eq { ($lhs:ty, $rhs: ty) => { impl<'a, 'bump> PartialEq<$rhs> for $lhs { #[inline] fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) } } impl<'a, 'b, 'bump> PartialEq<$lhs> for $rhs { #[inline] fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) } } }; } impl_eq! { String<'bump>, str } impl_eq! { String<'bump>, &'a str } impl_eq! { Cow<'a, str>, String<'bump> } impl_eq! { core_alloc::string::String, String<'bump> } impl<'bump> fmt::Display for String<'bump> { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&**self, f) } } impl<'bump> fmt::Debug for String<'bump> { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } impl<'bump> hash::Hash for String<'bump> { #[inline] fn hash(&self, hasher: &mut H) { (**self).hash(hasher) } } /// Implements the `+` operator for concatenating two strings. /// /// This consumes the `String<'bump>` on the left-hand side and re-uses its buffer (growing it if /// necessary). This is done to avoid allocating a new `String<'bump>` and copying the entire contents on /// every operation, which would lead to `O(n^2)` running time when building an `n`-byte string by /// repeated concatenation. /// /// The string on the right-hand side is only borrowed; its contents are copied into the returned /// `String<'bump>`. /// /// # Examples /// /// Concatenating two `String<'bump>`s takes the first by value and borrows the second: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let bump = Bump::new(); /// /// let a = String::from_str_in("hello", &bump); /// let b = String::from_str_in(" world", &bump); /// let c = a + &b; /// // `a` is moved and can no longer be used here. /// ``` /// /// If you want to keep using the first `String`, you can clone it and append to the clone instead: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let bump = Bump::new(); /// /// let a = String::from_str_in("hello", &bump); /// let b = String::from_str_in(" world", &bump); /// let c = a.clone() + &b; /// // `a` is still valid here. /// ``` /// /// Concatenating `&str` slices can be done by converting the first to a `String`: /// /// ``` /// use bumpalo::{Bump, collections::String}; /// /// let b = Bump::new(); /// /// let a = "hello"; /// let b = " world"; /// let c = a.to_string() + b; /// ``` impl<'a, 'bump> Add<&'a str> for String<'bump> { type Output = String<'bump>; #[inline] fn add(mut self, other: &str) -> String<'bump> { self.push_str(other); self } } /// Implements the `+=` operator for appending to a `String<'bump>`. /// /// This has the same behavior as the [`push_str`][String::push_str] method. impl<'a, 'bump> AddAssign<&'a str> for String<'bump> { #[inline] fn add_assign(&mut self, other: &str) { self.push_str(other); } } impl<'bump> ops::Index> for String<'bump> { type Output = str; #[inline] fn index(&self, index: ops::Range) -> &str { &self[..][index] } } impl<'bump> ops::Index> for String<'bump> { type Output = str; #[inline] fn index(&self, index: ops::RangeTo) -> &str { &self[..][index] } } impl<'bump> ops::Index> for String<'bump> { type Output = str; #[inline] fn index(&self, index: ops::RangeFrom) -> &str { &self[..][index] } } impl<'bump> ops::Index for String<'bump> { type Output = str; #[inline] fn index(&self, _index: ops::RangeFull) -> &str { unsafe { str::from_utf8_unchecked(&self.vec) } } } impl<'bump> ops::Index> for String<'bump> { type Output = str; #[inline] fn index(&self, index: ops::RangeInclusive) -> &str { Index::index(&**self, index) } } impl<'bump> ops::Index> for String<'bump> { type Output = str; #[inline] fn index(&self, index: ops::RangeToInclusive) -> &str { Index::index(&**self, index) } } impl<'bump> ops::IndexMut> for String<'bump> { #[inline] fn index_mut(&mut self, index: ops::Range) -> &mut str { &mut self[..][index] } } impl<'bump> ops::IndexMut> for String<'bump> { #[inline] fn index_mut(&mut self, index: ops::RangeTo) -> &mut str { &mut self[..][index] } } impl<'bump> ops::IndexMut> for String<'bump> { #[inline] fn index_mut(&mut self, index: ops::RangeFrom) -> &mut str { &mut self[..][index] } } impl<'bump> ops::IndexMut for String<'bump> { #[inline] fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str { unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) } } } impl<'bump> ops::IndexMut> for String<'bump> { #[inline] fn index_mut(&mut self, index: ops::RangeInclusive) -> &mut str { IndexMut::index_mut(&mut **self, index) } } impl<'bump> ops::IndexMut> for String<'bump> { #[inline] fn index_mut(&mut self, index: ops::RangeToInclusive) -> &mut str { IndexMut::index_mut(&mut **self, index) } } impl<'bump> ops::Deref for String<'bump> { type Target = str; #[inline] fn deref(&self) -> &str { unsafe { str::from_utf8_unchecked(&self.vec) } } } impl<'bump> ops::DerefMut for String<'bump> { #[inline] fn deref_mut(&mut self) -> &mut str { unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) } } } impl<'bump> AsRef for String<'bump> { #[inline] fn as_ref(&self) -> &str { self } } impl<'bump> AsRef<[u8]> for String<'bump> { #[inline] fn as_ref(&self) -> &[u8] { self.as_bytes() } } impl<'bump> fmt::Write for String<'bump> { #[inline] fn write_str(&mut self, s: &str) -> fmt::Result { self.push_str(s); Ok(()) } #[inline] fn write_char(&mut self, c: char) -> fmt::Result { self.push(c); Ok(()) } } impl<'bump> Borrow for String<'bump> { #[inline] fn borrow(&self) -> &str { &self[..] } } impl<'bump> BorrowMut for String<'bump> { #[inline] fn borrow_mut(&mut self) -> &mut str { &mut self[..] } } /// A draining iterator for `String`. /// /// This struct is created by the [`drain`] method on [`String`]. See its /// documentation for more. /// /// [`drain`]: struct.String.html#method.drain /// [`String`]: struct.String.html pub struct Drain<'a, 'bump> { /// Will be used as &'a mut String in the destructor string: *mut String<'bump>, /// Start of part to remove start: usize, /// End of part to remove end: usize, /// Current remaining range to remove iter: Chars<'a>, } impl<'a, 'bump> fmt::Debug for Drain<'a, 'bump> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.pad("Drain { .. }") } } unsafe impl<'a, 'bump> Sync for Drain<'a, 'bump> {} unsafe impl<'a, 'bump> Send for Drain<'a, 'bump> {} impl<'a, 'bump> Drop for Drain<'a, 'bump> { fn drop(&mut self) { unsafe { // Use Vec::drain. "Reaffirm" the bounds checks to avoid // panic code being inserted again. let self_vec = (*self.string).as_mut_vec(); if self.start <= self.end && self.end <= self_vec.len() { self_vec.drain(self.start..self.end); } } } } impl<'a, 'bump> Iterator for Drain<'a, 'bump> { type Item = char; #[inline] fn next(&mut self) -> Option { self.iter.next() } fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } impl<'a, 'bump> DoubleEndedIterator for Drain<'a, 'bump> { #[inline] fn next_back(&mut self) -> Option { self.iter.next_back() } } impl<'a, 'bump> FusedIterator for Drain<'a, 'bump> {} bumpalo-3.7.0/src/collections/vec.rs000064400000000000000000002252450000000000000155110ustar 00000000000000// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A contiguous growable array type with heap-allocated contents, written //! `Vec<'bump, T>`. //! //! Vectors have `O(1)` indexing, amortized `O(1)` push (to the end) and //! `O(1)` pop (from the end). //! //! # Examples //! //! You can explicitly create a [`Vec<'bump, T>`] with [`new`]: //! //! ``` //! use bumpalo::{Bump, collections::Vec}; //! //! let b = Bump::new(); //! let v: Vec = Vec::new_in(&b); //! ``` //! //! ...or by using the [`vec!`] macro: //! //! ``` //! use bumpalo::{Bump, collections::Vec}; //! //! let b = Bump::new(); //! //! let v: Vec = bumpalo::vec![in &b]; //! //! let v = bumpalo::vec![in &b; 1, 2, 3, 4, 5]; //! //! let v = bumpalo::vec![in &b; 0; 10]; // ten zeroes //! ``` //! //! You can [`push`] values onto the end of a vector (which will grow the vector //! as needed): //! //! ``` //! use bumpalo::{Bump, collections::Vec}; //! //! let b = Bump::new(); //! //! let mut v = bumpalo::vec![in &b; 1, 2]; //! //! v.push(3); //! ``` //! //! Popping values works in much the same way: //! //! ``` //! use bumpalo::{Bump, collections::Vec}; //! //! let b = Bump::new(); //! //! let mut v = bumpalo::vec![in &b; 1, 2]; //! //! let two = v.pop(); //! ``` //! //! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits): //! //! ``` //! use bumpalo::{Bump, collections::Vec}; //! //! let b = Bump::new(); //! //! let mut v = bumpalo::vec![in &b; 1, 2, 3]; //! let three = v[2]; //! v[1] = v[1] + 5; //! ``` //! //! [`Vec<'bump, T>`]: ./struct.Vec.html //! [`new`]: ./struct.Vec.html#method.new //! [`push`]: ./struct.Vec.html#method.push //! [`Index`]: https://doc.rust-lang.org/nightly/std/ops/trait.Index.html //! [`IndexMut`]: ../../std/ops/trait.IndexMut.html //! [`vec!`]: ../../macro.vec.html use super::raw_vec::RawVec; use crate::collections::CollectionAllocErr; use crate::Bump; use core::borrow::{Borrow, BorrowMut}; use core::cmp::Ordering; use core::fmt; use core::hash::{self, Hash}; use core::iter::FusedIterator; use core::marker::PhantomData; use core::mem; use core::ops; use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ops::{Index, IndexMut, RangeBounds}; use core::ptr; use core::ptr::NonNull; use core::slice; unsafe fn arith_offset(p: *const T, offset: isize) -> *const T { p.offset(offset) } fn partition_dedup_by(s: &mut [T], mut same_bucket: F) -> (&mut [T], &mut [T]) where F: FnMut(&mut T, &mut T) -> bool, { // Although we have a mutable reference to `s`, we cannot make // *arbitrary* changes. The `same_bucket` calls could panic, so we // must ensure that the slice is in a valid state at all times. // // The way that we handle this is by using swaps; we iterate // over all the elements, swapping as we go so that at the end // the elements we wish to keep are in the front, and those we // wish to reject are at the back. We can then split the slice. // This operation is still O(n). // // Example: We start in this state, where `r` represents "next // read" and `w` represents "next_write`. // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing s[r] against s[w-1], this is not a duplicate, so // we swap s[r] and s[w] (no effect as r==w) and then increment both // r and w, leaving us with: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing s[r] against s[w-1], this value is a duplicate, // so we increment `r` but leave everything else unchanged: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 1 | 2 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Comparing s[r] against s[w-1], this is not a duplicate, // so swap s[r] and s[w] and advance r and w: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 2 | 1 | 3 | 3 | // +---+---+---+---+---+---+ // w // // Not a duplicate, repeat: // // r // +---+---+---+---+---+---+ // | 0 | 1 | 2 | 3 | 1 | 3 | // +---+---+---+---+---+---+ // w // // Duplicate, advance r. End of slice. Split at w. let len = s.len(); if len <= 1 { return (s, &mut []); } let ptr = s.as_mut_ptr(); let mut next_read: usize = 1; let mut next_write: usize = 1; unsafe { // Avoid bounds checks by using raw pointers. while next_read < len { let ptr_read = ptr.add(next_read); let prev_ptr_write = ptr.add(next_write - 1); if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) { if next_read != next_write { let ptr_write = prev_ptr_write.offset(1); mem::swap(&mut *ptr_read, &mut *ptr_write); } next_write += 1; } next_read += 1; } } s.split_at_mut(next_write) } unsafe fn offset_from(p: *const T, origin: *const T) -> isize where T: Sized, { let pointee_size = mem::size_of::(); assert!(0 < pointee_size && pointee_size <= isize::max_value() as usize); // This is the same sequence that Clang emits for pointer subtraction. // It can be neither `nsw` nor `nuw` because the input is treated as // unsigned but then the output is treated as signed, so neither works. let d = isize::wrapping_sub(p as _, origin as _); d / (pointee_size as isize) } /// Creates a [`Vec`] containing the arguments. /// /// `vec!` allows `Vec`s to be defined with the same syntax as array expressions. /// There are two forms of this macro: /// /// - Create a [`Vec`] containing a given list of elements: /// /// ``` /// use bumpalo::{Bump, vec}; /// /// let b = Bump::new(); /// let v = bumpalo::vec![in &b; 1, 2, 3]; /// assert_eq!(v[0], 1); /// assert_eq!(v[1], 2); /// assert_eq!(v[2], 3); /// ``` /// /// - Create a [`Vec`] from a given element and size: /// /// ``` /// use bumpalo::{Bump, vec}; /// /// let b = Bump::new(); /// let v = bumpalo::vec![in &b; 1; 3]; /// assert_eq!(v, [1, 1, 1]); /// ``` /// /// Note that unlike array expressions this syntax supports all elements /// which implement [`Clone`] and the number of elements doesn't have to be /// a constant. /// /// This will use `clone` to duplicate an expression, so one should be careful /// using this with types having a nonstandard `Clone` implementation. For /// example, `bumpalo::vec![in ≎ Rc::new(1); 5]` will create a vector of five references /// to the same boxed integer value, not five references pointing to independently /// boxed integers. /// /// [`Vec`]: ../collections/vec/struct.Vec.html /// [`Clone`]: https://doc.rust-lang.org/nightly/std/clone/trait.Clone.html #[macro_export] macro_rules! vec { (in $bump:expr; $elem:expr; $n:expr) => {{ let n = $n; let mut v = $crate::collections::Vec::with_capacity_in(n, $bump); if n > 0 { let elem = $elem; for _ in 0..n - 1 { v.push(elem.clone()); } v.push(elem); } v }}; (in $bump:expr) => { $crate::collections::Vec::new_in($bump) }; (in $bump:expr; $($x:expr),*) => {{ let mut v = $crate::collections::Vec::new_in($bump); $( v.push($x); )* v }}; (in $bump:expr; $($x:expr,)*) => (bumpalo::vec![in $bump; $($x),*]) } /// A contiguous growable array type, written `Vec<'bump, T>` but pronounced 'vector'. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = Vec::new_in(&b); /// vec.push(1); /// vec.push(2); /// /// assert_eq!(vec.len(), 2); /// assert_eq!(vec[0], 1); /// /// assert_eq!(vec.pop(), Some(2)); /// assert_eq!(vec.len(), 1); /// /// vec[0] = 7; /// assert_eq!(vec[0], 7); /// /// vec.extend([1, 2, 3].iter().cloned()); /// /// for x in &vec { /// println!("{}", x); /// } /// assert_eq!(vec, [7, 1, 2, 3]); /// ``` /// /// The [`vec!`] macro is provided to make initialization more convenient: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3]; /// vec.push(4); /// assert_eq!(vec, [1, 2, 3, 4]); /// ``` /// /// It can also initialize each element of a `Vec<'bump, T>` with a given value. /// This may be more efficient than performing allocation and initialization /// in separate steps, especially when initializing a vector of zeros: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let vec = bumpalo::vec![in &b; 0; 5]; /// assert_eq!(vec, [0, 0, 0, 0, 0]); /// /// // The following is equivalent, but potentially slower: /// let mut vec1 = Vec::with_capacity_in(5, &b); /// vec1.resize(5, 0); /// ``` /// /// Use a `Vec<'bump, T>` as an efficient stack: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut stack = Vec::new_in(&b); /// /// stack.push(1); /// stack.push(2); /// stack.push(3); /// /// while let Some(top) = stack.pop() { /// // Prints 3, 2, 1 /// println!("{}", top); /// } /// ``` /// /// # Indexing /// /// The `Vec` type allows to access values by index, because it implements the /// [`Index`] trait. An example will be more explicit: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let v = bumpalo::vec![in &b; 0, 2, 4, 6]; /// println!("{}", v[1]); // it will display '2' /// ``` /// /// However be careful: if you try to access an index which isn't in the `Vec`, /// your software will panic! You cannot do this: /// /// ```should_panic /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let v = bumpalo::vec![in &b; 0, 2, 4, 6]; /// println!("{}", v[6]); // it will panic! /// ``` /// /// In conclusion: always check if the index you want to get really exists /// before doing it. /// /// # Slicing /// /// A `Vec` can be mutable. Slices, on the other hand, are read-only objects. /// To get a slice, use `&`. Example: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// fn read_slice(slice: &[usize]) { /// // ... /// } /// /// let b = Bump::new(); /// /// let v = bumpalo::vec![in &b; 0, 1]; /// read_slice(&v); /// /// // ... and that's all! /// // you can also do it like this: /// let x : &[usize] = &v; /// ``` /// /// In Rust, it's more common to pass slices as arguments rather than vectors /// when you just want to provide a read access. The same goes for [`String`] and /// [`&str`]. /// /// # Capacity and reallocation /// /// The capacity of a vector is the amount of space allocated for any future /// elements that will be added onto the vector. This is not to be confused with /// the *length* of a vector, which specifies the number of actual elements /// within the vector. If a vector's length exceeds its capacity, its capacity /// will automatically be increased, but its elements will have to be /// reallocated. /// /// For example, a vector with capacity 10 and length 0 would be an empty vector /// with space for 10 more elements. Pushing 10 or fewer elements onto the /// vector will not change its capacity or cause reallocation to occur. However, /// if the vector's length is increased to 11, it will have to reallocate, which /// can be slow. For this reason, it is recommended to use [`Vec::with_capacity_in`] /// whenever possible to specify how big the vector is expected to get. /// /// # Guarantees /// /// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees /// about its design. This ensures that it's as low-overhead as possible in /// the general case, and can be correctly manipulated in primitive ways /// by unsafe code. Note that these guarantees refer to an unqualified `Vec<'bump, T>`. /// If additional type parameters are added (e.g. to support custom allocators), /// overriding their defaults may change the behavior. /// /// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length) /// triplet. No more, no less. The order of these fields is completely /// unspecified, and you should use the appropriate methods to modify these. /// The pointer will never be null, so this type is null-pointer-optimized. /// /// However, the pointer may not actually point to allocated memory. In particular, /// if you construct a `Vec` with capacity 0 via [`Vec::new_in`], [`bumpalo::vec![in bump]`][`vec!`], /// [`Vec::with_capacity_in(0)`][`Vec::with_capacity_in`], or by calling [`shrink_to_fit`] /// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized /// types inside a `Vec`, it will not allocate space for them. *Note that in this case /// the `Vec` may not report a [`capacity`] of 0*. `Vec` will allocate if and only /// if [`mem::size_of::`]`() * capacity() > 0`. In general, `Vec`'s allocation /// details are very subtle — if you intend to allocate memory using a `Vec` /// and use it for something else (either to pass to unsafe code, or to build your /// own memory-backed collection), be sure to deallocate this memory by using /// `from_raw_parts` to recover the `Vec` and then dropping it. /// /// If a `Vec` *has* allocated memory, then the memory it points to is on the heap /// (as defined by the allocator Rust is configured to use by default), and its /// pointer points to [`len`] initialized, contiguous elements in order (what /// you would see if you coerced it to a slice), followed by [`capacity`]` - /// `[`len`] logically uninitialized, contiguous elements. /// /// `Vec` will never perform a "small optimization" where elements are actually /// stored on the stack for two reasons: /// /// * It would make it more difficult for unsafe code to correctly manipulate /// a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were /// only moved, and it would be more difficult to determine if a `Vec` had /// actually allocated memory. /// /// * It would penalize the general case, incurring an additional branch /// on every access. /// /// `Vec` will never automatically shrink itself, even if completely empty. This /// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec` /// and then filling it back up to the same [`len`] should incur no calls to /// the allocator. If you wish to free up unused memory, use /// [`shrink_to_fit`][`shrink_to_fit`]. /// /// [`push`] and [`insert`] will never (re)allocate if the reported capacity is /// sufficient. [`push`] and [`insert`] *will* (re)allocate if /// [`len`]` == `[`capacity`]. That is, the reported capacity is completely /// accurate, and can be relied on. It can even be used to manually free the memory /// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even /// when not necessary. /// /// `Vec` does not guarantee any particular growth strategy when reallocating /// when full, nor when [`reserve`] is called. The current strategy is basic /// and it may prove desirable to use a non-constant growth factor. Whatever /// strategy is used will of course guarantee `O(1)` amortized [`push`]. /// /// `bumpalo::vec![in bump; x; n]`, `bumpalo::vec![in bump; a, b, c, d]`, and /// [`Vec::with_capacity_in(n)`][`Vec::with_capacity_in`], will all produce a /// `Vec` with exactly the requested capacity. If [`len`]` == `[`capacity`], (as /// is the case for the [`vec!`] macro), then a `Vec<'bump, T>` can be converted /// to and from a [`Box<[T]>`][owned slice] without reallocating or moving the /// elements. /// /// `Vec` will not specifically overwrite any data that is removed from it, /// but also won't specifically preserve it. Its uninitialized memory is /// scratch space that it may use however it wants. It will generally just do /// whatever is most efficient or otherwise easy to implement. Do not rely on /// removed data to be erased for security purposes. Even if you drop a `Vec`, its /// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory /// first, that may not actually happen because the optimizer does not consider /// this a side-effect that must be preserved. There is one case which we will /// not break, however: using `unsafe` code to write to the excess capacity, /// and then increasing the length to match, is always valid. /// /// `Vec` does not currently guarantee the order in which elements are dropped. /// The order has changed in the past and may change again. /// /// [`vec!`]: ../../macro.vec.html /// [`Index`]: https://doc.rust-lang.org/nightly/std/ops/trait.Index.html /// [`String`]: https://doc.rust-lang.org/nightly/std/string/struct.String.html /// [`&str`]: https://doc.rust-lang.org/nightly/std/primitive.str.html /// [`Vec::with_capacity_in`]: ./struct.Vec.html#method.with_capacity_in /// [`Vec::new_in`]: ./struct.Vec.html#method.new /// [`shrink_to_fit`]: ./struct.Vec.html#method.shrink_to_fit /// [`capacity`]: ./struct.Vec.html#method.capacity /// [`mem::size_of::`]: https://doc.rust-lang.org/nightly/std/mem/fn.size_of.html /// [`len`]: ./struct.Vec.html#method.len /// [`push`]: ./struct.Vec.html#method.push /// [`insert`]: ./struct.Vec.html#method.insert /// [`reserve`]: ./struct.Vec.html#method.reserve /// [owned slice]: https://doc.rust-lang.org/nightly/std/boxed/struct.Box.html pub struct Vec<'bump, T: 'bump> { buf: RawVec<'bump, T>, len: usize, } //////////////////////////////////////////////////////////////////////////////// // Inherent methods //////////////////////////////////////////////////////////////////////////////// impl<'bump, T: 'bump> Vec<'bump, T> { /// Constructs a new, empty `Vec<'bump, T>`. /// /// The vector will not allocate until elements are pushed onto it. /// /// # Examples /// /// ``` /// # #![allow(unused_mut)] /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// let mut vec: Vec = Vec::new_in(&b); /// ``` #[inline] pub fn new_in(bump: &'bump Bump) -> Vec<'bump, T> { Vec { buf: RawVec::new_in(bump), len: 0, } } /// Constructs a new, empty `Vec<'bump, T>` with the specified capacity. /// /// The vector will be able to hold exactly `capacity` elements without /// reallocating. If `capacity` is 0, the vector will not allocate. /// /// It is important to note that although the returned vector has the /// *capacity* specified, the vector will have a zero *length*. For an /// explanation of the difference between length and capacity, see /// *[Capacity and reallocation]*. /// /// [Capacity and reallocation]: #capacity-and-reallocation /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = Vec::with_capacity_in(10, &b); /// /// // The vector contains no items, even though it has capacity for more /// assert_eq!(vec.len(), 0); /// /// // These are all done without reallocating... /// for i in 0..10 { /// vec.push(i); /// } /// /// // ...but this may make the vector reallocate /// vec.push(11); /// ``` #[inline] pub fn with_capacity_in(capacity: usize, bump: &'bump Bump) -> Vec<'bump, T> { Vec { buf: RawVec::with_capacity_in(capacity, bump), len: 0, } } /// Construct a new `Vec` from the given iterator's items. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// use std::iter; /// /// let b = Bump::new(); /// let v = Vec::from_iter_in(iter::repeat(7).take(3), &b); /// assert_eq!(v, [7, 7, 7]); /// ``` pub fn from_iter_in>(iter: I, bump: &'bump Bump) -> Vec<'bump, T> { let mut v = Vec::new_in(bump); v.extend(iter); v } /// Creates a `Vec<'bump, T>` directly from the raw components of another vector. /// /// # Safety /// /// This is highly unsafe, due to the number of invariants that aren't /// checked: /// /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<'bump, T>` /// (at least, it's highly likely to be incorrect if it wasn't). /// * `ptr`'s `T` needs to have the same size and alignment as it was allocated with. /// * `length` needs to be less than or equal to `capacity`. /// * `capacity` needs to be the capacity that the pointer was allocated with. /// /// Violating these may cause problems like corrupting the allocator's /// internal data structures. For example it is **not** safe /// to build a `Vec` from a pointer to a C `char` array and a `size_t`. /// /// The ownership of `ptr` is effectively transferred to the /// `Vec<'bump, T>` which may then deallocate, reallocate or change the /// contents of memory pointed to by the pointer at will. Ensure /// that nothing else uses the pointer after calling this /// function. /// /// [`String`]: https://doc.rust-lang.org/nightly/std/string/struct.String.html /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// use std::ptr; /// use std::mem; /// /// let b = Bump::new(); /// /// let mut v = bumpalo::vec![in &b; 1, 2, 3]; /// /// // Pull out the various important pieces of information about `v` /// let p = v.as_mut_ptr(); /// let len = v.len(); /// let cap = v.capacity(); /// /// unsafe { /// // Cast `v` into the void: no destructor run, so we are in /// // complete control of the allocation to which `p` points. /// mem::forget(v); /// /// // Overwrite memory with 4, 5, 6 /// for i in 0..len as isize { /// ptr::write(p.offset(i), 4 + i); /// } /// /// // Put everything back together into a Vec /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, &b); /// assert_eq!(rebuilt, [4, 5, 6]); /// } /// ``` pub unsafe fn from_raw_parts_in( ptr: *mut T, length: usize, capacity: usize, bump: &'bump Bump, ) -> Vec<'bump, T> { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, bump), len: length, } } /// Returns the number of elements the vector can hold without /// reallocating. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// let vec: Vec = Vec::with_capacity_in(10, &b); /// assert_eq!(vec.capacity(), 10); /// ``` #[inline] pub fn capacity(&self) -> usize { self.buf.cap() } /// Reserves capacity for at least `additional` more elements to be inserted /// in the given `Vec<'bump, T>`. The collection may reserve more space to avoid /// frequent reallocations. After calling `reserve`, capacity will be /// greater than or equal to `self.len() + additional`. Does nothing if /// capacity is already sufficient. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// let mut vec = bumpalo::vec![in &b; 1]; /// vec.reserve(10); /// assert!(vec.capacity() >= 11); /// ``` pub fn reserve(&mut self, additional: usize) { self.buf.reserve(self.len, additional); } /// Reserves the minimum capacity for exactly `additional` more elements to /// be inserted in the given `Vec<'bump, T>`. After calling `reserve_exact`, /// capacity will be greater than or equal to `self.len() + additional`. /// Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it /// requests. Therefore capacity can not be relied upon to be precisely /// minimal. Prefer `reserve` if future insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// let mut vec = bumpalo::vec![in &b; 1]; /// vec.reserve_exact(10); /// assert!(vec.capacity() >= 11); /// ``` pub fn reserve_exact(&mut self, additional: usize) { self.buf.reserve_exact(self.len, additional); } /// Attempts to reserve capacity for at least `additional` more elements to be inserted /// in the given `Vec<'bump, T>`. The collection may reserve more space to avoid /// frequent reallocations. After calling `try_reserve`, capacity will be /// greater than or equal to `self.len() + additional`. Does nothing if /// capacity is already sufficient. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// let mut vec = bumpalo::vec![in &b; 1]; /// vec.try_reserve(10).unwrap(); /// assert!(vec.capacity() >= 11); /// ``` pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.buf.try_reserve(self.len, additional) } /// Attempts to reserve the minimum capacity for exactly `additional` more elements to /// be inserted in the given `Vec<'bump, T>`. After calling `try_reserve_exact`, /// capacity will be greater than or equal to `self.len() + additional`. /// Does nothing if the capacity is already sufficient. /// /// Note that the allocator may give the collection more space than it /// requests. Therefore capacity can not be relied upon to be precisely /// minimal. Prefer `try_reserve` if future insertions are expected. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// let mut vec = bumpalo::vec![in &b; 1]; /// vec.try_reserve_exact(10).unwrap(); /// assert!(vec.capacity() >= 11); /// ``` pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { self.buf.try_reserve_exact(self.len, additional) } /// Shrinks the capacity of the vector as much as possible. /// /// It will drop down as close as possible to the length but the allocator /// may still inform the vector that there is space for a few more elements. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = Vec::with_capacity_in(10, &b); /// vec.extend([1, 2, 3].iter().cloned()); /// assert_eq!(vec.capacity(), 10); /// vec.shrink_to_fit(); /// assert!(vec.capacity() >= 3); /// ``` pub fn shrink_to_fit(&mut self) { if self.capacity() != self.len { self.buf.shrink_to_fit(self.len); } } /// Converts the vector into `&'bump [T]`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// let v = bumpalo::vec![in &b; 1, 2, 3]; /// /// let slice = v.into_bump_slice(); /// assert_eq!(slice, [1, 2, 3]); /// ``` pub fn into_bump_slice(self) -> &'bump [T] { unsafe { let ptr = self.as_ptr(); let len = self.len(); mem::forget(self); slice::from_raw_parts(ptr, len) } } /// Converts the vector into `&'bump mut [T]`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// let v = bumpalo::vec![in &b; 1, 2, 3]; /// /// let mut slice = v.into_bump_slice_mut(); /// /// slice[0] = 3; /// slice[2] = 1; /// /// assert_eq!(slice, [3, 2, 1]); /// ``` pub fn into_bump_slice_mut(mut self) -> &'bump mut [T] { let ptr = self.as_mut_ptr(); let len = self.len(); mem::forget(self); unsafe { slice::from_raw_parts_mut(ptr, len) } } /// Shortens the vector, keeping the first `len` elements and dropping /// the rest. /// /// If `len` is greater than the vector's current length, this has no /// effect. /// /// The [`drain`] method can emulate `truncate`, but causes the excess /// elements to be returned instead of dropped. /// /// Note that this method has no effect on the allocated capacity /// of the vector. /// /// # Examples /// /// Truncating a five element vector to two elements: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3, 4, 5]; /// vec.truncate(2); /// assert_eq!(vec, [1, 2]); /// ``` /// /// No truncation occurs when `len` is greater than the vector's current /// length: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3]; /// vec.truncate(8); /// assert_eq!(vec, [1, 2, 3]); /// ``` /// /// Truncating when `len == 0` is equivalent to calling the [`clear`] /// method. /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3]; /// vec.truncate(0); /// assert_eq!(vec, []); /// ``` /// /// [`clear`]: #method.clear /// [`drain`]: #method.drain pub fn truncate(&mut self, len: usize) { let current_len = self.len; unsafe { let mut ptr = self.as_mut_ptr().add(self.len); // Set the final length at the end, keeping in mind that // dropping an element might panic. Works around a missed // optimization, as seen in the following issue: // https://github.com/rust-lang/rust/issues/51802 let mut local_len = SetLenOnDrop::new(&mut self.len); // drop any extra elements for _ in len..current_len { local_len.decrement_len(1); ptr = ptr.offset(-1); ptr::drop_in_place(ptr); } } } /// Extracts a slice containing the entire vector. /// /// Equivalent to `&s[..]`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// use std::io::{self, Write}; /// /// let b = Bump::new(); /// /// let buffer = bumpalo::vec![in &b; 1, 2, 3, 5, 8]; /// io::sink().write(buffer.as_slice()).unwrap(); /// ``` #[inline] pub fn as_slice(&self) -> &[T] { self } /// Extracts a mutable slice of the entire vector. /// /// Equivalent to `&mut s[..]`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// use std::io::{self, Read}; /// /// let b = Bump::new(); /// let mut buffer = bumpalo::vec![in &b; 0; 3]; /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap(); /// ``` #[inline] pub fn as_mut_slice(&mut self) -> &mut [T] { self } /// Sets the length of a vector. /// /// This will explicitly set the size of the vector, without actually /// modifying its buffers, so it is up to the caller to ensure that the /// vector is actually the specified size. /// /// # Safety /// /// - `new_len` must be less than or equal to [`capacity()`]. /// - The elements at `old_len..new_len` must be initialized. /// /// [`capacity()`]: struct.Vec.html#method.capacity /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// use std::ptr; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 'r', 'u', 's', 't']; /// /// unsafe { /// ptr::drop_in_place(&mut vec[3]); /// vec.set_len(3); /// } /// assert_eq!(vec, ['r', 'u', 's']); /// ``` /// /// In this example, there is a memory leak since the memory locations /// owned by the inner vectors were not freed prior to the `set_len` call: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; /// bumpalo::vec![in &b; 1, 0, 0], /// bumpalo::vec![in &b; 0, 1, 0], /// bumpalo::vec![in &b; 0, 0, 1]]; /// unsafe { /// vec.set_len(0); /// } /// ``` /// /// In this example, the vector gets expanded from zero to four items /// without any memory allocations occurring, resulting in vector /// values of unallocated memory: /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec: Vec = Vec::new_in(&b); /// /// unsafe { /// vec.set_len(4); /// } /// ``` #[inline] pub unsafe fn set_len(&mut self, new_len: usize) { self.len = new_len; } /// Removes an element from the vector and returns it. /// /// The removed element is replaced by the last element of the vector. /// /// This does not preserve ordering, but is O(1). /// /// # Panics /// /// Panics if `index` is out of bounds. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut v = bumpalo::vec![in &b; "foo", "bar", "baz", "qux"]; /// /// assert_eq!(v.swap_remove(1), "bar"); /// assert_eq!(v, ["foo", "qux", "baz"]); /// /// assert_eq!(v.swap_remove(0), "foo"); /// assert_eq!(v, ["baz", "qux"]); /// ``` #[inline] pub fn swap_remove(&mut self, index: usize) -> T { unsafe { // We replace self[index] with the last element. Note that if the // bounds check on hole succeeds there must be a last element (which // can be self[index] itself). let hole: *mut T = &mut self[index]; let last = ptr::read(self.get_unchecked(self.len - 1)); self.len -= 1; ptr::replace(hole, last) } } /// Inserts an element at position `index` within the vector, shifting all /// elements after it to the right. /// /// # Panics /// /// Panics if `index > len`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3]; /// vec.insert(1, 4); /// assert_eq!(vec, [1, 4, 2, 3]); /// vec.insert(4, 5); /// assert_eq!(vec, [1, 4, 2, 3, 5]); /// ``` pub fn insert(&mut self, index: usize, element: T) { let len = self.len(); assert!(index <= len); // space for the new element if len == self.buf.cap() { self.reserve(1); } unsafe { // infallible // The spot to put the new value { let p = self.as_mut_ptr().add(index); // Shift everything over to make space. (Duplicating the // `index`th element into two consecutive places.) ptr::copy(p, p.offset(1), len - index); // Write it in, overwriting the first copy of the `index`th // element. ptr::write(p, element); } self.set_len(len + 1); } } /// Removes and returns the element at position `index` within the vector, /// shifting all elements after it to the left. /// /// # Panics /// /// Panics if `index` is out of bounds. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut v = bumpalo::vec![in &b; 1, 2, 3]; /// assert_eq!(v.remove(1), 2); /// assert_eq!(v, [1, 3]); /// ``` pub fn remove(&mut self, index: usize) -> T { let len = self.len(); assert!(index < len); unsafe { // infallible let ret; { // the place we are taking from. let ptr = self.as_mut_ptr().add(index); // copy it out, unsafely having a copy of the value on // the stack and in the vector at the same time. ret = ptr::read(ptr); // Shift everything down to fill in that spot. ptr::copy(ptr.offset(1), ptr, len - index - 1); } self.set_len(len - 1); ret } } /// Retains only the elements specified by the predicate. /// /// In other words, remove all elements `e` such that `f(&e)` returns `false`. /// This method operates in place and preserves the order of the retained /// elements. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3, 4]; /// vec.retain(|&x| x%2 == 0); /// assert_eq!(vec, [2, 4]); /// ``` pub fn retain(&mut self, mut f: F) where F: FnMut(&T) -> bool, { self.drain_filter(|x| !f(x)); } fn drain_filter<'a, F>(&'a mut self, filter: F) -> DrainFilter<'a, 'bump, T, F> where F: FnMut(&mut T) -> bool, { let old_len = self.len(); // Guard against us getting leaked (leak amplification) unsafe { self.set_len(0); } DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, } } /// Removes all but the first of consecutive elements in the vector that resolve to the same /// key. /// /// If the vector is sorted, this removes all duplicates. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 10, 20, 21, 30, 20]; /// /// vec.dedup_by_key(|i| *i / 10); /// /// assert_eq!(vec, [10, 20, 30, 20]); /// ``` #[inline] pub fn dedup_by_key(&mut self, mut key: F) where F: FnMut(&mut T) -> K, K: PartialEq, { self.dedup_by(|a, b| key(a) == key(b)) } /// Removes all but the first of consecutive elements in the vector satisfying a given equality /// relation. /// /// The `same_bucket` function is passed references to two elements from the vector and /// must determine if the elements compare equal. The elements are passed in opposite order /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is removed. /// /// If the vector is sorted, this removes all duplicates. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; "foo", "bar", "Bar", "baz", "bar"]; /// /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); /// /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); /// ``` pub fn dedup_by(&mut self, same_bucket: F) where F: FnMut(&mut T, &mut T) -> bool, { let len = { let (dedup, _) = partition_dedup_by(self.as_mut_slice(), same_bucket); dedup.len() }; self.truncate(len); } /// Appends an element to the back of a collection. /// /// # Panics /// /// Panics if the number of elements in the vector overflows a `usize`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2]; /// vec.push(3); /// assert_eq!(vec, [1, 2, 3]); /// ``` #[inline] pub fn push(&mut self, value: T) { // This will panic or abort if we would allocate > isize::MAX bytes // or if the length increment would overflow for zero-sized types. if self.len == self.buf.cap() { self.reserve(1); } unsafe { let end = self.as_mut_ptr().add(self.len); ptr::write(end, value); self.len += 1; } } /// Removes the last element from a vector and returns it, or [`None`] if it /// is empty. /// /// [`None`]: https://doc.rust-lang.org/nightly/std/option/enum.Option.html#variant.None /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3]; /// assert_eq!(vec.pop(), Some(3)); /// assert_eq!(vec, [1, 2]); /// ``` #[inline] pub fn pop(&mut self) -> Option { if self.len == 0 { None } else { unsafe { self.len -= 1; Some(ptr::read(self.get_unchecked(self.len()))) } } } /// Moves all the elements of `other` into `Self`, leaving `other` empty. /// /// # Panics /// /// Panics if the number of elements in the vector overflows a `usize`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3]; /// let mut vec2 = bumpalo::vec![in &b; 4, 5, 6]; /// vec.append(&mut vec2); /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]); /// assert_eq!(vec2, []); /// ``` #[inline] pub fn append(&mut self, other: &mut Self) { unsafe { self.append_elements(other.as_slice() as _); other.set_len(0); } } /// Appends elements to `Self` from other buffer. #[inline] unsafe fn append_elements(&mut self, other: *const [T]) { let count = (*other).len(); self.reserve(count); let len = self.len(); ptr::copy_nonoverlapping(other as *const T, self.get_unchecked_mut(len), count); self.len += count; } /// Creates a draining iterator that removes the specified range in the vector /// and yields the removed items. /// /// Note 1: The element range is removed even if the iterator is only /// partially consumed or not consumed at all. /// /// Note 2: It is unspecified how many elements are removed from the vector /// if the `Drain` value is leaked. /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut v = bumpalo::vec![in &b; 1, 2, 3]; /// /// let mut u: Vec<_> = Vec::new_in(&b); /// u.extend(v.drain(1..)); /// /// assert_eq!(v, &[1]); /// assert_eq!(u, &[2, 3]); /// /// // A full range clears the vector /// v.drain(..); /// assert_eq!(v, &[]); /// ``` pub fn drain(&mut self, range: R) -> Drain where R: RangeBounds, { // Memory safety // // When the Drain is first created, it shortens the length of // the source vector to make sure no uninitialized or moved-from elements // are accessible at all if the Drain's destructor never gets to run. // // Drain will ptr::read out the values to remove. // When finished, remaining tail of the vec is copied back to cover // the hole, and the vector length is restored to the new length. // let len = self.len(); let start = match range.start_bound() { Included(&n) => n, Excluded(&n) => n + 1, Unbounded => 0, }; let end = match range.end_bound() { Included(&n) => n + 1, Excluded(&n) => n, Unbounded => len, }; assert!(start <= end); assert!(end <= len); unsafe { // set self.vec length's to start, to be safe in case Drain is leaked self.set_len(start); // Use the borrow in the IterMut to indicate borrowing behavior of the // whole Drain iterator (like &mut T). let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start); Drain { tail_start: end, tail_len: len - end, iter: range_slice.iter(), vec: NonNull::from(self), } } } /// Clears the vector, removing all values. /// /// Note that this method has no effect on the allocated capacity /// of the vector. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut v = bumpalo::vec![in &b; 1, 2, 3]; /// /// v.clear(); /// /// assert!(v.is_empty()); /// ``` #[inline] pub fn clear(&mut self) { self.truncate(0) } /// Returns the number of elements in the vector, also referred to /// as its 'length'. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let a = bumpalo::vec![in &b; 1, 2, 3]; /// assert_eq!(a.len(), 3); /// ``` #[inline] pub fn len(&self) -> usize { self.len } /// Returns `true` if the vector contains no elements. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut v = Vec::new_in(&b); /// assert!(v.is_empty()); /// /// v.push(1); /// assert!(!v.is_empty()); /// ``` pub fn is_empty(&self) -> bool { self.len() == 0 } /// Splits the collection into two at the given index. /// /// Returns a newly allocated `Self`. `self` contains elements `[0, at)`, /// and the returned `Self` contains elements `[at, len)`. /// /// Note that the capacity of `self` does not change. /// /// # Panics /// /// Panics if `at > len`. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1,2,3]; /// let vec2 = vec.split_off(1); /// assert_eq!(vec, [1]); /// assert_eq!(vec2, [2, 3]); /// ``` #[inline] pub fn split_off(&mut self, at: usize) -> Self { assert!(at <= self.len(), "`at` out of bounds"); let other_len = self.len - at; let mut other = Vec::with_capacity_in(other_len, self.buf.bump()); // Unsafely `set_len` and copy items to `other`. unsafe { self.set_len(at); other.set_len(other_len); ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len()); } other } } #[cfg(feature = "boxed")] impl<'bump, T> Vec<'bump, T> { /// Converts the vector into [`Box<[T]>`][owned slice]. /// /// Note that this will drop any excess capacity. /// /// [owned slice]: ../boxed/struct.Box.html /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec, vec}; /// /// let b = Bump::new(); /// /// let v = vec![in &b; 1, 2, 3]; /// /// let slice = v.into_boxed_slice(); /// ``` pub fn into_boxed_slice(mut self) -> crate::boxed::Box<'bump, [T]> { use crate::boxed::Box; // Unlike `alloc::vec::Vec` shrinking here isn't necessary as `bumpalo::boxed::Box` doesn't own memory. unsafe { let slice = slice::from_raw_parts_mut(self.as_mut_ptr(), self.len); let output: Box<'bump, [T]> = Box::from_raw(slice); mem::forget(self); output } } } impl<'bump, T: 'bump + Clone> Vec<'bump, T> { /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. /// /// If `new_len` is greater than `len`, the `Vec` is extended by the /// difference, with each additional slot filled with `value`. /// If `new_len` is less than `len`, the `Vec` is simply truncated. /// /// This method requires [`Clone`] to be able clone the passed value. If /// you need more flexibility (or want to rely on [`Default`] instead of /// [`Clone`]), use [`resize_with`]. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; "hello"]; /// vec.resize(3, "world"); /// assert_eq!(vec, ["hello", "world", "world"]); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 3, 4]; /// vec.resize(2, 0); /// assert_eq!(vec, [1, 2]); /// ``` /// /// [`Clone`]: https://doc.rust-lang.org/nightly/std/clone/trait.Clone.html /// [`Default`]: https://doc.rust-lang.org/nightly/std/default/trait.Default.html /// [`resize_with`]: #method.resize_with pub fn resize(&mut self, new_len: usize, value: T) { let len = self.len(); if new_len > len { self.extend_with(new_len - len, ExtendElement(value)) } else { self.truncate(new_len); } } /// Clones and appends all elements in a slice to the `Vec`. /// /// Iterates over the slice `other`, clones each element, and then appends /// it to this `Vec`. The `other` vector is traversed in-order. /// /// Note that this function is same as [`extend`] except that it is /// specialized to work with slices instead. If and when Rust gets /// specialization this function will likely be deprecated (but still /// available). /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1]; /// vec.extend_from_slice(&[2, 3, 4]); /// assert_eq!(vec, [1, 2, 3, 4]); /// ``` /// /// [`extend`]: #method.extend pub fn extend_from_slice(&mut self, other: &[T]) { self.extend(other.iter().cloned()) } } // This code generalises `extend_with_{element,default}`. trait ExtendWith { fn next(&mut self) -> T; fn last(self) -> T; } struct ExtendElement(T); impl ExtendWith for ExtendElement { fn next(&mut self) -> T { self.0.clone() } fn last(self) -> T { self.0 } } impl<'bump, T: 'bump> Vec<'bump, T> { /// Extend the vector by `n` values, using the given generator. fn extend_with>(&mut self, n: usize, mut value: E) { self.reserve(n); unsafe { let mut ptr = self.as_mut_ptr().add(self.len()); // Use SetLenOnDrop to work around bug where compiler // may not realize the store through `ptr` through self.set_len() // don't alias. let mut local_len = SetLenOnDrop::new(&mut self.len); // Write all elements except the last one for _ in 1..n { ptr::write(ptr, value.next()); ptr = ptr.offset(1); // Increment the length in every step in case next() panics local_len.increment_len(1); } if n > 0 { // We can write the last element directly without cloning needlessly ptr::write(ptr, value.last()); local_len.increment_len(1); } // len set by scope guard } } } // Set the length of the vec when the `SetLenOnDrop` value goes out of scope. // // The idea is: The length field in SetLenOnDrop is a local variable // that the optimizer will see does not alias with any stores through the Vec's data // pointer. This is a workaround for alias analysis issue #32155 struct SetLenOnDrop<'a> { len: &'a mut usize, local_len: usize, } impl<'a> SetLenOnDrop<'a> { #[inline] fn new(len: &'a mut usize) -> Self { SetLenOnDrop { local_len: *len, len, } } #[inline] fn increment_len(&mut self, increment: usize) { self.local_len += increment; } #[inline] fn decrement_len(&mut self, decrement: usize) { self.local_len -= decrement; } } impl<'a> Drop for SetLenOnDrop<'a> { #[inline] fn drop(&mut self) { *self.len = self.local_len; } } impl<'bump, T: 'bump + PartialEq> Vec<'bump, T> { /// Removes consecutive repeated elements in the vector according to the /// [`PartialEq`] trait implementation. /// /// If the vector is sorted, this removes all duplicates. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut vec = bumpalo::vec![in &b; 1, 2, 2, 3, 2]; /// /// vec.dedup(); /// /// assert_eq!(vec, [1, 2, 3, 2]); /// ``` #[inline] pub fn dedup(&mut self) { self.dedup_by(|a, b| a == b) } } //////////////////////////////////////////////////////////////////////////////// // Common trait implementations for Vec //////////////////////////////////////////////////////////////////////////////// impl<'bump, T: 'bump + Clone> Clone for Vec<'bump, T> { #[cfg(not(test))] fn clone(&self) -> Vec<'bump, T> { let mut v = Vec::with_capacity_in(self.len(), self.buf.bump()); v.extend(self.iter().cloned()); v } // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is // required for this method definition, is not available. Instead use the // `slice::to_vec` function which is only available with cfg(test) // NB see the slice::hack module in slice.rs for more information #[cfg(test)] fn clone(&self) -> Vec<'bump, T> { let mut v = Vec::new_in(self.buf.bump()); v.extend(self.iter().cloned()); v } } impl<'bump, T: 'bump + Hash> Hash for Vec<'bump, T> { #[inline] fn hash(&self, state: &mut H) { Hash::hash(&**self, state) } } impl<'bump, T, I> Index for Vec<'bump, T> where I: ::core::slice::SliceIndex<[T]>, { type Output = I::Output; #[inline] fn index(&self, index: I) -> &Self::Output { Index::index(&**self, index) } } impl<'bump, T, I> IndexMut for Vec<'bump, T> where I: ::core::slice::SliceIndex<[T]>, { #[inline] fn index_mut(&mut self, index: I) -> &mut Self::Output { IndexMut::index_mut(&mut **self, index) } } impl<'bump, T: 'bump> ops::Deref for Vec<'bump, T> { type Target = [T]; fn deref(&self) -> &[T] { unsafe { let p = self.buf.ptr(); // assume(!p.is_null()); slice::from_raw_parts(p, self.len) } } } impl<'bump, T: 'bump> ops::DerefMut for Vec<'bump, T> { fn deref_mut(&mut self) -> &mut [T] { unsafe { let ptr = self.buf.ptr(); // assume(!ptr.is_null()); slice::from_raw_parts_mut(ptr, self.len) } } } impl<'bump, T: 'bump> IntoIterator for Vec<'bump, T> { type Item = T; type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each value out of /// the vector (from start to end). The vector cannot be used after calling /// this. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let v = bumpalo::vec![in &b; "a".to_string(), "b".to_string()]; /// for s in v.into_iter() { /// // s has type String, not &String /// println!("{}", s); /// } /// ``` #[inline] fn into_iter(mut self) -> IntoIter { unsafe { let begin = self.as_mut_ptr(); // assume(!begin.is_null()); let end = if mem::size_of::() == 0 { arith_offset(begin as *const i8, self.len() as isize) as *const T } else { begin.add(self.len()) as *const T }; mem::forget(self); IntoIter { phantom: PhantomData, ptr: begin, end, } } } } impl<'a, 'bump, T> IntoIterator for &'a Vec<'bump, T> { type Item = &'a T; type IntoIter = slice::Iter<'a, T>; fn into_iter(self) -> slice::Iter<'a, T> { self.iter() } } impl<'a, 'bump, T> IntoIterator for &'a mut Vec<'bump, T> { type Item = &'a mut T; type IntoIter = slice::IterMut<'a, T>; fn into_iter(self) -> slice::IterMut<'a, T> { self.iter_mut() } } impl<'bump, T: 'bump> Extend for Vec<'bump, T> { #[inline] fn extend>(&mut self, iter: I) { let iter = iter.into_iter(); self.reserve(iter.size_hint().0); for t in iter { self.push(t); } } } impl<'bump, T: 'bump> Vec<'bump, T> { /// Creates a splicing iterator that replaces the specified range in the vector /// with the given `replace_with` iterator and yields the removed items. /// `replace_with` does not need to be the same length as `range`. /// /// Note 1: The element range is removed even if the iterator is not /// consumed until the end. /// /// Note 2: It is unspecified how many elements are removed from the vector, /// if the `Splice` value is leaked. /// /// Note 3: The input iterator `replace_with` is only consumed /// when the `Splice` value is dropped. /// /// Note 4: This is optimal if: /// /// * The tail (elements in the vector after `range`) is empty, /// * or `replace_with` yields fewer elements than `range`’s length /// * or the lower bound of its `size_hint()` is exact. /// /// Otherwise, a temporary vector is allocated and the tail is moved twice. /// /// # Panics /// /// Panics if the starting point is greater than the end point or if /// the end point is greater than the length of the vector. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let mut v = bumpalo::vec![in &b; 1, 2, 3]; /// let new = [7, 8]; /// let u: Vec<_> = Vec::from_iter_in(v.splice(..2, new.iter().cloned()), &b); /// assert_eq!(v, &[7, 8, 3]); /// assert_eq!(u, &[1, 2]); /// ``` #[inline] pub fn splice(&mut self, range: R, replace_with: I) -> Splice where R: RangeBounds, I: IntoIterator, { Splice { drain: self.drain(range), replace_with: replace_with.into_iter(), } } } /// Extend implementation that copies elements out of references before pushing them onto the Vec. /// /// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to /// append the entire slice at once. /// /// [`copy_from_slice`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.copy_from_slice impl<'a, 'bump, T: 'a + Copy> Extend<&'a T> for Vec<'bump, T> { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()) } } macro_rules! __impl_slice_eq1 { ($Lhs: ty, $Rhs: ty) => { __impl_slice_eq1! { $Lhs, $Rhs, Sized } }; ($Lhs: ty, $Rhs: ty, $Bound: ident) => { impl<'a, 'b, A: $Bound, B> PartialEq<$Rhs> for $Lhs where A: PartialEq, { #[inline] fn eq(&self, other: &$Rhs) -> bool { self[..] == other[..] } } }; } __impl_slice_eq1! { Vec<'a, A>, Vec<'b, B> } __impl_slice_eq1! { Vec<'a, A>, &'b [B] } __impl_slice_eq1! { Vec<'a, A>, &'b mut [B] } // __impl_slice_eq1! { Cow<'a, [A]>, Vec<'b, B>, Clone } macro_rules! array_impls { ($($N: expr)+) => { $( // NOTE: some less important impls are omitted to reduce code bloat __impl_slice_eq1! { Vec<'a, A>, [B; $N] } __impl_slice_eq1! { Vec<'a, A>, &'b [B; $N] } // __impl_slice_eq1! { Vec, &'b mut [B; $N] } // __impl_slice_eq1! { Cow<'a, [A]>, [B; $N], Clone } // __impl_slice_eq1! { Cow<'a, [A]>, &'b [B; $N], Clone } // __impl_slice_eq1! { Cow<'a, [A]>, &'b mut [B; $N], Clone } )+ } } array_impls! { 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 } /// Implements comparison of vectors, lexicographically. impl<'bump, T: 'bump + PartialOrd> PartialOrd for Vec<'bump, T> { #[inline] fn partial_cmp(&self, other: &Vec<'bump, T>) -> Option { PartialOrd::partial_cmp(&**self, &**other) } } impl<'bump, T: 'bump + Eq> Eq for Vec<'bump, T> {} /// Implements ordering of vectors, lexicographically. impl<'bump, T: 'bump + Ord> Ord for Vec<'bump, T> { #[inline] fn cmp(&self, other: &Vec<'bump, T>) -> Ordering { Ord::cmp(&**self, &**other) } } impl<'bump, T: 'bump + fmt::Debug> fmt::Debug for Vec<'bump, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } impl<'bump, T: 'bump> AsRef> for Vec<'bump, T> { fn as_ref(&self) -> &Vec<'bump, T> { self } } impl<'bump, T: 'bump> AsMut> for Vec<'bump, T> { fn as_mut(&mut self) -> &mut Vec<'bump, T> { self } } impl<'bump, T: 'bump> AsRef<[T]> for Vec<'bump, T> { fn as_ref(&self) -> &[T] { self } } impl<'bump, T: 'bump> AsMut<[T]> for Vec<'bump, T> { fn as_mut(&mut self) -> &mut [T] { self } } #[cfg(feature = "boxed")] impl<'bump, T: 'bump> From> for crate::boxed::Box<'bump, [T]> { fn from(v: Vec<'bump, T>) -> crate::boxed::Box<'bump, [T]> { v.into_boxed_slice() } } impl<'bump, T: 'bump> Borrow<[T]> for Vec<'bump, T> { #[inline] fn borrow(&self) -> &[T] { &self[..] } } impl<'bump, T: 'bump> BorrowMut<[T]> for Vec<'bump, T> { #[inline] fn borrow_mut(&mut self) -> &mut [T] { &mut self[..] } } //////////////////////////////////////////////////////////////////////////////// // Clone-on-write //////////////////////////////////////////////////////////////////////////////// // impl<'a, 'bump, T: Clone> From> for Cow<'a, [T]> { // fn from(v: Vec<'bump, T>) -> Cow<'a, [T]> { // Cow::Owned(v) // } // } // impl<'a, 'bump, T: Clone> From<&'a Vec<'bump, T>> for Cow<'a, [T]> { // fn from(v: &'a Vec<'bump, T>) -> Cow<'a, [T]> { // Cow::Borrowed(v.as_slice()) // } // } //////////////////////////////////////////////////////////////////////////////// // Iterators //////////////////////////////////////////////////////////////////////////////// /// An iterator that moves out of a vector. /// /// This `struct` is created by the `into_iter` method on [`Vec`][`Vec`] (provided /// by the [`IntoIterator`] trait). /// /// [`Vec`]: struct.Vec.html /// [`IntoIterator`]: https://doc.rust-lang.org/nightly/std/iter/trait.IntoIterator.html pub struct IntoIter { phantom: PhantomData, ptr: *const T, end: *const T, } impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("IntoIter").field(&self.as_slice()).finish() } } impl<'bump, T: 'bump> IntoIter { /// Returns the remaining items of this iterator as a slice. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let vec = bumpalo::vec![in &b; 'a', 'b', 'c']; /// let mut into_iter = vec.into_iter(); /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); /// let _ = into_iter.next().unwrap(); /// assert_eq!(into_iter.as_slice(), &['b', 'c']); /// ``` pub fn as_slice(&self) -> &[T] { unsafe { slice::from_raw_parts(self.ptr, self.len()) } } /// Returns the remaining items of this iterator as a mutable slice. /// /// # Examples /// /// ``` /// use bumpalo::{Bump, collections::Vec}; /// /// let b = Bump::new(); /// /// let vec = bumpalo::vec![in &b; 'a', 'b', 'c']; /// let mut into_iter = vec.into_iter(); /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); /// into_iter.as_mut_slice()[2] = 'z'; /// assert_eq!(into_iter.next().unwrap(), 'a'); /// assert_eq!(into_iter.next().unwrap(), 'b'); /// assert_eq!(into_iter.next().unwrap(), 'z'); /// ``` pub fn as_mut_slice(&mut self) -> &mut [T] { unsafe { slice::from_raw_parts_mut(self.ptr as *mut T, self.len()) } } } unsafe impl Send for IntoIter {} unsafe impl Sync for IntoIter {} impl<'bump, T: 'bump> Iterator for IntoIter { type Item = T; #[inline] fn next(&mut self) -> Option { unsafe { if self.ptr as *const _ == self.end { None } else if mem::size_of::() == 0 { // purposefully don't use 'ptr.offset' because for // vectors with 0-size elements this would return the // same pointer. self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T; // Make up a value of this ZST. Some(mem::zeroed()) } else { let old = self.ptr; self.ptr = self.ptr.offset(1); Some(ptr::read(old)) } } } #[inline] fn size_hint(&self) -> (usize, Option) { let exact = if mem::size_of::() == 0 { (self.end as usize).wrapping_sub(self.ptr as usize) } else { unsafe { offset_from(self.end, self.ptr) as usize } }; (exact, Some(exact)) } #[inline] fn count(self) -> usize { self.len() } } impl<'bump, T: 'bump> DoubleEndedIterator for IntoIter { #[inline] fn next_back(&mut self) -> Option { unsafe { if self.end == self.ptr { None } else if mem::size_of::() == 0 { // See above for why 'ptr.offset' isn't used self.end = arith_offset(self.end as *const i8, -1) as *mut T; // Make up a value of this ZST. Some(mem::zeroed()) } else { self.end = self.end.offset(-1); Some(ptr::read(self.end)) } } } } impl<'bump, T: 'bump> ExactSizeIterator for IntoIter {} impl<'bump, T: 'bump> FusedIterator for IntoIter {} /// A draining iterator for `Vec<'bump, T>`. /// /// This `struct` is created by the [`drain`] method on [`Vec`]. /// /// [`drain`]: struct.Vec.html#method.drain /// [`Vec`]: struct.Vec.html pub struct Drain<'a, 'bump, T: 'a + 'bump> { /// Index of tail to preserve tail_start: usize, /// Length of tail tail_len: usize, /// Current remaining range to remove iter: slice::Iter<'a, T>, vec: NonNull>, } impl<'a, 'bump, T: 'a + 'bump + fmt::Debug> fmt::Debug for Drain<'a, 'bump, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() } } unsafe impl<'a, 'bump, T: Sync> Sync for Drain<'a, 'bump, T> {} unsafe impl<'a, 'bump, T: Send> Send for Drain<'a, 'bump, T> {} impl<'a, 'bump, T> Iterator for Drain<'a, 'bump, T> { type Item = T; #[inline] fn next(&mut self) -> Option { self.iter .next() .map(|elt| unsafe { ptr::read(elt as *const _) }) } fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } } impl<'a, 'bump, T> DoubleEndedIterator for Drain<'a, 'bump, T> { #[inline] fn next_back(&mut self) -> Option { self.iter .next_back() .map(|elt| unsafe { ptr::read(elt as *const _) }) } } impl<'a, 'bump, T> Drop for Drain<'a, 'bump, T> { fn drop(&mut self) { // exhaust self first self.for_each(drop); if self.tail_len > 0 { unsafe { let source_vec = self.vec.as_mut(); // memmove back untouched tail, update to new length let start = source_vec.len(); let tail = self.tail_start; if tail != start { let src = source_vec.as_ptr().add(tail); let dst = source_vec.as_mut_ptr().add(start); ptr::copy(src, dst, self.tail_len); } source_vec.set_len(start + self.tail_len); } } } } impl<'a, 'bump, T> ExactSizeIterator for Drain<'a, 'bump, T> {} impl<'a, 'bump, T> FusedIterator for Drain<'a, 'bump, T> {} /// A splicing iterator for `Vec`. /// /// This struct is created by the [`splice()`] method on [`Vec`]. See its /// documentation for more. /// /// [`splice()`]: struct.Vec.html#method.splice /// [`Vec`]: struct.Vec.html #[derive(Debug)] pub struct Splice<'a, 'bump, I: Iterator + 'a + 'bump> { drain: Drain<'a, 'bump, I::Item>, replace_with: I, } impl<'a, 'bump, I: Iterator> Iterator for Splice<'a, 'bump, I> { type Item = I::Item; fn next(&mut self) -> Option { self.drain.next() } fn size_hint(&self) -> (usize, Option) { self.drain.size_hint() } } impl<'a, 'bump, I: Iterator> DoubleEndedIterator for Splice<'a, 'bump, I> { fn next_back(&mut self) -> Option { self.drain.next_back() } } impl<'a, 'bump, I: Iterator> ExactSizeIterator for Splice<'a, 'bump, I> {} impl<'a, 'bump, I: Iterator> Drop for Splice<'a, 'bump, I> { fn drop(&mut self) { self.drain.by_ref().for_each(drop); unsafe { if self.drain.tail_len == 0 { self.drain.vec.as_mut().extend(self.replace_with.by_ref()); return; } // First fill the range left by drain(). if !self.drain.fill(&mut self.replace_with) { return; } // There may be more elements. Use the lower bound as an estimate. // FIXME: Is the upper bound a better guess? Or something else? let (lower_bound, _upper_bound) = self.replace_with.size_hint(); if lower_bound > 0 { self.drain.move_tail(lower_bound); if !self.drain.fill(&mut self.replace_with) { return; } } // Collect any remaining elements. // This is a zero-length vector which does not allocate if `lower_bound` was exact. let mut collected = Vec::new_in(self.drain.vec.as_ref().buf.bump()); collected.extend(self.replace_with.by_ref()); let mut collected = collected.into_iter(); // Now we have an exact count. if collected.len() > 0 { self.drain.move_tail(collected.len()); let filled = self.drain.fill(&mut collected); debug_assert!(filled); debug_assert_eq!(collected.len(), 0); } } // Let `Drain::drop` move the tail back if necessary and restore `vec.len`. } } /// Private helper methods for `Splice::drop` impl<'a, 'bump, T> Drain<'a, 'bump, T> { /// The range from `self.vec.len` to `self.tail_start` contains elements /// that have been moved out. /// Fill that range as much as possible with new elements from the `replace_with` iterator. /// Return whether we filled the entire range. (`replace_with.next()` didn’t return `None`.) unsafe fn fill>(&mut self, replace_with: &mut I) -> bool { let vec = self.vec.as_mut(); let range_start = vec.len; let range_end = self.tail_start; let range_slice = slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start); for place in range_slice { if let Some(new_item) = replace_with.next() { ptr::write(place, new_item); vec.len += 1; } else { return false; } } true } /// Make room for inserting more elements before the tail. unsafe fn move_tail(&mut self, extra_capacity: usize) { let vec = self.vec.as_mut(); let used_capacity = self.tail_start + self.tail_len; vec.buf.reserve(used_capacity, extra_capacity); let new_tail_start = self.tail_start + extra_capacity; let src = vec.as_ptr().add(self.tail_start); let dst = vec.as_mut_ptr().add(new_tail_start); ptr::copy(src, dst, self.tail_len); self.tail_start = new_tail_start; } } /// An iterator produced by calling `drain_filter` on Vec. #[derive(Debug)] pub struct DrainFilter<'a, 'bump: 'a, T: 'a + 'bump, F> where F: FnMut(&mut T) -> bool, { vec: &'a mut Vec<'bump, T>, idx: usize, del: usize, old_len: usize, pred: F, } impl<'a, 'bump, T, F> Iterator for DrainFilter<'a, 'bump, T, F> where F: FnMut(&mut T) -> bool, { type Item = T; fn next(&mut self) -> Option { unsafe { while self.idx != self.old_len { let i = self.idx; self.idx += 1; let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len); if (self.pred)(&mut v[i]) { self.del += 1; return Some(ptr::read(&v[i])); } else if self.del > 0 { let del = self.del; let src: *const T = &v[i]; let dst: *mut T = &mut v[i - del]; // This is safe because self.vec has length 0 // thus its elements will not have Drop::drop // called on them in the event of a panic. ptr::copy_nonoverlapping(src, dst, 1); } } None } } fn size_hint(&self) -> (usize, Option) { (0, Some(self.old_len - self.idx)) } } impl<'a, 'bump, T, F> Drop for DrainFilter<'a, 'bump, T, F> where F: FnMut(&mut T) -> bool, { fn drop(&mut self) { self.for_each(drop); unsafe { self.vec.set_len(self.old_len - self.del); } } } bumpalo-3.7.0/src/lib.rs000064400000000000000000002060630000000000000131610ustar 00000000000000/*! **A fast bump allocation arena for Rust.** [![](https://docs.rs/bumpalo/badge.svg)](https://docs.rs/bumpalo/) [![](https://img.shields.io/crates/v/bumpalo.svg)](https://crates.io/crates/bumpalo) [![](https://img.shields.io/crates/d/bumpalo.svg)](https://crates.io/crates/bumpalo) [![Build Status](https://github.com/fitzgen/bumpalo/workflows/Rust/badge.svg)](https://github.com/fitzgen/bumpalo/actions?query=workflow%3ARust) ![](https://github.com/fitzgen/bumpalo/raw/master/bumpalo.png) ## Bump Allocation Bump allocation is a fast, but limited approach to allocation. We have a chunk of memory, and we maintain a pointer within that memory. Whenever we allocate an object, we do a quick test that we have enough capacity left in our chunk to allocate the object and then update the pointer by the object's size. *That's it!* The disadvantage of bump allocation is that there is no general way to deallocate individual objects or reclaim the memory region for a no-longer-in-use object. These trade offs make bump allocation well-suited for *phase-oriented* allocations. That is, a group of objects that will all be allocated during the same program phase, used, and then can all be deallocated together as a group. ## Deallocation en Masse, but No `Drop` To deallocate all the objects in the arena at once, we can simply reset the bump pointer back to the start of the arena's memory chunk. This makes mass deallocation *extremely* fast, but allocated objects' `Drop` implementations are not invoked. > **However:** [`bumpalo::boxed::Box`][crate::boxed::Box] can be used to wrap > `T` values allocated in the `Bump` arena, and calls `T`'s `Drop` > implementation when the `Box` wrapper goes out of scope. This is similar to > how [`std::boxed::Box`] works, except without deallocating its backing memory. [`std::boxed::Box`]: https://doc.rust-lang.org/std/boxed/struct.Box.html ## What happens when the memory chunk is full? This implementation will allocate a new memory chunk from the global allocator and then start bump allocating into this new memory chunk. ## Example ``` use bumpalo::Bump; use std::u64; struct Doggo { cuteness: u64, age: u8, scritches_required: bool, } // Create a new arena to bump allocate into. let bump = Bump::new(); // Allocate values into the arena. let scooter = bump.alloc(Doggo { cuteness: u64::max_value(), age: 8, scritches_required: true, }); // Exclusive, mutable references to the just-allocated value are returned. assert!(scooter.scritches_required); scooter.age += 1; ``` ## Collections When the `"collections"` cargo feature is enabled, a fork of some of the `std` library's collections are available in the `collections` module. These collection types are modified to allocate their space inside `bumpalo::Bump` arenas. ```rust # #[cfg(feature = "collections")] # { use bumpalo::{Bump, collections::Vec}; // Create a new bump arena. let bump = Bump::new(); // Create a vector of integers whose storage is backed by the bump arena. The // vector cannot outlive its backing arena, and this property is enforced with // Rust's lifetime rules. let mut v = Vec::new_in(&bump); // Push a bunch of integers onto `v`! for i in 0..100 { v.push(i); } # } ``` Eventually [all `std` collection types will be parameterized by an allocator](https://github.com/rust-lang/rust/issues/42774) and we can remove this `collections` module and use the `std` versions. For unstable, nightly-only support for custom allocators in `std`, see the `allocator_api` section below. ## `bumpalo::boxed::Box` When the `"boxed"` cargo feature is enabled, a fork of `std::boxed::Box` library is available in the `boxed` module. This `Box` type is modified to allocate its space inside `bumpalo::Bump` arenas. **A `Box` runs `T`'s drop implementation when the `Box` is dropped.** You can use this to work around the fact that `Bump` does not drop values allocated in its space itself. ```rust # #[cfg(feature = "boxed")] # { use bumpalo::{Bump, boxed::Box}; use std::sync::atomic::{AtomicUsize, Ordering}; static NUM_DROPPED: AtomicUsize = AtomicUsize::new(0); struct CountDrops; impl Drop for CountDrops { fn drop(&mut self) { NUM_DROPPED.fetch_add(1, Ordering::SeqCst); } } // Create a new bump arena. let bump = Bump::new(); // Create a `CountDrops` inside the bump arena. let mut c = Box::new_in(CountDrops, &bump); // No `CountDrops` have been dropped yet. assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 0); // Drop our `Box`. drop(c); // Its `Drop` implementation was run, and so `NUM_DROPS` has been incremented. assert_eq!(NUM_DROPPED.load(Ordering::SeqCst), 1); # } ``` ## `#![no_std]` Support Bumpalo is a `no_std` crate. It depends only on the `alloc` and `core` crates. ## Thread support The `Bump` is `!Send`, which makes it hard to use in certain situations around threads ‒ for example in `rayon`. The [`bumpalo-herd`](https://crates.io/crates/bumpalo-herd) crate provides a pool of `Bump` allocators for use in such situations. ## Nightly Rust `feature(allocator_api)` Support The unstable, nightly-only Rust `allocator_api` feature defines an `Allocator` trait and exposes custom allocators for `std` types. Bumpalo has a matching `allocator_api` cargo feature to enable implementing `Allocator` and using `Bump` with `std` collections. Note that, as `feature(allocator_api)` is unstable and only in nightly Rust, Bumpalo's matching `allocator_api` cargo feature should be considered unstable, and will not follow the semver conventions that the rest of the crate does. First, enable the `allocator_api` feature in your `Cargo.toml`: ```toml [dependencies] bumpalo = { version = "3.4.0", features = ["allocator_api"] } ``` Next, enable the `allocator_api` nightly Rust feature in your `src/lib.rs` or `src/main.rs`: ```rust # #[cfg(feature = "allocator_api")] # { #![feature(allocator_api)] # } ``` Finally, use `std` collections with `Bump`, so that their internal heap allocations are made within the given bump arena: ``` # #![cfg_attr(feature = "allocator_api", feature(allocator_api))] # #[cfg(feature = "allocator_api")] # { #![feature(allocator_api)] use bumpalo::Bump; // Create a new bump arena. let bump = Bump::new(); // Create a `Vec` whose elements are allocated within the bump arena. let mut v = Vec::new_in(&bump); v.push(0); v.push(1); v.push(2); # } ``` ### Minimum Supported Rust Version (MSRV) This crate is guaranteed to compile on stable Rust 1.44 and up. It might compile with older versions but that may change in any new patch release. We reserve the right to increment the MSRV on minor releases, however we will strive to only do it deliberately and for good reasons. */ #![deny(missing_debug_implementations)] #![deny(missing_docs)] #![no_std] #![cfg_attr( feature = "allocator_api", feature(allocator_api, nonnull_slice_from_raw_parts) )] #[doc(hidden)] pub extern crate alloc as core_alloc; #[cfg(feature = "boxed")] pub mod boxed; #[cfg(feature = "collections")] pub mod collections; mod alloc; use core::cell::Cell; use core::fmt::Display; use core::iter; use core::marker::PhantomData; use core::mem; use core::ptr::{self, NonNull}; use core::slice; use core::str; use core_alloc::alloc::{alloc, dealloc, Layout}; #[cfg(feature = "allocator_api")] use core_alloc::alloc::{AllocError, Allocator}; /// An error returned from [`Bump::try_alloc_try_with`]. #[derive(Clone, PartialEq, Eq, Debug)] pub enum AllocOrInitError { /// Indicates that the initial allocation failed. Alloc(alloc::AllocErr), /// Indicates that the initializer failed with the contained error after /// allocation. /// /// It is possible but not guaranteed that the allocated memory has been /// released back to the allocator at this point. Init(E), } impl From for AllocOrInitError { fn from(e: alloc::AllocErr) -> Self { Self::Alloc(e) } } impl Display for AllocOrInitError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { match self { AllocOrInitError::Alloc(err) => err.fmt(f), AllocOrInitError::Init(err) => write!(f, "initialization failed: {}", err), } } } /// An arena to bump allocate into. /// /// ## No `Drop`s /// /// Objects that are bump-allocated will never have their `Drop` implementation /// called — unless you do it manually yourself. This makes it relatively /// easy to leak memory or other resources. /// /// If you have a type which internally manages /// /// * an allocation from the global heap (e.g. `Vec`), /// * open file descriptors (e.g. `std::fs::File`), or /// * any other resource that must be cleaned up (e.g. an `mmap`) /// /// and relies on its `Drop` implementation to clean up the internal resource, /// then if you allocate that type with a `Bump`, you need to find a new way to /// clean up after it yourself. /// /// Potential solutions are: /// /// * Using [`bumpalo::boxed::Box::new_in`] instead of [`Bump::alloc`], that /// will drop wrapped values similarly to [`std::boxed::Box`]. Note that this /// requires enabling the `"boxed"` Cargo feature for this crate. **This is /// often the easiest solution.** /// /// * Calling [`drop_in_place`][drop_in_place] or using /// [`std::mem::ManuallyDrop`][manuallydrop] to manually drop these types. /// /// * Using [`bumpalo::collections::Vec`] instead of [`std::vec::Vec`]. /// /// * Avoiding allocating these problematic types within a `Bump`. /// /// Note that not calling `Drop` is memory safe! Destructors are never /// guaranteed to run in Rust, you can't rely on them for enforcing memory /// safety. /// /// [drop_in_place]: https://doc.rust-lang.org/std/ptr/fn.drop_in_place.html /// [manuallydrop]: https://doc.rust-lang.org/std/mem/struct.ManuallyDrop.html /// [`bumpalo::collections::Vec`]: ./collections/struct.Vec.html /// [`std::vec::Vec`]: https://doc.rust-lang.org/std/vec/struct.Vec.html /// [`bumpalo::boxed::Box::new_in`]: ./boxed/struct.Box.html#method.new_in /// [`Bump::alloc`]: ./struct.Bump.html#method.alloc /// [`std::boxed::Box`]: https://doc.rust-lang.org/std/boxed/struct.Box.html /// /// ## Example /// /// ``` /// use bumpalo::Bump; /// /// // Create a new bump arena. /// let bump = Bump::new(); /// /// // Allocate values into the arena. /// let forty_two = bump.alloc(42); /// assert_eq!(*forty_two, 42); /// /// // Mutable references are returned from allocation. /// let mut s = bump.alloc("bumpalo"); /// *s = "the bump allocator; and also is a buffalo"; /// ``` /// /// ## Allocation Methods Come in Many Flavors /// /// There are various allocation methods on `Bump`, the simplest being /// [`alloc`][Bump::alloc]. The others exist to satisfy some combination of /// fallible allocation and initialization. The allocation methods are /// summarized in the following table: /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// /// ///
Infallible AllocationFallible Allocation
By Valuealloctry_alloc
Infallible Initializer Functionalloc_withtry_alloc_with
Fallible Initializer Functionalloc_try_withtry_alloc_try_with
/// /// ### Fallible Allocation: The `try_alloc_` Method Prefix /// /// These allocation methods let you recover from out-of-memory (OOM) /// scenarioes, rather than raising a panic on OOM. /// /// ``` /// use bumpalo::Bump; /// /// let bump = Bump::new(); /// /// match bump.try_alloc(MyStruct { /// // ... /// }) { /// Ok(my_struct) => { /// // Allocation succeeded. /// } /// Err(e) => { /// // Out of memory. /// } /// } /// /// struct MyStruct { /// // ... /// } /// ``` /// /// ### Initializer Functions: The `_with` Method Suffix /// /// Calling one of the generic `…alloc(x)` methods is essentially equivalent to /// the matching [`…alloc_with(|| x)`](?search=alloc_with). However if you use /// `…alloc_with`, then the closure will not be invoked until after allocating /// space for storing `x` on the heap. /// /// This can be useful in certain edge-cases related to compiler optimizations. /// When evaluating for example `bump.alloc(x)`, semantically `x` is first put /// on the stack and then moved onto the heap. In some cases, the compiler is /// able to optimize this into constructing `x` directly on the heap, however /// in many cases it does not. /// /// The `*alloc_with` functions try to help the compiler be smarter. In most /// cases doing for example `bump.try_alloc_with(|| x)` on release mode will be /// enough to help the compiler realize that this optimization is valid and /// to construct `x` directly onto the heap. /// /// #### Warning /// /// These functions critically depend on compiler optimizations to achieve their /// desired effect. This means that it is not an effective tool when compiling /// without optimizations on. /// /// Even when optimizations are on, these functions do not **guarantee** that /// the value is constructed on the heap. To the best of our knowledge no such /// guarantee can be made in stable Rust as of 1.44. /// /// ### Fallible Initialization: The `_try_with` Method Suffix /// /// The generic [`…alloc_try_with(|| x)`](?search=_try_with) methods behave /// like the purely `_with` suffixed methods explained above. However, they /// allow for fallible initialization by accepting a closure that returns a /// [`Result`] and will attempt to undo the initial allocation if this closure /// returns [`Err`]. /// /// #### Warning /// /// If the inner closure returns [`Ok`], space for the entire [`Result`] remains /// allocated inside `self`. This can be a problem especially if the [`Err`] /// variant is larger, but even otherwise there may be overhead for the /// [`Result`]'s discriminant. /// ///

Undoing the allocation in the Err case /// always fails if f successfully made any additional allocations /// in self. /// /// For example, the following will always leak also space for the [`Result`] /// into this `Bump`, even though the inner reference isn't kept and the [`Err`] /// payload is returned semantically by value: /// /// ```rust /// let bump = bumpalo::Bump::new(); /// /// let r: Result<&mut [u8; 1000], ()> = bump.alloc_try_with(|| { /// let _ = bump.alloc(0_u8); /// Err(()) /// }); /// /// assert!(r.is_err()); /// ``` /// ///

/// /// Since [`Err`] payloads are first placed on the heap and then moved to the /// stack, `bump.…alloc_try_with(|| x)?` is likely to execute more slowly than /// the matching `bump.…alloc(x?)` in case of initialization failure. If this /// happens frequently, using the plain un-suffixed method may perform better. #[derive(Debug)] pub struct Bump { // The current chunk we are bump allocating within. current_chunk_footer: Cell>, } #[repr(C)] #[derive(Debug)] struct ChunkFooter { // Pointer to the start of this chunk allocation. This footer is always at // the end of the chunk. data: NonNull, // The layout of this chunk's allocation. layout: Layout, // Link to the previous chunk, if any. prev: Cell>>, // Bump allocation finger that is always in the range `self.data..=self`. ptr: Cell>, } impl Default for Bump { fn default() -> Bump { Bump::new() } } impl Drop for Bump { fn drop(&mut self) { unsafe { dealloc_chunk_list(Some(self.current_chunk_footer.get())); } } } #[inline] unsafe fn dealloc_chunk_list(mut footer: Option>) { while let Some(f) = footer { footer = f.as_ref().prev.get(); dealloc(f.as_ref().data.as_ptr(), f.as_ref().layout); } } // `Bump`s are safe to send between threads because nothing aliases its owned // chunks until you start allocating from it. But by the time you allocate from // it, the returned references to allocations borrow the `Bump` and therefore // prevent sending the `Bump` across threads until the borrows end. unsafe impl Send for Bump {} #[inline] pub(crate) fn round_up_to(n: usize, divisor: usize) -> Option { debug_assert!(divisor > 0); debug_assert!(divisor.is_power_of_two()); Some(n.checked_add(divisor - 1)? & !(divisor - 1)) } // After this point, we try to hit page boundaries instead of powers of 2 const PAGE_STRATEGY_CUTOFF: usize = 0x1000; // We only support alignments of up to 16 bytes for iter_allocated_chunks. const SUPPORTED_ITER_ALIGNMENT: usize = 16; const CHUNK_ALIGN: usize = SUPPORTED_ITER_ALIGNMENT; const FOOTER_SIZE: usize = mem::size_of::(); // Assert that ChunkFooter is at most the supported alignment. This will give a compile time error if it is not the case const _FOOTER_ALIGN_ASSERTION: bool = mem::align_of::() <= CHUNK_ALIGN; const _: [(); _FOOTER_ALIGN_ASSERTION as usize] = [()]; // Maximum typical overhead per allocation imposed by allocators. const MALLOC_OVERHEAD: usize = 16; // This is the overhead from malloc, footer and alignment. For instance, if // we want to request a chunk of memory that has at least X bytes usable for // allocations (where X is aligned to CHUNK_ALIGN), then we expect that the // after adding a footer, malloc overhead and alignment, the chunk of memory // the allocator actually sets asside for us is X+OVERHEAD rounded up to the // nearest suitable size boundary. const OVERHEAD: usize = (MALLOC_OVERHEAD + FOOTER_SIZE + (CHUNK_ALIGN - 1)) & !(CHUNK_ALIGN - 1); // Choose a relatively small default initial chunk size, since we double chunk // sizes as we grow bump arenas to amortize costs of hitting the global // allocator. const FIRST_ALLOCATION_GOAL: usize = 1 << 9; // The actual size of the first allocation is going to be a bit smaller // than the goal. We need to make room for the footer, and we also need // take the alignment into account. const DEFAULT_CHUNK_SIZE_WITHOUT_FOOTER: usize = FIRST_ALLOCATION_GOAL - OVERHEAD; /// Wrapper around `Layout::from_size_align` that adds debug assertions. #[inline] unsafe fn layout_from_size_align(size: usize, align: usize) -> Layout { if cfg!(debug_assertions) { Layout::from_size_align(size, align).unwrap() } else { Layout::from_size_align_unchecked(size, align) } } #[inline(never)] fn allocation_size_overflow() -> T { panic!("requested allocation size overflowed") } impl Bump { /// Construct a new arena to bump allocate into. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// # let _ = bump; /// ``` pub fn new() -> Bump { Self::with_capacity(0) } /// Attempt to construct a new arena to bump allocate into. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::try_new(); /// # let _ = bump.unwrap(); /// ``` pub fn try_new() -> Result { Bump::try_with_capacity(0) } /// Construct a new arena with the specified byte capacity to bump allocate into. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::with_capacity(100); /// # let _ = bump; /// ``` pub fn with_capacity(capacity: usize) -> Bump { Bump::try_with_capacity(capacity).unwrap_or_else(|_| oom()) } /// Attempt to construct a new arena with the specified byte capacity to bump allocate into. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::try_with_capacity(100); /// # let _ = bump.unwrap(); /// ``` pub fn try_with_capacity(capacity: usize) -> Result { let chunk_footer = Self::new_chunk( None, Some(unsafe { layout_from_size_align(capacity, 1) }), None, ) .ok_or(alloc::AllocErr {})?; Ok(Bump { current_chunk_footer: Cell::new(chunk_footer), }) } /// Allocate a new chunk and return its initialized footer. /// /// If given, `layouts` is a tuple of the current chunk size and the /// layout of the allocation request that triggered us to fall back to /// allocating a new chunk of memory. fn new_chunk( new_size_without_footer: Option, requested_layout: Option, prev: Option>, ) -> Option> { unsafe { let mut new_size_without_footer = new_size_without_footer.unwrap_or(DEFAULT_CHUNK_SIZE_WITHOUT_FOOTER); // We want to have CHUNK_ALIGN or better alignment let mut align = CHUNK_ALIGN; // If we already know we need to fulfill some request, // make sure we allocate at least enough to satisfy it if let Some(requested_layout) = requested_layout { align = align.max(requested_layout.align()); let requested_size = round_up_to(requested_layout.size(), align) .unwrap_or_else(allocation_size_overflow); new_size_without_footer = new_size_without_footer.max(requested_size); } // We want our allocations to play nice with the memory allocator, // and waste as little memory as possible. // For small allocations, this means that the entire allocation // including the chunk footer and mallocs internal overhead is // as close to a power of two as we can go without going over. // For larger allocations, we only need to get close to a page // boundary without going over. if new_size_without_footer < PAGE_STRATEGY_CUTOFF { new_size_without_footer = (new_size_without_footer + OVERHEAD).next_power_of_two() - OVERHEAD; } else { new_size_without_footer = round_up_to(new_size_without_footer + OVERHEAD, 0x1000)? - OVERHEAD; } debug_assert_eq!(align % CHUNK_ALIGN, 0); debug_assert_eq!(new_size_without_footer % CHUNK_ALIGN, 0); let size = new_size_without_footer .checked_add(FOOTER_SIZE) .unwrap_or_else(allocation_size_overflow); let layout = layout_from_size_align(size, align); debug_assert!(requested_layout.map_or(true, |layout| size >= layout.size())); let data = alloc(layout); let data = NonNull::new(data)?; // The `ChunkFooter` is at the end of the chunk. let footer_ptr = data.as_ptr() as usize + new_size_without_footer; debug_assert_eq!((data.as_ptr() as usize) % align, 0); debug_assert_eq!(footer_ptr % CHUNK_ALIGN, 0); let footer_ptr = footer_ptr as *mut ChunkFooter; // The bump pointer is initialized to the end of the range we will // bump out of. let ptr = Cell::new(NonNull::new_unchecked(footer_ptr as *mut u8)); ptr::write( footer_ptr, ChunkFooter { data, layout, prev: Cell::new(prev), ptr, }, ); Some(NonNull::new_unchecked(footer_ptr)) } } /// Reset this bump allocator. /// /// Performs mass deallocation on everything allocated in this arena by /// resetting the pointer into the underlying chunk of memory to the start /// of the chunk. Does not run any `Drop` implementations on deallocated /// objects; see [the `Bump` type's top-level /// documentation](./struct.Bump.html) for details. /// /// If this arena has allocated multiple chunks to bump allocate into, then /// the excess chunks are returned to the global allocator. /// /// ## Example /// /// ``` /// let mut bump = bumpalo::Bump::new(); /// /// // Allocate a bunch of things. /// { /// for i in 0..100 { /// bump.alloc(i); /// } /// } /// /// // Reset the arena. /// bump.reset(); /// /// // Allocate some new things in the space previously occupied by the /// // original things. /// for j in 200..400 { /// bump.alloc(j); /// } ///``` pub fn reset(&mut self) { // Takes `&mut self` so `self` must be unique and there can't be any // borrows active that would get invalidated by resetting. unsafe { let cur_chunk = self.current_chunk_footer.get(); // Deallocate all chunks except the current one let prev_chunk = cur_chunk.as_ref().prev.replace(None); dealloc_chunk_list(prev_chunk); // Reset the bump finger to the end of the chunk. cur_chunk.as_ref().ptr.set(cur_chunk.cast()); debug_assert!( self.current_chunk_footer .get() .as_ref() .prev .get() .is_none(), "We should only have a single chunk" ); debug_assert_eq!( self.current_chunk_footer.get().as_ref().ptr.get(), self.current_chunk_footer.get().cast(), "Our chunk's bump finger should be reset to the start of its allocation" ); } } /// Allocate an object in this `Bump` and return an exclusive reference to /// it. /// /// ## Panics /// /// Panics if reserving space for `T` fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.alloc("hello"); /// assert_eq!(*x, "hello"); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc(&self, val: T) -> &mut T { self.alloc_with(|| val) } /// Try to allocate an object in this `Bump` and return an exclusive /// reference to it. /// /// ## Errors /// /// Errors if reserving space for `T` fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.try_alloc("hello"); /// assert_eq!(x, Ok(&mut"hello")); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn try_alloc(&self, val: T) -> Result<&mut T, alloc::AllocErr> { self.try_alloc_with(|| val) } /// Pre-allocate space for an object in this `Bump`, initializes it using /// the closure, then returns an exclusive reference to it. /// /// See [The `_with` Method Suffix](#the-_with-method-suffix) for a /// discussion on the differences between the `_with` suffixed methods and /// those methods without it, their performance characteristics, and when /// you might or might not choose a `_with` suffixed method. /// /// ## Panics /// /// Panics if reserving space for `T` fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.alloc_with(|| "hello"); /// assert_eq!(*x, "hello"); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_with(&self, f: F) -> &mut T where F: FnOnce() -> T, { #[inline(always)] unsafe fn inner_writer(ptr: *mut T, f: F) where F: FnOnce() -> T, { // This function is translated as: // - allocate space for a T on the stack // - call f() with the return value being put onto this stack space // - memcpy from the stack to the heap // // Ideally we want LLVM to always realize that doing a stack // allocation is unnecessary and optimize the code so it writes // directly into the heap instead. It seems we get it to realize // this most consistently if we put this critical line into it's // own function instead of inlining it into the surrounding code. ptr::write(ptr, f()) } let layout = Layout::new::(); unsafe { let p = self.alloc_layout(layout); let p = p.as_ptr() as *mut T; inner_writer(p, f); &mut *p } } /// Tries to pre-allocate space for an object in this `Bump`, initializes /// it using the closure, then returns an exclusive reference to it. /// /// See [The `_with` Method Suffix](#the-_with-method-suffix) for a /// discussion on the differences between the `_with` suffixed methods and /// those methods without it, their performance characteristics, and when /// you might or might not choose a `_with` suffixed method. /// /// ## Errors /// /// Errors if reserving space for `T` fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.try_alloc_with(|| "hello"); /// assert_eq!(x, Ok(&mut "hello")); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn try_alloc_with(&self, f: F) -> Result<&mut T, alloc::AllocErr> where F: FnOnce() -> T, { #[inline(always)] unsafe fn inner_writer(ptr: *mut T, f: F) where F: FnOnce() -> T, { // This function is translated as: // - allocate space for a T on the stack // - call f() with the return value being put onto this stack space // - memcpy from the stack to the heap // // Ideally we want LLVM to always realize that doing a stack // allocation is unnecessary and optimize the code so it writes // directly into the heap instead. It seems we get it to realize // this most consistently if we put this critical line into it's // own function instead of inlining it into the surrounding code. ptr::write(ptr, f()) } //SAFETY: Self-contained: // `p` is allocated for `T` and then a `T` is written. let layout = Layout::new::(); let p = self.try_alloc_layout(layout)?; let p = p.as_ptr() as *mut T; unsafe { inner_writer(p, f); Ok(&mut *p) } } /// Pre-allocates space for a [`Result`] in this `Bump`, initializes it using /// the closure, then returns an exclusive reference to its `T` if [`Ok`]. /// /// Iff the allocation fails, the closure is not run. /// /// Iff [`Err`], an allocator rewind is *attempted* and the `E` instance is /// moved out of the allocator to be consumed or dropped as normal. /// /// See [The `_with` Method Suffix](#the-_with-method-suffix) for a /// discussion on the differences between the `_with` suffixed methods and /// those methods without it, their performance characteristics, and when /// you might or might not choose a `_with` suffixed method. /// /// For caveats specific to fallible initialization, see /// [The `_try_with` Method Suffix](#the-_try_with-method-suffix). /// /// ## Errors /// /// Iff the allocation succeeds but `f` fails, that error is forwarded by value. /// /// ## Panics /// /// Panics if reserving space for `Result` fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.alloc_try_with(|| Ok("hello"))?; /// assert_eq!(*x, "hello"); /// # Result::<_, ()>::Ok(()) /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_try_with(&self, f: F) -> Result<&mut T, E> where F: FnOnce() -> Result, { let rewind_footer = self.current_chunk_footer.get(); let rewind_ptr = unsafe { rewind_footer.as_ref() }.ptr.get(); let mut inner_result_ptr = NonNull::from(self.alloc_with(f)); let inner_result_address = inner_result_ptr.as_ptr() as usize; match unsafe { inner_result_ptr.as_mut() } { Ok(t) => Ok(unsafe { //SAFETY: // The `&mut Result` returned by `alloc_with` may be // lifetime-limited by `E`, but the derived `&mut T` still has // the same validity as in `alloc_with` since the error variant // is already ruled out here. // We could conditionally truncate the allocation here, but // since it grows backwards, it seems unlikely that we'd get // any more than the `Result`'s discriminant this way, if // anything at all. &mut *(t as *mut _) }), Err(e) => unsafe { // If this result was the last allocation in this arena, we can // reclaim its space. In fact, sometimes we can do even better // than simply calling `dealloc` on the result pointer: we can // reclaim any alignment padding we might have added (which // `dealloc` cannot do) if we didn't allocate a new chunk for // this result. if self.is_last_allocation(NonNull::new_unchecked(inner_result_address as *mut _)) { let current_footer_p = self.current_chunk_footer.get(); let current_ptr = ¤t_footer_p.as_ref().ptr; if current_footer_p == rewind_footer { // It's still the same chunk, so reset the bump pointer // to its original value upon entry to this method // (reclaiming any alignment padding we may have // added). current_ptr.set(rewind_ptr); } else { // We allocated a new chunk for this result. // // We know the result is the only allocation in this // chunk: Any additional allocations since the start of // this method could only have happened when running // the initializer function, which is called *after* // reserving space for this result. Therefore, since we // already determined via the check above that this // result was the last allocation, there must not have // been any other allocations, and this result is the // only allocation in this chunk. // // Because this is the only allocation in this chunk, // we can reset the chunk's bump finger to the start of // the chunk. current_ptr.set(current_footer_p.as_ref().data); } } //SAFETY: // As we received `E` semantically by value from `f`, we can // just copy that value here as long as we avoid a double-drop // (which can't happen as any specific references to the `E`'s // data in `self` are destroyed when this function returns). // // The order between this and the deallocation doesn't matter // because `Self: !Sync`. Err(ptr::read(e as *const _)) }, } } /// Tries to pre-allocates space for a [`Result`] in this `Bump`, /// initializes it using the closure, then returns an exclusive reference /// to its `T` if all [`Ok`]. /// /// Iff the allocation fails, the closure is not run. /// /// Iff the closure returns [`Err`], an allocator rewind is *attempted* and /// the `E` instance is moved out of the allocator to be consumed or dropped /// as normal. /// /// See [The `_with` Method Suffix](#the-_with-method-suffix) for a /// discussion on the differences between the `_with` suffixed methods and /// those methods without it, their performance characteristics, and when /// you might or might not choose a `_with` suffixed method. /// /// For caveats specific to fallible initialization, see /// [The `_try_with` Method Suffix](#the-_try_with-method-suffix). /// /// ## Errors /// /// Errors with the [`Alloc`](`AllocOrInitError::Alloc`) variant iff /// reserving space for `Result` fails. /// /// Iff the allocation succeeds but `f` fails, that error is forwarded by /// value inside the [`Init`](`AllocOrInitError::Init`) variant. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.try_alloc_try_with(|| Ok("hello"))?; /// assert_eq!(*x, "hello"); /// # Result::<_, bumpalo::AllocOrInitError<()>>::Ok(()) /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn try_alloc_try_with(&self, f: F) -> Result<&mut T, AllocOrInitError> where F: FnOnce() -> Result, { let rewind_footer = self.current_chunk_footer.get(); let rewind_ptr = unsafe { rewind_footer.as_ref() }.ptr.get(); let mut inner_result_ptr = NonNull::from(self.try_alloc_with(f)?); let inner_result_address = inner_result_ptr.as_ptr() as usize; match unsafe { inner_result_ptr.as_mut() } { Ok(t) => Ok(unsafe { //SAFETY: // The `&mut Result` returned by `alloc_with` may be // lifetime-limited by `E`, but the derived `&mut T` still has // the same validity as in `alloc_with` since the error variant // is already ruled out here. // We could conditionally truncate the allocation here, but // since it grows backwards, it seems unlikely that we'd get // any more than the `Result`'s discriminant this way, if // anything at all. &mut *(t as *mut _) }), Err(e) => unsafe { // If this result was the last allocation in this arena, we can // reclaim its space. In fact, sometimes we can do even better // than simply calling `dealloc` on the result pointer: we can // reclaim any alignment padding we might have added (which // `dealloc` cannot do) if we didn't allocate a new chunk for // this result. if self.is_last_allocation(NonNull::new_unchecked(inner_result_address as *mut _)) { let current_footer_p = self.current_chunk_footer.get(); let current_ptr = ¤t_footer_p.as_ref().ptr; if current_footer_p == rewind_footer { // It's still the same chunk, so reset the bump pointer // to its original value upon entry to this method // (reclaiming any alignment padding we may have // added). current_ptr.set(rewind_ptr); } else { // We allocated a new chunk for this result. // // We know the result is the only allocation in this // chunk: Any additional allocations since the start of // this method could only have happened when running // the initializer function, which is called *after* // reserving space for this result. Therefore, since we // already determined via the check above that this // result was the last allocation, there must not have // been any other allocations, and this result is the // only allocation in this chunk. // // Because this is the only allocation in this chunk, // we can reset the chunk's bump finger to the start of // the chunk. current_ptr.set(current_footer_p.as_ref().data); } } //SAFETY: // As we received `E` semantically by value from `f`, we can // just copy that value here as long as we avoid a double-drop // (which can't happen as any specific references to the `E`'s // data in `self` are destroyed when this function returns). // // The order between this and the deallocation doesn't matter // because `Self: !Sync`. Err(AllocOrInitError::Init(ptr::read(e as *const _))) }, } } /// `Copy` a slice into this `Bump` and return an exclusive reference to /// the copy. /// /// ## Panics /// /// Panics if reserving space for the slice fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.alloc_slice_copy(&[1, 2, 3]); /// assert_eq!(x, &[1, 2, 3]); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_slice_copy(&self, src: &[T]) -> &mut [T] where T: Copy, { let layout = Layout::for_value(src); let dst = self.alloc_layout(layout).cast::(); unsafe { ptr::copy_nonoverlapping(src.as_ptr(), dst.as_ptr(), src.len()); slice::from_raw_parts_mut(dst.as_ptr(), src.len()) } } /// `Clone` a slice into this `Bump` and return an exclusive reference to /// the clone. Prefer `alloc_slice_copy` if `T` is `Copy`. /// /// ## Panics /// /// Panics if reserving space for the slice fails. /// /// ## Example /// /// ``` /// #[derive(Clone, Debug, Eq, PartialEq)] /// struct Sheep { /// name: String, /// } /// /// let originals = vec![ /// Sheep { name: "Alice".into() }, /// Sheep { name: "Bob".into() }, /// Sheep { name: "Cathy".into() }, /// ]; /// /// let bump = bumpalo::Bump::new(); /// let clones = bump.alloc_slice_clone(&originals); /// assert_eq!(originals, clones); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_slice_clone(&self, src: &[T]) -> &mut [T] where T: Clone, { let layout = Layout::for_value(src); let dst = self.alloc_layout(layout).cast::(); unsafe { for (i, val) in src.iter().cloned().enumerate() { ptr::write(dst.as_ptr().add(i), val); } slice::from_raw_parts_mut(dst.as_ptr(), src.len()) } } /// `Copy` a string slice into this `Bump` and return an exclusive reference to it. /// /// ## Panics /// /// Panics if reserving space for the string fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let hello = bump.alloc_str("hello world"); /// assert_eq!("hello world", hello); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_str(&self, src: &str) -> &mut str { let buffer = self.alloc_slice_copy(src.as_bytes()); unsafe { // This is OK, because it already came in as str, so it is guaranteed to be utf8 str::from_utf8_unchecked_mut(buffer) } } /// Allocates a new slice of size `len` into this `Bump` and returns an /// exclusive reference to the copy. /// /// The elements of the slice are initialized using the supplied closure. /// The closure argument is the position in the slice. /// /// ## Panics /// /// Panics if reserving space for the slice fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.alloc_slice_fill_with(5, |i| 5*(i+1)); /// assert_eq!(x, &[5, 10, 15, 20, 25]); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_slice_fill_with(&self, len: usize, mut f: F) -> &mut [T] where F: FnMut(usize) -> T, { let layout = Layout::array::(len).unwrap_or_else(|_| oom()); let dst = self.alloc_layout(layout).cast::(); unsafe { for i in 0..len { ptr::write(dst.as_ptr().add(i), f(i)); } let result = slice::from_raw_parts_mut(dst.as_ptr(), len); debug_assert_eq!(Layout::for_value(result), layout); result } } /// Allocates a new slice of size `len` into this `Bump` and returns an /// exclusive reference to the copy. /// /// All elements of the slice are initialized to `value`. /// /// ## Panics /// /// Panics if reserving space for the slice fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.alloc_slice_fill_copy(5, 42); /// assert_eq!(x, &[42, 42, 42, 42, 42]); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_slice_fill_copy(&self, len: usize, value: T) -> &mut [T] { self.alloc_slice_fill_with(len, |_| value) } /// Allocates a new slice of size `len` slice into this `Bump` and return an /// exclusive reference to the copy. /// /// All elements of the slice are initialized to `value.clone()`. /// /// ## Panics /// /// Panics if reserving space for the slice fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let s: String = "Hello Bump!".to_string(); /// let x: &[String] = bump.alloc_slice_fill_clone(2, &s); /// assert_eq!(x.len(), 2); /// assert_eq!(&x[0], &s); /// assert_eq!(&x[1], &s); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_slice_fill_clone(&self, len: usize, value: &T) -> &mut [T] { self.alloc_slice_fill_with(len, |_| value.clone()) } /// Allocates a new slice of size `len` slice into this `Bump` and return an /// exclusive reference to the copy. /// /// The elements are initialized using the supplied iterator. /// /// ## Panics /// /// Panics if reserving space for the slice fails, or if the supplied /// iterator returns fewer elements than it promised. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x: &[i32] = bump.alloc_slice_fill_iter([2, 3, 5].iter().cloned().map(|i| i * i)); /// assert_eq!(x, [4, 9, 25]); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_slice_fill_iter(&self, iter: I) -> &mut [T] where I: IntoIterator, I::IntoIter: ExactSizeIterator, { let mut iter = iter.into_iter(); self.alloc_slice_fill_with(iter.len(), |_| { iter.next().expect("Iterator supplied too few elements") }) } /// Allocates a new slice of size `len` slice into this `Bump` and return an /// exclusive reference to the copy. /// /// All elements of the slice are initialized to `T::default()`. /// /// ## Panics /// /// Panics if reserving space for the slice fails. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let x = bump.alloc_slice_fill_default::(5); /// assert_eq!(x, &[0, 0, 0, 0, 0]); /// ``` #[inline(always)] #[allow(clippy::mut_from_ref)] pub fn alloc_slice_fill_default(&self, len: usize) -> &mut [T] { self.alloc_slice_fill_with(len, |_| T::default()) } /// Allocate space for an object with the given `Layout`. /// /// The returned pointer points at uninitialized memory, and should be /// initialized with /// [`std::ptr::write`](https://doc.rust-lang.org/std/ptr/fn.write.html). /// /// # Panics /// /// Panics if reserving space matching `layout` fails. #[inline(always)] pub fn alloc_layout(&self, layout: Layout) -> NonNull { self.try_alloc_layout(layout).unwrap_or_else(|_| oom()) } /// Attempts to allocate space for an object with the given `Layout` or else returns /// an `Err`. /// /// The returned pointer points at uninitialized memory, and should be /// initialized with /// [`std::ptr::write`](https://doc.rust-lang.org/std/ptr/fn.write.html). /// /// # Errors /// /// Errors if reserving space matching `layout` fails. #[inline(always)] pub fn try_alloc_layout(&self, layout: Layout) -> Result, alloc::AllocErr> { if let Some(p) = self.try_alloc_layout_fast(layout) { Ok(p) } else { self.alloc_layout_slow(layout).ok_or(alloc::AllocErr {}) } } #[inline(always)] fn try_alloc_layout_fast(&self, layout: Layout) -> Option> { // We don't need to check for ZSTs here since they will automatically // be handled properly: the pointer will be bumped by zero bytes, // modulo alignment. This keeps the fast path optimized for non-ZSTs, // which are much more common. unsafe { let footer = self.current_chunk_footer.get(); let footer = footer.as_ref(); let ptr = footer.ptr.get().as_ptr() as usize; let start = footer.data.as_ptr() as usize; debug_assert!(start <= ptr); debug_assert!(ptr <= footer as *const _ as usize); let ptr = ptr.checked_sub(layout.size())?; let aligned_ptr = ptr & !(layout.align() - 1); if aligned_ptr >= start { let aligned_ptr = NonNull::new_unchecked(aligned_ptr as *mut u8); footer.ptr.set(aligned_ptr); Some(aligned_ptr) } else { None } } } /// Gets the remaining capacity in the current chunk (in bytes). /// /// ## Example /// /// ``` /// use bumpalo::Bump; /// /// let bump = Bump::with_capacity(100); /// /// let capacity = bump.chunk_capacity(); /// assert!(capacity >= 100); /// ``` pub fn chunk_capacity(&self) -> usize { let current_footer = self.current_chunk_footer.get(); let current_footer = unsafe { current_footer.as_ref() }; current_footer as *const _ as usize - current_footer.data.as_ptr() as usize } /// Slow path allocation for when we need to allocate a new chunk from the /// parent bump set because there isn't enough room in our current chunk. #[inline(never)] fn alloc_layout_slow(&self, layout: Layout) -> Option> { unsafe { let size = layout.size(); // Get a new chunk from the global allocator. let current_footer = self.current_chunk_footer.get(); let current_layout = current_footer.as_ref().layout; // By default, we want our new chunk to be about twice as big // as the previous chunk. If the global allocator refuses it, // we try to divide it by half until it works or the requested // size is smaller than the default footer size. let min_new_chunk_size = layout.size().max(DEFAULT_CHUNK_SIZE_WITHOUT_FOOTER); let mut base_size = (current_layout.size() - FOOTER_SIZE) .checked_mul(2)? .max(min_new_chunk_size); let sizes = iter::from_fn(|| { if base_size >= min_new_chunk_size { let size = base_size; base_size = base_size / 2; Some(size) } else { None } }); let new_footer = sizes .filter_map(|size| Bump::new_chunk(Some(size), Some(layout), Some(current_footer))) .next()?; debug_assert_eq!( new_footer.as_ref().data.as_ptr() as usize % layout.align(), 0 ); // Set the new chunk as our new current chunk. self.current_chunk_footer.set(new_footer); let new_footer = new_footer.as_ref(); // Move the bump ptr finger down to allocate room for `val`. We know // this can't overflow because we successfully allocated a chunk of // at least the requested size. let ptr = new_footer.ptr.get().as_ptr() as usize - size; // Round the pointer down to the requested alignment. let ptr = ptr & !(layout.align() - 1); debug_assert!( ptr <= new_footer as *const _ as usize, "{:#x} <= {:#x}", ptr, new_footer as *const _ as usize ); let ptr = NonNull::new_unchecked(ptr as *mut u8); new_footer.ptr.set(ptr); // Return a pointer to the freshly allocated region in this chunk. Some(ptr) } } /// Returns an iterator over each chunk of allocated memory that /// this arena has bump allocated into. /// /// The chunks are returned ordered by allocation time, with the most /// recently allocated chunk being returned first, and the least recently /// allocated chunk being returned last. /// /// The values inside each chunk are also ordered by allocation time, with /// the most recent allocation being earlier in the slice, and the least /// recent allocation being towards the end of the slice. /// /// ## Safety /// /// Because this method takes `&mut self`, we know that the bump arena /// reference is unique and therefore there aren't any active references to /// any of the objects we've allocated in it either. This potential aliasing /// of exclusive references is one common footgun for unsafe code that we /// don't need to worry about here. /// /// However, there could be regions of uninitialized memory used as padding /// between allocations, which is why this iterator has items of type /// `[MaybeUninit]`, instead of simply `[u8]`. /// /// The only way to guarantee that there is no padding between allocations /// or within allocated objects is if all of these properties hold: /// /// 1. Every object allocated in this arena has the same alignment, /// and that alignment is at most 16. /// 2. Every object's size is a multiple of its alignment. /// 3. None of the objects allocated in this arena contain any internal /// padding. /// /// If you want to use this `iter_allocated_chunks` method, it is *your* /// responsibility to ensure that these properties hold before calling /// `MaybeUninit::assume_init` or otherwise reading the returned values. /// /// Finally, you must also ensure that any values allocated into the bump /// arena have not had their `Drop` implementations called on them, /// e.g. after dropping a [`bumpalo::boxed::Box`][crate::boxed::Box]. /// /// ## Example /// /// ``` /// let mut bump = bumpalo::Bump::new(); /// /// // Allocate a bunch of `i32`s in this bump arena, potentially causing /// // additional memory chunks to be reserved. /// for i in 0..10000 { /// bump.alloc(i); /// } /// /// // Iterate over each chunk we've bump allocated into. This is safe /// // because we have only allocated `i32`s in this arena, which fulfills /// // the above requirements. /// for ch in bump.iter_allocated_chunks() { /// println!("Used a chunk that is {} bytes long", ch.len()); /// println!("The first byte is {:?}", unsafe { /// ch.get(0).unwrap().assume_init() /// }); /// } /// /// // Within a chunk, allocations are ordered from most recent to least /// // recent. If we allocated 'a', then 'b', then 'c', when we iterate /// // through the chunk's data, we get them in the order 'c', then 'b', /// // then 'a'. /// /// bump.reset(); /// bump.alloc(b'a'); /// bump.alloc(b'b'); /// bump.alloc(b'c'); /// /// assert_eq!(bump.iter_allocated_chunks().count(), 1); /// let chunk = bump.iter_allocated_chunks().nth(0).unwrap(); /// assert_eq!(chunk.len(), 3); /// /// // Safe because we've only allocated `u8`s in this arena, which /// // fulfills the above requirements. /// unsafe { /// assert_eq!(chunk[0].assume_init(), b'c'); /// assert_eq!(chunk[1].assume_init(), b'b'); /// assert_eq!(chunk[2].assume_init(), b'a'); /// } /// ``` pub fn iter_allocated_chunks(&mut self) -> ChunkIter<'_> { ChunkIter { footer: Some(self.current_chunk_footer.get()), bump: PhantomData, } } /// Calculates the number of bytes currently allocated across all chunks in /// this bump arena. /// /// If you allocate types of different alignments or types with /// larger-than-typical alignment in the same arena, some padding /// bytes might get allocated in the bump arena. Note that those padding /// bytes will add to this method's resulting sum, so you cannot rely /// on it only counting the sum of the sizes of the things /// you've allocated in the arena. /// /// ## Example /// /// ``` /// let bump = bumpalo::Bump::new(); /// let _x = bump.alloc_slice_fill_default::(5); /// let bytes = bump.allocated_bytes(); /// assert!(bytes >= core::mem::size_of::() * 5); /// ``` pub fn allocated_bytes(&self) -> usize { let mut footer = Some(self.current_chunk_footer.get()); let mut bytes = 0; while let Some(f) = footer { let foot = unsafe { f.as_ref() }; let ptr = foot.ptr.get().as_ptr() as usize; debug_assert!(ptr <= foot as *const _ as usize); bytes += foot as *const _ as usize - ptr; footer = foot.prev.get(); } bytes } #[inline] unsafe fn is_last_allocation(&self, ptr: NonNull) -> bool { let footer = self.current_chunk_footer.get(); let footer = footer.as_ref(); footer.ptr.get() == ptr } #[inline] unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { // If the pointer is the last allocation we made, we can reuse the bytes, // otherwise they are simply leaked -- at least until somebody calls reset(). if self.is_last_allocation(ptr) { let ptr = NonNull::new_unchecked(ptr.as_ptr().add(layout.size())); self.current_chunk_footer.get().as_ref().ptr.set(ptr); } } #[inline] unsafe fn shrink( &self, ptr: NonNull, layout: Layout, new_size: usize, ) -> Result, alloc::AllocErr> { let old_size = layout.size(); if self.is_last_allocation(ptr) // Only reclaim the excess space (which requires a copy) if it // is worth it: we are actually going to recover "enough" space // and we can do a non-overlapping copy. && new_size <= old_size / 2 { let delta = old_size - new_size; let footer = self.current_chunk_footer.get(); let footer = footer.as_ref(); footer .ptr .set(NonNull::new_unchecked(footer.ptr.get().as_ptr().add(delta))); let new_ptr = footer.ptr.get(); // NB: we know it is non-overlapping because of the size check // in the `if` condition. ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), new_size); return Ok(new_ptr); } else { return Ok(ptr); } } #[inline] unsafe fn grow( &self, ptr: NonNull, layout: Layout, new_size: usize, ) -> Result, alloc::AllocErr> { let old_size = layout.size(); if self.is_last_allocation(ptr) { // Try to allocate the delta size within this same block so we can // reuse the currently allocated space. let delta = new_size - old_size; if let Some(p) = self.try_alloc_layout_fast(layout_from_size_align(delta, layout.align())) { ptr::copy(ptr.as_ptr(), p.as_ptr(), old_size); return Ok(p); } } // Fallback: do a fresh allocation and copy the existing data into it. let new_layout = layout_from_size_align(new_size, layout.align()); let new_ptr = self.try_alloc_layout(new_layout)?; ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_size); Ok(new_ptr) } } /// An iterator over each chunk of allocated memory that /// an arena has bump allocated into. /// /// The chunks are returned ordered by allocation time, with the most recently /// allocated chunk being returned first. /// /// The values inside each chunk is also ordered by allocation time, with the most /// recent allocation being earlier in the slice. /// /// This struct is created by the [`iter_allocated_chunks`] method on /// [`Bump`]. See that function for a safety description regarding reading from the returned items. /// /// [`Bump`]: ./struct.Bump.html /// [`iter_allocated_chunks`]: ./struct.Bump.html#method.iter_allocated_chunks #[derive(Debug)] pub struct ChunkIter<'a> { footer: Option>, bump: PhantomData<&'a mut Bump>, } impl<'a> Iterator for ChunkIter<'a> { type Item = &'a [mem::MaybeUninit]; fn next(&mut self) -> Option<&'a [mem::MaybeUninit]> { unsafe { let foot = self.footer?; let foot = foot.as_ref(); let data = foot.data.as_ptr() as usize; let ptr = foot.ptr.get().as_ptr() as usize; debug_assert!(data <= ptr); debug_assert!(ptr <= foot as *const _ as usize); let len = foot as *const _ as usize - ptr; let slice = slice::from_raw_parts(ptr as *const mem::MaybeUninit, len); self.footer = foot.prev.get(); Some(slice) } } } impl<'a> iter::FusedIterator for ChunkIter<'a> {} #[inline(never)] #[cold] fn oom() -> ! { panic!("out of memory") } unsafe impl<'a> alloc::Alloc for &'a Bump { #[inline(always)] unsafe fn alloc(&mut self, layout: Layout) -> Result, alloc::AllocErr> { self.try_alloc_layout(layout) } #[inline] unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { Bump::dealloc(self, ptr, layout) } #[inline] unsafe fn realloc( &mut self, ptr: NonNull, layout: Layout, new_size: usize, ) -> Result, alloc::AllocErr> { let old_size = layout.size(); if old_size == 0 { return self.try_alloc_layout(layout); } if new_size <= old_size { self.shrink(ptr, layout, new_size) } else { self.grow(ptr, layout, new_size) } } } #[cfg(feature = "allocator_api")] unsafe impl<'a> Allocator for &'a Bump { fn allocate(&self, layout: Layout) -> Result, AllocError> { self.try_alloc_layout(layout) .map(|p| NonNull::slice_from_raw_parts(p, layout.size())) .map_err(|_| AllocError) } unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { Bump::dealloc(self, ptr, layout) } unsafe fn shrink( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { let new_size = new_layout.size(); Bump::shrink(self, ptr, old_layout, new_size) .map(|p| NonNull::slice_from_raw_parts(p, new_size)) .map_err(|_| AllocError) } unsafe fn grow( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { let new_size = new_layout.size(); Bump::grow(self, ptr, old_layout, new_size) .map(|p| NonNull::slice_from_raw_parts(p, new_size)) .map_err(|_| AllocError) } unsafe fn grow_zeroed( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { let mut ptr = self.grow(ptr, old_layout, new_layout)?; ptr.as_mut()[old_layout.size()..].fill(0); Ok(ptr) } } #[cfg(test)] mod tests { use super::*; #[test] fn chunk_footer_is_five_words() { assert_eq!(mem::size_of::(), mem::size_of::() * 5); } #[test] #[allow(clippy::cognitive_complexity)] fn test_realloc() { use crate::alloc::Alloc; unsafe { const CAPACITY: usize = 1024 - OVERHEAD; let mut b = Bump::with_capacity(CAPACITY); // `realloc` doesn't shrink allocations that aren't "worth it". let layout = Layout::from_size_align(100, 1).unwrap(); let p = b.alloc_layout(layout); let q = (&b).realloc(p, layout, 51).unwrap(); assert_eq!(p, q); b.reset(); // `realloc` will shrink allocations that are "worth it". let layout = Layout::from_size_align(100, 1).unwrap(); let p = b.alloc_layout(layout); let q = (&b).realloc(p, layout, 50).unwrap(); assert!(p != q); b.reset(); // `realloc` will reuse the last allocation when growing. let layout = Layout::from_size_align(10, 1).unwrap(); let p = b.alloc_layout(layout); let q = (&b).realloc(p, layout, 11).unwrap(); assert_eq!(q.as_ptr() as usize, p.as_ptr() as usize - 1); b.reset(); // `realloc` will allocate a new chunk when growing the last // allocation, if need be. let layout = Layout::from_size_align(1, 1).unwrap(); let p = b.alloc_layout(layout); let q = (&b).realloc(p, layout, CAPACITY + 1).unwrap(); assert!(q.as_ptr() as usize != p.as_ptr() as usize - CAPACITY); b = Bump::with_capacity(CAPACITY); // `realloc` will allocate and copy when reallocating anything that // wasn't the last allocation. let layout = Layout::from_size_align(1, 1).unwrap(); let p = b.alloc_layout(layout); let _ = b.alloc_layout(layout); let q = (&b).realloc(p, layout, 2).unwrap(); assert!(q.as_ptr() as usize != p.as_ptr() as usize - 1); b.reset(); } } #[test] fn invalid_read() { use alloc::Alloc; let mut b = &Bump::new(); unsafe { let l1 = Layout::from_size_align(12000, 4).unwrap(); let p1 = Alloc::alloc(&mut b, l1).unwrap(); let l2 = Layout::from_size_align(1000, 4).unwrap(); Alloc::alloc(&mut b, l2).unwrap(); let p1 = b.realloc(p1, l1, 24000).unwrap(); let l3 = Layout::from_size_align(24000, 4).unwrap(); b.realloc(p1, l3, 48000).unwrap(); } } } bumpalo-3.7.0/tests/alloc_fill.rs000064400000000000000000000016010000000000000150550ustar 00000000000000use bumpalo::Bump; use std::alloc::Layout; #[test] fn alloc_slice_fill_zero() { let b = Bump::new(); let layout = Layout::new::(); let ptr1 = b.alloc_layout(layout); struct MyZeroSizedType; b.alloc_slice_copy::(&[]); b.alloc_slice_clone::(&[]); b.alloc_slice_fill_with::(0, |_| panic!("should not happen")); b.alloc_slice_fill_copy(0, 42u64); b.alloc_slice_fill_clone(0, &"hello".to_string()); b.alloc_slice_fill_default::(0); let ptr2 = b.alloc(MyZeroSizedType); assert_eq!(ptr1.as_ptr() as usize & !7, ptr2 as *mut _ as usize); let ptr3 = b.alloc_layout(layout); assert_eq!(ptr2 as *mut _ as usize, ptr3.as_ptr() as usize + 1); } #[test] #[should_panic(expected = "out of memory")] fn alloc_slice_overflow() { let b = Bump::new(); b.alloc_slice_fill_default::(usize::max_value()); } bumpalo-3.7.0/tests/alloc_try_with.rs000064400000000000000000000047120000000000000160060ustar 00000000000000// All of these alloc_try_with tests will fail with "fatal runtime error: stack overflow" unless // LLVM manages to optimize the stack writes away. // // We only run them when debug_assertions are not set, as we expect them to fail outside release // mode. use bumpalo::Bump; #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_try_with_large_array() -> Result<(), ()> { let b = Bump::new(); b.alloc_try_with(|| Ok([4u8; 10_000_000]))?; Ok(()) } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_try_with_large_array_err() { let b = Bump::new(); assert!(b .alloc_try_with(|| Result::<[u8; 10_000_000], _>::Err(())) .is_err()); } #[allow(dead_code)] struct LargeStruct { small: usize, big1: [u8; 20_000_000], big2: [u8; 20_000_000], big3: [u8; 20_000_000], } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_try_with_large_struct() -> Result<(), ()> { let b = Bump::new(); b.alloc_try_with(|| { Ok(LargeStruct { small: 1, big1: [2; 20_000_000], big2: [3; 20_000_000], big3: [4; 20_000_000], }) })?; Ok(()) } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_try_with_large_struct_err() { let b = Bump::new(); assert!(b .alloc_try_with(|| Result::::Err(())) .is_err()); } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_try_with_large_tuple() -> Result<(), ()> { let b = Bump::new(); b.alloc_try_with(|| { Ok(( 1u32, LargeStruct { small: 2, big1: [3; 20_000_000], big2: [4; 20_000_000], big3: [5; 20_000_000], }, )) })?; Ok(()) } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_try_with_large_tuple_err() { let b = Bump::new(); assert!(b .alloc_try_with(|| { Result::<(u32, LargeStruct), _>::Err(()) }) .is_err()); } #[allow(clippy::large_enum_variant)] enum LargeEnum { Small, #[allow(dead_code)] Large([u8; 10_000_000]), } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_try_with_large_enum() -> Result<(), ()> { let b = Bump::new(); b.alloc_try_with(|| Ok(LargeEnum::Small))?; Ok(()) } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_try_with_large_enum_err() { let b = Bump::new(); assert!(b .alloc_try_with(|| Result::::Err(())) .is_err()); } bumpalo-3.7.0/tests/alloc_with.rs000064400000000000000000000025770000000000000151170ustar 00000000000000// All of these alloc_with tests will fail with "fatal runtime error: stack overflow" unless LLVM // manages to optimize the stack writes away. // // We only run them when debug_assertions are not set, as we expect them to fail outside release // mode. use bumpalo::Bump; #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_with_large_array() { let b = Bump::new(); b.alloc_with(|| [4u8; 10_000_000]); } #[allow(dead_code)] struct LargeStruct { small: usize, big1: [u8; 20_000_000], big2: [u8; 20_000_000], big3: [u8; 20_000_000], } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_with_large_struct() { let b = Bump::new(); b.alloc_with(|| LargeStruct { small: 1, big1: [2; 20_000_000], big2: [3; 20_000_000], big3: [4; 20_000_000], }); } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_with_large_tuple() { let b = Bump::new(); b.alloc_with(|| { ( 1u32, LargeStruct { small: 2, big1: [3; 20_000_000], big2: [4; 20_000_000], big3: [5; 20_000_000], }, ) }); } enum LargeEnum { Small, #[allow(dead_code)] Large([u8; 10_000_000]), } #[test] #[cfg_attr(debug_assertions, ignore)] fn alloc_with_large_enum() { let b = Bump::new(); b.alloc_with(|| LargeEnum::Small); } bumpalo-3.7.0/tests/allocator_api.rs000075500000000000000000000063310000000000000155760ustar 00000000000000#![feature(allocator_api)] #![cfg(feature = "allocator_api")] use bumpalo::Bump; use std::alloc::{AllocError, Allocator, Layout}; use std::ptr::NonNull; use std::sync::atomic::{AtomicUsize, Ordering::Relaxed}; #[derive(Debug)] struct AllocatorDebug { bump: Bump, grows: AtomicUsize, shrinks: AtomicUsize, allocs: AtomicUsize, deallocs: AtomicUsize, } impl AllocatorDebug { fn new(bump: Bump) -> AllocatorDebug { AllocatorDebug { bump, grows: AtomicUsize::new(0), shrinks: AtomicUsize::new(0), allocs: AtomicUsize::new(0), deallocs: AtomicUsize::new(0), } } } unsafe impl Allocator for AllocatorDebug { fn allocate(&self, layout: Layout) -> Result, AllocError> { self.allocs.fetch_add(1, Relaxed); let ref bump = self.bump; bump.allocate(layout) } unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { self.deallocs.fetch_add(1, Relaxed); let ref bump = self.bump; bump.deallocate(ptr, layout) } unsafe fn shrink( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { self.shrinks.fetch_add(1, Relaxed); let ref bump = self.bump; bump.shrink(ptr, old_layout, new_layout) } unsafe fn grow( &self, ptr: NonNull, old_layout: Layout, new_layout: Layout, ) -> Result, AllocError> { self.grows.fetch_add(1, Relaxed); let ref bump = self.bump; bump.grow(ptr, old_layout, new_layout) } } #[test] fn allocator_api_push_a_bunch_of_items() { let b = AllocatorDebug::new(Bump::new()); let mut v = Vec::with_capacity_in(1024, &b); assert_eq!(b.allocs.load(Relaxed), 1); for x in 0..1024 { v.push(x); } // Ensure we trigger a grow assert_eq!(b.grows.load(Relaxed), 0); for x in 1024..2048 { v.push(x); } assert_ne!(b.grows.load(Relaxed), 0); // Ensure we trigger a shrink v.truncate(1024); v.shrink_to_fit(); assert_eq!(b.shrinks.load(Relaxed), 1); // Ensure we trigger a deallocation assert_eq!(b.deallocs.load(Relaxed), 0); drop(v); assert_eq!(b.deallocs.load(Relaxed), 1); } #[test] fn allocator_grow_zeroed() { // Create a new bump arena. let ref bump = Bump::new(); // Make an initial allocation. let first_layout = Layout::from_size_align(4, 4).expect("create a layout"); let mut p = bump .allocate_zeroed(first_layout) .expect("allocate a first chunk"); let allocated = bump.allocated_bytes(); unsafe { p.as_mut().fill(42) }; let p = p.cast(); // Grow the last allocation. This should just reserve a few more bytes // within the current chunk, not allocate a whole new memory block within a // new chunk. let second_layout = Layout::from_size_align(8, 4).expect("create a expanded layout"); let p = unsafe { bump.grow_zeroed(p, first_layout, second_layout) } .expect("should grow_zeroed okay"); assert!(bump.allocated_bytes() <= allocated * 2); assert_eq!(unsafe { p.as_ref() }, [42, 42, 42, 42, 0, 0, 0, 0]); } bumpalo-3.7.0/tests/quickchecks.rs000064400000000000000000000156720000000000000152670ustar 00000000000000use bumpalo::Bump; use quickcheck::{quickcheck, Arbitrary, Gen}; use std::mem; #[derive(Clone, Debug, PartialEq)] struct BigValue { data: [u64; 32], } impl BigValue { fn new(x: u64) -> BigValue { BigValue { data: [ x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, ], } } } impl Arbitrary for BigValue { fn arbitrary(g: &mut G) -> BigValue { BigValue::new(u64::arbitrary(g)) } } #[derive(Clone, Debug)] enum Elems { OneT(T), TwoT(T, T), FourT(T, T, T, T), OneU(U), TwoU(U, U), FourU(U, U, U, U), } impl Arbitrary for Elems where T: Arbitrary + Clone, U: Arbitrary + Clone, { fn arbitrary(g: &mut G) -> Elems { let x: u8 = u8::arbitrary(g); match x % 6 { 0 => Elems::OneT(T::arbitrary(g)), 1 => Elems::TwoT(T::arbitrary(g), T::arbitrary(g)), 2 => Elems::FourT( T::arbitrary(g), T::arbitrary(g), T::arbitrary(g), T::arbitrary(g), ), 3 => Elems::OneU(U::arbitrary(g)), 4 => Elems::TwoU(U::arbitrary(g), U::arbitrary(g)), 5 => Elems::FourU( U::arbitrary(g), U::arbitrary(g), U::arbitrary(g), U::arbitrary(g), ), _ => unreachable!(), } } fn shrink(&self) -> Box> { match self { Elems::OneT(_) => Box::new(vec![].into_iter()), Elems::TwoT(a, b) => { Box::new(vec![Elems::OneT(a.clone()), Elems::OneT(b.clone())].into_iter()) } Elems::FourT(a, b, c, d) => Box::new( vec![ Elems::TwoT(a.clone(), b.clone()), Elems::TwoT(a.clone(), c.clone()), Elems::TwoT(a.clone(), d.clone()), Elems::TwoT(b.clone(), c.clone()), Elems::TwoT(b.clone(), d.clone()), Elems::TwoT(c.clone(), d.clone()), ] .into_iter(), ), Elems::OneU(_) => Box::new(vec![].into_iter()), Elems::TwoU(a, b) => { Box::new(vec![Elems::OneU(a.clone()), Elems::OneU(b.clone())].into_iter()) } Elems::FourU(a, b, c, d) => Box::new( vec![ Elems::TwoU(a.clone(), b.clone()), Elems::TwoU(a.clone(), c.clone()), Elems::TwoU(a.clone(), d.clone()), Elems::TwoU(b.clone(), c.clone()), Elems::TwoU(b.clone(), d.clone()), Elems::TwoU(c.clone(), d.clone()), ] .into_iter(), ), } } } fn overlap((a1, a2): (usize, usize), (b1, b2): (usize, usize)) -> bool { assert!(a1 < a2); assert!(b1 < b2); a1 < b2 && b1 < a2 } fn range(t: &T) -> (usize, usize) { let start = t as *const _ as usize; let end = start + mem::size_of::(); (start, end) } quickcheck! { fn can_allocate_big_values(values: Vec) -> () { let bump = Bump::new(); let mut alloced = vec![]; for vals in values.iter().cloned() { alloced.push(bump.alloc(vals)); } for (vals, alloc) in values.iter().zip(alloced.into_iter()) { assert_eq!(vals, alloc); } } fn big_allocations_never_overlap(values: Vec) -> () { let bump = Bump::new(); let mut alloced = vec![]; for v in values { let a = bump.alloc(v); let start = a as *const _ as usize; let end = unsafe { (a as *const BigValue).offset(1) as usize }; let range = (start, end); for r in &alloced { assert!(!overlap(*r, range)); } alloced.push(range); } } fn can_allocate_heterogeneous_things_and_they_dont_overlap(things: Vec>) -> () { let bump = Bump::new(); let mut ranges = vec![]; for t in things { let r = match t { Elems::OneT(a) => { range(bump.alloc(a)) }, Elems::TwoT(a, b) => { range(bump.alloc([a, b])) }, Elems::FourT(a, b, c, d) => { range(bump.alloc([a, b, c, d])) }, Elems::OneU(a) => { range(bump.alloc(a)) }, Elems::TwoU(a, b) => { range(bump.alloc([a, b])) }, Elems::FourU(a, b, c, d) => { range(bump.alloc([a, b, c, d])) }, }; for s in &ranges { assert!(!overlap(r, *s)); } ranges.push(r); } } fn test_alignment_chunks(sizes: Vec) -> () { const SUPPORTED_ALIGNMENTS: &[usize] = &[1, 2, 4, 8, 16]; for &alignment in SUPPORTED_ALIGNMENTS { let mut b = Bump::with_capacity(513); let mut sizes = sizes.iter().map(|&size| (size % 10) * alignment).collect::>(); for &size in &sizes { let layout = std::alloc::Layout::from_size_align(size, alignment).unwrap(); let ptr = b.alloc_layout(layout).as_ptr() as *const u8 as usize; assert_eq!(ptr % alignment, 0); } for chunk in b.iter_allocated_chunks() { let mut remaining = chunk.len(); while remaining > 0 { let size = sizes.pop().expect("too many bytes in the chunk output"); assert!(remaining >= size, "returned chunk contained padding"); remaining -= size; } } assert_eq!(sizes.into_iter().sum::(), 0); } } fn alloc_slices(allocs: Vec<(u8, usize)>) -> () { let b = Bump::new(); let mut allocated: Vec<(usize, usize)> = vec![]; for (val, len) in allocs { let len = len % 100; let s = b.alloc_slice_fill_copy(len, val); assert_eq!(s.len(), len); assert!(s.iter().all(|v| v == &val)); let range = (s.as_ptr() as usize, unsafe { s.as_ptr().add(s.len()) } as usize); for r in &allocated { let no_overlap = range.1 <= r.0 || r.1 <= range.0; assert!(no_overlap); } allocated.push(range); } } fn alloc_strs(allocs: Vec) -> () { let b = Bump::new(); let allocated: Vec<&str> = allocs.iter().map(|s| b.alloc_str(s) as &_).collect(); for (val, alloc) in allocs.into_iter().zip(allocated) { assert_eq!(val, alloc); } } } bumpalo-3.7.0/tests/string.rs000064400000000000000000000006660000000000000142750ustar 00000000000000#![cfg(feature = "collections")] use bumpalo::{collections::String, format, Bump}; use std::fmt::Write; #[test] fn format_a_bunch_of_strings() { let b = Bump::new(); let mut s = String::from_str_in("hello", &b); for i in 0..1000 { write!(&mut s, " {}", i).unwrap(); } } #[test] fn trailing_comma_in_format_macro() { let b = Bump::new(); let v = format![in &b, "{}{}", 1, 2, ]; assert_eq!(v, "12"); } bumpalo-3.7.0/tests/tests.rs000064400000000000000000000121720000000000000141240ustar 00000000000000use bumpalo::Bump; use std::alloc::Layout; use std::mem; use std::usize; #[test] fn can_iterate_over_allocated_things() { let mut bump = Bump::new(); const MAX: u64 = 131_072; let mut chunk_ends = vec![]; let mut last = None; for i in 0..MAX { let this = bump.alloc(i); assert_eq!(*this, i); let this = this as *const _ as usize; if match last { Some(last) if last - mem::size_of::() == this => false, _ => true, } { let chunk_end = this + mem::size_of::(); println!("new chunk ending @ 0x{:x}", chunk_end); assert!( !chunk_ends.contains(&chunk_end), "should not have already allocated this chunk" ); chunk_ends.push(chunk_end); } last = Some(this); } let mut seen = vec![false; MAX as usize]; // Safe because we always allocated objects of the same type in this arena, // and their size >= their align. for ch in bump.iter_allocated_chunks() { let chunk_end = ch.as_ptr() as usize + ch.len(); println!("iter chunk ending @ {:#x}", chunk_end); assert_eq!( chunk_ends.pop().unwrap(), chunk_end, "should iterate over each chunk once, in order they were allocated in" ); let (before, mid, after) = unsafe { ch.align_to::() }; assert!(before.is_empty()); assert!(after.is_empty()); for i in mid { assert!(*i < MAX, "{} < {} (aka {:x} < {:x})", i, MAX, i, MAX); seen[*i as usize] = true; } } assert!(seen.iter().all(|s| *s)); } #[test] #[should_panic(expected = "out of memory")] fn oom_instead_of_bump_pointer_overflow() { let bump = Bump::new(); let x = bump.alloc(0_u8); let p = x as *mut u8 as usize; // A size guaranteed to overflow the bump pointer. let size = usize::MAX - p + 1; let align = 1; let layout = match Layout::from_size_align(size, align) { Err(e) => { // Return on error so that we don't panic and the test fails. eprintln!("Layout::from_size_align errored: {}", e); return; } Ok(l) => l, }; // This should panic. bump.alloc_layout(layout); } #[test] fn force_new_chunk_fits_well() { let b = Bump::new(); // Use the first chunk for something b.alloc_layout(Layout::from_size_align(1, 1).unwrap()); // Next force allocation of some new chunks. b.alloc_layout(Layout::from_size_align(100_001, 1).unwrap()); b.alloc_layout(Layout::from_size_align(100_003, 1).unwrap()); } #[test] fn alloc_with_strong_alignment() { let b = Bump::new(); // 64 is probably the strongest alignment we'll see in practice // e.g. AVX-512 types, or cache line padding optimizations b.alloc_layout(Layout::from_size_align(4096, 64).unwrap()); } #[test] fn alloc_slice_copy() { let b = Bump::new(); let src: &[u16] = &[0xFEED, 0xFACE, 0xA7, 0xCAFE]; let dst = b.alloc_slice_copy(src); assert_eq!(src, dst); } #[test] fn alloc_slice_clone() { let b = Bump::new(); let src = vec![vec![0], vec![1, 2], vec![3, 4, 5], vec![6, 7, 8, 9]]; let dst = b.alloc_slice_clone(&src); assert_eq!(src, dst); } #[test] fn small_size_and_large_align() { let b = Bump::new(); let layout = std::alloc::Layout::from_size_align(1, 0x1000).unwrap(); b.alloc_layout(layout); } fn with_capacity_helper(iter: I) where T: Copy + Eq, I: Clone + Iterator + DoubleEndedIterator, { for &initial_size in &[0, 1, 8, 11, 0x1000, 0x12345] { let mut b = Bump::with_capacity(initial_size); for v in iter.clone() { b.alloc(v); } let pushed_values = b.iter_allocated_chunks().flat_map(|c| { let (before, mid, after) = unsafe { c.align_to::() }; assert!(before.is_empty()); assert!(after.is_empty()); mid.iter().copied() }); assert!(pushed_values.eq(iter.clone().rev())); } } #[test] fn with_capacity_test() { with_capacity_helper(0u8..255); with_capacity_helper(0u16..10000); with_capacity_helper(0u32..10000); with_capacity_helper(0u64..10000); with_capacity_helper(0u128..10000); } #[test] fn test_reset() { let mut b = Bump::new(); for i in 0u64..10_000 { b.alloc(i); } assert!(b.iter_allocated_chunks().count() > 1); let last_chunk = b.iter_allocated_chunks().next().unwrap(); let start = last_chunk.as_ptr() as usize; let end = start + last_chunk.len(); b.reset(); assert_eq!( end - mem::size_of::(), b.alloc(0u64) as *const u64 as usize ); assert_eq!(b.iter_allocated_chunks().count(), 1); } #[test] fn test_alignment() { for &alignment in &[2, 4, 8, 16, 32, 64] { let b = Bump::with_capacity(513); let layout = std::alloc::Layout::from_size_align(alignment, alignment).unwrap(); for _ in 0..1024 { let ptr = b.alloc_layout(layout).as_ptr(); assert_eq!(ptr as *const u8 as usize % alignment, 0); } } } bumpalo-3.7.0/tests/try_alloc.rs000064400000000000000000000220450000000000000147520ustar 00000000000000use bumpalo::{AllocOrInitError, Bump}; use rand::Rng; use std::alloc::{GlobalAlloc, Layout, System}; use std::sync::atomic::{AtomicBool, Ordering}; /// A custom allocator that wraps the system allocator, but lets us force /// allocation failures for testing. struct Allocator(AtomicBool); impl Allocator { fn is_returning_null(&self) -> bool { self.0.load(Ordering::SeqCst) } fn set_returning_null(&self, returning_null: bool) { self.0.store(returning_null, Ordering::SeqCst); } fn toggle_returning_null(&self) { self.set_returning_null(!self.is_returning_null()); } #[allow(dead_code)] // Silence warnings for non-"collections" builds. fn with_successful_allocs(&self, callback: F) -> T where F: FnOnce() -> T, { let old_returning_null = self.is_returning_null(); self.set_returning_null(false); let result = callback(); self.set_returning_null(old_returning_null); result } fn with_alloc_failures(&self, callback: F) -> T where F: FnOnce() -> T, { let old_returning_null = self.is_returning_null(); self.set_returning_null(true); let result = callback(); self.set_returning_null(old_returning_null); result } } unsafe impl GlobalAlloc for Allocator { unsafe fn alloc(&self, layout: Layout) -> *mut u8 { if self.is_returning_null() { core::ptr::null_mut() } else { System.alloc(layout) } } unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { System.dealloc(ptr, layout); } unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { if self.is_returning_null() { core::ptr::null_mut() } else { System.realloc(ptr, layout, new_size) } } } #[global_allocator] static GLOBAL_ALLOCATOR: Allocator = Allocator(AtomicBool::new(false)); /// `assert!` may allocate on failure (e.g. for string formatting and boxing /// panic info), so we must re-enable allocations during assertions. macro_rules! assert { ($cond:expr $(, $args:tt)*) => { if !$cond { GLOBAL_ALLOCATOR.set_returning_null(false); panic!(concat!("Assertion failed: ", stringify!($cond))); } }; } /// NB: We provide our own `main` rather than using the default test harness's /// so that we can ensure that tests are executed serially, and no background /// threads get tripped up by us disabling the global allocator, or anything /// like that. fn main() { macro_rules! test { ($name:expr, $test:expr $(,)*) => { ($name, $test as fn()) }; } fn test_static_size_alloc(assert_alloc_ok: fn(bump: &Bump), assert_alloc_err: fn(bump: &Bump)) { // Unlike with `try_alloc_layout`, it's not that easy to test a variety // of size/capacity combinations here. // Since nothing in Bump is really random, and we have to start fresh // each time, just checking each case once is enough. for &fail_alloc in &[false, true] { let bump = GLOBAL_ALLOCATOR.with_successful_allocs(|| { // We can't query the remaining free space in the current chunk, // so we have to create a new Bump for each test and fill it to // the brink of a new allocation. let bump = Bump::try_new().unwrap(); // Bump preallocates space in the initial chunk, so we need to // use up this block prior to the actual test let layout = Layout::from_size_align(bump.chunk_capacity(), 1).unwrap(); assert!(bump.try_alloc_layout(layout).is_ok()); bump }); GLOBAL_ALLOCATOR.set_returning_null(fail_alloc); if fail_alloc { assert_alloc_err(&bump) } else { assert_alloc_ok(&bump) } } } let tests = [ test!("Bump::try_new fails when global allocator fails", || { GLOBAL_ALLOCATOR.with_alloc_failures(|| { assert!(Bump::try_new().is_err()); }); }), test!( "test try_alloc_layout with and without global allocation failures", || { const NUM_TESTS: usize = 5000; const MAX_BYTES_ALLOCATED: usize = 65536; let mut bump = Bump::try_new().unwrap(); let mut bytes_allocated = bump.chunk_capacity(); // Bump preallocates space in the initial chunk, so we need to // use up this block prior to the actual test let layout = Layout::from_size_align(bump.chunk_capacity(), 1).unwrap(); assert!(bump.try_alloc_layout(layout).is_ok()); let mut rng = rand::thread_rng(); for _ in 0..NUM_TESTS { if rng.gen() { GLOBAL_ALLOCATOR.toggle_returning_null(); } let layout = Layout::from_size_align(bump.chunk_capacity() + 1, 1).unwrap(); if GLOBAL_ALLOCATOR.is_returning_null() { assert!(bump.try_alloc_layout(layout).is_err()); } else { assert!(bump.try_alloc_layout(layout).is_ok()); bytes_allocated += bump.chunk_capacity(); } if bytes_allocated >= MAX_BYTES_ALLOCATED { bump = GLOBAL_ALLOCATOR.with_successful_allocs(|| Bump::try_new().unwrap()); bytes_allocated = bump.chunk_capacity(); } } }, ), test!( "test try_alloc with and without global allocation failures", || { test_static_size_alloc( |bump| assert!(bump.try_alloc(1u8).is_ok()), |bump| assert!(bump.try_alloc(1u8).is_err()), ) }, ), test!( "test try_alloc_with with and without global allocation failures", || { test_static_size_alloc( |bump| assert!(bump.try_alloc_with(|| 1u8).is_ok()), |bump| assert!(bump.try_alloc_with(|| 1u8).is_err()), ) }, ), test!( "test try_alloc_try_with (Ok) with and without global allocation failures", || { test_static_size_alloc( |bump| assert!(bump.try_alloc_try_with::<_, _, ()>(|| Ok(1u8)).is_ok()), |bump| assert!(bump.try_alloc_try_with::<_, _, ()>(|| Ok(1u8)).is_err()), ) }, ), test!( "test try_alloc_try_with (Err) with and without global allocation failures", || { test_static_size_alloc( |bump| { assert!(matches!( bump.try_alloc_try_with::<_, u8, _>(|| Err(())), Err(AllocOrInitError::Init(_)) )) }, |bump| { assert!(matches!( bump.try_alloc_try_with::<_, u8, _>(|| Err(())), Err(AllocOrInitError::Alloc(_)) )) }, ) }, ), #[cfg(feature = "collections")] test!("test Vec::try_reserve and Vec::try_reserve_exact", || { use bumpalo::collections::Vec; let bump = Bump::try_new().unwrap(); GLOBAL_ALLOCATOR.with_alloc_failures(|| { let mut vec = Vec::::new_in(&bump); let chunk_cap = bump.chunk_capacity(); // Will always succeed since this size gets pre-allocated in Bump::try_new() assert!(vec.try_reserve(chunk_cap).is_ok()); assert!(vec.try_reserve_exact(chunk_cap).is_ok()); // Fails to allocate futher since allocator returns null assert!(vec.try_reserve(chunk_cap + 1).is_err()); assert!(vec.try_reserve_exact(chunk_cap + 1).is_err()); }); GLOBAL_ALLOCATOR.with_successful_allocs(|| { let mut vec = Vec::::new_in(&bump); let chunk_cap = bump.chunk_capacity(); // Will always succeed since this size gets pre-allocated in Bump::try_new() assert!(vec.try_reserve(chunk_cap).is_ok()); assert!(vec.try_reserve_exact(chunk_cap).is_ok()); // Succeeds to allocate further assert!(vec.try_reserve(chunk_cap + 1).is_ok()); assert!(vec.try_reserve_exact(chunk_cap + 1).is_ok()); }); }), ]; for (name, test) in tests.iter() { assert!(!GLOBAL_ALLOCATOR.is_returning_null()); eprintln!("=== {} ===", name); test(); GLOBAL_ALLOCATOR.set_returning_null(false); } } bumpalo-3.7.0/tests/try_alloc_try_with.rs000064400000000000000000000051520000000000000167030ustar 00000000000000// All of these try_alloc_try_with tests will fail with "fatal runtime error: stack overflow" unless // LLVM manages to optimize the stack writes away. // // We only run them when debug_assertions are not set, as we expect them to fail outside release // mode. use bumpalo::{AllocOrInitError, Bump}; #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_try_with_large_array() -> Result<(), AllocOrInitError<()>> { let b = Bump::new(); b.try_alloc_try_with(|| Ok([4u8; 10_000_000]))?; Ok(()) } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_try_with_large_array_err() { let b = Bump::new(); assert!(b .try_alloc_try_with(|| Result::<[u8; 10_000_000], _>::Err(())) .is_err()); } #[allow(dead_code)] struct LargeStruct { small: usize, big1: [u8; 20_000_000], big2: [u8; 20_000_000], big3: [u8; 20_000_000], } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_try_with_large_struct() -> Result<(), AllocOrInitError<()>> { let b = Bump::new(); b.try_alloc_try_with(|| { Ok(LargeStruct { small: 1, big1: [2; 20_000_000], big2: [3; 20_000_000], big3: [4; 20_000_000], }) })?; Ok(()) } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_try_with_large_struct_err() { let b = Bump::new(); assert!(b .try_alloc_try_with(|| Result::::Err(())) .is_err()); } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_try_with_large_tuple() -> Result<(), AllocOrInitError<()>> { let b = Bump::new(); b.try_alloc_try_with(|| { Ok(( 1u32, LargeStruct { small: 2, big1: [3; 20_000_000], big2: [4; 20_000_000], big3: [5; 20_000_000], }, )) })?; Ok(()) } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_try_with_large_tuple_err() { let b = Bump::new(); assert!(b .try_alloc_try_with(|| { Result::<(u32, LargeStruct), _>::Err(()) }) .is_err()); } #[allow(clippy::large_enum_variant)] enum LargeEnum { Small, #[allow(dead_code)] Large([u8; 10_000_000]), } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_try_with_large_enum() -> Result<(), AllocOrInitError<()>> { let b = Bump::new(); b.try_alloc_try_with(|| Ok(LargeEnum::Small))?; Ok(()) } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_try_with_large_enum_err() { let b = Bump::new(); assert!(b .try_alloc_try_with(|| Result::::Err(())) .is_err()); } bumpalo-3.7.0/tests/try_alloc_with.rs000064400000000000000000000027210000000000000160040ustar 00000000000000// All of these try_alloc_with tests will fail with "fatal runtime error: stack overflow" unless LLVM // manages to optimize the stack writes away. // // We only run them when debug_assertions are not set, as we expect them to fail outside release // mode. use bumpalo::Bump; #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_with_large_array() { let b = Bump::new(); b.try_alloc_with(|| [4u8; 10_000_000]).unwrap(); } #[allow(dead_code)] struct LargeStruct { small: usize, big1: [u8; 20_000_000], big2: [u8; 20_000_000], big3: [u8; 20_000_000], } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_with_large_struct() { let b = Bump::new(); b.try_alloc_with(|| LargeStruct { small: 1, big1: [2; 20_000_000], big2: [3; 20_000_000], big3: [4; 20_000_000], }) .unwrap(); } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_with_large_tuple() { let b = Bump::new(); b.try_alloc_with(|| { ( 1u32, LargeStruct { small: 2, big1: [3; 20_000_000], big2: [4; 20_000_000], big3: [5; 20_000_000], }, ) }) .unwrap(); } enum LargeEnum { Small, #[allow(dead_code)] Large([u8; 10_000_000]), } #[test] #[cfg_attr(debug_assertions, ignore)] fn try_alloc_with_large_enum() { let b = Bump::new(); b.try_alloc_with(|| LargeEnum::Small).unwrap(); } bumpalo-3.7.0/tests/vec.rs000064400000000000000000000042510000000000000135360ustar 00000000000000#![cfg(feature = "collections")] use bumpalo::{collections::Vec, vec, Bump}; use std::cell::Cell; #[test] fn push_a_bunch_of_items() { let b = Bump::new(); let mut v = Vec::new_in(&b); for x in 0..10_000 { v.push(x); } } #[test] fn trailing_comma_in_vec_macro() { let b = Bump::new(); let v = vec![in &b; 1, 2, 3,]; assert_eq!(v, [1, 2, 3]); } #[test] fn recursive_vecs() { // The purpose of this test is to see if the data structures with // self references are allowed without causing a compile error // because of the dropck let b = Bump::new(); struct Node<'a> { myself: Cell>>, edges: Cell>>, } let node1: &Node = b.alloc(Node { myself: Cell::new(None), edges: Cell::new(Vec::new_in(&b)), }); let node2: &Node = b.alloc(Node { myself: Cell::new(None), edges: Cell::new(Vec::new_in(&b)), }); node1.myself.set(Some(node1)); node1.edges.set(bumpalo::vec![in &b; node1, node1, node2]); node2.myself.set(Some(node2)); node2.edges.set(bumpalo::vec![in &b; node1, node2]); } #[test] fn test_into_bump_slice_mut() { let b = Bump::new(); let v = bumpalo::vec![in &b; 1, 2, 3]; let slice = v.into_bump_slice_mut(); slice[0] = 3; slice[2] = 1; assert_eq!(slice, [3, 2, 1]); } quickcheck::quickcheck! { fn vec_resizes_causing_reallocs(sizes: std::vec::Vec) -> () { // Exercise `realloc` by doing a bunch of `resize`s followed by // `shrink_to_fit`s. let b = Bump::new(); let mut v = bumpalo::vec![in &b]; for len in sizes { // We don't want to get too big and OOM. const MAX_SIZE: usize = 1 << 15; // But we want allocations to get fairly close to the minimum chunk // size, so that we are exercising both realloc'ing within a chunk // and when we need new chunks. const MIN_SIZE: usize = 1 << 7; let len = std::cmp::min(len, MAX_SIZE); let len = std::cmp::max(len, MIN_SIZE); v.resize(len, 0); v.shrink_to_fit(); } } } bumpalo-3.7.0/valgrind.supp000064400000000000000000000012430000000000000137660ustar 00000000000000{ Memcheck:FishyValue malloc(size) fun:malloc obj:/**/target/*/deps/tests-* } { Memcheck:Param statx(buf) fun:syscall fun:statx } { Memcheck:Param statx(file_name) fun:syscall fun:statx } { Memcheck:Param statx(buf) fun:statx fun:statx } { Memcheck:Param statx(file_name) fun:statx fun:statx }